mirror of
https://github.com/bahdotsh/wrkflw.git
synced 2026-01-06 12:26:32 +01:00
Compare commits
20 Commits
v0.4.0
...
bahdotsh/v
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b4a73a3cde | ||
|
|
4802e686de | ||
|
|
64621375cb | ||
|
|
cff8e3f4bd | ||
|
|
4251e6469d | ||
|
|
2ba3dbe65b | ||
|
|
7edc6b3645 | ||
|
|
93f18d0327 | ||
|
|
faee4717e1 | ||
|
|
22389736c3 | ||
|
|
699c9250f2 | ||
|
|
48e944a4cc | ||
|
|
d5d1904d0a | ||
|
|
00fa569add | ||
|
|
a97398f949 | ||
|
|
e73b0df520 | ||
|
|
9f51e26eb3 | ||
|
|
3a88b33c83 | ||
|
|
3a9f4f1101 | ||
|
|
470132c5bf |
90
.github/test_organization.md
vendored
90
.github/test_organization.md
vendored
@@ -1,90 +0,0 @@
|
||||
# Test Organization for wrkflw
|
||||
|
||||
Following Rust best practices, we have reorganized the tests in this project to improve maintainability and clarity.
|
||||
|
||||
## Test Structure
|
||||
|
||||
Tests are now organized as follows:
|
||||
|
||||
### 1. Unit Tests
|
||||
|
||||
Unit tests remain in the source files using the `#[cfg(test)]` attribute. These tests are designed to test individual functions and small units of code in isolation.
|
||||
|
||||
Example:
|
||||
```rust
|
||||
// In src/matrix.rs
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_function() {
|
||||
// Test code here
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### 2. Integration Tests
|
||||
|
||||
Integration tests have been moved to the `tests/` directory. These tests import and test the public API of the crate, ensuring that different components work together correctly.
|
||||
|
||||
- `tests/matrix_test.rs` - Tests for matrix expansion functionality
|
||||
- `tests/reusable_workflow_test.rs` - Tests for reusable workflow validation
|
||||
|
||||
### 3. End-to-End Tests
|
||||
|
||||
End-to-end tests are also located in the `tests/` directory. These tests simulate real-world usage scenarios and often involve external dependencies like Docker.
|
||||
|
||||
- `tests/cleanup_test.rs` - Tests for cleanup functionality with Docker containers, networks, etc.
|
||||
|
||||
## Running Tests
|
||||
|
||||
You can run all tests using:
|
||||
```bash
|
||||
cargo test
|
||||
```
|
||||
|
||||
To run only unit tests:
|
||||
```bash
|
||||
cargo test --lib
|
||||
```
|
||||
|
||||
To run only integration tests:
|
||||
```bash
|
||||
cargo test --test matrix_test --test reusable_workflow_test
|
||||
```
|
||||
|
||||
To run only end-to-end tests:
|
||||
```bash
|
||||
cargo test --test cleanup_test
|
||||
```
|
||||
|
||||
To run a specific test:
|
||||
```bash
|
||||
cargo test test_name
|
||||
```
|
||||
|
||||
## CI Configuration
|
||||
|
||||
Our CI workflow has been updated to run all types of tests separately, allowing for better isolation and clearer failure reporting:
|
||||
|
||||
```yaml
|
||||
- name: Run unit tests
|
||||
run: cargo test --lib --verbose
|
||||
|
||||
- name: Run integration tests
|
||||
run: cargo test --test matrix_test --test reusable_workflow_test --verbose
|
||||
|
||||
- name: Run e2e tests (if Docker available)
|
||||
run: cargo test --test cleanup_test --verbose -- --skip docker --skip processes
|
||||
```
|
||||
|
||||
## Writing New Tests
|
||||
|
||||
When adding new tests:
|
||||
|
||||
1. For unit tests, add them to the relevant source file using `#[cfg(test)]`
|
||||
2. For integration tests, add them to the `tests/` directory with a descriptive name like `feature_name_test.rs`
|
||||
3. For end-to-end tests, also add them to the `tests/` directory with a descriptive name
|
||||
|
||||
Follow the existing patterns to ensure consistency.
|
||||
4
.github/workflows/release.yml
vendored
4
.github/workflows/release.yml
vendored
@@ -78,10 +78,6 @@ jobs:
|
||||
target: aarch64-apple-darwin
|
||||
artifact_name: wrkflw
|
||||
asset_name: wrkflw-${{ github.event.inputs.version || github.ref_name }}-macos-arm64
|
||||
- os: windows-latest
|
||||
target: x86_64-pc-windows-msvc
|
||||
artifact_name: wrkflw.exe
|
||||
asset_name: wrkflw-${{ github.event.inputs.version || github.ref_name }}-windows-x86_64
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
|
||||
43
.github/workflows/rust.yml
vendored
43
.github/workflows/rust.yml
vendored
@@ -1,43 +0,0 @@
|
||||
name: Rust
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
branches: [ "main" ]
|
||||
pull_request:
|
||||
branches: [ "main" ]
|
||||
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Build
|
||||
run: cargo build --verbose
|
||||
|
||||
test-unit:
|
||||
needs: [build]
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Run unit tests
|
||||
run: cargo test --lib --verbose
|
||||
|
||||
test-integration:
|
||||
needs: [build]
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Run integration tests
|
||||
run: cargo test --test matrix_test --test reusable_workflow_test --verbose
|
||||
|
||||
test-e2e:
|
||||
needs: [build]
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Run e2e tests (if Docker available)
|
||||
run: cargo test --test cleanup_test --verbose -- --skip docker --skip processes
|
||||
@@ -2,38 +2,30 @@
|
||||
# This pipeline will build and test the Rust project
|
||||
|
||||
stages:
|
||||
- lint
|
||||
- build
|
||||
- test
|
||||
- release
|
||||
- deploy
|
||||
|
||||
variables:
|
||||
CARGO_HOME: ${CI_PROJECT_DIR}/.cargo
|
||||
RUST_VERSION: stable
|
||||
RUST_VERSION: "1.70.0"
|
||||
CARGO_TERM_COLOR: always
|
||||
|
||||
# Cache dependencies between jobs
|
||||
# Cache settings
|
||||
cache:
|
||||
key: "$CI_COMMIT_REF_SLUG"
|
||||
paths:
|
||||
- .cargo/
|
||||
- target/
|
||||
script:
|
||||
- echo "This is a placeholder - the cache directive doesn't need a script"
|
||||
|
||||
# Lint job - runs rustfmt and clippy
|
||||
lint:
|
||||
stage: lint
|
||||
stage: test
|
||||
image: rust:${RUST_VERSION}
|
||||
script:
|
||||
- rustup component add rustfmt clippy
|
||||
- cargo fmt -- --check
|
||||
- rustup component add clippy
|
||||
- cargo clippy -- -D warnings
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "web"
|
||||
when: always
|
||||
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
|
||||
when: always
|
||||
- if: $CI_COMMIT_TAG
|
||||
when: never
|
||||
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
|
||||
when: always
|
||||
allow_failure: true
|
||||
|
||||
# Build job - builds the application
|
||||
build:
|
||||
@@ -43,17 +35,8 @@ build:
|
||||
- cargo build --verbose
|
||||
artifacts:
|
||||
paths:
|
||||
- target/debug/wrkflw
|
||||
- target/debug
|
||||
expire_in: 1 week
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "web"
|
||||
when: always
|
||||
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
|
||||
when: always
|
||||
- if: $CI_COMMIT_TAG
|
||||
when: always
|
||||
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
|
||||
when: always
|
||||
|
||||
# Test job - runs unit and integration tests
|
||||
test:
|
||||
@@ -61,21 +44,12 @@ test:
|
||||
image: rust:${RUST_VERSION}
|
||||
script:
|
||||
- cargo test --verbose
|
||||
needs:
|
||||
dependencies:
|
||||
- build
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "web"
|
||||
when: always
|
||||
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
|
||||
when: always
|
||||
- if: $CI_COMMIT_TAG
|
||||
when: always
|
||||
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
|
||||
when: always
|
||||
|
||||
# Release job - creates a release build
|
||||
release:
|
||||
stage: release
|
||||
stage: deploy
|
||||
image: rust:${RUST_VERSION}
|
||||
script:
|
||||
- cargo build --release --verbose
|
||||
@@ -92,16 +66,35 @@ release:
|
||||
|
||||
# Custom job for documentation
|
||||
docs:
|
||||
stage: release
|
||||
stage: deploy
|
||||
image: rust:${RUST_VERSION}
|
||||
script:
|
||||
- cargo doc --no-deps
|
||||
- mkdir -p public
|
||||
- cp -r target/doc/* public/
|
||||
artifacts:
|
||||
paths:
|
||||
- target/doc/
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "web" && $BUILD_DOCS == "true"
|
||||
when: always
|
||||
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
|
||||
when: always
|
||||
- when: never
|
||||
- public
|
||||
only:
|
||||
- main
|
||||
|
||||
format:
|
||||
stage: test
|
||||
image: rust:${RUST_VERSION}
|
||||
script:
|
||||
- rustup component add rustfmt
|
||||
- cargo fmt --check
|
||||
allow_failure: true
|
||||
|
||||
pages:
|
||||
stage: deploy
|
||||
image: rust:${RUST_VERSION}
|
||||
script:
|
||||
- cargo doc --no-deps
|
||||
- mkdir -p public
|
||||
- cp -r target/doc/* public/
|
||||
artifacts:
|
||||
paths:
|
||||
- public
|
||||
only:
|
||||
- main
|
||||
219
Cargo.lock
generated
219
Cargo.lock
generated
@@ -486,6 +486,46 @@ dependencies = [
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "evaluator"
|
||||
version = "0.4.0"
|
||||
dependencies = [
|
||||
"colored",
|
||||
"models",
|
||||
"serde_yaml",
|
||||
"validators",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "executor"
|
||||
version = "0.4.0"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"bollard",
|
||||
"chrono",
|
||||
"dirs",
|
||||
"futures",
|
||||
"futures-util",
|
||||
"lazy_static",
|
||||
"logging",
|
||||
"matrix",
|
||||
"models",
|
||||
"num_cpus",
|
||||
"once_cell",
|
||||
"parser",
|
||||
"regex",
|
||||
"runtime",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_yaml",
|
||||
"tar",
|
||||
"tempfile",
|
||||
"thiserror",
|
||||
"tokio",
|
||||
"utils",
|
||||
"uuid",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fancy-regex"
|
||||
version = "0.11.0"
|
||||
@@ -674,6 +714,35 @@ version = "0.31.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f"
|
||||
|
||||
[[package]]
|
||||
name = "github"
|
||||
version = "0.4.0"
|
||||
dependencies = [
|
||||
"lazy_static",
|
||||
"models",
|
||||
"regex",
|
||||
"reqwest",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_yaml",
|
||||
"thiserror",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "gitlab"
|
||||
version = "0.4.0"
|
||||
dependencies = [
|
||||
"lazy_static",
|
||||
"models",
|
||||
"regex",
|
||||
"reqwest",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_yaml",
|
||||
"thiserror",
|
||||
"urlencoding",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "h2"
|
||||
version = "0.3.26"
|
||||
@@ -1146,6 +1215,28 @@ version = "0.4.27"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94"
|
||||
|
||||
[[package]]
|
||||
name = "logging"
|
||||
version = "0.4.0"
|
||||
dependencies = [
|
||||
"chrono",
|
||||
"models",
|
||||
"once_cell",
|
||||
"serde",
|
||||
"serde_yaml",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "matrix"
|
||||
version = "0.4.0"
|
||||
dependencies = [
|
||||
"indexmap 2.8.0",
|
||||
"models",
|
||||
"serde",
|
||||
"serde_yaml",
|
||||
"thiserror",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "memchr"
|
||||
version = "2.7.4"
|
||||
@@ -1190,6 +1281,16 @@ dependencies = [
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "models"
|
||||
version = "0.4.0"
|
||||
dependencies = [
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_yaml",
|
||||
"thiserror",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "native-tls"
|
||||
version = "0.2.14"
|
||||
@@ -1410,6 +1511,20 @@ dependencies = [
|
||||
"windows-targets 0.52.6",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "parser"
|
||||
version = "0.4.0"
|
||||
dependencies = [
|
||||
"jsonschema",
|
||||
"matrix",
|
||||
"models",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_yaml",
|
||||
"tempfile",
|
||||
"thiserror",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "paste"
|
||||
version = "1.0.15"
|
||||
@@ -1616,6 +1731,23 @@ dependencies = [
|
||||
"winreg",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "runtime"
|
||||
version = "0.4.0"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"futures",
|
||||
"logging",
|
||||
"models",
|
||||
"once_cell",
|
||||
"serde",
|
||||
"serde_yaml",
|
||||
"tempfile",
|
||||
"tokio",
|
||||
"utils",
|
||||
"which",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rustc-demangle"
|
||||
version = "0.1.24"
|
||||
@@ -1669,6 +1801,15 @@ version = "1.0.20"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f"
|
||||
|
||||
[[package]]
|
||||
name = "same-file"
|
||||
version = "1.0.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502"
|
||||
dependencies = [
|
||||
"winapi-util",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "schannel"
|
||||
version = "0.1.27"
|
||||
@@ -2102,6 +2243,28 @@ version = "0.2.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b"
|
||||
|
||||
[[package]]
|
||||
name = "ui"
|
||||
version = "0.4.0"
|
||||
dependencies = [
|
||||
"chrono",
|
||||
"crossterm 0.26.1",
|
||||
"evaluator",
|
||||
"executor",
|
||||
"futures",
|
||||
"github",
|
||||
"logging",
|
||||
"models",
|
||||
"ratatui",
|
||||
"regex",
|
||||
"reqwest",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_yaml",
|
||||
"tokio",
|
||||
"utils",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "unicode-ident"
|
||||
version = "1.0.18"
|
||||
@@ -2161,6 +2324,16 @@ version = "0.2.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821"
|
||||
|
||||
[[package]]
|
||||
name = "utils"
|
||||
version = "0.4.0"
|
||||
dependencies = [
|
||||
"models",
|
||||
"nix",
|
||||
"serde",
|
||||
"serde_yaml",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "uuid"
|
||||
version = "1.16.0"
|
||||
@@ -2170,6 +2343,16 @@ dependencies = [
|
||||
"getrandom 0.3.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "validators"
|
||||
version = "0.4.0"
|
||||
dependencies = [
|
||||
"matrix",
|
||||
"models",
|
||||
"serde",
|
||||
"serde_yaml",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "vcpkg"
|
||||
version = "0.2.15"
|
||||
@@ -2182,6 +2365,16 @@ version = "0.9.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a"
|
||||
|
||||
[[package]]
|
||||
name = "walkdir"
|
||||
version = "2.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b"
|
||||
dependencies = [
|
||||
"same-file",
|
||||
"winapi-util",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "want"
|
||||
version = "0.3.1"
|
||||
@@ -2315,6 +2508,15 @@ version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
|
||||
|
||||
[[package]]
|
||||
name = "winapi-util"
|
||||
version = "0.1.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb"
|
||||
dependencies = [
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "winapi-x86_64-pc-windows-gnu"
|
||||
version = "0.4.0"
|
||||
@@ -2519,38 +2721,47 @@ checksum = "1e9df38ee2d2c3c5948ea468a8406ff0db0b29ae1ffde1bcf20ef305bcc95c51"
|
||||
name = "wrkflw"
|
||||
version = "0.4.0"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"bollard",
|
||||
"chrono",
|
||||
"clap",
|
||||
"colored",
|
||||
"crossterm 0.26.1",
|
||||
"dirs",
|
||||
"evaluator",
|
||||
"executor",
|
||||
"futures",
|
||||
"futures-util",
|
||||
"github",
|
||||
"gitlab",
|
||||
"indexmap 2.8.0",
|
||||
"itertools",
|
||||
"jsonschema",
|
||||
"lazy_static",
|
||||
"libc",
|
||||
"log",
|
||||
"logging",
|
||||
"matrix",
|
||||
"models",
|
||||
"nix",
|
||||
"num_cpus",
|
||||
"once_cell",
|
||||
"parser",
|
||||
"ratatui",
|
||||
"rayon",
|
||||
"regex",
|
||||
"reqwest",
|
||||
"runtime",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_yaml",
|
||||
"tar",
|
||||
"tempfile",
|
||||
"thiserror",
|
||||
"tokio",
|
||||
"ui",
|
||||
"urlencoding",
|
||||
"utils",
|
||||
"uuid",
|
||||
"which",
|
||||
"validators",
|
||||
"walkdir",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
||||
11
Cargo.toml
11
Cargo.toml
@@ -1,5 +1,10 @@
|
||||
[package]
|
||||
name = "wrkflw"
|
||||
[workspace]
|
||||
members = [
|
||||
"crates/*"
|
||||
]
|
||||
resolver = "2"
|
||||
|
||||
[workspace.package]
|
||||
version = "0.4.0"
|
||||
edition = "2021"
|
||||
description = "A GitHub Actions workflow validator and executor"
|
||||
@@ -10,7 +15,7 @@ keywords = ["workflows", "github", "local"]
|
||||
categories = ["command-line-utilities"]
|
||||
license = "MIT"
|
||||
|
||||
[dependencies]
|
||||
[workspace.dependencies]
|
||||
clap = { version = "4.3", features = ["derive"] }
|
||||
colored = "2.0"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
|
||||
78
README.md
78
README.md
@@ -13,7 +13,7 @@ WRKFLW is a powerful command-line tool for validating and executing GitHub Actio
|
||||
## Features
|
||||
|
||||
- **TUI Interface**: A full-featured terminal user interface for managing and monitoring workflow executions
|
||||
- **Validate Workflow Files**: Check for syntax errors and common mistakes in GitHub Actions workflow files
|
||||
- **Validate Workflow Files**: Check for syntax errors and common mistakes in GitHub Actions workflow files with proper exit codes for CI/CD integration
|
||||
- **Execute Workflows Locally**: Run workflows directly on your machine using Docker containers
|
||||
- **Emulation Mode**: Optional execution without Docker by emulating the container environment locally
|
||||
- **Job Dependency Resolution**: Automatically determines the correct execution order based on job dependencies
|
||||
@@ -77,8 +77,38 @@ wrkflw validate path/to/workflows
|
||||
|
||||
# Validate with verbose output
|
||||
wrkflw validate --verbose path/to/workflow.yml
|
||||
|
||||
# Validate GitLab CI pipelines
|
||||
wrkflw validate .gitlab-ci.yml --gitlab
|
||||
|
||||
# Disable exit codes for custom error handling (default: enabled)
|
||||
wrkflw validate --no-exit-code path/to/workflow.yml
|
||||
```
|
||||
|
||||
#### Exit Codes for CI/CD Integration
|
||||
|
||||
By default, `wrkflw validate` sets the exit code to `1` when validation fails, making it perfect for CI/CD pipelines and scripts:
|
||||
|
||||
```bash
|
||||
# In CI/CD scripts - validation failure will cause the script to exit
|
||||
if ! wrkflw validate; then
|
||||
echo "❌ Workflow validation failed!"
|
||||
exit 1
|
||||
fi
|
||||
echo "✅ All workflows are valid!"
|
||||
|
||||
# For custom error handling, disable exit codes
|
||||
wrkflw validate --no-exit-code
|
||||
if [ $? -eq 0 ]; then
|
||||
echo "Validation completed (check output for details)"
|
||||
fi
|
||||
```
|
||||
|
||||
**Exit Code Behavior:**
|
||||
- `0`: All validations passed successfully
|
||||
- `1`: One or more validation failures detected
|
||||
- `2`: Command usage error (invalid arguments, file not found, etc.)
|
||||
|
||||
### Running Workflows in CLI Mode
|
||||
|
||||
```bash
|
||||
@@ -90,6 +120,9 @@ wrkflw run --emulate .github/workflows/ci.yml
|
||||
|
||||
# Run with verbose output
|
||||
wrkflw run --verbose .github/workflows/ci.yml
|
||||
|
||||
# Preserve failed containers for debugging
|
||||
wrkflw run --preserve-containers-on-failure .github/workflows/ci.yml
|
||||
```
|
||||
|
||||
### Using the TUI Interface
|
||||
@@ -140,17 +173,25 @@ The terminal user interface provides an interactive way to manage workflows:
|
||||
|
||||
```bash
|
||||
$ wrkflw validate .github/workflows/rust.yml
|
||||
Validating GitHub workflow file: .github/workflows/rust.yml... Validating 1 workflow file(s)...
|
||||
✅ Valid: .github/workflows/rust.yml
|
||||
|
||||
Validating workflows in: .github/workflows/rust.yml
|
||||
============================================================
|
||||
✅ Valid: rust.yml
|
||||
------------------------------------------------------------
|
||||
Summary: 1 valid, 0 invalid
|
||||
|
||||
Summary
|
||||
============================================================
|
||||
✅ 1 valid workflow file(s)
|
||||
$ echo $?
|
||||
0
|
||||
|
||||
All workflows are valid! 🎉
|
||||
# Example with validation failure
|
||||
$ wrkflw validate .github/workflows/invalid.yml
|
||||
Validating GitHub workflow file: .github/workflows/invalid.yml... Validating 1 workflow file(s)...
|
||||
❌ Invalid: .github/workflows/invalid.yml
|
||||
1. Job 'test' is missing 'runs-on' field
|
||||
2. Job 'test' is missing 'steps' section
|
||||
|
||||
Summary: 0 valid, 1 invalid
|
||||
|
||||
$ echo $?
|
||||
1
|
||||
```
|
||||
|
||||
### Running a Workflow
|
||||
@@ -223,10 +264,27 @@ WRKFLW supports composite actions, which are actions made up of multiple steps.
|
||||
|
||||
WRKFLW automatically cleans up any Docker containers created during workflow execution, even if the process is interrupted with Ctrl+C.
|
||||
|
||||
For debugging failed workflows, you can preserve containers that fail by using the `--preserve-containers-on-failure` flag:
|
||||
|
||||
```bash
|
||||
# Preserve failed containers for debugging
|
||||
wrkflw run --preserve-containers-on-failure .github/workflows/build.yml
|
||||
|
||||
# Also available in TUI mode
|
||||
wrkflw tui --preserve-containers-on-failure
|
||||
```
|
||||
|
||||
When a container fails with this flag enabled, WRKFLW will:
|
||||
- Keep the failed container running instead of removing it
|
||||
- Log the container ID and provide inspection instructions
|
||||
- Show a message like: `Preserving container abc123 for debugging (exit code: 1). Use 'docker exec -it abc123 bash' to inspect.`
|
||||
|
||||
This allows you to inspect the exact state of the container when the failure occurred, examine files, check environment variables, and debug issues more effectively.
|
||||
|
||||
## Limitations
|
||||
|
||||
### Supported Features
|
||||
- ✅ Basic workflow syntax and validation (all YAML syntax checks, required fields, and structure)
|
||||
- ✅ Basic workflow syntax and validation (all YAML syntax checks, required fields, and structure) with proper exit codes for CI/CD integration
|
||||
- ✅ Job dependency resolution and parallel execution (all jobs with correct 'needs' relationships are executed in the right order, and independent jobs run in parallel)
|
||||
- ✅ Matrix builds (supported for reasonable matrix sizes; very large matrices may be slow or resource-intensive)
|
||||
- ✅ Environment variables and GitHub context (all standard GitHub Actions environment variables and context objects are emulated)
|
||||
|
||||
97
crates/README.md
Normal file
97
crates/README.md
Normal file
@@ -0,0 +1,97 @@
|
||||
# Wrkflw Crates
|
||||
|
||||
This directory contains the Rust crates that make up the Wrkflw project. The project has been restructured to use a workspace-based approach with individual crates for better modularity and maintainability.
|
||||
|
||||
## Crate Structure
|
||||
|
||||
- **wrkflw**: Main binary crate and entry point for the application
|
||||
- **models**: Data models and structures used throughout the application
|
||||
- **evaluator**: Workflow evaluation functionality
|
||||
- **executor**: Workflow execution engine
|
||||
- **github**: GitHub API integration
|
||||
- **gitlab**: GitLab API integration
|
||||
- **logging**: Logging functionality
|
||||
- **matrix**: Matrix-based parallelization support
|
||||
- **parser**: Workflow parsing functionality
|
||||
- **runtime**: Runtime execution environment
|
||||
- **ui**: User interface components
|
||||
- **utils**: Utility functions
|
||||
- **validators**: Validation functionality
|
||||
|
||||
## Dependencies
|
||||
|
||||
Each crate has its own `Cargo.toml` file that defines its dependencies. The root `Cargo.toml` file defines the workspace and shared dependencies.
|
||||
|
||||
## Build Instructions
|
||||
|
||||
To build the entire project:
|
||||
|
||||
```bash
|
||||
cargo build
|
||||
```
|
||||
|
||||
To build a specific crate:
|
||||
|
||||
```bash
|
||||
cargo build -p <crate-name>
|
||||
```
|
||||
|
||||
## Testing
|
||||
|
||||
To run tests for the entire project:
|
||||
|
||||
```bash
|
||||
cargo test
|
||||
```
|
||||
|
||||
To run tests for a specific crate:
|
||||
|
||||
```bash
|
||||
cargo test -p <crate-name>
|
||||
```
|
||||
|
||||
## Rust Best Practices
|
||||
|
||||
When contributing to wrkflw, please follow these Rust best practices:
|
||||
|
||||
### Code Organization
|
||||
|
||||
- Place modules in their respective crates to maintain separation of concerns
|
||||
- Use `pub` selectively to expose only the necessary APIs
|
||||
- Follow the Rust module system conventions (use `mod` and `pub mod` appropriately)
|
||||
|
||||
### Errors and Error Handling
|
||||
|
||||
- Prefer using the `thiserror` crate for defining custom error types
|
||||
- Use the `?` operator for error propagation instead of match statements when appropriate
|
||||
- Implement custom error types that provide context for the error
|
||||
- Avoid using `.unwrap()` and `.expect()` in production code
|
||||
|
||||
### Performance
|
||||
|
||||
- Profile code before optimizing using tools like `cargo flamegraph`
|
||||
- Use `Arc` and `Mutex` judiciously for shared mutable state
|
||||
- Leverage Rust's zero-cost abstractions (iterators, closures)
|
||||
- Consider adding benchmark tests using the `criterion` crate for performance-critical code
|
||||
|
||||
### Security
|
||||
|
||||
- Validate all input, especially from external sources
|
||||
- Avoid using `unsafe` code unless absolutely necessary
|
||||
- Handle secrets securely using environment variables
|
||||
- Check for integer overflows with `checked_` operations
|
||||
|
||||
### Testing
|
||||
|
||||
- Write unit tests for all public functions
|
||||
- Use integration tests to verify crate-to-crate interactions
|
||||
- Consider property-based testing for complex logic
|
||||
- Structure tests with clear preparation, execution, and verification phases
|
||||
|
||||
### Tooling
|
||||
|
||||
- Run `cargo clippy` before committing changes to catch common mistakes
|
||||
- Use `cargo fmt` to maintain consistent code formatting
|
||||
- Enable compiler warnings with `#![warn(clippy::all)]`
|
||||
|
||||
For more detailed guidance, refer to the project's best practices documentation.
|
||||
15
crates/evaluator/Cargo.toml
Normal file
15
crates/evaluator/Cargo.toml
Normal file
@@ -0,0 +1,15 @@
|
||||
[package]
|
||||
name = "evaluator"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
description = "Workflow evaluation for wrkflw"
|
||||
license.workspace = true
|
||||
|
||||
[dependencies]
|
||||
# Internal crates
|
||||
models = { path = "../models" }
|
||||
validators = { path = "../validators" }
|
||||
|
||||
# External dependencies
|
||||
colored.workspace = true
|
||||
serde_yaml.workspace = true
|
||||
@@ -3,8 +3,8 @@ use serde_yaml::{self, Value};
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
|
||||
use crate::models::ValidationResult;
|
||||
use crate::validators::{validate_jobs, validate_triggers};
|
||||
use models::ValidationResult;
|
||||
use validators::{validate_jobs, validate_triggers};
|
||||
|
||||
pub fn evaluate_workflow_file(path: &Path, verbose: bool) -> Result<ValidationResult, String> {
|
||||
let content = fs::read_to_string(path).map_err(|e| format!("Failed to read file: {}", e))?;
|
||||
35
crates/executor/Cargo.toml
Normal file
35
crates/executor/Cargo.toml
Normal file
@@ -0,0 +1,35 @@
|
||||
[package]
|
||||
name = "executor"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
description = "Workflow executor for wrkflw"
|
||||
license.workspace = true
|
||||
|
||||
[dependencies]
|
||||
# Internal crates
|
||||
models = { path = "../models" }
|
||||
parser = { path = "../parser" }
|
||||
runtime = { path = "../runtime" }
|
||||
logging = { path = "../logging" }
|
||||
matrix = { path = "../matrix" }
|
||||
utils = { path = "../utils" }
|
||||
|
||||
# External dependencies
|
||||
async-trait.workspace = true
|
||||
bollard.workspace = true
|
||||
chrono.workspace = true
|
||||
dirs.workspace = true
|
||||
futures.workspace = true
|
||||
futures-util.workspace = true
|
||||
lazy_static.workspace = true
|
||||
num_cpus.workspace = true
|
||||
once_cell.workspace = true
|
||||
regex.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
serde_yaml.workspace = true
|
||||
tar.workspace = true
|
||||
tempfile.workspace = true
|
||||
thiserror.workspace = true
|
||||
tokio.workspace = true
|
||||
uuid.workspace = true
|
||||
@@ -1,4 +1,4 @@
|
||||
use crate::parser::workflow::WorkflowDefinition;
|
||||
use parser::workflow::WorkflowDefinition;
|
||||
use std::collections::{HashMap, HashSet};
|
||||
|
||||
pub fn resolve_dependencies(workflow: &WorkflowDefinition) -> Result<Vec<Vec<String>>, String> {
|
||||
@@ -1,5 +1,3 @@
|
||||
use crate::logging;
|
||||
use crate::runtime::container::{ContainerError, ContainerOutput, ContainerRuntime};
|
||||
use async_trait::async_trait;
|
||||
use bollard::{
|
||||
container::{Config, CreateContainerOptions},
|
||||
@@ -8,10 +6,14 @@ use bollard::{
|
||||
Docker,
|
||||
};
|
||||
use futures_util::StreamExt;
|
||||
use logging;
|
||||
use once_cell::sync::Lazy;
|
||||
use runtime::container::{ContainerError, ContainerOutput, ContainerRuntime};
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
use std::sync::Mutex;
|
||||
use utils;
|
||||
use utils::fd;
|
||||
|
||||
static RUNNING_CONTAINERS: Lazy<Mutex<Vec<String>>> = Lazy::new(|| Mutex::new(Vec::new()));
|
||||
static CREATED_NETWORKS: Lazy<Mutex<Vec<String>>> = Lazy::new(|| Mutex::new(Vec::new()));
|
||||
@@ -22,36 +24,58 @@ static CUSTOMIZED_IMAGES: Lazy<Mutex<HashMap<String, String>>> =
|
||||
|
||||
pub struct DockerRuntime {
|
||||
docker: Docker,
|
||||
preserve_containers_on_failure: bool,
|
||||
}
|
||||
|
||||
impl DockerRuntime {
|
||||
pub fn new() -> Result<Self, ContainerError> {
|
||||
Self::new_with_config(false)
|
||||
}
|
||||
|
||||
pub fn new_with_config(preserve_containers_on_failure: bool) -> Result<Self, ContainerError> {
|
||||
let docker = Docker::connect_with_local_defaults().map_err(|e| {
|
||||
ContainerError::ContainerStart(format!("Failed to connect to Docker: {}", e))
|
||||
})?;
|
||||
|
||||
Ok(DockerRuntime { docker })
|
||||
Ok(DockerRuntime {
|
||||
docker,
|
||||
preserve_containers_on_failure,
|
||||
})
|
||||
}
|
||||
|
||||
// Add a method to store and retrieve customized images (e.g., with Python installed)
|
||||
#[allow(dead_code)]
|
||||
pub fn get_customized_image(base_image: &str, customization: &str) -> Option<String> {
|
||||
let key = format!("{}:{}", base_image, customization);
|
||||
let images = CUSTOMIZED_IMAGES.lock().unwrap();
|
||||
images.get(&key).cloned()
|
||||
match CUSTOMIZED_IMAGES.lock() {
|
||||
Ok(images) => images.get(&key).cloned(),
|
||||
Err(e) => {
|
||||
logging::error(&format!("Failed to acquire lock: {}", e));
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub fn set_customized_image(base_image: &str, customization: &str, new_image: &str) {
|
||||
let key = format!("{}:{}", base_image, customization);
|
||||
let mut images = CUSTOMIZED_IMAGES.lock().unwrap();
|
||||
images.insert(key, new_image.to_string());
|
||||
if let Err(e) = CUSTOMIZED_IMAGES.lock().map(|mut images| {
|
||||
images.insert(key, new_image.to_string());
|
||||
}) {
|
||||
logging::error(&format!("Failed to acquire lock: {}", e));
|
||||
}
|
||||
}
|
||||
|
||||
/// Find a customized image key by prefix
|
||||
#[allow(dead_code)]
|
||||
pub fn find_customized_image_key(image: &str, prefix: &str) -> Option<String> {
|
||||
let image_keys = CUSTOMIZED_IMAGES.lock().unwrap();
|
||||
let image_keys = match CUSTOMIZED_IMAGES.lock() {
|
||||
Ok(keys) => keys,
|
||||
Err(e) => {
|
||||
logging::error(&format!("Failed to acquire lock: {}", e));
|
||||
return None;
|
||||
}
|
||||
};
|
||||
|
||||
// Look for any key that starts with the prefix
|
||||
for (key, _) in image_keys.iter() {
|
||||
@@ -80,8 +104,13 @@ impl DockerRuntime {
|
||||
(lang, None) => lang.to_string(),
|
||||
};
|
||||
|
||||
let images = CUSTOMIZED_IMAGES.lock().unwrap();
|
||||
images.get(&key).cloned()
|
||||
match CUSTOMIZED_IMAGES.lock() {
|
||||
Ok(images) => images.get(&key).cloned(),
|
||||
Err(e) => {
|
||||
logging::error(&format!("Failed to acquire lock: {}", e));
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Set a customized image with language-specific dependencies
|
||||
@@ -102,8 +131,11 @@ impl DockerRuntime {
|
||||
(lang, None) => lang.to_string(),
|
||||
};
|
||||
|
||||
let mut images = CUSTOMIZED_IMAGES.lock().unwrap();
|
||||
images.insert(key, new_image.to_string());
|
||||
if let Err(e) = CUSTOMIZED_IMAGES.lock().map(|mut images| {
|
||||
images.insert(key, new_image.to_string());
|
||||
}) {
|
||||
logging::error(&format!("Failed to acquire lock: {}", e));
|
||||
}
|
||||
}
|
||||
|
||||
/// Prepare a language-specific environment
|
||||
@@ -250,7 +282,7 @@ pub fn is_available() -> bool {
|
||||
// Spawn a thread with the timeout to prevent blocking the main thread
|
||||
let handle = std::thread::spawn(move || {
|
||||
// Use safe FD redirection utility to suppress Docker error messages
|
||||
match crate::utils::fd::with_stderr_to_null(|| {
|
||||
match fd::with_stderr_to_null(|| {
|
||||
// First, check if docker CLI is available as a quick test
|
||||
if cfg!(target_os = "linux") || cfg!(target_os = "macos") {
|
||||
// Try a simple docker version command with a short timeout
|
||||
@@ -974,13 +1006,23 @@ impl DockerRuntime {
|
||||
logging::warning("Retrieving container logs timed out");
|
||||
}
|
||||
|
||||
// Clean up container with a timeout
|
||||
let _ = tokio::time::timeout(
|
||||
std::time::Duration::from_secs(10),
|
||||
self.docker.remove_container(&container.id, None),
|
||||
)
|
||||
.await;
|
||||
untrack_container(&container.id);
|
||||
// Clean up container with a timeout, but preserve on failure if configured
|
||||
if exit_code == 0 || !self.preserve_containers_on_failure {
|
||||
let _ = tokio::time::timeout(
|
||||
std::time::Duration::from_secs(10),
|
||||
self.docker.remove_container(&container.id, None),
|
||||
)
|
||||
.await;
|
||||
untrack_container(&container.id);
|
||||
} else {
|
||||
// Container failed and we want to preserve it for debugging
|
||||
logging::info(&format!(
|
||||
"Preserving container {} for debugging (exit code: {}). Use 'docker exec -it {} bash' to inspect.",
|
||||
container.id, exit_code, container.id
|
||||
));
|
||||
// Still untrack it from the automatic cleanup system to prevent it from being cleaned up later
|
||||
untrack_container(&container.id);
|
||||
}
|
||||
|
||||
// Log detailed information about the command execution for debugging
|
||||
if exit_code != 0 {
|
||||
@@ -1,3 +1,4 @@
|
||||
#[allow(unused_imports)]
|
||||
use bollard::Docker;
|
||||
use futures::future;
|
||||
use regex;
|
||||
@@ -8,25 +9,70 @@ use std::path::Path;
|
||||
use std::process::Command;
|
||||
use thiserror::Error;
|
||||
|
||||
use crate::executor::dependency;
|
||||
use crate::executor::docker;
|
||||
use crate::executor::environment;
|
||||
use crate::logging;
|
||||
use crate::matrix::{self, MatrixCombination};
|
||||
use crate::parser::workflow::{parse_workflow, ActionInfo, Job, WorkflowDefinition};
|
||||
use crate::runtime::container::ContainerRuntime;
|
||||
use crate::runtime::emulation::handle_special_action;
|
||||
use crate::dependency;
|
||||
use crate::docker;
|
||||
use crate::environment;
|
||||
use logging;
|
||||
use matrix::MatrixCombination;
|
||||
use models::gitlab::Pipeline;
|
||||
use parser::gitlab::{self, parse_pipeline};
|
||||
use parser::workflow::{self, parse_workflow, ActionInfo, Job, WorkflowDefinition};
|
||||
use runtime::container::ContainerRuntime;
|
||||
use runtime::emulation;
|
||||
|
||||
#[allow(unused_variables, unused_assignments)]
|
||||
/// Execute a GitHub Actions workflow file locally
|
||||
pub async fn execute_workflow(
|
||||
workflow_path: &Path,
|
||||
runtime_type: RuntimeType,
|
||||
verbose: bool,
|
||||
config: ExecutionConfig,
|
||||
) -> Result<ExecutionResult, ExecutionError> {
|
||||
logging::info(&format!("Executing workflow: {}", workflow_path.display()));
|
||||
logging::info(&format!("Runtime: {:?}", runtime_type));
|
||||
logging::info(&format!("Runtime: {:?}", config.runtime_type));
|
||||
|
||||
// Determine if this is a GitLab CI/CD pipeline or GitHub Actions workflow
|
||||
let is_gitlab = is_gitlab_pipeline(workflow_path);
|
||||
|
||||
if is_gitlab {
|
||||
execute_gitlab_pipeline(workflow_path, config.clone()).await
|
||||
} else {
|
||||
execute_github_workflow(workflow_path, config.clone()).await
|
||||
}
|
||||
}
|
||||
|
||||
/// Determine if a file is a GitLab CI/CD pipeline
|
||||
fn is_gitlab_pipeline(path: &Path) -> bool {
|
||||
// Check the file name
|
||||
if let Some(file_name) = path.file_name() {
|
||||
if let Some(file_name_str) = file_name.to_str() {
|
||||
return file_name_str == ".gitlab-ci.yml" || file_name_str.ends_with("gitlab-ci.yml");
|
||||
}
|
||||
}
|
||||
|
||||
// If file name check fails, try to read and determine by content
|
||||
if let Ok(content) = fs::read_to_string(path) {
|
||||
// GitLab CI/CD pipelines typically have stages, before_script, after_script at the top level
|
||||
if content.contains("stages:")
|
||||
|| content.contains("before_script:")
|
||||
|| content.contains("after_script:")
|
||||
{
|
||||
// Check for GitHub Actions specific keys that would indicate it's not GitLab
|
||||
if !content.contains("on:")
|
||||
&& !content.contains("runs-on:")
|
||||
&& !content.contains("uses:")
|
||||
{
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
/// Execute a GitHub Actions workflow file locally
|
||||
async fn execute_github_workflow(
|
||||
workflow_path: &Path,
|
||||
config: ExecutionConfig,
|
||||
) -> Result<ExecutionResult, ExecutionError> {
|
||||
// 1. Parse workflow file
|
||||
let workflow = parse_workflow(workflow_path)?;
|
||||
|
||||
@@ -34,7 +80,10 @@ pub async fn execute_workflow(
|
||||
let execution_plan = dependency::resolve_dependencies(&workflow)?;
|
||||
|
||||
// 3. Initialize appropriate runtime
|
||||
let runtime = initialize_runtime(runtime_type.clone())?;
|
||||
let runtime = initialize_runtime(
|
||||
config.runtime_type.clone(),
|
||||
config.preserve_containers_on_failure,
|
||||
)?;
|
||||
|
||||
// Create a temporary workspace directory
|
||||
let workspace_dir = tempfile::tempdir()
|
||||
@@ -46,7 +95,7 @@ pub async fn execute_workflow(
|
||||
// Add runtime mode to environment
|
||||
env_context.insert(
|
||||
"WRKFLW_RUNTIME_MODE".to_string(),
|
||||
if runtime_type == RuntimeType::Emulation {
|
||||
if config.runtime_type == RuntimeType::Emulation {
|
||||
"emulation".to_string()
|
||||
} else {
|
||||
"docker".to_string()
|
||||
@@ -76,7 +125,7 @@ pub async fn execute_workflow(
|
||||
&workflow,
|
||||
runtime.as_ref(),
|
||||
&env_context,
|
||||
verbose,
|
||||
config.verbose,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@@ -113,30 +162,225 @@ pub async fn execute_workflow(
|
||||
})
|
||||
}
|
||||
|
||||
/// Execute a GitLab CI/CD pipeline locally
|
||||
async fn execute_gitlab_pipeline(
|
||||
pipeline_path: &Path,
|
||||
config: ExecutionConfig,
|
||||
) -> Result<ExecutionResult, ExecutionError> {
|
||||
logging::info("Executing GitLab CI/CD pipeline");
|
||||
|
||||
// 1. Parse the GitLab pipeline file
|
||||
let pipeline = parse_pipeline(pipeline_path)
|
||||
.map_err(|e| ExecutionError::Parse(format!("Failed to parse GitLab pipeline: {}", e)))?;
|
||||
|
||||
// 2. Convert the GitLab pipeline to a format compatible with the workflow executor
|
||||
let workflow = gitlab::convert_to_workflow_format(&pipeline);
|
||||
|
||||
// 3. Resolve job dependencies based on stages
|
||||
let execution_plan = resolve_gitlab_dependencies(&pipeline, &workflow)?;
|
||||
|
||||
// 4. Initialize appropriate runtime
|
||||
let runtime = initialize_runtime(
|
||||
config.runtime_type.clone(),
|
||||
config.preserve_containers_on_failure,
|
||||
)?;
|
||||
|
||||
// Create a temporary workspace directory
|
||||
let workspace_dir = tempfile::tempdir()
|
||||
.map_err(|e| ExecutionError::Execution(format!("Failed to create workspace: {}", e)))?;
|
||||
|
||||
// 5. Set up GitLab-like environment
|
||||
let mut env_context = create_gitlab_context(&pipeline, workspace_dir.path());
|
||||
|
||||
// Add runtime mode to environment
|
||||
env_context.insert(
|
||||
"WRKFLW_RUNTIME_MODE".to_string(),
|
||||
if config.runtime_type == RuntimeType::Emulation {
|
||||
"emulation".to_string()
|
||||
} else {
|
||||
"docker".to_string()
|
||||
},
|
||||
);
|
||||
|
||||
// Setup environment files
|
||||
environment::setup_github_environment_files(workspace_dir.path()).map_err(|e| {
|
||||
ExecutionError::Execution(format!("Failed to setup environment files: {}", e))
|
||||
})?;
|
||||
|
||||
// 6. Execute jobs according to the plan
|
||||
let mut results = Vec::new();
|
||||
let mut has_failures = false;
|
||||
let mut failure_details = String::new();
|
||||
|
||||
for job_batch in execution_plan {
|
||||
// Execute jobs in parallel if they don't depend on each other
|
||||
let job_results = execute_job_batch(
|
||||
&job_batch,
|
||||
&workflow,
|
||||
runtime.as_ref(),
|
||||
&env_context,
|
||||
config.verbose,
|
||||
)
|
||||
.await?;
|
||||
|
||||
// Check for job failures and collect details
|
||||
for job_result in &job_results {
|
||||
if job_result.status == JobStatus::Failure {
|
||||
has_failures = true;
|
||||
failure_details.push_str(&format!("\n❌ Job failed: {}\n", job_result.name));
|
||||
|
||||
// Add step details for failed jobs
|
||||
for step in &job_result.steps {
|
||||
if step.status == StepStatus::Failure {
|
||||
failure_details.push_str(&format!(" ❌ {}: {}\n", step.name, step.output));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
results.extend(job_results);
|
||||
}
|
||||
|
||||
// If there were failures, add detailed failure information to the result
|
||||
if has_failures {
|
||||
logging::error(&format!("Pipeline execution failed:{}", failure_details));
|
||||
}
|
||||
|
||||
Ok(ExecutionResult {
|
||||
jobs: results,
|
||||
failure_details: if has_failures {
|
||||
Some(failure_details)
|
||||
} else {
|
||||
None
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
/// Create an environment context for GitLab CI/CD pipeline execution
|
||||
fn create_gitlab_context(pipeline: &Pipeline, workspace_dir: &Path) -> HashMap<String, String> {
|
||||
let mut env_context = HashMap::new();
|
||||
|
||||
// Add GitLab CI/CD environment variables
|
||||
env_context.insert("CI".to_string(), "true".to_string());
|
||||
env_context.insert("GITLAB_CI".to_string(), "true".to_string());
|
||||
|
||||
// Add custom environment variable to indicate use in wrkflw
|
||||
env_context.insert("WRKFLW_CI".to_string(), "true".to_string());
|
||||
|
||||
// Add workspace directory
|
||||
env_context.insert(
|
||||
"CI_PROJECT_DIR".to_string(),
|
||||
workspace_dir.to_string_lossy().to_string(),
|
||||
);
|
||||
|
||||
// Also add the workspace as the GitHub workspace for compatibility with emulation runtime
|
||||
env_context.insert(
|
||||
"GITHUB_WORKSPACE".to_string(),
|
||||
workspace_dir.to_string_lossy().to_string(),
|
||||
);
|
||||
|
||||
// Add global variables from the pipeline
|
||||
if let Some(variables) = &pipeline.variables {
|
||||
for (key, value) in variables {
|
||||
env_context.insert(key.clone(), value.clone());
|
||||
}
|
||||
}
|
||||
|
||||
env_context
|
||||
}
|
||||
|
||||
/// Resolve GitLab CI/CD pipeline dependencies
|
||||
fn resolve_gitlab_dependencies(
|
||||
pipeline: &Pipeline,
|
||||
workflow: &WorkflowDefinition,
|
||||
) -> Result<Vec<Vec<String>>, ExecutionError> {
|
||||
// For GitLab CI/CD pipelines, jobs within the same stage can run in parallel,
|
||||
// but jobs in different stages run sequentially
|
||||
|
||||
// Get stages from the pipeline or create a default one
|
||||
let stages = match &pipeline.stages {
|
||||
Some(defined_stages) => defined_stages.clone(),
|
||||
None => vec![
|
||||
"build".to_string(),
|
||||
"test".to_string(),
|
||||
"deploy".to_string(),
|
||||
],
|
||||
};
|
||||
|
||||
// Create an execution plan based on stages
|
||||
let mut execution_plan = Vec::new();
|
||||
|
||||
// For each stage, collect the jobs that belong to it
|
||||
for stage in stages {
|
||||
let mut stage_jobs = Vec::new();
|
||||
|
||||
for (job_name, job) in &pipeline.jobs {
|
||||
// Skip template jobs
|
||||
if let Some(true) = job.template {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Get the job's stage, or assume "test" if not specified
|
||||
let default_stage = "test".to_string();
|
||||
let job_stage = job.stage.as_ref().unwrap_or(&default_stage);
|
||||
|
||||
// If the job belongs to the current stage, add it to the batch
|
||||
if job_stage == &stage {
|
||||
stage_jobs.push(job_name.clone());
|
||||
}
|
||||
}
|
||||
|
||||
if !stage_jobs.is_empty() {
|
||||
execution_plan.push(stage_jobs);
|
||||
}
|
||||
}
|
||||
|
||||
// Also create a batch for jobs without a stage
|
||||
let mut stageless_jobs = Vec::new();
|
||||
|
||||
for (job_name, job) in &pipeline.jobs {
|
||||
// Skip template jobs
|
||||
if let Some(true) = job.template {
|
||||
continue;
|
||||
}
|
||||
|
||||
if job.stage.is_none() {
|
||||
stageless_jobs.push(job_name.clone());
|
||||
}
|
||||
}
|
||||
|
||||
if !stageless_jobs.is_empty() {
|
||||
execution_plan.push(stageless_jobs);
|
||||
}
|
||||
|
||||
Ok(execution_plan)
|
||||
}
|
||||
|
||||
// Determine if Docker is available or fall back to emulation
|
||||
fn initialize_runtime(
|
||||
runtime_type: RuntimeType,
|
||||
preserve_containers_on_failure: bool,
|
||||
) -> Result<Box<dyn ContainerRuntime>, ExecutionError> {
|
||||
match runtime_type {
|
||||
RuntimeType::Docker => {
|
||||
if docker::is_available() {
|
||||
// Handle the Result returned by DockerRuntime::new()
|
||||
match docker::DockerRuntime::new() {
|
||||
match docker::DockerRuntime::new_with_config(preserve_containers_on_failure) {
|
||||
Ok(docker_runtime) => Ok(Box::new(docker_runtime)),
|
||||
Err(e) => {
|
||||
logging::error(&format!(
|
||||
"Failed to initialize Docker runtime: {}, falling back to emulation mode",
|
||||
e
|
||||
));
|
||||
Ok(Box::new(crate::runtime::emulation::EmulationRuntime::new()))
|
||||
Ok(Box::new(emulation::EmulationRuntime::new()))
|
||||
}
|
||||
}
|
||||
} else {
|
||||
logging::error("Docker not available, falling back to emulation mode");
|
||||
Ok(Box::new(crate::runtime::emulation::EmulationRuntime::new()))
|
||||
Ok(Box::new(emulation::EmulationRuntime::new()))
|
||||
}
|
||||
}
|
||||
RuntimeType::Emulation => Ok(Box::new(crate::runtime::emulation::EmulationRuntime::new())),
|
||||
RuntimeType::Emulation => Ok(Box::new(emulation::EmulationRuntime::new())),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -146,6 +390,13 @@ pub enum RuntimeType {
|
||||
Emulation,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ExecutionConfig {
|
||||
pub runtime_type: RuntimeType,
|
||||
pub verbose: bool,
|
||||
pub preserve_containers_on_failure: bool,
|
||||
}
|
||||
|
||||
pub struct ExecutionResult {
|
||||
pub jobs: Vec<JobResult>,
|
||||
pub failure_details: Option<String>,
|
||||
@@ -302,6 +553,24 @@ async fn execute_job_with_matrix(
|
||||
ExecutionError::Execution(format!("Job '{}' not found in workflow", job_name))
|
||||
})?;
|
||||
|
||||
// Evaluate job condition if present
|
||||
if let Some(if_condition) = &job.if_condition {
|
||||
let should_run = evaluate_job_condition(if_condition, env_context, workflow);
|
||||
if !should_run {
|
||||
logging::info(&format!(
|
||||
"⏭️ Skipping job '{}' due to condition: {}",
|
||||
job_name, if_condition
|
||||
));
|
||||
// Return a skipped job result
|
||||
return Ok(vec![JobResult {
|
||||
name: job_name.to_string(),
|
||||
status: JobStatus::Skipped,
|
||||
steps: Vec::new(),
|
||||
logs: String::new(),
|
||||
}]);
|
||||
}
|
||||
}
|
||||
|
||||
// Check if this is a matrix job
|
||||
if let Some(matrix_config) = &job.matrix {
|
||||
// Expand the matrix into combinations
|
||||
@@ -379,219 +648,16 @@ async fn execute_job(ctx: JobExecutionContext<'_>) -> Result<JobResult, Executio
|
||||
let job_dir = tempfile::tempdir()
|
||||
.map_err(|e| ExecutionError::Execution(format!("Failed to create job directory: {}", e)))?;
|
||||
|
||||
// Try to get a Docker client if using Docker and services exist
|
||||
let docker_client = if !job.services.is_empty() {
|
||||
match Docker::connect_with_local_defaults() {
|
||||
Ok(client) => Some(client),
|
||||
Err(e) => {
|
||||
logging::error(&format!("Failed to connect to Docker: {}", e));
|
||||
None
|
||||
}
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
// Create a Docker network for this job if we have services
|
||||
let network_id = if !job.services.is_empty() && docker_client.is_some() {
|
||||
let docker = match docker_client.as_ref() {
|
||||
Some(client) => client,
|
||||
None => {
|
||||
return Err(ExecutionError::Runtime(
|
||||
"Docker client is required but not available".to_string(),
|
||||
));
|
||||
}
|
||||
};
|
||||
match docker::create_job_network(docker).await {
|
||||
Ok(id) => {
|
||||
logging::info(&format!(
|
||||
"Created network {} for job '{}'",
|
||||
id, ctx.job_name
|
||||
));
|
||||
Some(id)
|
||||
}
|
||||
Err(e) => {
|
||||
logging::error(&format!(
|
||||
"Failed to create network for job '{}': {}",
|
||||
ctx.job_name, e
|
||||
));
|
||||
return Err(ExecutionError::Runtime(format!(
|
||||
"Failed to create network: {}",
|
||||
e
|
||||
)));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
// Start service containers if any
|
||||
let mut service_containers = Vec::new();
|
||||
|
||||
if !job.services.is_empty() {
|
||||
if docker_client.is_none() {
|
||||
logging::error("Services are only supported with Docker runtime");
|
||||
return Err(ExecutionError::Runtime(
|
||||
"Services require Docker runtime".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
logging::info(&format!(
|
||||
"Starting {} service containers for job '{}'",
|
||||
job.services.len(),
|
||||
ctx.job_name
|
||||
));
|
||||
|
||||
let docker = match docker_client.as_ref() {
|
||||
Some(client) => client,
|
||||
None => {
|
||||
return Err(ExecutionError::Runtime(
|
||||
"Docker client is required but not available".to_string(),
|
||||
));
|
||||
}
|
||||
};
|
||||
|
||||
#[allow(unused_variables, unused_assignments)]
|
||||
for (service_name, service_config) in &job.services {
|
||||
logging::info(&format!(
|
||||
"Starting service '{}' with image '{}'",
|
||||
service_name, service_config.image
|
||||
));
|
||||
|
||||
// Prepare container configuration
|
||||
let container_name = format!("wrkflw-service-{}-{}", ctx.job_name, service_name);
|
||||
|
||||
// Map ports if specified
|
||||
let mut port_bindings = HashMap::new();
|
||||
if let Some(ports) = &service_config.ports {
|
||||
for port_spec in ports {
|
||||
// Parse port spec like "8080:80"
|
||||
let parts: Vec<&str> = port_spec.split(':').collect();
|
||||
if parts.len() == 2 {
|
||||
let host_port = parts[0];
|
||||
let container_port = parts[1];
|
||||
|
||||
let port_binding = bollard::models::PortBinding {
|
||||
host_ip: Some("0.0.0.0".to_string()),
|
||||
host_port: Some(host_port.to_string()),
|
||||
};
|
||||
|
||||
let key = format!("{}/tcp", container_port);
|
||||
port_bindings.insert(key, Some(vec![port_binding]));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Convert environment variables
|
||||
let env_vars: Vec<String> = service_config
|
||||
.env
|
||||
.iter()
|
||||
.map(|(k, v)| format!("{}={}", k, v))
|
||||
.collect();
|
||||
|
||||
// Create container options
|
||||
let create_opts = bollard::container::CreateContainerOptions {
|
||||
name: container_name,
|
||||
platform: None,
|
||||
};
|
||||
|
||||
// Host configuration
|
||||
let host_config = bollard::models::HostConfig {
|
||||
port_bindings: Some(port_bindings),
|
||||
network_mode: network_id.clone(),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
// Container configuration
|
||||
let config = bollard::container::Config {
|
||||
image: Some(service_config.image.clone()),
|
||||
env: Some(env_vars),
|
||||
host_config: Some(host_config),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
// Log the network connection
|
||||
if network_id.is_some() {
|
||||
logging::info(&format!(
|
||||
"Service '{}' connected to network via host_config",
|
||||
service_name
|
||||
));
|
||||
}
|
||||
|
||||
match docker.create_container(Some(create_opts), config).await {
|
||||
Ok(response) => {
|
||||
let container_id = response.id;
|
||||
|
||||
// Track the container for cleanup
|
||||
docker::track_container(&container_id);
|
||||
service_containers.push(container_id.clone());
|
||||
|
||||
// Start the container
|
||||
match docker.start_container::<String>(&container_id, None).await {
|
||||
Ok(_) => {
|
||||
logging::info(&format!("Started service container: {}", container_id));
|
||||
|
||||
// Add service address to environment
|
||||
job_env.insert(
|
||||
format!("{}_HOST", service_name.to_uppercase()),
|
||||
service_name.clone(),
|
||||
);
|
||||
|
||||
job_logs.push_str(&format!(
|
||||
"Started service '{}' with container ID: {}\n",
|
||||
service_name, container_id
|
||||
));
|
||||
}
|
||||
Err(e) => {
|
||||
let error_msg = format!(
|
||||
"Failed to start service container '{}': {}",
|
||||
service_name, e
|
||||
);
|
||||
logging::error(&error_msg);
|
||||
|
||||
// Clean up the created container
|
||||
let _ = docker.remove_container(&container_id, None).await;
|
||||
|
||||
// Clean up network if created
|
||||
if let Some(net_id) = &network_id {
|
||||
let _ = docker.remove_network(net_id).await;
|
||||
docker::untrack_network(net_id);
|
||||
}
|
||||
|
||||
return Err(ExecutionError::Runtime(error_msg));
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
let error_msg = format!(
|
||||
"Failed to create service container '{}': {}",
|
||||
service_name, e
|
||||
);
|
||||
logging::error(&error_msg);
|
||||
|
||||
// Clean up network if created
|
||||
if let Some(net_id) = &network_id {
|
||||
let _ = docker.remove_network(net_id).await;
|
||||
docker::untrack_network(net_id);
|
||||
}
|
||||
|
||||
return Err(ExecutionError::Runtime(error_msg));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Give services a moment to start up
|
||||
tokio::time::sleep(tokio::time::Duration::from_secs(2)).await;
|
||||
}
|
||||
|
||||
// Prepare the runner environment
|
||||
let runner_image = get_runner_image(&job.runs_on);
|
||||
prepare_runner_image(&runner_image, ctx.runtime, ctx.verbose).await?;
|
||||
|
||||
// Copy project files to workspace
|
||||
// Get the current project directory
|
||||
let current_dir = std::env::current_dir().map_err(|e| {
|
||||
ExecutionError::Execution(format!("Failed to get current directory: {}", e))
|
||||
})?;
|
||||
|
||||
// Copy project files to the job workspace directory
|
||||
logging::info(&format!(
|
||||
"Copying project files to job workspace: {}",
|
||||
job_dir.path().display()
|
||||
));
|
||||
copy_directory_contents(¤t_dir, job_dir.path())?;
|
||||
|
||||
logging::info(&format!("Executing job: {}", ctx.job_name));
|
||||
@@ -607,7 +673,7 @@ async fn execute_job(ctx: JobExecutionContext<'_>) -> Result<JobResult, Executio
|
||||
working_dir: job_dir.path(),
|
||||
runtime: ctx.runtime,
|
||||
workflow: ctx.workflow,
|
||||
runner_image: &runner_image,
|
||||
runner_image: &get_runner_image(&job.runs_on),
|
||||
verbose: ctx.verbose,
|
||||
matrix_combination: &None,
|
||||
})
|
||||
@@ -656,50 +722,6 @@ async fn execute_job(ctx: JobExecutionContext<'_>) -> Result<JobResult, Executio
|
||||
}
|
||||
}
|
||||
|
||||
// Clean up service containers
|
||||
if !service_containers.is_empty() && docker_client.is_some() {
|
||||
let docker = match docker_client.as_ref() {
|
||||
Some(client) => client,
|
||||
None => {
|
||||
return Err(ExecutionError::Runtime(
|
||||
"Docker client is required but not available".to_string(),
|
||||
));
|
||||
}
|
||||
};
|
||||
|
||||
for container_id in &service_containers {
|
||||
logging::info(&format!("Stopping service container: {}", container_id));
|
||||
|
||||
let _ = docker.stop_container(container_id, None).await;
|
||||
let _ = docker.remove_container(container_id, None).await;
|
||||
|
||||
// Untrack container since we've explicitly removed it
|
||||
docker::untrack_container(container_id);
|
||||
}
|
||||
}
|
||||
|
||||
// Clean up network if created
|
||||
if let Some(net_id) = &network_id {
|
||||
if docker_client.is_some() {
|
||||
let docker = match docker_client.as_ref() {
|
||||
Some(client) => client,
|
||||
None => {
|
||||
return Err(ExecutionError::Runtime(
|
||||
"Docker client is required but not available".to_string(),
|
||||
));
|
||||
}
|
||||
};
|
||||
|
||||
logging::info(&format!("Removing network: {}", net_id));
|
||||
if let Err(e) = docker.remove_network(net_id).await {
|
||||
logging::error(&format!("Failed to remove network {}: {}", net_id, e));
|
||||
}
|
||||
|
||||
// Untrack network since we've explicitly removed it
|
||||
docker::untrack_network(net_id);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(JobResult {
|
||||
name: ctx.job_name.to_string(),
|
||||
status: if job_success {
|
||||
@@ -822,14 +844,16 @@ async fn execute_matrix_job(
|
||||
let job_dir = tempfile::tempdir()
|
||||
.map_err(|e| ExecutionError::Execution(format!("Failed to create job directory: {}", e)))?;
|
||||
|
||||
// Prepare the runner
|
||||
let runner_image = get_runner_image(&job_template.runs_on);
|
||||
prepare_runner_image(&runner_image, runtime, verbose).await?;
|
||||
|
||||
// Copy project files to workspace
|
||||
// Get the current project directory
|
||||
let current_dir = std::env::current_dir().map_err(|e| {
|
||||
ExecutionError::Execution(format!("Failed to get current directory: {}", e))
|
||||
})?;
|
||||
|
||||
// Copy project files to the job workspace directory
|
||||
logging::info(&format!(
|
||||
"Copying project files to job workspace: {}",
|
||||
job_dir.path().display()
|
||||
));
|
||||
copy_directory_contents(¤t_dir, job_dir.path())?;
|
||||
|
||||
let job_success = if job_template.steps.is_empty() {
|
||||
@@ -845,7 +869,7 @@ async fn execute_matrix_job(
|
||||
working_dir: job_dir.path(),
|
||||
runtime,
|
||||
workflow,
|
||||
runner_image: &runner_image,
|
||||
runner_image: &get_runner_image(&job_template.runs_on),
|
||||
verbose,
|
||||
matrix_combination: &Some(combination.values.clone()),
|
||||
})
|
||||
@@ -907,7 +931,7 @@ async fn execute_matrix_job(
|
||||
|
||||
// Before the execute_step function, add this struct
|
||||
struct StepExecutionContext<'a> {
|
||||
step: &'a crate::parser::workflow::Step,
|
||||
step: &'a workflow::Step,
|
||||
step_idx: usize,
|
||||
job_env: &'a HashMap<String, String>,
|
||||
working_dir: &'a Path,
|
||||
@@ -1183,7 +1207,7 @@ async fn execute_step(ctx: StepExecutionContext<'_>) -> Result<StepResult, Execu
|
||||
}
|
||||
} else {
|
||||
// For GitHub actions, check if we have special handling
|
||||
if let Err(e) = handle_special_action(uses).await {
|
||||
if let Err(e) = emulation::handle_special_action(uses).await {
|
||||
// Log error but continue
|
||||
println!(" Warning: Special action handling failed: {}", e);
|
||||
}
|
||||
@@ -1425,7 +1449,12 @@ async fn execute_step(ctx: StepExecutionContext<'_>) -> Result<StepResult, Execu
|
||||
} else {
|
||||
StepStatus::Failure
|
||||
},
|
||||
output: output_text,
|
||||
output: format!(
|
||||
"Exit code: {}
|
||||
{}
|
||||
{}",
|
||||
output.exit_code, output.stdout, output.stderr
|
||||
),
|
||||
}
|
||||
} else {
|
||||
StepResult {
|
||||
@@ -1538,9 +1567,11 @@ async fn execute_step(ctx: StepExecutionContext<'_>) -> Result<StepResult, Execu
|
||||
output,
|
||||
}
|
||||
} else {
|
||||
return Err(ExecutionError::Execution(
|
||||
"Step must have either 'uses' or 'run' field".to_string(),
|
||||
));
|
||||
return Ok(StepResult {
|
||||
name: step_name,
|
||||
status: StepStatus::Skipped,
|
||||
output: "Step has neither 'uses' nor 'run'".to_string(),
|
||||
});
|
||||
};
|
||||
|
||||
Ok(step_result)
|
||||
@@ -1678,6 +1709,7 @@ fn get_runner_image(runs_on: &str) -> String {
|
||||
.to_string()
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
async fn prepare_runner_image(
|
||||
image: &str,
|
||||
runtime: &dyn ContainerRuntime,
|
||||
@@ -1707,7 +1739,7 @@ async fn prepare_runner_image(
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Extract language and version information from an image name
|
||||
#[allow(dead_code)]
|
||||
fn extract_language_info(image: &str) -> Option<(&'static str, Option<&str>)> {
|
||||
let image_lower = image.to_lowercase();
|
||||
|
||||
@@ -1730,7 +1762,7 @@ fn extract_language_info(image: &str) -> Option<(&'static str, Option<&str>)> {
|
||||
}
|
||||
|
||||
async fn execute_composite_action(
|
||||
step: &crate::parser::workflow::Step,
|
||||
step: &workflow::Step,
|
||||
action_path: &Path,
|
||||
job_env: &HashMap<String, String>,
|
||||
working_dir: &Path,
|
||||
@@ -1825,7 +1857,7 @@ async fn execute_composite_action(
|
||||
job_env: &action_env,
|
||||
working_dir,
|
||||
runtime,
|
||||
workflow: &crate::parser::workflow::WorkflowDefinition {
|
||||
workflow: &workflow::WorkflowDefinition {
|
||||
name: "Composite Action".to_string(),
|
||||
on: vec![],
|
||||
on_raw: serde_yaml::Value::Null,
|
||||
@@ -1907,9 +1939,7 @@ async fn execute_composite_action(
|
||||
}
|
||||
|
||||
// Helper function to convert YAML step to our Step struct
|
||||
fn convert_yaml_to_step(
|
||||
step_yaml: &serde_yaml::Value,
|
||||
) -> Result<crate::parser::workflow::Step, String> {
|
||||
fn convert_yaml_to_step(step_yaml: &serde_yaml::Value) -> Result<workflow::Step, String> {
|
||||
// Extract step properties
|
||||
let name = step_yaml
|
||||
.get("name")
|
||||
@@ -1961,7 +1991,7 @@ fn convert_yaml_to_step(
|
||||
// Extract continue_on_error
|
||||
let continue_on_error = step_yaml.get("continue-on-error").and_then(|v| v.as_bool());
|
||||
|
||||
Ok(crate::parser::workflow::Step {
|
||||
Ok(workflow::Step {
|
||||
name,
|
||||
uses,
|
||||
run: final_run,
|
||||
@@ -1970,3 +2000,47 @@ fn convert_yaml_to_step(
|
||||
continue_on_error,
|
||||
})
|
||||
}
|
||||
|
||||
/// Evaluate a job condition expression
|
||||
/// This is a simplified implementation that handles basic GitHub Actions expressions
|
||||
fn evaluate_job_condition(
|
||||
condition: &str,
|
||||
env_context: &HashMap<String, String>,
|
||||
workflow: &WorkflowDefinition,
|
||||
) -> bool {
|
||||
logging::debug(&format!("Evaluating condition: {}", condition));
|
||||
|
||||
// For now, implement basic pattern matching for common conditions
|
||||
// TODO: Implement a full GitHub Actions expression evaluator
|
||||
|
||||
// Handle simple boolean conditions
|
||||
if condition == "true" {
|
||||
return true;
|
||||
}
|
||||
if condition == "false" {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Handle github.event.pull_request.draft == false
|
||||
if condition.contains("github.event.pull_request.draft == false") {
|
||||
// For local execution, assume this is always true (not a draft)
|
||||
return true;
|
||||
}
|
||||
|
||||
// Handle needs.jobname.outputs.outputname == 'value' patterns
|
||||
if condition.contains("needs.") && condition.contains(".outputs.") {
|
||||
// For now, simulate that outputs are available but empty
|
||||
// This means conditions like needs.changes.outputs.source-code == 'true' will be false
|
||||
logging::debug(
|
||||
"Evaluating needs.outputs condition - defaulting to false for local execution",
|
||||
);
|
||||
return false;
|
||||
}
|
||||
|
||||
// Default to true for unknown conditions to avoid breaking workflows
|
||||
logging::warning(&format!(
|
||||
"Unknown condition pattern: '{}' - defaulting to true",
|
||||
condition
|
||||
));
|
||||
true
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
use crate::matrix::MatrixCombination;
|
||||
use crate::parser::workflow::WorkflowDefinition;
|
||||
use chrono::Utc;
|
||||
use matrix::MatrixCombination;
|
||||
use parser::workflow::WorkflowDefinition;
|
||||
use serde_yaml::Value;
|
||||
use std::{collections::HashMap, fs, io, path::Path};
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
// executor crate
|
||||
|
||||
#![allow(unused_variables, unused_assignments)]
|
||||
|
||||
pub mod dependency;
|
||||
@@ -8,4 +10,6 @@ pub mod substitution;
|
||||
|
||||
// Re-export public items
|
||||
pub use docker::cleanup_resources;
|
||||
pub use engine::{execute_workflow, JobResult, JobStatus, RuntimeType, StepResult, StepStatus};
|
||||
pub use engine::{
|
||||
execute_workflow, ExecutionConfig, JobResult, JobStatus, RuntimeType, StepResult, StepStatus,
|
||||
};
|
||||
19
crates/github/Cargo.toml
Normal file
19
crates/github/Cargo.toml
Normal file
@@ -0,0 +1,19 @@
|
||||
[package]
|
||||
name = "github"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
description = "github functionality for wrkflw"
|
||||
license.workspace = true
|
||||
|
||||
[dependencies]
|
||||
# Add other crate dependencies as needed
|
||||
models = { path = "../models" }
|
||||
|
||||
# External dependencies from workspace
|
||||
serde.workspace = true
|
||||
serde_yaml.workspace = true
|
||||
serde_json.workspace = true
|
||||
reqwest.workspace = true
|
||||
thiserror.workspace = true
|
||||
lazy_static.workspace = true
|
||||
regex.workspace = true
|
||||
@@ -1,6 +1,9 @@
|
||||
// github crate
|
||||
|
||||
use lazy_static::lazy_static;
|
||||
use regex::Regex;
|
||||
use reqwest::header;
|
||||
use serde_json::{self};
|
||||
use std::collections::HashMap;
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
20
crates/gitlab/Cargo.toml
Normal file
20
crates/gitlab/Cargo.toml
Normal file
@@ -0,0 +1,20 @@
|
||||
[package]
|
||||
name = "gitlab"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
description = "gitlab functionality for wrkflw"
|
||||
license.workspace = true
|
||||
|
||||
[dependencies]
|
||||
# Internal crates
|
||||
models = { path = "../models" }
|
||||
|
||||
# External dependencies
|
||||
lazy_static.workspace = true
|
||||
regex.workspace = true
|
||||
reqwest.workspace = true
|
||||
serde.workspace = true
|
||||
serde_yaml.workspace = true
|
||||
serde_json.workspace = true
|
||||
thiserror.workspace = true
|
||||
urlencoding.workspace = true
|
||||
@@ -1,3 +1,5 @@
|
||||
// gitlab crate
|
||||
|
||||
use lazy_static::lazy_static;
|
||||
use regex::Regex;
|
||||
use reqwest::header;
|
||||
16
crates/logging/Cargo.toml
Normal file
16
crates/logging/Cargo.toml
Normal file
@@ -0,0 +1,16 @@
|
||||
[package]
|
||||
name = "logging"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
description = "logging functionality for wrkflw"
|
||||
license.workspace = true
|
||||
|
||||
[dependencies]
|
||||
# Internal crates
|
||||
models = { path = "../models" }
|
||||
|
||||
# External dependencies
|
||||
chrono.workspace = true
|
||||
once_cell.workspace = true
|
||||
serde.workspace = true
|
||||
serde_yaml.workspace = true
|
||||
16
crates/matrix/Cargo.toml
Normal file
16
crates/matrix/Cargo.toml
Normal file
@@ -0,0 +1,16 @@
|
||||
[package]
|
||||
name = "matrix"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
description = "matrix functionality for wrkflw"
|
||||
license.workspace = true
|
||||
|
||||
[dependencies]
|
||||
# Internal crates
|
||||
models = { path = "../models" }
|
||||
|
||||
# External dependencies
|
||||
indexmap.workspace = true
|
||||
serde.workspace = true
|
||||
serde_yaml.workspace = true
|
||||
thiserror.workspace = true
|
||||
@@ -1,3 +1,5 @@
|
||||
// matrix crate
|
||||
|
||||
use indexmap::IndexMap;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_yaml::Value;
|
||||
12
crates/models/Cargo.toml
Normal file
12
crates/models/Cargo.toml
Normal file
@@ -0,0 +1,12 @@
|
||||
[package]
|
||||
name = "models"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
description = "Data models for wrkflw"
|
||||
license.workspace = true
|
||||
|
||||
[dependencies]
|
||||
serde.workspace = true
|
||||
serde_yaml.workspace = true
|
||||
serde_json.workspace = true
|
||||
thiserror.workspace = true
|
||||
338
crates/models/src/lib.rs
Normal file
338
crates/models/src/lib.rs
Normal file
@@ -0,0 +1,338 @@
|
||||
pub struct ValidationResult {
|
||||
pub is_valid: bool,
|
||||
pub issues: Vec<String>,
|
||||
}
|
||||
|
||||
impl Default for ValidationResult {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl ValidationResult {
|
||||
pub fn new() -> Self {
|
||||
ValidationResult {
|
||||
is_valid: true,
|
||||
issues: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn add_issue(&mut self, issue: String) {
|
||||
self.is_valid = false;
|
||||
self.issues.push(issue);
|
||||
}
|
||||
}
|
||||
|
||||
// GitLab pipeline models
|
||||
pub mod gitlab {
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
|
||||
/// Represents a GitLab CI/CD pipeline configuration
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct Pipeline {
|
||||
/// Default image for all jobs
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub image: Option<Image>,
|
||||
|
||||
/// Global variables available to all jobs
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub variables: Option<HashMap<String, String>>,
|
||||
|
||||
/// Pipeline stages in execution order
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub stages: Option<Vec<String>>,
|
||||
|
||||
/// Default before_script for all jobs
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub before_script: Option<Vec<String>>,
|
||||
|
||||
/// Default after_script for all jobs
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub after_script: Option<Vec<String>>,
|
||||
|
||||
/// Job definitions (name => job)
|
||||
#[serde(flatten)]
|
||||
pub jobs: HashMap<String, Job>,
|
||||
|
||||
/// Workflow rules for the pipeline
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub workflow: Option<Workflow>,
|
||||
|
||||
/// Includes for pipeline configuration
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub include: Option<Vec<Include>>,
|
||||
}
|
||||
|
||||
/// A job in a GitLab CI/CD pipeline
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct Job {
|
||||
/// The stage this job belongs to
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub stage: Option<String>,
|
||||
|
||||
/// Docker image to use for this job
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub image: Option<Image>,
|
||||
|
||||
/// Script commands to run
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub script: Option<Vec<String>>,
|
||||
|
||||
/// Commands to run before the main script
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub before_script: Option<Vec<String>>,
|
||||
|
||||
/// Commands to run after the main script
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub after_script: Option<Vec<String>>,
|
||||
|
||||
/// When to run the job (on_success, on_failure, always, manual)
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub when: Option<String>,
|
||||
|
||||
/// Allow job failure
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub allow_failure: Option<bool>,
|
||||
|
||||
/// Services to run alongside the job
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub services: Option<Vec<Service>>,
|
||||
|
||||
/// Tags to define which runners can execute this job
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub tags: Option<Vec<String>>,
|
||||
|
||||
/// Job-specific variables
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub variables: Option<HashMap<String, String>>,
|
||||
|
||||
/// Job dependencies
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub dependencies: Option<Vec<String>>,
|
||||
|
||||
/// Artifacts to store after job execution
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub artifacts: Option<Artifacts>,
|
||||
|
||||
/// Cache configuration
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub cache: Option<Cache>,
|
||||
|
||||
/// Rules for when this job should run
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub rules: Option<Vec<Rule>>,
|
||||
|
||||
/// Only run on specified refs
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub only: Option<Only>,
|
||||
|
||||
/// Exclude specified refs
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub except: Option<Except>,
|
||||
|
||||
/// Retry configuration
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub retry: Option<Retry>,
|
||||
|
||||
/// Timeout for the job in seconds
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub timeout: Option<String>,
|
||||
|
||||
/// Mark job as parallel and specify instance count
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub parallel: Option<usize>,
|
||||
|
||||
/// Flag to indicate this is a template job
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub template: Option<bool>,
|
||||
|
||||
/// List of jobs this job extends from
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub extends: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
/// Docker image configuration
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
#[serde(untagged)]
|
||||
pub enum Image {
|
||||
/// Simple image name as string
|
||||
Simple(String),
|
||||
/// Detailed image configuration
|
||||
Detailed {
|
||||
/// Image name
|
||||
name: String,
|
||||
/// Entrypoint to override in the image
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
entrypoint: Option<Vec<String>>,
|
||||
},
|
||||
}
|
||||
|
||||
/// Service container to run alongside a job
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
#[serde(untagged)]
|
||||
pub enum Service {
|
||||
/// Simple service name as string
|
||||
Simple(String),
|
||||
/// Detailed service configuration
|
||||
Detailed {
|
||||
/// Service name/image
|
||||
name: String,
|
||||
/// Command to run in the service container
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
command: Option<Vec<String>>,
|
||||
/// Entrypoint to override in the image
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
entrypoint: Option<Vec<String>>,
|
||||
},
|
||||
}
|
||||
|
||||
/// Artifacts configuration
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct Artifacts {
|
||||
/// Paths to include as artifacts
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub paths: Option<Vec<String>>,
|
||||
/// Artifact expiration duration
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub expire_in: Option<String>,
|
||||
/// When to upload artifacts (on_success, on_failure, always)
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub when: Option<String>,
|
||||
}
|
||||
|
||||
/// Cache configuration
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct Cache {
|
||||
/// Cache key
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub key: Option<String>,
|
||||
/// Paths to cache
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub paths: Option<Vec<String>>,
|
||||
/// When to save cache (on_success, on_failure, always)
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub when: Option<String>,
|
||||
/// Cache policy
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub policy: Option<String>,
|
||||
}
|
||||
|
||||
/// Rule for conditional job execution
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct Rule {
|
||||
/// If condition expression
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub if_: Option<String>,
|
||||
/// When to run if condition is true
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub when: Option<String>,
|
||||
/// Variables to set if condition is true
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub variables: Option<HashMap<String, String>>,
|
||||
}
|
||||
|
||||
/// Only/except configuration
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
#[serde(untagged)]
|
||||
pub enum Only {
|
||||
/// Simple list of refs
|
||||
Refs(Vec<String>),
|
||||
/// Detailed configuration
|
||||
Complex {
|
||||
/// Refs to include
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
refs: Option<Vec<String>>,
|
||||
/// Branch patterns to include
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
branches: Option<Vec<String>>,
|
||||
/// Tags to include
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
tags: Option<Vec<String>>,
|
||||
/// Pipeline types to include
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
variables: Option<Vec<String>>,
|
||||
/// Changes to files that trigger the job
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
changes: Option<Vec<String>>,
|
||||
},
|
||||
}
|
||||
|
||||
/// Except configuration
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
#[serde(untagged)]
|
||||
pub enum Except {
|
||||
/// Simple list of refs
|
||||
Refs(Vec<String>),
|
||||
/// Detailed configuration
|
||||
Complex {
|
||||
/// Refs to exclude
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
refs: Option<Vec<String>>,
|
||||
/// Branch patterns to exclude
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
branches: Option<Vec<String>>,
|
||||
/// Tags to exclude
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
tags: Option<Vec<String>>,
|
||||
/// Pipeline types to exclude
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
variables: Option<Vec<String>>,
|
||||
/// Changes to files that don't trigger the job
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
changes: Option<Vec<String>>,
|
||||
},
|
||||
}
|
||||
|
||||
/// Workflow configuration
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct Workflow {
|
||||
/// Rules for when to run the pipeline
|
||||
pub rules: Vec<Rule>,
|
||||
}
|
||||
|
||||
/// Retry configuration
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
#[serde(untagged)]
|
||||
pub enum Retry {
|
||||
/// Simple max attempts
|
||||
MaxAttempts(u32),
|
||||
/// Detailed retry configuration
|
||||
Detailed {
|
||||
/// Maximum retry attempts
|
||||
max: u32,
|
||||
/// When to retry
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
when: Option<Vec<String>>,
|
||||
},
|
||||
}
|
||||
|
||||
/// Include configuration for external pipeline files
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
#[serde(untagged)]
|
||||
pub enum Include {
|
||||
/// Simple string include
|
||||
Local(String),
|
||||
/// Detailed include configuration
|
||||
Detailed {
|
||||
/// Local file path
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
local: Option<String>,
|
||||
/// Remote file URL
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
remote: Option<String>,
|
||||
/// Include from project
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
project: Option<String>,
|
||||
/// Include specific file from project
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
file: Option<String>,
|
||||
/// Include template
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
template: Option<String>,
|
||||
/// Ref to use when including from project
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
ref_: Option<String>,
|
||||
},
|
||||
}
|
||||
}
|
||||
21
crates/parser/Cargo.toml
Normal file
21
crates/parser/Cargo.toml
Normal file
@@ -0,0 +1,21 @@
|
||||
[package]
|
||||
name = "parser"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
description = "Parser functionality for wrkflw"
|
||||
license.workspace = true
|
||||
|
||||
[dependencies]
|
||||
# Internal crates
|
||||
models = { path = "../models" }
|
||||
matrix = { path = "../matrix" }
|
||||
|
||||
# External dependencies
|
||||
jsonschema.workspace = true
|
||||
serde.workspace = true
|
||||
serde_yaml.workspace = true
|
||||
serde_json.workspace = true
|
||||
thiserror.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
tempfile = "3.7"
|
||||
275
crates/parser/src/gitlab.rs
Normal file
275
crates/parser/src/gitlab.rs
Normal file
@@ -0,0 +1,275 @@
|
||||
use crate::schema::{SchemaType, SchemaValidator};
|
||||
use crate::workflow;
|
||||
use models::gitlab::Pipeline;
|
||||
use models::ValidationResult;
|
||||
use std::collections::HashMap;
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
use thiserror::Error;
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum GitlabParserError {
|
||||
#[error("I/O error: {0}")]
|
||||
IoError(#[from] std::io::Error),
|
||||
|
||||
#[error("YAML parsing error: {0}")]
|
||||
YamlError(#[from] serde_yaml::Error),
|
||||
|
||||
#[error("Invalid pipeline structure: {0}")]
|
||||
InvalidStructure(String),
|
||||
|
||||
#[error("Schema validation error: {0}")]
|
||||
SchemaValidationError(String),
|
||||
}
|
||||
|
||||
/// Parse a GitLab CI/CD pipeline file
|
||||
pub fn parse_pipeline(pipeline_path: &Path) -> Result<Pipeline, GitlabParserError> {
|
||||
// Read the pipeline file
|
||||
let pipeline_content = fs::read_to_string(pipeline_path)?;
|
||||
|
||||
// Validate against schema
|
||||
let validator = SchemaValidator::new().map_err(GitlabParserError::SchemaValidationError)?;
|
||||
|
||||
validator
|
||||
.validate_with_specific_schema(&pipeline_content, SchemaType::GitLab)
|
||||
.map_err(GitlabParserError::SchemaValidationError)?;
|
||||
|
||||
// Parse the pipeline YAML
|
||||
let pipeline: Pipeline = serde_yaml::from_str(&pipeline_content)?;
|
||||
|
||||
// Return the parsed pipeline
|
||||
Ok(pipeline)
|
||||
}
|
||||
|
||||
/// Validate the basic structure of a GitLab CI/CD pipeline
|
||||
pub fn validate_pipeline_structure(pipeline: &Pipeline) -> ValidationResult {
|
||||
let mut result = ValidationResult::new();
|
||||
|
||||
// Check for at least one job
|
||||
if pipeline.jobs.is_empty() {
|
||||
result.add_issue("Pipeline must contain at least one job".to_string());
|
||||
}
|
||||
|
||||
// Check for script in jobs
|
||||
for (job_name, job) in &pipeline.jobs {
|
||||
// Skip template jobs
|
||||
if let Some(true) = job.template {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check for script or extends
|
||||
if job.script.is_none() && job.extends.is_none() {
|
||||
result.add_issue(format!(
|
||||
"Job '{}' must have a script section or extend another job",
|
||||
job_name
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
// Check that referenced stages are defined
|
||||
if let Some(stages) = &pipeline.stages {
|
||||
for (job_name, job) in &pipeline.jobs {
|
||||
if let Some(stage) = &job.stage {
|
||||
if !stages.contains(stage) {
|
||||
result.add_issue(format!(
|
||||
"Job '{}' references undefined stage '{}'",
|
||||
job_name, stage
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check that job dependencies exist
|
||||
for (job_name, job) in &pipeline.jobs {
|
||||
if let Some(dependencies) = &job.dependencies {
|
||||
for dependency in dependencies {
|
||||
if !pipeline.jobs.contains_key(dependency) {
|
||||
result.add_issue(format!(
|
||||
"Job '{}' depends on undefined job '{}'",
|
||||
job_name, dependency
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check that job extensions exist
|
||||
for (job_name, job) in &pipeline.jobs {
|
||||
if let Some(extends) = &job.extends {
|
||||
for extend in extends {
|
||||
if !pipeline.jobs.contains_key(extend) {
|
||||
result.add_issue(format!(
|
||||
"Job '{}' extends undefined job '{}'",
|
||||
job_name, extend
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
/// Convert a GitLab CI/CD pipeline to a format compatible with the workflow executor
|
||||
pub fn convert_to_workflow_format(pipeline: &Pipeline) -> workflow::WorkflowDefinition {
|
||||
// Create a new workflow with required fields
|
||||
let mut workflow = workflow::WorkflowDefinition {
|
||||
name: "Converted GitLab CI Pipeline".to_string(),
|
||||
on: vec!["push".to_string()], // Default trigger
|
||||
on_raw: serde_yaml::Value::String("push".to_string()),
|
||||
jobs: HashMap::new(),
|
||||
};
|
||||
|
||||
// Convert each GitLab job to a GitHub Actions job
|
||||
for (job_name, gitlab_job) in &pipeline.jobs {
|
||||
// Skip template jobs
|
||||
if let Some(true) = gitlab_job.template {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Create a new job
|
||||
let mut job = workflow::Job {
|
||||
runs_on: "ubuntu-latest".to_string(), // Default runner
|
||||
needs: None,
|
||||
steps: Vec::new(),
|
||||
env: HashMap::new(),
|
||||
matrix: None,
|
||||
services: HashMap::new(),
|
||||
if_condition: None,
|
||||
outputs: None,
|
||||
permissions: None,
|
||||
};
|
||||
|
||||
// Add job-specific environment variables
|
||||
if let Some(variables) = &gitlab_job.variables {
|
||||
job.env.extend(variables.clone());
|
||||
}
|
||||
|
||||
// Add global variables if they exist
|
||||
if let Some(variables) = &pipeline.variables {
|
||||
// Only add if not already defined at job level
|
||||
for (key, value) in variables {
|
||||
job.env.entry(key.clone()).or_insert_with(|| value.clone());
|
||||
}
|
||||
}
|
||||
|
||||
// Convert before_script to steps if it exists
|
||||
if let Some(before_script) = &gitlab_job.before_script {
|
||||
for (i, cmd) in before_script.iter().enumerate() {
|
||||
let step = workflow::Step {
|
||||
name: Some(format!("Before script {}", i + 1)),
|
||||
uses: None,
|
||||
run: Some(cmd.clone()),
|
||||
with: None,
|
||||
env: HashMap::new(),
|
||||
continue_on_error: None,
|
||||
};
|
||||
job.steps.push(step);
|
||||
}
|
||||
}
|
||||
|
||||
// Convert main script to steps
|
||||
if let Some(script) = &gitlab_job.script {
|
||||
for (i, cmd) in script.iter().enumerate() {
|
||||
let step = workflow::Step {
|
||||
name: Some(format!("Run script line {}", i + 1)),
|
||||
uses: None,
|
||||
run: Some(cmd.clone()),
|
||||
with: None,
|
||||
env: HashMap::new(),
|
||||
continue_on_error: None,
|
||||
};
|
||||
job.steps.push(step);
|
||||
}
|
||||
}
|
||||
|
||||
// Convert after_script to steps if it exists
|
||||
if let Some(after_script) = &gitlab_job.after_script {
|
||||
for (i, cmd) in after_script.iter().enumerate() {
|
||||
let step = workflow::Step {
|
||||
name: Some(format!("After script {}", i + 1)),
|
||||
uses: None,
|
||||
run: Some(cmd.clone()),
|
||||
with: None,
|
||||
env: HashMap::new(),
|
||||
continue_on_error: Some(true), // After script should continue even if previous steps fail
|
||||
};
|
||||
job.steps.push(step);
|
||||
}
|
||||
}
|
||||
|
||||
// Add services if they exist
|
||||
if let Some(services) = &gitlab_job.services {
|
||||
for (i, service) in services.iter().enumerate() {
|
||||
let service_name = format!("service-{}", i);
|
||||
let service_image = match service {
|
||||
models::gitlab::Service::Simple(name) => name.clone(),
|
||||
models::gitlab::Service::Detailed { name, .. } => name.clone(),
|
||||
};
|
||||
|
||||
let service = workflow::Service {
|
||||
image: service_image,
|
||||
ports: None,
|
||||
env: HashMap::new(),
|
||||
volumes: None,
|
||||
options: None,
|
||||
};
|
||||
|
||||
job.services.insert(service_name, service);
|
||||
}
|
||||
}
|
||||
|
||||
// Add the job to the workflow
|
||||
workflow.jobs.insert(job_name.clone(), job);
|
||||
}
|
||||
|
||||
workflow
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::path::PathBuf;
|
||||
use tempfile::NamedTempFile;
|
||||
|
||||
#[test]
|
||||
fn test_parse_simple_pipeline() {
|
||||
// Create a temporary file with a simple GitLab CI/CD pipeline
|
||||
let mut file = NamedTempFile::new().unwrap();
|
||||
let content = r#"
|
||||
stages:
|
||||
- build
|
||||
- test
|
||||
|
||||
build_job:
|
||||
stage: build
|
||||
script:
|
||||
- echo "Building..."
|
||||
- make build
|
||||
|
||||
test_job:
|
||||
stage: test
|
||||
script:
|
||||
- echo "Testing..."
|
||||
- make test
|
||||
"#;
|
||||
fs::write(&file, content).unwrap();
|
||||
|
||||
// Parse the pipeline
|
||||
let pipeline = parse_pipeline(&file.path()).unwrap();
|
||||
|
||||
// Validate basic structure
|
||||
assert_eq!(pipeline.stages.as_ref().unwrap().len(), 2);
|
||||
assert_eq!(pipeline.jobs.len(), 2);
|
||||
|
||||
// Check job contents
|
||||
let build_job = pipeline.jobs.get("build_job").unwrap();
|
||||
assert_eq!(build_job.stage.as_ref().unwrap(), "build");
|
||||
assert_eq!(build_job.script.as_ref().unwrap().len(), 2);
|
||||
|
||||
let test_job = pipeline.jobs.get("test_job").unwrap();
|
||||
assert_eq!(test_job.stage.as_ref().unwrap(), "test");
|
||||
assert_eq!(test_job.script.as_ref().unwrap().len(), 2);
|
||||
}
|
||||
}
|
||||
@@ -1,2 +1,5 @@
|
||||
// parser crate
|
||||
|
||||
pub mod gitlab;
|
||||
pub mod schema;
|
||||
pub mod workflow;
|
||||
111
crates/parser/src/schema.rs
Normal file
111
crates/parser/src/schema.rs
Normal file
@@ -0,0 +1,111 @@
|
||||
use jsonschema::JSONSchema;
|
||||
use serde_json::Value;
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
|
||||
const GITHUB_WORKFLOW_SCHEMA: &str = include_str!("../../../schemas/github-workflow.json");
|
||||
const GITLAB_CI_SCHEMA: &str = include_str!("../../../schemas/gitlab-ci.json");
|
||||
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub enum SchemaType {
|
||||
GitHub,
|
||||
GitLab,
|
||||
}
|
||||
|
||||
pub struct SchemaValidator {
|
||||
github_schema: JSONSchema,
|
||||
gitlab_schema: JSONSchema,
|
||||
}
|
||||
|
||||
impl SchemaValidator {
|
||||
pub fn new() -> Result<Self, String> {
|
||||
let github_schema_json: Value = serde_json::from_str(GITHUB_WORKFLOW_SCHEMA)
|
||||
.map_err(|e| format!("Failed to parse GitHub workflow schema: {}", e))?;
|
||||
|
||||
let gitlab_schema_json: Value = serde_json::from_str(GITLAB_CI_SCHEMA)
|
||||
.map_err(|e| format!("Failed to parse GitLab CI schema: {}", e))?;
|
||||
|
||||
let github_schema = JSONSchema::compile(&github_schema_json)
|
||||
.map_err(|e| format!("Failed to compile GitHub JSON schema: {}", e))?;
|
||||
|
||||
let gitlab_schema = JSONSchema::compile(&gitlab_schema_json)
|
||||
.map_err(|e| format!("Failed to compile GitLab JSON schema: {}", e))?;
|
||||
|
||||
Ok(Self {
|
||||
github_schema,
|
||||
gitlab_schema,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn validate_workflow(&self, workflow_path: &Path) -> Result<(), String> {
|
||||
// Determine the schema type based on the filename
|
||||
let schema_type = if workflow_path.file_name().is_some_and(|name| {
|
||||
let name_str = name.to_string_lossy();
|
||||
name_str.ends_with(".gitlab-ci.yml") || name_str.ends_with(".gitlab-ci.yaml")
|
||||
}) {
|
||||
SchemaType::GitLab
|
||||
} else {
|
||||
SchemaType::GitHub
|
||||
};
|
||||
|
||||
// Read the workflow file
|
||||
let content = fs::read_to_string(workflow_path)
|
||||
.map_err(|e| format!("Failed to read workflow file: {}", e))?;
|
||||
|
||||
// Parse YAML to JSON Value
|
||||
let workflow_json: Value = serde_yaml::from_str(&content)
|
||||
.map_err(|e| format!("Failed to parse workflow YAML: {}", e))?;
|
||||
|
||||
// Validate against the appropriate schema
|
||||
let validation_result = match schema_type {
|
||||
SchemaType::GitHub => self.github_schema.validate(&workflow_json),
|
||||
SchemaType::GitLab => self.gitlab_schema.validate(&workflow_json),
|
||||
};
|
||||
|
||||
// Handle validation errors
|
||||
if let Err(errors) = validation_result {
|
||||
let schema_name = match schema_type {
|
||||
SchemaType::GitHub => "GitHub workflow",
|
||||
SchemaType::GitLab => "GitLab CI",
|
||||
};
|
||||
let mut error_msg = format!("{} validation failed:\n", schema_name);
|
||||
for error in errors {
|
||||
error_msg.push_str(&format!("- {}\n", error));
|
||||
}
|
||||
return Err(error_msg);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn validate_with_specific_schema(
|
||||
&self,
|
||||
content: &str,
|
||||
schema_type: SchemaType,
|
||||
) -> Result<(), String> {
|
||||
// Parse YAML to JSON Value
|
||||
let workflow_json: Value =
|
||||
serde_yaml::from_str(content).map_err(|e| format!("Failed to parse YAML: {}", e))?;
|
||||
|
||||
// Validate against the appropriate schema
|
||||
let validation_result = match schema_type {
|
||||
SchemaType::GitHub => self.github_schema.validate(&workflow_json),
|
||||
SchemaType::GitLab => self.gitlab_schema.validate(&workflow_json),
|
||||
};
|
||||
|
||||
// Handle validation errors
|
||||
if let Err(errors) = validation_result {
|
||||
let schema_name = match schema_type {
|
||||
SchemaType::GitHub => "GitHub workflow",
|
||||
SchemaType::GitLab => "GitLab CI",
|
||||
};
|
||||
let mut error_msg = format!("{} validation failed:\n", schema_name);
|
||||
for error in errors {
|
||||
error_msg.push_str(&format!("- {}\n", error));
|
||||
}
|
||||
return Err(error_msg);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@@ -1,11 +1,31 @@
|
||||
use crate::matrix::MatrixConfig;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use matrix::MatrixConfig;
|
||||
use serde::{Deserialize, Deserializer, Serialize};
|
||||
use std::collections::HashMap;
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
|
||||
use super::schema::SchemaValidator;
|
||||
|
||||
// Custom deserializer for needs field that handles both string and array formats
|
||||
fn deserialize_needs<'de, D>(deserializer: D) -> Result<Option<Vec<String>>, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
#[derive(Deserialize)]
|
||||
#[serde(untagged)]
|
||||
enum StringOrVec {
|
||||
String(String),
|
||||
Vec(Vec<String>),
|
||||
}
|
||||
|
||||
let value = Option::<StringOrVec>::deserialize(deserializer)?;
|
||||
match value {
|
||||
Some(StringOrVec::String(s)) => Ok(Some(vec![s])),
|
||||
Some(StringOrVec::Vec(v)) => Ok(Some(v)),
|
||||
None => Ok(None),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, Serialize)]
|
||||
pub struct WorkflowDefinition {
|
||||
pub name: String,
|
||||
@@ -20,7 +40,7 @@ pub struct WorkflowDefinition {
|
||||
pub struct Job {
|
||||
#[serde(rename = "runs-on")]
|
||||
pub runs_on: String,
|
||||
#[serde(default)]
|
||||
#[serde(default, deserialize_with = "deserialize_needs")]
|
||||
pub needs: Option<Vec<String>>,
|
||||
pub steps: Vec<Step>,
|
||||
#[serde(default)]
|
||||
@@ -29,6 +49,12 @@ pub struct Job {
|
||||
pub matrix: Option<MatrixConfig>,
|
||||
#[serde(default)]
|
||||
pub services: HashMap<String, Service>,
|
||||
#[serde(default, rename = "if")]
|
||||
pub if_condition: Option<String>,
|
||||
#[serde(default)]
|
||||
pub outputs: Option<HashMap<String, String>>,
|
||||
#[serde(default)]
|
||||
pub permissions: Option<HashMap<String, String>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, Serialize)]
|
||||
22
crates/runtime/Cargo.toml
Normal file
22
crates/runtime/Cargo.toml
Normal file
@@ -0,0 +1,22 @@
|
||||
[package]
|
||||
name = "runtime"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
description = "Runtime environment for wrkflw"
|
||||
license.workspace = true
|
||||
|
||||
[dependencies]
|
||||
# Internal crates
|
||||
models = { path = "../models" }
|
||||
logging = { path = "../logging", version = "0.4.0" }
|
||||
|
||||
# External dependencies
|
||||
async-trait.workspace = true
|
||||
once_cell = "1.19"
|
||||
serde.workspace = true
|
||||
serde_yaml.workspace = true
|
||||
tempfile = "3.9"
|
||||
tokio.workspace = true
|
||||
futures = "0.3"
|
||||
utils = { path = "../utils", version = "0.4.0" }
|
||||
which = "4.4"
|
||||
@@ -1,6 +1,6 @@
|
||||
use crate::logging;
|
||||
use crate::runtime::container::{ContainerError, ContainerOutput, ContainerRuntime};
|
||||
use crate::container::{ContainerError, ContainerOutput, ContainerRuntime};
|
||||
use async_trait::async_trait;
|
||||
use logging;
|
||||
use once_cell::sync::Lazy;
|
||||
use std::collections::HashMap;
|
||||
use std::fs;
|
||||
@@ -8,6 +8,7 @@ use std::path::{Path, PathBuf};
|
||||
use std::process::Command;
|
||||
use std::sync::Mutex;
|
||||
use tempfile::TempDir;
|
||||
use which;
|
||||
|
||||
// Global collection of resources to clean up
|
||||
static EMULATION_WORKSPACES: Lazy<Mutex<Vec<PathBuf>>> = Lazy::new(|| Mutex::new(Vec::new()));
|
||||
@@ -160,29 +161,189 @@ impl ContainerRuntime for EmulationRuntime {
|
||||
command_str.push_str(part);
|
||||
}
|
||||
|
||||
// Log the command being executed
|
||||
// Log more detailed debugging information
|
||||
logging::info(&format!("Executing command in container: {}", command_str));
|
||||
logging::info(&format!("Working directory: {}", working_dir.display()));
|
||||
logging::info(&format!("Command length: {}", command.len()));
|
||||
|
||||
// Special handling for Rust/Cargo actions
|
||||
if command_str.contains("rust") || command_str.contains("cargo") {
|
||||
logging::debug(&format!("Executing Rust command: {}", command_str));
|
||||
if command.is_empty() {
|
||||
return Err(ContainerError::ContainerExecution(
|
||||
"Empty command array".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
let mut cmd = Command::new("cargo");
|
||||
let parts = command_str.split_whitespace().collect::<Vec<&str>>();
|
||||
// Print each command part separately for debugging
|
||||
for (i, part) in command.iter().enumerate() {
|
||||
logging::info(&format!("Command part {}: '{}'", i, part));
|
||||
}
|
||||
|
||||
let current_dir = working_dir.to_str().unwrap_or(".");
|
||||
cmd.current_dir(current_dir);
|
||||
// Log environment variables
|
||||
logging::info("Environment variables:");
|
||||
for (key, value) in env_vars {
|
||||
logging::info(&format!(" {}={}", key, value));
|
||||
}
|
||||
|
||||
// Find actual working directory - determine if we should use the current directory instead
|
||||
let actual_working_dir: PathBuf = if !working_dir.exists() {
|
||||
// Look for GITHUB_WORKSPACE or CI_PROJECT_DIR in env_vars
|
||||
let mut workspace_path = None;
|
||||
for (key, value) in env_vars {
|
||||
if *key == "GITHUB_WORKSPACE" || *key == "CI_PROJECT_DIR" {
|
||||
workspace_path = Some(PathBuf::from(value));
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// If found, use that as the working directory
|
||||
if let Some(path) = workspace_path {
|
||||
if path.exists() {
|
||||
logging::info(&format!(
|
||||
"Using environment-defined workspace: {}",
|
||||
path.display()
|
||||
));
|
||||
path
|
||||
} else {
|
||||
// Fallback to current directory
|
||||
let current_dir =
|
||||
std::env::current_dir().unwrap_or_else(|_| PathBuf::from("."));
|
||||
logging::info(&format!(
|
||||
"Using current directory: {}",
|
||||
current_dir.display()
|
||||
));
|
||||
current_dir
|
||||
}
|
||||
} else {
|
||||
// Fallback to current directory
|
||||
let current_dir = std::env::current_dir().unwrap_or_else(|_| PathBuf::from("."));
|
||||
logging::info(&format!(
|
||||
"Using current directory: {}",
|
||||
current_dir.display()
|
||||
));
|
||||
current_dir
|
||||
}
|
||||
} else {
|
||||
working_dir.to_path_buf()
|
||||
};
|
||||
|
||||
logging::info(&format!(
|
||||
"Using actual working directory: {}",
|
||||
actual_working_dir.display()
|
||||
));
|
||||
|
||||
// Check if path contains the command (for shell script execution)
|
||||
let command_path = which::which(command[0]);
|
||||
match &command_path {
|
||||
Ok(path) => logging::info(&format!("Found command at: {}", path.display())),
|
||||
Err(e) => logging::error(&format!(
|
||||
"Command not found in PATH: {} - Error: {}",
|
||||
command[0], e
|
||||
)),
|
||||
}
|
||||
|
||||
// First, check if this is a simple shell command (like echo)
|
||||
if command_str.starts_with("echo ")
|
||||
|| command_str.starts_with("cp ")
|
||||
|| command_str.starts_with("mkdir ")
|
||||
|| command_str.starts_with("mv ")
|
||||
{
|
||||
logging::info("Executing as shell command");
|
||||
// Execute as a shell command
|
||||
let mut cmd = Command::new("sh");
|
||||
cmd.arg("-c");
|
||||
cmd.arg(&command_str);
|
||||
cmd.current_dir(&actual_working_dir);
|
||||
|
||||
// Add environment variables
|
||||
for (key, value) in env_vars {
|
||||
cmd.env(key, value);
|
||||
}
|
||||
|
||||
match cmd.output() {
|
||||
Ok(output_result) => {
|
||||
let exit_code = output_result.status.code().unwrap_or(-1);
|
||||
let output = String::from_utf8_lossy(&output_result.stdout).to_string();
|
||||
let error = String::from_utf8_lossy(&output_result.stderr).to_string();
|
||||
|
||||
logging::debug(&format!(
|
||||
"Shell command completed with exit code: {}",
|
||||
exit_code
|
||||
));
|
||||
|
||||
if exit_code != 0 {
|
||||
let mut error_details = format!(
|
||||
"Command failed with exit code: {}\nCommand: {}\n\nError output:\n{}",
|
||||
exit_code, command_str, error
|
||||
);
|
||||
|
||||
// Add environment variables to error details
|
||||
error_details.push_str("\n\nEnvironment variables:\n");
|
||||
for (key, value) in env_vars {
|
||||
if key.starts_with("GITHUB_") || key.starts_with("CI_") {
|
||||
error_details.push_str(&format!("{}={}\n", key, value));
|
||||
}
|
||||
}
|
||||
|
||||
return Err(ContainerError::ContainerExecution(error_details));
|
||||
}
|
||||
|
||||
return Ok(ContainerOutput {
|
||||
stdout: output,
|
||||
stderr: error,
|
||||
exit_code,
|
||||
});
|
||||
}
|
||||
Err(e) => {
|
||||
return Err(ContainerError::ContainerExecution(format!(
|
||||
"Failed to execute command: {}\nError: {}",
|
||||
command_str, e
|
||||
)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Special handling for Rust/Cargo commands
|
||||
if command_str.starts_with("cargo ") || command_str.starts_with("rustup ") {
|
||||
let parts: Vec<&str> = command_str.split_whitespace().collect();
|
||||
if parts.is_empty() {
|
||||
return Err(ContainerError::ContainerExecution(
|
||||
"Empty command".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
let mut cmd = Command::new(parts[0]);
|
||||
|
||||
// Always use the current directory for cargo/rust commands rather than the temporary directory
|
||||
let current_dir = std::env::current_dir().unwrap_or_else(|_| PathBuf::from("."));
|
||||
logging::info(&format!(
|
||||
"Using project directory for Rust command: {}",
|
||||
current_dir.display()
|
||||
));
|
||||
cmd.current_dir(¤t_dir);
|
||||
|
||||
// Add environment variables
|
||||
for (key, value) in env_vars {
|
||||
// Don't use the CI_PROJECT_DIR for CARGO_HOME, use the actual project directory
|
||||
if *key == "CARGO_HOME" && value.contains("${CI_PROJECT_DIR}") {
|
||||
let cargo_home =
|
||||
value.replace("${CI_PROJECT_DIR}", ¤t_dir.to_string_lossy());
|
||||
logging::info(&format!("Setting CARGO_HOME to: {}", cargo_home));
|
||||
cmd.env(key, cargo_home);
|
||||
} else {
|
||||
cmd.env(key, value);
|
||||
}
|
||||
}
|
||||
|
||||
// Add command arguments
|
||||
if parts.len() > 1 {
|
||||
cmd.args(&parts[1..]);
|
||||
}
|
||||
|
||||
logging::debug(&format!(
|
||||
"Executing Rust command: {} in {}",
|
||||
command_str,
|
||||
current_dir.display()
|
||||
));
|
||||
|
||||
match cmd.output() {
|
||||
Ok(output_result) => {
|
||||
let exit_code = output_result.status.code().unwrap_or(-1);
|
||||
@@ -200,7 +361,11 @@ impl ContainerRuntime for EmulationRuntime {
|
||||
// Add environment variables to error details
|
||||
error_details.push_str("\n\nEnvironment variables:\n");
|
||||
for (key, value) in env_vars {
|
||||
if key.starts_with("GITHUB_") || key.starts_with("RUST") {
|
||||
if key.starts_with("GITHUB_")
|
||||
|| key.starts_with("RUST")
|
||||
|| key.starts_with("CARGO")
|
||||
|| key.starts_with("CI_")
|
||||
{
|
||||
error_details.push_str(&format!("{}={}\n", key, value));
|
||||
}
|
||||
}
|
||||
@@ -223,11 +388,11 @@ impl ContainerRuntime for EmulationRuntime {
|
||||
}
|
||||
}
|
||||
|
||||
// For other commands, use a shell
|
||||
// For other commands, use a shell as fallback
|
||||
let mut cmd = Command::new("sh");
|
||||
cmd.arg("-c");
|
||||
cmd.arg(&command_str);
|
||||
cmd.current_dir(working_dir.to_str().unwrap_or("."));
|
||||
cmd.current_dir(&actual_working_dir);
|
||||
|
||||
// Add environment variables
|
||||
for (key, value) in env_vars {
|
||||
@@ -251,7 +416,7 @@ impl ContainerRuntime for EmulationRuntime {
|
||||
// Add environment variables to error details
|
||||
error_details.push_str("\n\nEnvironment variables:\n");
|
||||
for (key, value) in env_vars {
|
||||
if key.starts_with("GITHUB_") {
|
||||
if key.starts_with("GITHUB_") || key.starts_with("CI_") {
|
||||
error_details.push_str(&format!("{}={}\n", key, value));
|
||||
}
|
||||
}
|
||||
@@ -1,2 +1,4 @@
|
||||
// runtime crate
|
||||
|
||||
pub mod container;
|
||||
pub mod emulation;
|
||||
27
crates/ui/Cargo.toml
Normal file
27
crates/ui/Cargo.toml
Normal file
@@ -0,0 +1,27 @@
|
||||
[package]
|
||||
name = "ui"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
description = "user interface functionality for wrkflw"
|
||||
license.workspace = true
|
||||
|
||||
[dependencies]
|
||||
# Internal crates
|
||||
models = { path = "../models" }
|
||||
evaluator = { path = "../evaluator" }
|
||||
executor = { path = "../executor" }
|
||||
logging = { path = "../logging" }
|
||||
utils = { path = "../utils" }
|
||||
github = { path = "../github" }
|
||||
|
||||
# External dependencies
|
||||
chrono.workspace = true
|
||||
crossterm.workspace = true
|
||||
ratatui.workspace = true
|
||||
serde.workspace = true
|
||||
serde_yaml.workspace = true
|
||||
tokio.workspace = true
|
||||
serde_json.workspace = true
|
||||
reqwest = { workspace = true, features = ["json"] }
|
||||
regex.workspace = true
|
||||
futures.workspace = true
|
||||
462
crates/ui/src/app/mod.rs
Normal file
462
crates/ui/src/app/mod.rs
Normal file
@@ -0,0 +1,462 @@
|
||||
// App module for UI state and main TUI entry point
|
||||
mod state;
|
||||
|
||||
use crate::handlers::workflow::start_next_workflow_execution;
|
||||
use crate::models::{ExecutionResultMsg, Workflow, WorkflowStatus};
|
||||
use crate::utils::load_workflows;
|
||||
use crate::views::render_ui;
|
||||
use chrono::Local;
|
||||
use crossterm::{
|
||||
event::{self, DisableMouseCapture, EnableMouseCapture, Event, KeyCode, KeyModifiers},
|
||||
execute,
|
||||
terminal::{disable_raw_mode, enable_raw_mode, EnterAlternateScreen, LeaveAlternateScreen},
|
||||
};
|
||||
use executor::RuntimeType;
|
||||
use ratatui::{backend::CrosstermBackend, Terminal};
|
||||
use std::io::{self, stdout};
|
||||
use std::path::PathBuf;
|
||||
use std::sync::mpsc;
|
||||
use std::time::{Duration, Instant};
|
||||
|
||||
pub use state::App;
|
||||
|
||||
// Main entry point for the TUI interface
|
||||
#[allow(clippy::ptr_arg)]
|
||||
pub async fn run_wrkflw_tui(
|
||||
path: Option<&PathBuf>,
|
||||
runtime_type: RuntimeType,
|
||||
verbose: bool,
|
||||
preserve_containers_on_failure: bool,
|
||||
) -> io::Result<()> {
|
||||
// Terminal setup
|
||||
enable_raw_mode()?;
|
||||
let mut stdout = stdout();
|
||||
execute!(stdout, EnterAlternateScreen, EnableMouseCapture)?;
|
||||
let backend = CrosstermBackend::new(stdout);
|
||||
let mut terminal = Terminal::new(backend)?;
|
||||
|
||||
// Set up channel for async communication
|
||||
let (tx, rx): (
|
||||
mpsc::Sender<ExecutionResultMsg>,
|
||||
mpsc::Receiver<ExecutionResultMsg>,
|
||||
) = mpsc::channel();
|
||||
|
||||
// Initialize app state
|
||||
let mut app = App::new(
|
||||
runtime_type.clone(),
|
||||
tx.clone(),
|
||||
preserve_containers_on_failure,
|
||||
);
|
||||
|
||||
if app.validation_mode {
|
||||
app.logs.push("Starting in validation mode".to_string());
|
||||
logging::info("Starting in validation mode");
|
||||
}
|
||||
|
||||
// Load workflows
|
||||
let dir_path = match path {
|
||||
Some(path) if path.is_dir() => path.clone(),
|
||||
Some(path) if path.is_file() => {
|
||||
// Single workflow file
|
||||
let name = path
|
||||
.file_name()
|
||||
.unwrap_or_default()
|
||||
.to_string_lossy()
|
||||
.into_owned();
|
||||
|
||||
app.workflows = vec![Workflow {
|
||||
name: name.clone(),
|
||||
path: path.clone(),
|
||||
selected: true,
|
||||
status: WorkflowStatus::NotStarted,
|
||||
execution_details: None,
|
||||
}];
|
||||
|
||||
// Queue the single workflow for execution
|
||||
app.execution_queue = vec![0];
|
||||
app.start_execution();
|
||||
|
||||
// Return parent dir or current dir if no parent
|
||||
path.parent()
|
||||
.map(|p| p.to_path_buf())
|
||||
.unwrap_or_else(|| PathBuf::from("."))
|
||||
}
|
||||
_ => PathBuf::from(".github/workflows"),
|
||||
};
|
||||
|
||||
// Only load directory if we haven't already loaded a single file
|
||||
if app.workflows.is_empty() {
|
||||
app.workflows = load_workflows(&dir_path);
|
||||
}
|
||||
|
||||
// Run the main event loop
|
||||
let tx_clone = tx.clone();
|
||||
|
||||
// Run the event loop
|
||||
let result = run_tui_event_loop(&mut terminal, &mut app, &tx_clone, &rx, verbose);
|
||||
|
||||
// Clean up terminal
|
||||
disable_raw_mode()?;
|
||||
execute!(
|
||||
terminal.backend_mut(),
|
||||
LeaveAlternateScreen,
|
||||
DisableMouseCapture
|
||||
)?;
|
||||
terminal.show_cursor()?;
|
||||
|
||||
match result {
|
||||
Ok(_) => Ok(()),
|
||||
Err(e) => {
|
||||
// If the TUI fails to initialize or crashes, fall back to CLI mode
|
||||
logging::error(&format!("Failed to start UI: {}", e));
|
||||
|
||||
// Only for 'tui' command should we fall back to CLI mode for files
|
||||
// For other commands, return the error
|
||||
if let Some(path) = path {
|
||||
if path.is_file() {
|
||||
logging::error("Falling back to CLI mode...");
|
||||
crate::handlers::workflow::execute_workflow_cli(path, runtime_type, verbose)
|
||||
.await
|
||||
} else if path.is_dir() {
|
||||
crate::handlers::workflow::validate_workflow(path, verbose)
|
||||
} else {
|
||||
Err(e)
|
||||
}
|
||||
} else {
|
||||
Err(e)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Helper function to run the main event loop
|
||||
fn run_tui_event_loop(
|
||||
terminal: &mut Terminal<CrosstermBackend<io::Stdout>>,
|
||||
app: &mut App,
|
||||
tx_clone: &mpsc::Sender<ExecutionResultMsg>,
|
||||
rx: &mpsc::Receiver<ExecutionResultMsg>,
|
||||
verbose: bool,
|
||||
) -> io::Result<()> {
|
||||
// Max time to wait for events - keep this short to ensure UI responsiveness
|
||||
let event_poll_timeout = Duration::from_millis(50);
|
||||
|
||||
// Set up a dedicated tick timer
|
||||
let tick_rate = app.tick_rate;
|
||||
let mut last_tick = Instant::now();
|
||||
|
||||
loop {
|
||||
// Always redraw the UI on each loop iteration to keep it responsive
|
||||
terminal.draw(|f| {
|
||||
render_ui(f, app);
|
||||
})?;
|
||||
|
||||
// Update the UI on every tick
|
||||
if last_tick.elapsed() >= tick_rate {
|
||||
app.tick();
|
||||
app.update_running_workflow_progress();
|
||||
last_tick = Instant::now();
|
||||
}
|
||||
|
||||
// Non-blocking check for execution results
|
||||
if let Ok((workflow_idx, result)) = rx.try_recv() {
|
||||
app.process_execution_result(workflow_idx, result);
|
||||
app.current_execution = None;
|
||||
|
||||
// Get next workflow to execute using our helper function
|
||||
start_next_workflow_execution(app, tx_clone, verbose);
|
||||
}
|
||||
|
||||
// Start execution if we have a queued workflow and nothing is currently running
|
||||
if app.running && app.current_execution.is_none() && !app.execution_queue.is_empty() {
|
||||
start_next_workflow_execution(app, tx_clone, verbose);
|
||||
}
|
||||
|
||||
// Handle key events with a short timeout
|
||||
if event::poll(event_poll_timeout)? {
|
||||
if let Event::Key(key) = event::read()? {
|
||||
// Handle search input first if we're in search mode and logs tab
|
||||
if app.selected_tab == 2 && app.log_search_active {
|
||||
app.handle_log_search_input(key.code);
|
||||
continue;
|
||||
}
|
||||
|
||||
match key.code {
|
||||
KeyCode::Char('q') => {
|
||||
// Exit and clean up
|
||||
break Ok(());
|
||||
}
|
||||
KeyCode::Esc => {
|
||||
if app.detailed_view {
|
||||
app.detailed_view = false;
|
||||
} else if app.show_help {
|
||||
app.show_help = false;
|
||||
} else {
|
||||
// Exit and clean up
|
||||
break Ok(());
|
||||
}
|
||||
}
|
||||
KeyCode::Tab => {
|
||||
// Cycle through tabs
|
||||
app.switch_tab((app.selected_tab + 1) % 4);
|
||||
}
|
||||
KeyCode::BackTab => {
|
||||
// Cycle through tabs backwards
|
||||
app.switch_tab((app.selected_tab + 3) % 4);
|
||||
}
|
||||
KeyCode::Char('1') | KeyCode::Char('w') => app.switch_tab(0),
|
||||
KeyCode::Char('2') | KeyCode::Char('x') => app.switch_tab(1),
|
||||
KeyCode::Char('3') | KeyCode::Char('l') => app.switch_tab(2),
|
||||
KeyCode::Char('4') | KeyCode::Char('h') => app.switch_tab(3),
|
||||
KeyCode::Up | KeyCode::Char('k') => {
|
||||
if app.selected_tab == 2 {
|
||||
if !app.log_search_matches.is_empty() {
|
||||
app.previous_search_match();
|
||||
} else {
|
||||
app.scroll_logs_up();
|
||||
}
|
||||
} else if app.selected_tab == 0 {
|
||||
app.previous_workflow();
|
||||
} else if app.selected_tab == 1 {
|
||||
if app.detailed_view {
|
||||
app.previous_step();
|
||||
} else {
|
||||
app.previous_job();
|
||||
}
|
||||
}
|
||||
}
|
||||
KeyCode::Down | KeyCode::Char('j') => {
|
||||
if app.selected_tab == 2 {
|
||||
if !app.log_search_matches.is_empty() {
|
||||
app.next_search_match();
|
||||
} else {
|
||||
app.scroll_logs_down();
|
||||
}
|
||||
} else if app.selected_tab == 0 {
|
||||
app.next_workflow();
|
||||
} else if app.selected_tab == 1 {
|
||||
if app.detailed_view {
|
||||
app.next_step();
|
||||
} else {
|
||||
app.next_job();
|
||||
}
|
||||
}
|
||||
}
|
||||
KeyCode::Char(' ') => {
|
||||
if app.selected_tab == 0 && !app.running {
|
||||
app.toggle_selected();
|
||||
}
|
||||
}
|
||||
KeyCode::Enter => {
|
||||
match app.selected_tab {
|
||||
0 => {
|
||||
// In workflows tab, Enter runs the selected workflow
|
||||
if !app.running {
|
||||
if let Some(idx) = app.workflow_list_state.selected() {
|
||||
app.workflows[idx].selected = true;
|
||||
app.queue_selected_for_execution();
|
||||
app.start_execution();
|
||||
}
|
||||
}
|
||||
}
|
||||
1 => {
|
||||
// In execution tab, Enter shows job details
|
||||
app.toggle_detailed_view();
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
KeyCode::Char('r') => {
|
||||
// Check if shift is pressed - this might be receiving the reset command
|
||||
if key.modifiers.contains(KeyModifiers::SHIFT) {
|
||||
let timestamp = Local::now().format("%H:%M:%S").to_string();
|
||||
app.logs.push(format!(
|
||||
"[{}] DEBUG: Shift+r detected - this should be uppercase R",
|
||||
timestamp
|
||||
));
|
||||
logging::info(
|
||||
"Shift+r detected as lowercase - this should be uppercase R",
|
||||
);
|
||||
|
||||
if !app.running {
|
||||
// Reset workflow status with Shift+r
|
||||
app.logs.push(format!(
|
||||
"[{}] Attempting to reset workflow status via Shift+r...",
|
||||
timestamp
|
||||
));
|
||||
app.reset_workflow_status();
|
||||
|
||||
// Force redraw to update UI immediately
|
||||
terminal.draw(|f| {
|
||||
render_ui(f, app);
|
||||
})?;
|
||||
}
|
||||
} else if !app.running {
|
||||
app.queue_selected_for_execution();
|
||||
app.start_execution();
|
||||
}
|
||||
}
|
||||
KeyCode::Char('a') => {
|
||||
if !app.running {
|
||||
// Select all workflows
|
||||
for workflow in &mut app.workflows {
|
||||
workflow.selected = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
KeyCode::Char('e') => {
|
||||
if !app.running {
|
||||
app.toggle_emulation_mode();
|
||||
}
|
||||
}
|
||||
KeyCode::Char('v') => {
|
||||
if !app.running {
|
||||
app.toggle_validation_mode();
|
||||
}
|
||||
}
|
||||
KeyCode::Char('n') => {
|
||||
if app.selected_tab == 2 && !app.log_search_query.is_empty() {
|
||||
app.next_search_match();
|
||||
} else if app.selected_tab == 0 && !app.running {
|
||||
// Deselect all workflows
|
||||
for workflow in &mut app.workflows {
|
||||
workflow.selected = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
KeyCode::Char('R') => {
|
||||
let timestamp = Local::now().format("%H:%M:%S").to_string();
|
||||
app.logs.push(format!(
|
||||
"[{}] DEBUG: Reset key 'Shift+R' pressed",
|
||||
timestamp
|
||||
));
|
||||
logging::info("Reset key 'Shift+R' pressed");
|
||||
|
||||
if !app.running {
|
||||
// Reset workflow status
|
||||
app.logs.push(format!(
|
||||
"[{}] Attempting to reset workflow status...",
|
||||
timestamp
|
||||
));
|
||||
app.reset_workflow_status();
|
||||
|
||||
// Force redraw to update UI immediately
|
||||
terminal.draw(|f| {
|
||||
render_ui(f, app);
|
||||
})?;
|
||||
} else {
|
||||
app.logs.push(format!(
|
||||
"[{}] Cannot reset workflow while another operation is running",
|
||||
timestamp
|
||||
));
|
||||
}
|
||||
}
|
||||
KeyCode::Char('?') => {
|
||||
// Toggle help overlay
|
||||
app.show_help = !app.show_help;
|
||||
}
|
||||
KeyCode::Char('t') => {
|
||||
// Only trigger workflow if not already running and we're in the workflows tab
|
||||
if !app.running && app.selected_tab == 0 {
|
||||
if let Some(selected_idx) = app.workflow_list_state.selected() {
|
||||
if selected_idx < app.workflows.len() {
|
||||
let workflow = &app.workflows[selected_idx];
|
||||
if workflow.status == WorkflowStatus::NotStarted {
|
||||
app.trigger_selected_workflow();
|
||||
} else if workflow.status == WorkflowStatus::Running {
|
||||
app.logs.push(format!(
|
||||
"Workflow '{}' is already running",
|
||||
workflow.name
|
||||
));
|
||||
logging::warning(&format!(
|
||||
"Workflow '{}' is already running",
|
||||
workflow.name
|
||||
));
|
||||
} else {
|
||||
// First, get all the data we need from the workflow
|
||||
let workflow_name = workflow.name.clone();
|
||||
let status_text = match workflow.status {
|
||||
WorkflowStatus::Success => "Success",
|
||||
WorkflowStatus::Failed => "Failed",
|
||||
WorkflowStatus::Skipped => "Skipped",
|
||||
_ => "current",
|
||||
};
|
||||
let needs_reset_hint = workflow.status
|
||||
== WorkflowStatus::Success
|
||||
|| workflow.status == WorkflowStatus::Failed
|
||||
|| workflow.status == WorkflowStatus::Skipped;
|
||||
|
||||
// Now set the status message (mutable borrow)
|
||||
app.set_status_message(format!(
|
||||
"Cannot trigger workflow '{}' in {} state. Press Shift+R to reset.",
|
||||
workflow_name,
|
||||
status_text
|
||||
));
|
||||
|
||||
// Add log entries
|
||||
app.logs.push(format!(
|
||||
"Cannot trigger workflow '{}' in {} state",
|
||||
workflow_name, status_text
|
||||
));
|
||||
|
||||
// Add hint about using reset
|
||||
if needs_reset_hint {
|
||||
let timestamp =
|
||||
Local::now().format("%H:%M:%S").to_string();
|
||||
app.logs.push(format!(
|
||||
"[{}] Hint: Press 'Shift+R' to reset the workflow status and allow triggering",
|
||||
timestamp
|
||||
));
|
||||
}
|
||||
|
||||
logging::warning(&format!(
|
||||
"Cannot trigger workflow in {} state",
|
||||
status_text
|
||||
));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
app.logs.push("No workflow selected to trigger".to_string());
|
||||
logging::warning("No workflow selected to trigger");
|
||||
}
|
||||
} else if app.running {
|
||||
app.logs.push(
|
||||
"Cannot trigger workflow while another operation is in progress"
|
||||
.to_string(),
|
||||
);
|
||||
logging::warning(
|
||||
"Cannot trigger workflow while another operation is in progress",
|
||||
);
|
||||
} else if app.selected_tab != 0 {
|
||||
app.logs
|
||||
.push("Switch to Workflows tab to trigger a workflow".to_string());
|
||||
logging::warning("Switch to Workflows tab to trigger a workflow");
|
||||
// For better UX, we could also automatically switch to the Workflows tab here
|
||||
app.switch_tab(0);
|
||||
}
|
||||
}
|
||||
KeyCode::Char('s') => {
|
||||
if app.selected_tab == 2 {
|
||||
app.toggle_log_search();
|
||||
}
|
||||
}
|
||||
KeyCode::Char('f') => {
|
||||
if app.selected_tab == 2 {
|
||||
app.toggle_log_filter();
|
||||
}
|
||||
}
|
||||
KeyCode::Char('c') => {
|
||||
if app.selected_tab == 2 {
|
||||
app.clear_log_search_and_filter();
|
||||
}
|
||||
}
|
||||
KeyCode::Char(c) => {
|
||||
if app.selected_tab == 2 && app.log_search_active {
|
||||
app.handle_log_search_input(KeyCode::Char(c));
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
900
crates/ui/src/app/state.rs
Normal file
900
crates/ui/src/app/state.rs
Normal file
@@ -0,0 +1,900 @@
|
||||
// App state for the UI
|
||||
use crate::models::{
|
||||
ExecutionResultMsg, JobExecution, LogFilterLevel, StepExecution, Workflow, WorkflowExecution,
|
||||
WorkflowStatus,
|
||||
};
|
||||
use chrono::Local;
|
||||
use crossterm::event::KeyCode;
|
||||
use executor::{JobStatus, RuntimeType, StepStatus};
|
||||
use ratatui::widgets::{ListState, TableState};
|
||||
use std::sync::mpsc;
|
||||
use std::time::{Duration, Instant};
|
||||
|
||||
/// Application state
|
||||
pub struct App {
|
||||
pub workflows: Vec<Workflow>,
|
||||
pub workflow_list_state: ListState,
|
||||
pub selected_tab: usize,
|
||||
pub running: bool,
|
||||
pub show_help: bool,
|
||||
pub runtime_type: RuntimeType,
|
||||
pub validation_mode: bool,
|
||||
pub preserve_containers_on_failure: bool,
|
||||
pub execution_queue: Vec<usize>, // Indices of workflows to execute
|
||||
pub current_execution: Option<usize>,
|
||||
pub logs: Vec<String>, // Overall execution logs
|
||||
pub log_scroll: usize, // Scrolling position for logs
|
||||
pub job_list_state: ListState, // For viewing job details
|
||||
pub detailed_view: bool, // Whether we're in detailed view mode
|
||||
pub step_list_state: ListState, // For selecting steps in detailed view
|
||||
pub step_table_state: TableState, // For the steps table in detailed view
|
||||
pub last_tick: Instant, // For UI animations and updates
|
||||
pub tick_rate: Duration, // How often to update the UI
|
||||
pub tx: mpsc::Sender<ExecutionResultMsg>, // Channel for async communication
|
||||
pub status_message: Option<String>, // Temporary status message to display
|
||||
pub status_message_time: Option<Instant>, // When the message was set
|
||||
|
||||
// Search and filter functionality
|
||||
pub log_search_query: String, // Current search query for logs
|
||||
pub log_search_active: bool, // Whether search input is active
|
||||
pub log_filter_level: Option<LogFilterLevel>, // Current log level filter
|
||||
pub log_search_matches: Vec<usize>, // Indices of logs that match the search
|
||||
pub log_search_match_idx: usize, // Current match index for navigation
|
||||
}
|
||||
|
||||
impl App {
|
||||
pub fn new(
|
||||
runtime_type: RuntimeType,
|
||||
tx: mpsc::Sender<ExecutionResultMsg>,
|
||||
preserve_containers_on_failure: bool,
|
||||
) -> App {
|
||||
let mut workflow_list_state = ListState::default();
|
||||
workflow_list_state.select(Some(0));
|
||||
|
||||
let mut job_list_state = ListState::default();
|
||||
job_list_state.select(Some(0));
|
||||
|
||||
let mut step_list_state = ListState::default();
|
||||
step_list_state.select(Some(0));
|
||||
|
||||
let mut step_table_state = TableState::default();
|
||||
step_table_state.select(Some(0));
|
||||
|
||||
// Check Docker availability if Docker runtime is selected
|
||||
let mut initial_logs = Vec::new();
|
||||
let runtime_type = match runtime_type {
|
||||
RuntimeType::Docker => {
|
||||
// Use a timeout for the Docker availability check to prevent hanging
|
||||
let is_docker_available = match std::panic::catch_unwind(|| {
|
||||
// Use a very short timeout to prevent blocking the UI
|
||||
let result = std::thread::scope(|s| {
|
||||
let handle = s.spawn(|| {
|
||||
utils::fd::with_stderr_to_null(executor::docker::is_available)
|
||||
.unwrap_or(false)
|
||||
});
|
||||
|
||||
// Set a short timeout for the thread
|
||||
let start = std::time::Instant::now();
|
||||
let timeout = std::time::Duration::from_secs(1);
|
||||
|
||||
while start.elapsed() < timeout {
|
||||
if handle.is_finished() {
|
||||
return handle.join().unwrap_or(false);
|
||||
}
|
||||
std::thread::sleep(std::time::Duration::from_millis(10));
|
||||
}
|
||||
|
||||
// If we reach here, the check took too long
|
||||
logging::warning(
|
||||
"Docker availability check timed out, falling back to emulation mode",
|
||||
);
|
||||
false
|
||||
});
|
||||
result
|
||||
}) {
|
||||
Ok(result) => result,
|
||||
Err(_) => {
|
||||
logging::warning("Docker availability check failed with panic, falling back to emulation mode");
|
||||
false
|
||||
}
|
||||
};
|
||||
|
||||
if !is_docker_available {
|
||||
initial_logs.push(
|
||||
"Docker is not available or unresponsive. Using emulation mode instead."
|
||||
.to_string(),
|
||||
);
|
||||
logging::warning(
|
||||
"Docker is not available or unresponsive. Using emulation mode instead.",
|
||||
);
|
||||
RuntimeType::Emulation
|
||||
} else {
|
||||
logging::info("Docker is available, using Docker runtime");
|
||||
RuntimeType::Docker
|
||||
}
|
||||
}
|
||||
RuntimeType::Emulation => RuntimeType::Emulation,
|
||||
};
|
||||
|
||||
App {
|
||||
workflows: Vec::new(),
|
||||
workflow_list_state,
|
||||
selected_tab: 0,
|
||||
running: false,
|
||||
show_help: false,
|
||||
runtime_type,
|
||||
validation_mode: false,
|
||||
preserve_containers_on_failure,
|
||||
execution_queue: Vec::new(),
|
||||
current_execution: None,
|
||||
logs: initial_logs,
|
||||
log_scroll: 0,
|
||||
job_list_state,
|
||||
detailed_view: false,
|
||||
step_list_state,
|
||||
step_table_state,
|
||||
last_tick: Instant::now(),
|
||||
tick_rate: Duration::from_millis(250), // Update 4 times per second
|
||||
tx,
|
||||
status_message: None,
|
||||
status_message_time: None,
|
||||
|
||||
// Search and filter functionality
|
||||
log_search_query: String::new(),
|
||||
log_search_active: false,
|
||||
log_filter_level: Some(LogFilterLevel::All),
|
||||
log_search_matches: Vec::new(),
|
||||
log_search_match_idx: 0,
|
||||
}
|
||||
}
|
||||
|
||||
// Toggle workflow selection
|
||||
pub fn toggle_selected(&mut self) {
|
||||
if let Some(idx) = self.workflow_list_state.selected() {
|
||||
if idx < self.workflows.len() {
|
||||
self.workflows[idx].selected = !self.workflows[idx].selected;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn toggle_emulation_mode(&mut self) {
|
||||
self.runtime_type = match self.runtime_type {
|
||||
RuntimeType::Docker => RuntimeType::Emulation,
|
||||
RuntimeType::Emulation => RuntimeType::Docker,
|
||||
};
|
||||
self.logs
|
||||
.push(format!("Switched to {} mode", self.runtime_type_name()));
|
||||
}
|
||||
|
||||
pub fn toggle_validation_mode(&mut self) {
|
||||
self.validation_mode = !self.validation_mode;
|
||||
let mode = if self.validation_mode {
|
||||
"validation"
|
||||
} else {
|
||||
"normal"
|
||||
};
|
||||
let timestamp = Local::now().format("%H:%M:%S").to_string();
|
||||
self.logs
|
||||
.push(format!("[{}] Switched to {} mode", timestamp, mode));
|
||||
logging::info(&format!("Switched to {} mode", mode));
|
||||
}
|
||||
|
||||
pub fn runtime_type_name(&self) -> &str {
|
||||
match self.runtime_type {
|
||||
RuntimeType::Docker => "Docker",
|
||||
RuntimeType::Emulation => "Emulation",
|
||||
}
|
||||
}
|
||||
|
||||
// Move cursor up in the workflow list
|
||||
pub fn previous_workflow(&mut self) {
|
||||
if self.workflows.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let i = match self.workflow_list_state.selected() {
|
||||
Some(i) => {
|
||||
if i == 0 {
|
||||
self.workflows.len() - 1
|
||||
} else {
|
||||
i - 1
|
||||
}
|
||||
}
|
||||
None => 0,
|
||||
};
|
||||
self.workflow_list_state.select(Some(i));
|
||||
}
|
||||
|
||||
// Move cursor down in the workflow list
|
||||
pub fn next_workflow(&mut self) {
|
||||
if self.workflows.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let i = match self.workflow_list_state.selected() {
|
||||
Some(i) => {
|
||||
if i >= self.workflows.len() - 1 {
|
||||
0
|
||||
} else {
|
||||
i + 1
|
||||
}
|
||||
}
|
||||
None => 0,
|
||||
};
|
||||
self.workflow_list_state.select(Some(i));
|
||||
}
|
||||
|
||||
// Move cursor up in the job list
|
||||
pub fn previous_job(&mut self) {
|
||||
let current_workflow_idx = self
|
||||
.current_execution
|
||||
.or_else(|| self.workflow_list_state.selected());
|
||||
|
||||
if let Some(workflow_idx) = current_workflow_idx {
|
||||
if workflow_idx >= self.workflows.len() {
|
||||
return;
|
||||
}
|
||||
|
||||
if let Some(execution) = &self.workflows[workflow_idx].execution_details {
|
||||
if execution.jobs.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let i = match self.job_list_state.selected() {
|
||||
Some(i) => {
|
||||
if i == 0 {
|
||||
execution.jobs.len() - 1
|
||||
} else {
|
||||
i - 1
|
||||
}
|
||||
}
|
||||
None => 0,
|
||||
};
|
||||
self.job_list_state.select(Some(i));
|
||||
|
||||
// Reset step selection when changing jobs
|
||||
self.step_list_state.select(Some(0));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Move cursor down in the job list
|
||||
pub fn next_job(&mut self) {
|
||||
let current_workflow_idx = self
|
||||
.current_execution
|
||||
.or_else(|| self.workflow_list_state.selected())
|
||||
.filter(|&idx| idx < self.workflows.len());
|
||||
|
||||
if let Some(workflow_idx) = current_workflow_idx {
|
||||
if workflow_idx >= self.workflows.len() {
|
||||
return;
|
||||
}
|
||||
|
||||
if let Some(execution) = &self.workflows[workflow_idx].execution_details {
|
||||
if execution.jobs.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let i = match self.job_list_state.selected() {
|
||||
Some(i) => {
|
||||
if i >= execution.jobs.len() - 1 {
|
||||
0
|
||||
} else {
|
||||
i + 1
|
||||
}
|
||||
}
|
||||
None => 0,
|
||||
};
|
||||
self.job_list_state.select(Some(i));
|
||||
|
||||
// Reset step selection when changing jobs
|
||||
self.step_list_state.select(Some(0));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Move cursor up in step list
|
||||
pub fn previous_step(&mut self) {
|
||||
let current_workflow_idx = self
|
||||
.current_execution
|
||||
.or_else(|| self.workflow_list_state.selected())
|
||||
.filter(|&idx| idx < self.workflows.len());
|
||||
|
||||
if let Some(workflow_idx) = current_workflow_idx {
|
||||
if let Some(execution) = &self.workflows[workflow_idx].execution_details {
|
||||
if let Some(job_idx) = self.job_list_state.selected() {
|
||||
if job_idx < execution.jobs.len() {
|
||||
let steps = &execution.jobs[job_idx].steps;
|
||||
if steps.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let i = match self.step_list_state.selected() {
|
||||
Some(i) => {
|
||||
if i == 0 {
|
||||
steps.len() - 1
|
||||
} else {
|
||||
i - 1
|
||||
}
|
||||
}
|
||||
None => 0,
|
||||
};
|
||||
self.step_list_state.select(Some(i));
|
||||
// Update the table state to match
|
||||
self.step_table_state.select(Some(i));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Move cursor down in step list
|
||||
pub fn next_step(&mut self) {
|
||||
let current_workflow_idx = self
|
||||
.current_execution
|
||||
.or_else(|| self.workflow_list_state.selected())
|
||||
.filter(|&idx| idx < self.workflows.len());
|
||||
|
||||
if let Some(workflow_idx) = current_workflow_idx {
|
||||
if let Some(execution) = &self.workflows[workflow_idx].execution_details {
|
||||
if let Some(job_idx) = self.job_list_state.selected() {
|
||||
if job_idx < execution.jobs.len() {
|
||||
let steps = &execution.jobs[job_idx].steps;
|
||||
if steps.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let i = match self.step_list_state.selected() {
|
||||
Some(i) => {
|
||||
if i >= steps.len() - 1 {
|
||||
0
|
||||
} else {
|
||||
i + 1
|
||||
}
|
||||
}
|
||||
None => 0,
|
||||
};
|
||||
self.step_list_state.select(Some(i));
|
||||
// Update the table state to match
|
||||
self.step_table_state.select(Some(i));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Change the tab
|
||||
pub fn switch_tab(&mut self, tab: usize) {
|
||||
self.selected_tab = tab;
|
||||
}
|
||||
|
||||
// Queue selected workflows for execution
|
||||
pub fn queue_selected_for_execution(&mut self) {
|
||||
if let Some(idx) = self.workflow_list_state.selected() {
|
||||
if idx < self.workflows.len() && !self.execution_queue.contains(&idx) {
|
||||
self.execution_queue.push(idx);
|
||||
let timestamp = Local::now().format("%H:%M:%S").to_string();
|
||||
self.logs.push(format!(
|
||||
"[{}] Added '{}' to execution queue. Press 'Enter' to start.",
|
||||
timestamp, self.workflows[idx].name
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Start workflow execution process
|
||||
pub fn start_execution(&mut self) {
|
||||
// Only start if we have workflows in queue and nothing is currently running
|
||||
if !self.execution_queue.is_empty() && self.current_execution.is_none() {
|
||||
self.running = true;
|
||||
|
||||
// Log only once at the beginning - don't initialize execution details here
|
||||
// since that will happen in start_next_workflow_execution
|
||||
let timestamp = Local::now().format("%H:%M:%S").to_string();
|
||||
self.logs
|
||||
.push(format!("[{}] Starting workflow execution...", timestamp));
|
||||
logging::info("Starting workflow execution...");
|
||||
}
|
||||
}
|
||||
|
||||
// Process execution results and update UI
|
||||
pub fn process_execution_result(
|
||||
&mut self,
|
||||
workflow_idx: usize,
|
||||
result: Result<(Vec<executor::JobResult>, ()), String>,
|
||||
) {
|
||||
if workflow_idx >= self.workflows.len() {
|
||||
let timestamp = Local::now().format("%H:%M:%S").to_string();
|
||||
self.logs.push(format!(
|
||||
"[{}] Error: Invalid workflow index received",
|
||||
timestamp
|
||||
));
|
||||
logging::error("Invalid workflow index received in process_execution_result");
|
||||
return;
|
||||
}
|
||||
|
||||
let workflow = &mut self.workflows[workflow_idx];
|
||||
|
||||
// Ensure execution details exist
|
||||
if workflow.execution_details.is_none() {
|
||||
workflow.execution_details = Some(WorkflowExecution {
|
||||
jobs: Vec::new(),
|
||||
start_time: Local::now(),
|
||||
end_time: Some(Local::now()),
|
||||
logs: Vec::new(),
|
||||
progress: 1.0,
|
||||
});
|
||||
}
|
||||
|
||||
// Update execution details with end time
|
||||
if let Some(execution_details) = &mut workflow.execution_details {
|
||||
execution_details.end_time = Some(Local::now());
|
||||
|
||||
match &result {
|
||||
Ok((jobs, _)) => {
|
||||
let timestamp = Local::now().format("%H:%M:%S").to_string();
|
||||
execution_details
|
||||
.logs
|
||||
.push(format!("[{}] Operation completed successfully.", timestamp));
|
||||
execution_details.progress = 1.0;
|
||||
|
||||
// Convert executor::JobResult to our JobExecution struct
|
||||
execution_details.jobs = jobs
|
||||
.iter()
|
||||
.map(|job_result| JobExecution {
|
||||
name: job_result.name.clone(),
|
||||
status: match job_result.status {
|
||||
executor::JobStatus::Success => JobStatus::Success,
|
||||
executor::JobStatus::Failure => JobStatus::Failure,
|
||||
executor::JobStatus::Skipped => JobStatus::Skipped,
|
||||
},
|
||||
steps: job_result
|
||||
.steps
|
||||
.iter()
|
||||
.map(|step_result| StepExecution {
|
||||
name: step_result.name.clone(),
|
||||
status: match step_result.status {
|
||||
executor::StepStatus::Success => StepStatus::Success,
|
||||
executor::StepStatus::Failure => StepStatus::Failure,
|
||||
executor::StepStatus::Skipped => StepStatus::Skipped,
|
||||
},
|
||||
output: step_result.output.clone(),
|
||||
})
|
||||
.collect::<Vec<StepExecution>>(),
|
||||
logs: vec![job_result.logs.clone()],
|
||||
})
|
||||
.collect::<Vec<JobExecution>>();
|
||||
}
|
||||
Err(e) => {
|
||||
let timestamp = Local::now().format("%H:%M:%S").to_string();
|
||||
execution_details
|
||||
.logs
|
||||
.push(format!("[{}] Error: {}", timestamp, e));
|
||||
execution_details.progress = 1.0;
|
||||
|
||||
// Create a dummy job with the error information so users can see details
|
||||
execution_details.jobs = vec![JobExecution {
|
||||
name: "Workflow Execution".to_string(),
|
||||
status: JobStatus::Failure,
|
||||
steps: vec![StepExecution {
|
||||
name: "Execution Error".to_string(),
|
||||
status: StepStatus::Failure,
|
||||
output: format!("Error: {}\n\nThis error prevented the workflow from executing properly.", e),
|
||||
}],
|
||||
logs: vec![format!("Workflow execution error: {}", e)],
|
||||
}];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
match result {
|
||||
Ok(_) => {
|
||||
workflow.status = WorkflowStatus::Success;
|
||||
let timestamp = Local::now().format("%H:%M:%S").to_string();
|
||||
self.logs.push(format!(
|
||||
"[{}] Workflow '{}' completed successfully!",
|
||||
timestamp, workflow.name
|
||||
));
|
||||
logging::info(&format!(
|
||||
"[{}] Workflow '{}' completed successfully!",
|
||||
timestamp, workflow.name
|
||||
));
|
||||
}
|
||||
Err(e) => {
|
||||
workflow.status = WorkflowStatus::Failed;
|
||||
let timestamp = Local::now().format("%H:%M:%S").to_string();
|
||||
self.logs.push(format!(
|
||||
"[{}] Workflow '{}' failed: {}",
|
||||
timestamp, workflow.name, e
|
||||
));
|
||||
logging::error(&format!(
|
||||
"[{}] Workflow '{}' failed: {}",
|
||||
timestamp, workflow.name, e
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
// Only clear current_execution if it matches the processed workflow
|
||||
if let Some(current_idx) = self.current_execution {
|
||||
if current_idx == workflow_idx {
|
||||
self.current_execution = None;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Get next workflow for execution
|
||||
pub fn get_next_workflow_to_execute(&mut self) -> Option<usize> {
|
||||
if self.execution_queue.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let next = self.execution_queue.remove(0);
|
||||
self.workflows[next].status = WorkflowStatus::Running;
|
||||
self.current_execution = Some(next);
|
||||
self.logs
|
||||
.push(format!("Executing workflow: {}", self.workflows[next].name));
|
||||
logging::info(&format!(
|
||||
"Executing workflow: {}",
|
||||
self.workflows[next].name
|
||||
));
|
||||
|
||||
// Initialize execution details
|
||||
self.workflows[next].execution_details = Some(WorkflowExecution {
|
||||
jobs: Vec::new(),
|
||||
start_time: Local::now(),
|
||||
end_time: None,
|
||||
logs: vec!["Execution started".to_string()],
|
||||
progress: 0.0, // Just started
|
||||
});
|
||||
|
||||
Some(next)
|
||||
}
|
||||
|
||||
// Toggle detailed view mode
|
||||
pub fn toggle_detailed_view(&mut self) {
|
||||
self.detailed_view = !self.detailed_view;
|
||||
|
||||
// When entering detailed view, make sure step selection is initialized
|
||||
if self.detailed_view {
|
||||
// Ensure the step_table_state matches the step_list_state
|
||||
if let Some(step_idx) = self.step_list_state.selected() {
|
||||
self.step_table_state.select(Some(step_idx));
|
||||
} else {
|
||||
// Initialize both to the first item if nothing is selected
|
||||
self.step_list_state.select(Some(0));
|
||||
self.step_table_state.select(Some(0));
|
||||
}
|
||||
|
||||
// Also ensure job_list_state has a selection
|
||||
if self.job_list_state.selected().is_none() {
|
||||
self.job_list_state.select(Some(0));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Function to handle keyboard input for log search
|
||||
pub fn handle_log_search_input(&mut self, key: KeyCode) {
|
||||
match key {
|
||||
KeyCode::Esc => {
|
||||
self.log_search_active = false;
|
||||
self.log_search_query.clear();
|
||||
self.log_search_matches.clear();
|
||||
}
|
||||
KeyCode::Backspace => {
|
||||
self.log_search_query.pop();
|
||||
self.update_log_search_matches();
|
||||
}
|
||||
KeyCode::Enter => {
|
||||
self.log_search_active = false;
|
||||
// Keep the search query and matches
|
||||
}
|
||||
KeyCode::Char(c) => {
|
||||
self.log_search_query.push(c);
|
||||
self.update_log_search_matches();
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
// Toggle log search mode
|
||||
pub fn toggle_log_search(&mut self) {
|
||||
self.log_search_active = !self.log_search_active;
|
||||
if !self.log_search_active {
|
||||
// Don't clear the query, this allows toggling the search UI while keeping the filter
|
||||
} else {
|
||||
// When activating search, update matches
|
||||
self.update_log_search_matches();
|
||||
}
|
||||
}
|
||||
|
||||
// Toggle log filter
|
||||
pub fn toggle_log_filter(&mut self) {
|
||||
self.log_filter_level = match &self.log_filter_level {
|
||||
None => Some(LogFilterLevel::Info),
|
||||
Some(level) => Some(level.next()),
|
||||
};
|
||||
|
||||
// Update search matches when filter changes
|
||||
self.update_log_search_matches();
|
||||
}
|
||||
|
||||
// Clear log search and filter
|
||||
pub fn clear_log_search_and_filter(&mut self) {
|
||||
self.log_search_query.clear();
|
||||
self.log_filter_level = None;
|
||||
self.log_search_matches.clear();
|
||||
self.log_search_match_idx = 0;
|
||||
}
|
||||
|
||||
// Update matches based on current search and filter
|
||||
pub fn update_log_search_matches(&mut self) {
|
||||
self.log_search_matches.clear();
|
||||
self.log_search_match_idx = 0;
|
||||
|
||||
// Get all logs (app logs + system logs)
|
||||
let mut all_logs = Vec::new();
|
||||
for log in &self.logs {
|
||||
all_logs.push(log.clone());
|
||||
}
|
||||
for log in logging::get_logs() {
|
||||
all_logs.push(log.clone());
|
||||
}
|
||||
|
||||
// Apply filter and search
|
||||
for (idx, log) in all_logs.iter().enumerate() {
|
||||
let passes_filter = match &self.log_filter_level {
|
||||
None => true,
|
||||
Some(level) => level.matches(log),
|
||||
};
|
||||
|
||||
let matches_search = if self.log_search_query.is_empty() {
|
||||
true
|
||||
} else {
|
||||
log.to_lowercase()
|
||||
.contains(&self.log_search_query.to_lowercase())
|
||||
};
|
||||
|
||||
if passes_filter && matches_search {
|
||||
self.log_search_matches.push(idx);
|
||||
}
|
||||
}
|
||||
|
||||
// Jump to first match and provide feedback
|
||||
if !self.log_search_matches.is_empty() {
|
||||
// Jump to the first match
|
||||
if let Some(&idx) = self.log_search_matches.first() {
|
||||
self.log_scroll = idx;
|
||||
|
||||
if !self.log_search_query.is_empty() {
|
||||
self.set_status_message(format!(
|
||||
"Found {} matches for '{}'",
|
||||
self.log_search_matches.len(),
|
||||
self.log_search_query
|
||||
));
|
||||
}
|
||||
}
|
||||
} else if !self.log_search_query.is_empty() {
|
||||
// No matches found
|
||||
self.set_status_message(format!("No matches found for '{}'", self.log_search_query));
|
||||
}
|
||||
}
|
||||
|
||||
// Navigate to next search match
|
||||
pub fn next_search_match(&mut self) {
|
||||
if !self.log_search_matches.is_empty() {
|
||||
self.log_search_match_idx =
|
||||
(self.log_search_match_idx + 1) % self.log_search_matches.len();
|
||||
if let Some(&idx) = self.log_search_matches.get(self.log_search_match_idx) {
|
||||
self.log_scroll = idx;
|
||||
|
||||
// Set status message showing which match we're on
|
||||
self.set_status_message(format!(
|
||||
"Search match {}/{} for '{}'",
|
||||
self.log_search_match_idx + 1,
|
||||
self.log_search_matches.len(),
|
||||
self.log_search_query
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Navigate to previous search match
|
||||
pub fn previous_search_match(&mut self) {
|
||||
if !self.log_search_matches.is_empty() {
|
||||
self.log_search_match_idx = if self.log_search_match_idx == 0 {
|
||||
self.log_search_matches.len() - 1
|
||||
} else {
|
||||
self.log_search_match_idx - 1
|
||||
};
|
||||
if let Some(&idx) = self.log_search_matches.get(self.log_search_match_idx) {
|
||||
self.log_scroll = idx;
|
||||
|
||||
// Set status message showing which match we're on
|
||||
self.set_status_message(format!(
|
||||
"Search match {}/{} for '{}'",
|
||||
self.log_search_match_idx + 1,
|
||||
self.log_search_matches.len(),
|
||||
self.log_search_query
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Scroll logs up
|
||||
pub fn scroll_logs_up(&mut self) {
|
||||
self.log_scroll = self.log_scroll.saturating_sub(1);
|
||||
}
|
||||
|
||||
// Scroll logs down
|
||||
pub fn scroll_logs_down(&mut self) {
|
||||
// Get total log count including system logs
|
||||
let total_logs = self.logs.len() + logging::get_logs().len();
|
||||
if total_logs > 0 {
|
||||
self.log_scroll = (self.log_scroll + 1).min(total_logs - 1);
|
||||
}
|
||||
}
|
||||
|
||||
// Update progress for running workflows
|
||||
pub fn update_running_workflow_progress(&mut self) {
|
||||
if let Some(idx) = self.current_execution {
|
||||
if let Some(execution) = &mut self.workflows[idx].execution_details {
|
||||
if execution.end_time.is_none() {
|
||||
// Gradually increase progress for visual feedback
|
||||
execution.progress = (execution.progress + 0.01).min(0.95);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Set a temporary status message to be displayed in the UI
|
||||
pub fn set_status_message(&mut self, message: String) {
|
||||
self.status_message = Some(message);
|
||||
self.status_message_time = Some(Instant::now());
|
||||
}
|
||||
|
||||
// Check if tick should happen
|
||||
pub fn tick(&mut self) -> bool {
|
||||
let now = Instant::now();
|
||||
|
||||
// Check if we should clear a status message (after 3 seconds)
|
||||
if let Some(message_time) = self.status_message_time {
|
||||
if now.duration_since(message_time).as_secs() >= 3 {
|
||||
self.status_message = None;
|
||||
self.status_message_time = None;
|
||||
}
|
||||
}
|
||||
|
||||
if now.duration_since(self.last_tick) >= self.tick_rate {
|
||||
self.last_tick = now;
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
// Trigger the selected workflow
|
||||
pub fn trigger_selected_workflow(&mut self) {
|
||||
if let Some(selected_idx) = self.workflow_list_state.selected() {
|
||||
if selected_idx < self.workflows.len() {
|
||||
let workflow = &self.workflows[selected_idx];
|
||||
|
||||
if workflow.name.is_empty() {
|
||||
let timestamp = Local::now().format("%H:%M:%S").to_string();
|
||||
self.logs
|
||||
.push(format!("[{}] Error: Invalid workflow selection", timestamp));
|
||||
logging::error("Invalid workflow selection in trigger_selected_workflow");
|
||||
return;
|
||||
}
|
||||
|
||||
// Set up background task to execute the workflow via GitHub Actions REST API
|
||||
let timestamp = Local::now().format("%H:%M:%S").to_string();
|
||||
self.logs.push(format!(
|
||||
"[{}] Triggering workflow: {}",
|
||||
timestamp, workflow.name
|
||||
));
|
||||
logging::info(&format!("Triggering workflow: {}", workflow.name));
|
||||
|
||||
// Clone necessary values for the async task
|
||||
let workflow_name = workflow.name.clone();
|
||||
let tx_clone = self.tx.clone();
|
||||
|
||||
// Set this tab as the current execution to ensure it shows in the Execution tab
|
||||
self.current_execution = Some(selected_idx);
|
||||
|
||||
// Switch to execution tab for better user feedback
|
||||
self.selected_tab = 1; // Switch to Execution tab manually to avoid the borrowing issue
|
||||
|
||||
// Create a thread instead of using tokio runtime directly since send() is not async
|
||||
std::thread::spawn(move || {
|
||||
// Create a runtime for the thread
|
||||
let rt = match tokio::runtime::Runtime::new() {
|
||||
Ok(runtime) => runtime,
|
||||
Err(e) => {
|
||||
let _ = tx_clone.send((
|
||||
selected_idx,
|
||||
Err(format!("Failed to create Tokio runtime: {}", e)),
|
||||
));
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
// Execute the GitHub Actions trigger API call
|
||||
let result = rt.block_on(async {
|
||||
crate::handlers::workflow::execute_curl_trigger(&workflow_name, None).await
|
||||
});
|
||||
|
||||
// Send the result back to the main thread
|
||||
if let Err(e) = tx_clone.send((selected_idx, result)) {
|
||||
logging::error(&format!("Error sending trigger result: {}", e));
|
||||
}
|
||||
});
|
||||
} else {
|
||||
let timestamp = Local::now().format("%H:%M:%S").to_string();
|
||||
self.logs
|
||||
.push(format!("[{}] No workflow selected to trigger", timestamp));
|
||||
logging::warning("No workflow selected to trigger");
|
||||
}
|
||||
} else {
|
||||
self.logs
|
||||
.push("No workflow selected to trigger".to_string());
|
||||
logging::warning("No workflow selected to trigger");
|
||||
}
|
||||
}
|
||||
|
||||
// Reset a workflow's status to NotStarted
|
||||
pub fn reset_workflow_status(&mut self) {
|
||||
// Log whether a selection exists
|
||||
if self.workflow_list_state.selected().is_none() {
|
||||
let timestamp = Local::now().format("%H:%M:%S").to_string();
|
||||
self.logs.push(format!(
|
||||
"[{}] Debug: No workflow selected for reset",
|
||||
timestamp
|
||||
));
|
||||
logging::warning("No workflow selected for reset");
|
||||
return;
|
||||
}
|
||||
|
||||
if let Some(idx) = self.workflow_list_state.selected() {
|
||||
if idx < self.workflows.len() {
|
||||
let workflow = &mut self.workflows[idx];
|
||||
// Log before status
|
||||
let timestamp = Local::now().format("%H:%M:%S").to_string();
|
||||
self.logs.push(format!(
|
||||
"[{}] Debug: Attempting to reset workflow '{}' from {:?} state",
|
||||
timestamp, workflow.name, workflow.status
|
||||
));
|
||||
|
||||
// Debug: Reset unconditionally for testing
|
||||
// if workflow.status != WorkflowStatus::Running {
|
||||
let old_status = match workflow.status {
|
||||
WorkflowStatus::Success => "Success",
|
||||
WorkflowStatus::Failed => "Failed",
|
||||
WorkflowStatus::Skipped => "Skipped",
|
||||
WorkflowStatus::NotStarted => "NotStarted",
|
||||
WorkflowStatus::Running => "Running",
|
||||
};
|
||||
|
||||
// Store workflow name for the success message
|
||||
let workflow_name = workflow.name.clone();
|
||||
|
||||
// Reset regardless of current status (for debugging)
|
||||
workflow.status = WorkflowStatus::NotStarted;
|
||||
// Clear execution details to reset all state
|
||||
workflow.execution_details = None;
|
||||
|
||||
let timestamp = Local::now().format("%H:%M:%S").to_string();
|
||||
self.logs.push(format!(
|
||||
"[{}] Reset workflow '{}' from {} state to NotStarted - status is now {:?}",
|
||||
timestamp, workflow.name, old_status, workflow.status
|
||||
));
|
||||
logging::info(&format!(
|
||||
"Reset workflow '{}' from {} state to NotStarted - status is now {:?}",
|
||||
workflow.name, old_status, workflow.status
|
||||
));
|
||||
|
||||
// Set a success status message
|
||||
self.set_status_message(format!("✅ Workflow '{}' has been reset!", workflow_name));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
53
crates/ui/src/components/button.rs
Normal file
53
crates/ui/src/components/button.rs
Normal file
@@ -0,0 +1,53 @@
|
||||
// Button component
|
||||
use ratatui::{
|
||||
style::{Color, Modifier, Style},
|
||||
text::{Line, Span},
|
||||
widgets::Paragraph,
|
||||
};
|
||||
|
||||
/// A simple button component for the TUI
|
||||
pub struct Button {
|
||||
pub label: String,
|
||||
pub is_selected: bool,
|
||||
pub is_active: bool,
|
||||
}
|
||||
|
||||
impl Button {
|
||||
/// Create a new button
|
||||
pub fn new(label: &str) -> Self {
|
||||
Button {
|
||||
label: label.to_string(),
|
||||
is_selected: false,
|
||||
is_active: true,
|
||||
}
|
||||
}
|
||||
|
||||
/// Set selected state
|
||||
pub fn selected(mut self, is_selected: bool) -> Self {
|
||||
self.is_selected = is_selected;
|
||||
self
|
||||
}
|
||||
|
||||
/// Set active state
|
||||
pub fn active(mut self, is_active: bool) -> Self {
|
||||
self.is_active = is_active;
|
||||
self
|
||||
}
|
||||
|
||||
/// Render the button
|
||||
pub fn render(&self) -> Paragraph<'_> {
|
||||
let (fg, bg) = match (self.is_selected, self.is_active) {
|
||||
(true, true) => (Color::Black, Color::Yellow),
|
||||
(true, false) => (Color::Black, Color::DarkGray),
|
||||
(false, true) => (Color::White, Color::Blue),
|
||||
(false, false) => (Color::DarkGray, Color::Black),
|
||||
};
|
||||
|
||||
let style = Style::default().fg(fg).bg(bg).add_modifier(Modifier::BOLD);
|
||||
|
||||
Paragraph::new(Line::from(vec![Span::styled(
|
||||
format!(" {} ", self.label),
|
||||
style,
|
||||
)]))
|
||||
}
|
||||
}
|
||||
60
crates/ui/src/components/checkbox.rs
Normal file
60
crates/ui/src/components/checkbox.rs
Normal file
@@ -0,0 +1,60 @@
|
||||
// Checkbox component
|
||||
use ratatui::{
|
||||
style::{Color, Modifier, Style},
|
||||
text::{Line, Span},
|
||||
widgets::Paragraph,
|
||||
};
|
||||
|
||||
/// A simple checkbox component for the TUI
|
||||
pub struct Checkbox {
|
||||
pub label: String,
|
||||
pub is_checked: bool,
|
||||
pub is_selected: bool,
|
||||
}
|
||||
|
||||
impl Checkbox {
|
||||
/// Create a new checkbox
|
||||
pub fn new(label: &str) -> Self {
|
||||
Checkbox {
|
||||
label: label.to_string(),
|
||||
is_checked: false,
|
||||
is_selected: false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Set checked state
|
||||
pub fn checked(mut self, is_checked: bool) -> Self {
|
||||
self.is_checked = is_checked;
|
||||
self
|
||||
}
|
||||
|
||||
/// Set selected state
|
||||
pub fn selected(mut self, is_selected: bool) -> Self {
|
||||
self.is_selected = is_selected;
|
||||
self
|
||||
}
|
||||
|
||||
/// Toggle checked state
|
||||
pub fn toggle(&mut self) {
|
||||
self.is_checked = !self.is_checked;
|
||||
}
|
||||
|
||||
/// Render the checkbox
|
||||
pub fn render(&self) -> Paragraph<'_> {
|
||||
let checkbox = if self.is_checked { "[✓]" } else { "[ ]" };
|
||||
|
||||
let style = if self.is_selected {
|
||||
Style::default()
|
||||
.fg(Color::Yellow)
|
||||
.add_modifier(Modifier::BOLD)
|
||||
} else {
|
||||
Style::default().fg(Color::White)
|
||||
};
|
||||
|
||||
Paragraph::new(Line::from(vec![
|
||||
Span::styled(checkbox, style),
|
||||
Span::raw(" "),
|
||||
Span::styled(&self.label, style),
|
||||
]))
|
||||
}
|
||||
}
|
||||
12
crates/ui/src/components/mod.rs
Normal file
12
crates/ui/src/components/mod.rs
Normal file
@@ -0,0 +1,12 @@
|
||||
// UI Components
|
||||
mod button;
|
||||
mod checkbox;
|
||||
mod progress_bar;
|
||||
|
||||
// Re-export components for easier access
|
||||
pub use button::Button;
|
||||
pub use checkbox::Checkbox;
|
||||
pub use progress_bar::ProgressBar;
|
||||
|
||||
// This module will contain smaller reusable UI elements that
|
||||
// can be shared between different views of the application.
|
||||
53
crates/ui/src/components/progress_bar.rs
Normal file
53
crates/ui/src/components/progress_bar.rs
Normal file
@@ -0,0 +1,53 @@
|
||||
// Progress bar component
|
||||
use ratatui::{
|
||||
style::{Color, Style},
|
||||
widgets::Gauge,
|
||||
};
|
||||
|
||||
/// A simple progress bar component for the TUI
|
||||
pub struct ProgressBar {
|
||||
pub progress: f64,
|
||||
pub label: Option<String>,
|
||||
pub color: Color,
|
||||
}
|
||||
|
||||
impl ProgressBar {
|
||||
/// Create a new progress bar
|
||||
pub fn new(progress: f64) -> Self {
|
||||
ProgressBar {
|
||||
progress: progress.clamp(0.0, 1.0),
|
||||
label: None,
|
||||
color: Color::Blue,
|
||||
}
|
||||
}
|
||||
|
||||
/// Set label
|
||||
pub fn label(mut self, label: &str) -> Self {
|
||||
self.label = Some(label.to_string());
|
||||
self
|
||||
}
|
||||
|
||||
/// Set color
|
||||
pub fn color(mut self, color: Color) -> Self {
|
||||
self.color = color;
|
||||
self
|
||||
}
|
||||
|
||||
/// Update progress value
|
||||
pub fn update(&mut self, progress: f64) {
|
||||
self.progress = progress.clamp(0.0, 1.0);
|
||||
}
|
||||
|
||||
/// Render the progress bar
|
||||
pub fn render(&self) -> Gauge<'_> {
|
||||
let label = match &self.label {
|
||||
Some(lbl) => format!("{} {:.0}%", lbl, self.progress * 100.0),
|
||||
None => format!("{:.0}%", self.progress * 100.0),
|
||||
};
|
||||
|
||||
Gauge::default()
|
||||
.gauge_style(Style::default().fg(self.color).bg(Color::Black))
|
||||
.label(label)
|
||||
.ratio(self.progress)
|
||||
}
|
||||
}
|
||||
3
crates/ui/src/handlers/mod.rs
Normal file
3
crates/ui/src/handlers/mod.rs
Normal file
@@ -0,0 +1,3 @@
|
||||
// Handlers for the UI
|
||||
|
||||
pub mod workflow;
|
||||
528
crates/ui/src/handlers/workflow.rs
Normal file
528
crates/ui/src/handlers/workflow.rs
Normal file
@@ -0,0 +1,528 @@
|
||||
// Workflow handlers
|
||||
use crate::app::App;
|
||||
use crate::models::{ExecutionResultMsg, WorkflowExecution, WorkflowStatus};
|
||||
use chrono::Local;
|
||||
use evaluator::evaluate_workflow_file;
|
||||
use executor::{self, JobStatus, RuntimeType, StepStatus};
|
||||
use std::io;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::mpsc;
|
||||
use std::thread;
|
||||
|
||||
// Validate a workflow or directory containing workflows
|
||||
pub fn validate_workflow(path: &Path, verbose: bool) -> io::Result<()> {
|
||||
let mut workflows = Vec::new();
|
||||
|
||||
if path.is_dir() {
|
||||
let entries = std::fs::read_dir(path)?;
|
||||
|
||||
for entry in entries {
|
||||
let entry = entry?;
|
||||
let entry_path = entry.path();
|
||||
|
||||
if entry_path.is_file() && utils::is_workflow_file(&entry_path) {
|
||||
workflows.push(entry_path);
|
||||
}
|
||||
}
|
||||
} else if path.is_file() {
|
||||
workflows.push(PathBuf::from(path));
|
||||
} else {
|
||||
return Err(io::Error::new(
|
||||
io::ErrorKind::NotFound,
|
||||
format!("Path does not exist: {}", path.display()),
|
||||
));
|
||||
}
|
||||
|
||||
let mut valid_count = 0;
|
||||
let mut invalid_count = 0;
|
||||
|
||||
println!("Validating {} workflow file(s)...", workflows.len());
|
||||
|
||||
for workflow_path in workflows {
|
||||
match evaluate_workflow_file(&workflow_path, verbose) {
|
||||
Ok(result) => {
|
||||
if result.is_valid {
|
||||
println!("✅ Valid: {}", workflow_path.display());
|
||||
valid_count += 1;
|
||||
} else {
|
||||
println!("❌ Invalid: {}", workflow_path.display());
|
||||
for (i, issue) in result.issues.iter().enumerate() {
|
||||
println!(" {}. {}", i + 1, issue);
|
||||
}
|
||||
invalid_count += 1;
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
println!("❌ Error processing {}: {}", workflow_path.display(), e);
|
||||
invalid_count += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
println!(
|
||||
"\nSummary: {} valid, {} invalid",
|
||||
valid_count, invalid_count
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Execute a workflow through the CLI
|
||||
pub async fn execute_workflow_cli(
|
||||
path: &Path,
|
||||
runtime_type: RuntimeType,
|
||||
verbose: bool,
|
||||
) -> io::Result<()> {
|
||||
if !path.exists() {
|
||||
return Err(io::Error::new(
|
||||
io::ErrorKind::NotFound,
|
||||
format!("Workflow file does not exist: {}", path.display()),
|
||||
));
|
||||
}
|
||||
|
||||
println!("Validating workflow...");
|
||||
match evaluate_workflow_file(path, false) {
|
||||
Ok(result) => {
|
||||
if !result.is_valid {
|
||||
println!("❌ Cannot execute invalid workflow: {}", path.display());
|
||||
for (i, issue) in result.issues.iter().enumerate() {
|
||||
println!(" {}. {}", i + 1, issue);
|
||||
}
|
||||
return Err(io::Error::new(
|
||||
io::ErrorKind::InvalidData,
|
||||
"Workflow validation failed",
|
||||
));
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
return Err(io::Error::other(format!(
|
||||
"Error validating workflow: {}",
|
||||
e
|
||||
)));
|
||||
}
|
||||
}
|
||||
|
||||
// Check Docker availability if Docker runtime is selected
|
||||
let runtime_type = match runtime_type {
|
||||
RuntimeType::Docker => {
|
||||
if !executor::docker::is_available() {
|
||||
println!("⚠️ Docker is not available. Using emulation mode instead.");
|
||||
logging::warning("Docker is not available. Using emulation mode instead.");
|
||||
RuntimeType::Emulation
|
||||
} else {
|
||||
RuntimeType::Docker
|
||||
}
|
||||
}
|
||||
RuntimeType::Emulation => RuntimeType::Emulation,
|
||||
};
|
||||
|
||||
println!("Executing workflow: {}", path.display());
|
||||
println!("Runtime mode: {:?}", runtime_type);
|
||||
|
||||
// Log the start of the execution in debug mode with more details
|
||||
logging::debug(&format!(
|
||||
"Starting workflow execution: path={}, runtime={:?}, verbose={}",
|
||||
path.display(),
|
||||
runtime_type,
|
||||
verbose
|
||||
));
|
||||
|
||||
let config = executor::ExecutionConfig {
|
||||
runtime_type,
|
||||
verbose,
|
||||
preserve_containers_on_failure: false, // Default for this path
|
||||
};
|
||||
|
||||
match executor::execute_workflow(path, config).await {
|
||||
Ok(result) => {
|
||||
println!("\nWorkflow execution results:");
|
||||
|
||||
// Track if the workflow had any failures
|
||||
let mut any_job_failed = false;
|
||||
|
||||
for job in &result.jobs {
|
||||
match job.status {
|
||||
JobStatus::Success => {
|
||||
println!("\n✅ Job succeeded: {}", job.name);
|
||||
}
|
||||
JobStatus::Failure => {
|
||||
println!("\n❌ Job failed: {}", job.name);
|
||||
any_job_failed = true;
|
||||
}
|
||||
JobStatus::Skipped => {
|
||||
println!("\n⏭️ Job skipped: {}", job.name);
|
||||
}
|
||||
}
|
||||
|
||||
println!("-------------------------");
|
||||
|
||||
// Log the job details for debug purposes
|
||||
logging::debug(&format!("Job: {}, Status: {:?}", job.name, job.status));
|
||||
|
||||
for step in job.steps.iter() {
|
||||
match step.status {
|
||||
StepStatus::Success => {
|
||||
println!(" ✅ {}", step.name);
|
||||
|
||||
// Check if this is a GitHub action output that should be hidden
|
||||
let should_hide = std::env::var("WRKFLW_HIDE_ACTION_MESSAGES")
|
||||
.map(|val| val == "true")
|
||||
.unwrap_or(false)
|
||||
&& step.output.contains("Would execute GitHub action:");
|
||||
|
||||
// Only show output if not hidden and it's short
|
||||
if !should_hide
|
||||
&& !step.output.trim().is_empty()
|
||||
&& step.output.lines().count() <= 3
|
||||
{
|
||||
// For short outputs, show directly
|
||||
println!(" {}", step.output.trim());
|
||||
}
|
||||
}
|
||||
StepStatus::Failure => {
|
||||
println!(" ❌ {}", step.name);
|
||||
|
||||
// Ensure we capture and show exit code
|
||||
if let Some(exit_code) = step
|
||||
.output
|
||||
.lines()
|
||||
.find(|line| line.trim().starts_with("Exit code:"))
|
||||
.map(|line| line.trim().to_string())
|
||||
{
|
||||
println!(" {}", exit_code);
|
||||
}
|
||||
|
||||
// Show command/run details in debug mode
|
||||
if logging::get_log_level() <= logging::LogLevel::Debug {
|
||||
if let Some(cmd_output) = step
|
||||
.output
|
||||
.lines()
|
||||
.skip_while(|l| !l.trim().starts_with("$"))
|
||||
.take(1)
|
||||
.next()
|
||||
{
|
||||
println!(" Command: {}", cmd_output.trim());
|
||||
}
|
||||
}
|
||||
|
||||
// Always show error output from failed steps, but keep it to a reasonable length
|
||||
let output_lines: Vec<&str> = step
|
||||
.output
|
||||
.lines()
|
||||
.filter(|line| !line.trim().starts_with("Exit code:"))
|
||||
.collect();
|
||||
|
||||
if !output_lines.is_empty() {
|
||||
println!(" Error output:");
|
||||
for line in output_lines.iter().take(10) {
|
||||
println!(" {}", line.trim().replace('\n', "\n "));
|
||||
}
|
||||
|
||||
if output_lines.len() > 10 {
|
||||
println!(
|
||||
" ... (and {} more lines)",
|
||||
output_lines.len() - 10
|
||||
);
|
||||
println!(" Use --debug to see full output");
|
||||
}
|
||||
}
|
||||
}
|
||||
StepStatus::Skipped => {
|
||||
println!(" ⏭️ {} (skipped)", step.name);
|
||||
}
|
||||
}
|
||||
|
||||
// Always log the step details for debug purposes
|
||||
logging::debug(&format!(
|
||||
"Step: {}, Status: {:?}, Output length: {} lines",
|
||||
step.name,
|
||||
step.status,
|
||||
step.output.lines().count()
|
||||
));
|
||||
|
||||
// In debug mode, log all step output
|
||||
if logging::get_log_level() == logging::LogLevel::Debug
|
||||
&& !step.output.trim().is_empty()
|
||||
{
|
||||
logging::debug(&format!(
|
||||
"Step output for '{}': \n{}",
|
||||
step.name, step.output
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if any_job_failed {
|
||||
println!("\n❌ Workflow completed with failures");
|
||||
// In the case of failure, we'll also inform the user about the debug option
|
||||
// if they're not already using it
|
||||
if logging::get_log_level() > logging::LogLevel::Debug {
|
||||
println!(" Run with --debug for more detailed output");
|
||||
}
|
||||
} else {
|
||||
println!("\n✅ Workflow completed successfully!");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
Err(e) => {
|
||||
println!("❌ Failed to execute workflow: {}", e);
|
||||
logging::error(&format!("Failed to execute workflow: {}", e));
|
||||
Err(io::Error::other(e))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Helper function to execute workflow trigger using curl
|
||||
pub async fn execute_curl_trigger(
|
||||
workflow_name: &str,
|
||||
branch: Option<&str>,
|
||||
) -> Result<(Vec<executor::JobResult>, ()), String> {
|
||||
// Get GitHub token
|
||||
let token = std::env::var("GITHUB_TOKEN").map_err(|_| {
|
||||
"GitHub token not found. Please set GITHUB_TOKEN environment variable".to_string()
|
||||
})?;
|
||||
|
||||
// Debug log to check if GITHUB_TOKEN is set
|
||||
match std::env::var("GITHUB_TOKEN") {
|
||||
Ok(token) => logging::info(&format!("GITHUB_TOKEN is set: {}", &token[..5])), // Log first 5 characters for security
|
||||
Err(_) => logging::error("GITHUB_TOKEN is not set"),
|
||||
}
|
||||
|
||||
// Get repository information
|
||||
let repo_info =
|
||||
github::get_repo_info().map_err(|e| format!("Failed to get repository info: {}", e))?;
|
||||
|
||||
// Determine branch to use
|
||||
let branch_ref = branch.unwrap_or(&repo_info.default_branch);
|
||||
|
||||
// Extract just the workflow name from the path if it's a full path
|
||||
let workflow_name = if workflow_name.contains('/') {
|
||||
Path::new(workflow_name)
|
||||
.file_stem()
|
||||
.and_then(|s| s.to_str())
|
||||
.ok_or_else(|| "Invalid workflow name".to_string())?
|
||||
} else {
|
||||
workflow_name
|
||||
};
|
||||
|
||||
logging::info(&format!("Using workflow name: {}", workflow_name));
|
||||
|
||||
// Construct JSON payload
|
||||
let payload = serde_json::json!({
|
||||
"ref": branch_ref
|
||||
});
|
||||
|
||||
// Construct API URL
|
||||
let url = format!(
|
||||
"https://api.github.com/repos/{}/{}/actions/workflows/{}.yml/dispatches",
|
||||
repo_info.owner, repo_info.repo, workflow_name
|
||||
);
|
||||
|
||||
logging::info(&format!("Triggering workflow at URL: {}", url));
|
||||
|
||||
// Create a reqwest client
|
||||
let client = reqwest::Client::new();
|
||||
|
||||
// Send the request using reqwest
|
||||
let response = client
|
||||
.post(&url)
|
||||
.header("Authorization", format!("Bearer {}", token.trim()))
|
||||
.header("Accept", "application/vnd.github.v3+json")
|
||||
.header("Content-Type", "application/json")
|
||||
.header("User-Agent", "wrkflw-cli")
|
||||
.json(&payload)
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| format!("Failed to send request: {}", e))?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
let status = response.status().as_u16();
|
||||
let error_message = response
|
||||
.text()
|
||||
.await
|
||||
.unwrap_or_else(|_| format!("Unknown error (HTTP {})", status));
|
||||
|
||||
return Err(format!("API error: {} - {}", status, error_message));
|
||||
}
|
||||
|
||||
// Success message with URL to view the workflow
|
||||
let success_msg = format!(
|
||||
"Workflow triggered successfully. View it at: https://github.com/{}/{}/actions/workflows/{}.yml",
|
||||
repo_info.owner, repo_info.repo, workflow_name
|
||||
);
|
||||
|
||||
// Create a job result structure
|
||||
let job_result = executor::JobResult {
|
||||
name: "GitHub Trigger".to_string(),
|
||||
status: executor::JobStatus::Success,
|
||||
steps: vec![executor::StepResult {
|
||||
name: "Remote Trigger".to_string(),
|
||||
status: executor::StepStatus::Success,
|
||||
output: success_msg,
|
||||
}],
|
||||
logs: "Workflow triggered remotely on GitHub".to_string(),
|
||||
};
|
||||
|
||||
Ok((vec![job_result], ()))
|
||||
}
|
||||
|
||||
// Extract common workflow execution logic to avoid duplication
|
||||
pub fn start_next_workflow_execution(
|
||||
app: &mut App,
|
||||
tx_clone: &mpsc::Sender<ExecutionResultMsg>,
|
||||
verbose: bool,
|
||||
) {
|
||||
if let Some(next_idx) = app.get_next_workflow_to_execute() {
|
||||
app.current_execution = Some(next_idx);
|
||||
let tx_clone_inner = tx_clone.clone();
|
||||
let workflow_path = app.workflows[next_idx].path.clone();
|
||||
|
||||
// Log whether verbose mode is enabled
|
||||
if verbose {
|
||||
app.logs
|
||||
.push("Verbose mode: Step outputs will be displayed in full".to_string());
|
||||
logging::info("Verbose mode: Step outputs will be displayed in full");
|
||||
} else {
|
||||
app.logs.push(
|
||||
"Standard mode: Only step status will be shown (use --verbose for full output)"
|
||||
.to_string(),
|
||||
);
|
||||
logging::info(
|
||||
"Standard mode: Only step status will be shown (use --verbose for full output)",
|
||||
);
|
||||
}
|
||||
|
||||
// Check Docker availability again if Docker runtime is selected
|
||||
let runtime_type = match app.runtime_type {
|
||||
RuntimeType::Docker => {
|
||||
// Use safe FD redirection to check Docker availability
|
||||
let is_docker_available =
|
||||
match utils::fd::with_stderr_to_null(executor::docker::is_available) {
|
||||
Ok(result) => result,
|
||||
Err(_) => {
|
||||
logging::debug(
|
||||
"Failed to redirect stderr when checking Docker availability.",
|
||||
);
|
||||
false
|
||||
}
|
||||
};
|
||||
|
||||
if !is_docker_available {
|
||||
app.logs
|
||||
.push("Docker is not available. Using emulation mode instead.".to_string());
|
||||
logging::warning("Docker is not available. Using emulation mode instead.");
|
||||
RuntimeType::Emulation
|
||||
} else {
|
||||
RuntimeType::Docker
|
||||
}
|
||||
}
|
||||
RuntimeType::Emulation => RuntimeType::Emulation,
|
||||
};
|
||||
|
||||
let validation_mode = app.validation_mode;
|
||||
let preserve_containers_on_failure = app.preserve_containers_on_failure;
|
||||
|
||||
// Update workflow status and add execution details
|
||||
app.workflows[next_idx].status = WorkflowStatus::Running;
|
||||
|
||||
// Initialize execution details if not already done
|
||||
if app.workflows[next_idx].execution_details.is_none() {
|
||||
app.workflows[next_idx].execution_details = Some(WorkflowExecution {
|
||||
jobs: Vec::new(),
|
||||
start_time: Local::now(),
|
||||
end_time: None,
|
||||
logs: Vec::new(),
|
||||
progress: 0.0,
|
||||
});
|
||||
}
|
||||
|
||||
thread::spawn(move || {
|
||||
let rt = match tokio::runtime::Runtime::new() {
|
||||
Ok(runtime) => runtime,
|
||||
Err(e) => {
|
||||
let _ = tx_clone_inner.send((
|
||||
next_idx,
|
||||
Err(format!("Failed to create Tokio runtime: {}", e)),
|
||||
));
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let result = rt.block_on(async {
|
||||
if validation_mode {
|
||||
// Perform validation instead of execution
|
||||
match evaluate_workflow_file(&workflow_path, verbose) {
|
||||
Ok(validation_result) => {
|
||||
// Create execution result based on validation
|
||||
let status = if validation_result.is_valid {
|
||||
executor::JobStatus::Success
|
||||
} else {
|
||||
executor::JobStatus::Failure
|
||||
};
|
||||
|
||||
// Create a synthetic job result for validation
|
||||
let jobs = vec![executor::JobResult {
|
||||
name: "Validation".to_string(),
|
||||
status,
|
||||
steps: vec![executor::StepResult {
|
||||
name: "Validator".to_string(),
|
||||
status: if validation_result.is_valid {
|
||||
executor::StepStatus::Success
|
||||
} else {
|
||||
executor::StepStatus::Failure
|
||||
},
|
||||
output: validation_result.issues.join("\n"),
|
||||
}],
|
||||
logs: format!(
|
||||
"Validation result: {}",
|
||||
if validation_result.is_valid {
|
||||
"PASSED"
|
||||
} else {
|
||||
"FAILED"
|
||||
}
|
||||
),
|
||||
}];
|
||||
|
||||
Ok((jobs, ()))
|
||||
}
|
||||
Err(e) => Err(e.to_string()),
|
||||
}
|
||||
} else {
|
||||
// Use safe FD redirection for execution
|
||||
let config = executor::ExecutionConfig {
|
||||
runtime_type,
|
||||
verbose,
|
||||
preserve_containers_on_failure,
|
||||
};
|
||||
|
||||
let execution_result = utils::fd::with_stderr_to_null(|| {
|
||||
futures::executor::block_on(async {
|
||||
executor::execute_workflow(&workflow_path, config).await
|
||||
})
|
||||
})
|
||||
.map_err(|e| format!("Failed to redirect stderr during execution: {}", e))?;
|
||||
|
||||
match execution_result {
|
||||
Ok(execution_result) => {
|
||||
// Send back the job results in a wrapped result
|
||||
Ok((execution_result.jobs, ()))
|
||||
}
|
||||
Err(e) => Err(e.to_string()),
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Only send if we get a valid result
|
||||
if let Err(e) = tx_clone_inner.send((next_idx, result)) {
|
||||
logging::error(&format!("Error sending execution result: {}", e));
|
||||
}
|
||||
});
|
||||
} else {
|
||||
app.running = false;
|
||||
let timestamp = Local::now().format("%H:%M:%S").to_string();
|
||||
app.logs
|
||||
.push(format!("[{}] All workflows completed execution", timestamp));
|
||||
logging::info("All workflows completed execution");
|
||||
}
|
||||
}
|
||||
22
crates/ui/src/lib.rs
Normal file
22
crates/ui/src/lib.rs
Normal file
@@ -0,0 +1,22 @@
|
||||
// Modular UI crate for wrkflw
|
||||
//
|
||||
// This crate is organized into several modules:
|
||||
// - app: Contains the main App state and TUI entry point
|
||||
// - models: Contains the data structures for the UI
|
||||
// - components: Contains reusable UI elements
|
||||
// - handlers: Contains workflow handling logic
|
||||
// - utils: Contains utility functions
|
||||
// - views: Contains UI rendering code
|
||||
|
||||
// Re-export public modules
|
||||
pub mod app;
|
||||
pub mod components;
|
||||
pub mod handlers;
|
||||
pub mod models;
|
||||
pub mod utils;
|
||||
pub mod views;
|
||||
|
||||
// Re-export main entry points
|
||||
pub use app::run_wrkflw_tui;
|
||||
pub use handlers::workflow::execute_workflow_cli;
|
||||
pub use handlers::workflow::validate_workflow;
|
||||
99
crates/ui/src/models/mod.rs
Normal file
99
crates/ui/src/models/mod.rs
Normal file
@@ -0,0 +1,99 @@
|
||||
// UI Models for wrkflw
|
||||
use chrono::Local;
|
||||
use executor::{JobStatus, StepStatus};
|
||||
use std::path::PathBuf;
|
||||
|
||||
/// Type alias for the complex execution result type
|
||||
pub type ExecutionResultMsg = (usize, Result<(Vec<executor::JobResult>, ()), String>);
|
||||
|
||||
/// Represents an individual workflow file
|
||||
pub struct Workflow {
|
||||
pub name: String,
|
||||
pub path: PathBuf,
|
||||
pub selected: bool,
|
||||
pub status: WorkflowStatus,
|
||||
pub execution_details: Option<WorkflowExecution>,
|
||||
}
|
||||
|
||||
/// Status of a workflow
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub enum WorkflowStatus {
|
||||
NotStarted,
|
||||
Running,
|
||||
Success,
|
||||
Failed,
|
||||
Skipped,
|
||||
}
|
||||
|
||||
/// Detailed execution information
|
||||
pub struct WorkflowExecution {
|
||||
pub jobs: Vec<JobExecution>,
|
||||
pub start_time: chrono::DateTime<Local>,
|
||||
pub end_time: Option<chrono::DateTime<Local>>,
|
||||
pub logs: Vec<String>,
|
||||
pub progress: f64, // 0.0 - 1.0 for progress bar
|
||||
}
|
||||
|
||||
/// Job execution details
|
||||
pub struct JobExecution {
|
||||
pub name: String,
|
||||
pub status: JobStatus,
|
||||
pub steps: Vec<StepExecution>,
|
||||
pub logs: Vec<String>,
|
||||
}
|
||||
|
||||
/// Step execution details
|
||||
pub struct StepExecution {
|
||||
pub name: String,
|
||||
pub status: StepStatus,
|
||||
pub output: String,
|
||||
}
|
||||
|
||||
/// Log filter levels
|
||||
pub enum LogFilterLevel {
|
||||
Info,
|
||||
Warning,
|
||||
Error,
|
||||
Success,
|
||||
Trigger,
|
||||
All,
|
||||
}
|
||||
|
||||
impl LogFilterLevel {
|
||||
pub fn matches(&self, log: &str) -> bool {
|
||||
match self {
|
||||
LogFilterLevel::Info => {
|
||||
log.contains("ℹ️") || (log.contains("INFO") && !log.contains("SUCCESS"))
|
||||
}
|
||||
LogFilterLevel::Warning => log.contains("⚠️") || log.contains("WARN"),
|
||||
LogFilterLevel::Error => log.contains("❌") || log.contains("ERROR"),
|
||||
LogFilterLevel::Success => log.contains("SUCCESS") || log.contains("success"),
|
||||
LogFilterLevel::Trigger => {
|
||||
log.contains("Triggering") || log.contains("triggered") || log.contains("TRIG")
|
||||
}
|
||||
LogFilterLevel::All => true,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn next(&self) -> Self {
|
||||
match self {
|
||||
LogFilterLevel::All => LogFilterLevel::Info,
|
||||
LogFilterLevel::Info => LogFilterLevel::Warning,
|
||||
LogFilterLevel::Warning => LogFilterLevel::Error,
|
||||
LogFilterLevel::Error => LogFilterLevel::Success,
|
||||
LogFilterLevel::Success => LogFilterLevel::Trigger,
|
||||
LogFilterLevel::Trigger => LogFilterLevel::All,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn to_string(&self) -> &str {
|
||||
match self {
|
||||
LogFilterLevel::All => "ALL",
|
||||
LogFilterLevel::Info => "INFO",
|
||||
LogFilterLevel::Warning => "WARNING",
|
||||
LogFilterLevel::Error => "ERROR",
|
||||
LogFilterLevel::Success => "SUCCESS",
|
||||
LogFilterLevel::Trigger => "TRIGGER",
|
||||
}
|
||||
}
|
||||
}
|
||||
53
crates/ui/src/utils/mod.rs
Normal file
53
crates/ui/src/utils/mod.rs
Normal file
@@ -0,0 +1,53 @@
|
||||
// UI utilities
|
||||
use crate::models::{Workflow, WorkflowStatus};
|
||||
use std::path::{Path, PathBuf};
|
||||
use utils::is_workflow_file;
|
||||
|
||||
/// Find and load all workflow files in a directory
|
||||
pub fn load_workflows(dir_path: &Path) -> Vec<Workflow> {
|
||||
let mut workflows = Vec::new();
|
||||
|
||||
// Default path is .github/workflows
|
||||
let default_workflows_dir = Path::new(".github").join("workflows");
|
||||
let is_default_dir = dir_path == default_workflows_dir || dir_path.ends_with("workflows");
|
||||
|
||||
if let Ok(entries) = std::fs::read_dir(dir_path) {
|
||||
for entry in entries.flatten() {
|
||||
let path = entry.path();
|
||||
if path.is_file() && (is_workflow_file(&path) || !is_default_dir) {
|
||||
// Get just the base name without extension
|
||||
let name = path.file_stem().map_or_else(
|
||||
|| "[unknown]".to_string(),
|
||||
|fname| fname.to_string_lossy().into_owned(),
|
||||
);
|
||||
|
||||
workflows.push(Workflow {
|
||||
name,
|
||||
path,
|
||||
selected: false,
|
||||
status: WorkflowStatus::NotStarted,
|
||||
execution_details: None,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check for GitLab CI pipeline file in the root directory if we're in the default GitHub workflows dir
|
||||
if is_default_dir {
|
||||
// Look for .gitlab-ci.yml in the repository root
|
||||
let gitlab_ci_path = PathBuf::from(".gitlab-ci.yml");
|
||||
if gitlab_ci_path.exists() && gitlab_ci_path.is_file() {
|
||||
workflows.push(Workflow {
|
||||
name: "gitlab-ci".to_string(),
|
||||
path: gitlab_ci_path,
|
||||
selected: false,
|
||||
status: WorkflowStatus::NotStarted,
|
||||
execution_details: None,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Sort workflows by name
|
||||
workflows.sort_by(|a, b| a.name.cmp(&b.name));
|
||||
workflows
|
||||
}
|
||||
359
crates/ui/src/views/execution_tab.rs
Normal file
359
crates/ui/src/views/execution_tab.rs
Normal file
@@ -0,0 +1,359 @@
|
||||
// Execution tab rendering
|
||||
use crate::app::App;
|
||||
use crate::models::WorkflowStatus;
|
||||
use ratatui::{
|
||||
backend::CrosstermBackend,
|
||||
layout::{Alignment, Constraint, Direction, Layout, Rect},
|
||||
style::{Color, Modifier, Style},
|
||||
text::{Line, Span},
|
||||
widgets::{Block, BorderType, Borders, Gauge, List, ListItem, Paragraph},
|
||||
Frame,
|
||||
};
|
||||
use std::io;
|
||||
|
||||
// Render the execution tab
|
||||
pub fn render_execution_tab(
|
||||
f: &mut Frame<CrosstermBackend<io::Stdout>>,
|
||||
app: &mut App,
|
||||
area: Rect,
|
||||
) {
|
||||
// Get the workflow index either from current_execution or selected workflow
|
||||
let current_workflow_idx = app
|
||||
.current_execution
|
||||
.or_else(|| app.workflow_list_state.selected())
|
||||
.filter(|&idx| idx < app.workflows.len());
|
||||
|
||||
if let Some(idx) = current_workflow_idx {
|
||||
let workflow = &app.workflows[idx];
|
||||
|
||||
// Split the area into sections
|
||||
let chunks = Layout::default()
|
||||
.direction(Direction::Vertical)
|
||||
.constraints(
|
||||
[
|
||||
Constraint::Length(5), // Workflow info with progress bar
|
||||
Constraint::Min(5), // Jobs list or Remote execution info
|
||||
Constraint::Length(7), // Execution info
|
||||
]
|
||||
.as_ref(),
|
||||
)
|
||||
.margin(1)
|
||||
.split(area);
|
||||
|
||||
// Workflow info section
|
||||
let status_text = match workflow.status {
|
||||
WorkflowStatus::NotStarted => "Not Started",
|
||||
WorkflowStatus::Running => "Running",
|
||||
WorkflowStatus::Success => "Success",
|
||||
WorkflowStatus::Failed => "Failed",
|
||||
WorkflowStatus::Skipped => "Skipped",
|
||||
};
|
||||
|
||||
let status_style = match workflow.status {
|
||||
WorkflowStatus::NotStarted => Style::default().fg(Color::Gray),
|
||||
WorkflowStatus::Running => Style::default().fg(Color::Cyan),
|
||||
WorkflowStatus::Success => Style::default().fg(Color::Green),
|
||||
WorkflowStatus::Failed => Style::default().fg(Color::Red),
|
||||
WorkflowStatus::Skipped => Style::default().fg(Color::Yellow),
|
||||
};
|
||||
|
||||
let mut workflow_info = vec![
|
||||
Line::from(vec![
|
||||
Span::styled("Workflow: ", Style::default().fg(Color::Blue)),
|
||||
Span::styled(
|
||||
workflow.name.clone(),
|
||||
Style::default()
|
||||
.fg(Color::White)
|
||||
.add_modifier(Modifier::BOLD),
|
||||
),
|
||||
]),
|
||||
Line::from(vec![
|
||||
Span::styled("Status: ", Style::default().fg(Color::Blue)),
|
||||
Span::styled(status_text, status_style),
|
||||
]),
|
||||
];
|
||||
|
||||
// Add progress bar for running workflows or workflows with execution details
|
||||
if let Some(execution) = &workflow.execution_details {
|
||||
// Calculate progress
|
||||
let progress = execution.progress;
|
||||
|
||||
// Add progress bar
|
||||
let gauge_color = match workflow.status {
|
||||
WorkflowStatus::Running => Color::Cyan,
|
||||
WorkflowStatus::Success => Color::Green,
|
||||
WorkflowStatus::Failed => Color::Red,
|
||||
_ => Color::Gray,
|
||||
};
|
||||
|
||||
let progress_text = match workflow.status {
|
||||
WorkflowStatus::Running => format!("{:.0}%", progress * 100.0),
|
||||
WorkflowStatus::Success => "Completed".to_string(),
|
||||
WorkflowStatus::Failed => "Failed".to_string(),
|
||||
_ => "Not started".to_string(),
|
||||
};
|
||||
|
||||
// Add empty line before progress bar
|
||||
workflow_info.push(Line::from(""));
|
||||
|
||||
// Add the gauge widget to the paragraph data
|
||||
workflow_info.push(Line::from(vec![Span::styled(
|
||||
format!("Progress: {}", progress_text),
|
||||
Style::default().fg(Color::Blue),
|
||||
)]));
|
||||
|
||||
let gauge = Gauge::default()
|
||||
.block(Block::default())
|
||||
.gauge_style(Style::default().fg(gauge_color).bg(Color::Black))
|
||||
.percent((progress * 100.0) as u16);
|
||||
|
||||
// Render gauge separately after the paragraph
|
||||
let workflow_info_widget = Paragraph::new(workflow_info).block(
|
||||
Block::default()
|
||||
.borders(Borders::ALL)
|
||||
.border_type(BorderType::Rounded)
|
||||
.title(Span::styled(
|
||||
" Workflow Information ",
|
||||
Style::default().fg(Color::Yellow),
|
||||
)),
|
||||
);
|
||||
|
||||
let gauge_area = Rect {
|
||||
x: chunks[0].x + 2,
|
||||
y: chunks[0].y + 4,
|
||||
width: chunks[0].width - 4,
|
||||
height: 1,
|
||||
};
|
||||
|
||||
f.render_widget(workflow_info_widget, chunks[0]);
|
||||
f.render_widget(gauge, gauge_area);
|
||||
|
||||
// Jobs list section
|
||||
if execution.jobs.is_empty() {
|
||||
let placeholder = Paragraph::new("No jobs have started execution yet...")
|
||||
.block(
|
||||
Block::default()
|
||||
.borders(Borders::ALL)
|
||||
.border_type(BorderType::Rounded)
|
||||
.title(Span::styled(" Jobs ", Style::default().fg(Color::Yellow))),
|
||||
)
|
||||
.alignment(Alignment::Center);
|
||||
f.render_widget(placeholder, chunks[1]);
|
||||
} else {
|
||||
let job_items: Vec<ListItem> = execution
|
||||
.jobs
|
||||
.iter()
|
||||
.map(|job| {
|
||||
let status_symbol = match job.status {
|
||||
executor::JobStatus::Success => "✅",
|
||||
executor::JobStatus::Failure => "❌",
|
||||
executor::JobStatus::Skipped => "⏭",
|
||||
};
|
||||
|
||||
let status_style = match job.status {
|
||||
executor::JobStatus::Success => Style::default().fg(Color::Green),
|
||||
executor::JobStatus::Failure => Style::default().fg(Color::Red),
|
||||
executor::JobStatus::Skipped => Style::default().fg(Color::Gray),
|
||||
};
|
||||
|
||||
// Count completed and total steps
|
||||
let total_steps = job.steps.len();
|
||||
let completed_steps = job
|
||||
.steps
|
||||
.iter()
|
||||
.filter(|s| {
|
||||
s.status == executor::StepStatus::Success
|
||||
|| s.status == executor::StepStatus::Failure
|
||||
})
|
||||
.count();
|
||||
|
||||
let steps_info = format!("[{}/{}]", completed_steps, total_steps);
|
||||
|
||||
ListItem::new(Line::from(vec![
|
||||
Span::styled(status_symbol, status_style),
|
||||
Span::raw(" "),
|
||||
Span::styled(&job.name, Style::default().fg(Color::White)),
|
||||
Span::raw(" "),
|
||||
Span::styled(steps_info, Style::default().fg(Color::DarkGray)),
|
||||
]))
|
||||
})
|
||||
.collect();
|
||||
|
||||
let jobs_list = List::new(job_items)
|
||||
.block(
|
||||
Block::default()
|
||||
.borders(Borders::ALL)
|
||||
.border_type(BorderType::Rounded)
|
||||
.title(Span::styled(" Jobs ", Style::default().fg(Color::Yellow))),
|
||||
)
|
||||
.highlight_style(
|
||||
Style::default()
|
||||
.bg(Color::DarkGray)
|
||||
.add_modifier(Modifier::BOLD),
|
||||
)
|
||||
.highlight_symbol("» ");
|
||||
|
||||
f.render_stateful_widget(jobs_list, chunks[1], &mut app.job_list_state);
|
||||
}
|
||||
|
||||
// Execution info section
|
||||
let mut execution_info = Vec::new();
|
||||
|
||||
execution_info.push(Line::from(vec![
|
||||
Span::styled("Started: ", Style::default().fg(Color::Blue)),
|
||||
Span::styled(
|
||||
execution.start_time.format("%Y-%m-%d %H:%M:%S").to_string(),
|
||||
Style::default().fg(Color::White),
|
||||
),
|
||||
]));
|
||||
|
||||
if let Some(end_time) = execution.end_time {
|
||||
execution_info.push(Line::from(vec![
|
||||
Span::styled("Finished: ", Style::default().fg(Color::Blue)),
|
||||
Span::styled(
|
||||
end_time.format("%Y-%m-%d %H:%M:%S").to_string(),
|
||||
Style::default().fg(Color::White),
|
||||
),
|
||||
]));
|
||||
|
||||
// Calculate duration
|
||||
let duration = end_time.signed_duration_since(execution.start_time);
|
||||
execution_info.push(Line::from(vec![
|
||||
Span::styled("Duration: ", Style::default().fg(Color::Blue)),
|
||||
Span::styled(
|
||||
format!(
|
||||
"{}m {}s",
|
||||
duration.num_minutes(),
|
||||
duration.num_seconds() % 60
|
||||
),
|
||||
Style::default().fg(Color::White),
|
||||
),
|
||||
]));
|
||||
} else {
|
||||
// Show running time for active workflows
|
||||
let current_time = chrono::Local::now();
|
||||
let running_time = current_time.signed_duration_since(execution.start_time);
|
||||
execution_info.push(Line::from(vec![
|
||||
Span::styled("Running for: ", Style::default().fg(Color::Blue)),
|
||||
Span::styled(
|
||||
format!(
|
||||
"{}m {}s",
|
||||
running_time.num_minutes(),
|
||||
running_time.num_seconds() % 60
|
||||
),
|
||||
Style::default().fg(Color::White),
|
||||
),
|
||||
]));
|
||||
}
|
||||
|
||||
// Add hint for Enter key to see details
|
||||
execution_info.push(Line::from(""));
|
||||
execution_info.push(Line::from(vec![
|
||||
Span::styled("Press ", Style::default().fg(Color::DarkGray)),
|
||||
Span::styled("Enter", Style::default().fg(Color::Yellow)),
|
||||
Span::styled(" to view job details", Style::default().fg(Color::DarkGray)),
|
||||
]));
|
||||
|
||||
let info_widget = Paragraph::new(execution_info).block(
|
||||
Block::default()
|
||||
.borders(Borders::ALL)
|
||||
.border_type(BorderType::Rounded)
|
||||
.title(Span::styled(
|
||||
" Execution Information ",
|
||||
Style::default().fg(Color::Yellow),
|
||||
)),
|
||||
);
|
||||
|
||||
f.render_widget(info_widget, chunks[2]);
|
||||
} else {
|
||||
// No workflow execution to display
|
||||
let workflow_info_widget = Paragraph::new(workflow_info).block(
|
||||
Block::default()
|
||||
.borders(Borders::ALL)
|
||||
.border_type(BorderType::Rounded)
|
||||
.title(Span::styled(
|
||||
" Workflow Information ",
|
||||
Style::default().fg(Color::Yellow),
|
||||
)),
|
||||
);
|
||||
|
||||
f.render_widget(workflow_info_widget, chunks[0]);
|
||||
|
||||
// No execution details to display
|
||||
let placeholder = Paragraph::new(vec![
|
||||
Line::from(""),
|
||||
Line::from(vec![Span::styled(
|
||||
"No execution data available.",
|
||||
Style::default()
|
||||
.fg(Color::Yellow)
|
||||
.add_modifier(Modifier::BOLD),
|
||||
)]),
|
||||
Line::from(""),
|
||||
Line::from("Press 'Enter' to run this workflow."),
|
||||
Line::from(""),
|
||||
])
|
||||
.block(
|
||||
Block::default()
|
||||
.borders(Borders::ALL)
|
||||
.border_type(BorderType::Rounded)
|
||||
.title(Span::styled(" Jobs ", Style::default().fg(Color::Yellow))),
|
||||
)
|
||||
.alignment(Alignment::Center);
|
||||
|
||||
f.render_widget(placeholder, chunks[1]);
|
||||
|
||||
// Execution information
|
||||
let info_widget = Paragraph::new(vec![
|
||||
Line::from(""),
|
||||
Line::from(vec![Span::styled(
|
||||
"No execution has been started.",
|
||||
Style::default().fg(Color::Yellow),
|
||||
)]),
|
||||
Line::from(""),
|
||||
Line::from("Press 'Enter' in the Workflows tab to run,"),
|
||||
Line::from("or 't' to trigger on GitHub."),
|
||||
])
|
||||
.block(
|
||||
Block::default()
|
||||
.borders(Borders::ALL)
|
||||
.border_type(BorderType::Rounded)
|
||||
.title(Span::styled(
|
||||
" Execution Information ",
|
||||
Style::default().fg(Color::Yellow),
|
||||
)),
|
||||
)
|
||||
.alignment(Alignment::Center);
|
||||
|
||||
f.render_widget(info_widget, chunks[2]);
|
||||
}
|
||||
} else {
|
||||
// No workflow execution to display
|
||||
let placeholder = Paragraph::new(vec![
|
||||
Line::from(""),
|
||||
Line::from(vec![Span::styled(
|
||||
"No workflow execution data available.",
|
||||
Style::default()
|
||||
.fg(Color::Yellow)
|
||||
.add_modifier(Modifier::BOLD),
|
||||
)]),
|
||||
Line::from(""),
|
||||
Line::from("Select workflows in the Workflows tab and press 'r' to run them."),
|
||||
Line::from(""),
|
||||
Line::from("Or press Enter on a selected workflow to run it directly."),
|
||||
Line::from(""),
|
||||
Line::from("You can also press 't' to trigger a workflow on GitHub remotely."),
|
||||
])
|
||||
.block(
|
||||
Block::default()
|
||||
.borders(Borders::ALL)
|
||||
.border_type(BorderType::Rounded)
|
||||
.title(Span::styled(
|
||||
" Execution ",
|
||||
Style::default().fg(Color::Yellow),
|
||||
)),
|
||||
)
|
||||
.alignment(Alignment::Center);
|
||||
|
||||
f.render_widget(placeholder, area);
|
||||
}
|
||||
}
|
||||
69
crates/ui/src/views/help_overlay.rs
Normal file
69
crates/ui/src/views/help_overlay.rs
Normal file
@@ -0,0 +1,69 @@
|
||||
// Help overlay rendering
|
||||
use ratatui::{
|
||||
backend::CrosstermBackend,
|
||||
layout::Rect,
|
||||
style::{Color, Modifier, Style},
|
||||
text::{Line, Span},
|
||||
widgets::{Block, BorderType, Borders, Paragraph, Wrap},
|
||||
Frame,
|
||||
};
|
||||
use std::io;
|
||||
|
||||
// Render the help tab
|
||||
pub fn render_help_tab(f: &mut Frame<CrosstermBackend<io::Stdout>>, area: Rect) {
|
||||
let help_text = vec![
|
||||
Line::from(Span::styled(
|
||||
"Keyboard Controls",
|
||||
Style::default()
|
||||
.fg(Color::Cyan)
|
||||
.add_modifier(Modifier::BOLD),
|
||||
)),
|
||||
Line::from(""),
|
||||
Line::from(vec![
|
||||
Span::styled(
|
||||
"Tab",
|
||||
Style::default()
|
||||
.fg(Color::Yellow)
|
||||
.add_modifier(Modifier::BOLD),
|
||||
),
|
||||
Span::raw(" - Switch between tabs"),
|
||||
]),
|
||||
// More help text would follow...
|
||||
];
|
||||
|
||||
let help_widget = Paragraph::new(help_text)
|
||||
.block(
|
||||
Block::default()
|
||||
.borders(Borders::ALL)
|
||||
.border_type(BorderType::Rounded)
|
||||
.title(Span::styled(" Help ", Style::default().fg(Color::Yellow))),
|
||||
)
|
||||
.wrap(Wrap { trim: true });
|
||||
|
||||
f.render_widget(help_widget, area);
|
||||
}
|
||||
|
||||
// Render a help overlay
|
||||
pub fn render_help_overlay(f: &mut Frame<CrosstermBackend<io::Stdout>>) {
|
||||
let size = f.size();
|
||||
|
||||
// Create a slightly smaller centered modal
|
||||
let width = size.width.min(60);
|
||||
let height = size.height.min(20);
|
||||
let x = (size.width - width) / 2;
|
||||
let y = (size.height - height) / 2;
|
||||
|
||||
let help_area = Rect {
|
||||
x,
|
||||
y,
|
||||
width,
|
||||
height,
|
||||
};
|
||||
|
||||
// Create a clear background
|
||||
let clear = Block::default().style(Style::default().bg(Color::Black));
|
||||
f.render_widget(clear, size);
|
||||
|
||||
// Render the help content
|
||||
render_help_tab(f, help_area);
|
||||
}
|
||||
201
crates/ui/src/views/job_detail.rs
Normal file
201
crates/ui/src/views/job_detail.rs
Normal file
@@ -0,0 +1,201 @@
|
||||
// Job detail view rendering
|
||||
use crate::app::App;
|
||||
use ratatui::{
|
||||
backend::CrosstermBackend,
|
||||
layout::{Constraint, Direction, Layout, Rect},
|
||||
style::{Color, Modifier, Style},
|
||||
text::{Line, Span},
|
||||
widgets::{Block, BorderType, Borders, Paragraph, Row, Table},
|
||||
Frame,
|
||||
};
|
||||
use std::io;
|
||||
|
||||
// Render the job detail view
|
||||
pub fn render_job_detail_view(
|
||||
f: &mut Frame<CrosstermBackend<io::Stdout>>,
|
||||
app: &mut App,
|
||||
area: Rect,
|
||||
) {
|
||||
// Get the workflow index either from current_execution or selected workflow
|
||||
let current_workflow_idx = app
|
||||
.current_execution
|
||||
.or_else(|| app.workflow_list_state.selected())
|
||||
.filter(|&idx| idx < app.workflows.len());
|
||||
|
||||
if let Some(workflow_idx) = current_workflow_idx {
|
||||
// Only proceed if we have execution details
|
||||
if let Some(execution) = &app.workflows[workflow_idx].execution_details {
|
||||
// Only proceed if we have a valid job selection
|
||||
if let Some(job_idx) = app.job_list_state.selected() {
|
||||
if job_idx < execution.jobs.len() {
|
||||
let job = &execution.jobs[job_idx];
|
||||
|
||||
// Split the area into sections
|
||||
let chunks = Layout::default()
|
||||
.direction(Direction::Vertical)
|
||||
.constraints(
|
||||
[
|
||||
Constraint::Length(3), // Job title
|
||||
Constraint::Min(5), // Steps table
|
||||
Constraint::Length(8), // Step details
|
||||
]
|
||||
.as_ref(),
|
||||
)
|
||||
.margin(1)
|
||||
.split(area);
|
||||
|
||||
// Job title section
|
||||
let status_text = match job.status {
|
||||
executor::JobStatus::Success => "Success",
|
||||
executor::JobStatus::Failure => "Failed",
|
||||
executor::JobStatus::Skipped => "Skipped",
|
||||
};
|
||||
|
||||
let status_style = match job.status {
|
||||
executor::JobStatus::Success => Style::default().fg(Color::Green),
|
||||
executor::JobStatus::Failure => Style::default().fg(Color::Red),
|
||||
executor::JobStatus::Skipped => Style::default().fg(Color::Yellow),
|
||||
};
|
||||
|
||||
let job_title = Paragraph::new(vec![
|
||||
Line::from(vec![
|
||||
Span::styled("Job: ", Style::default().fg(Color::Blue)),
|
||||
Span::styled(
|
||||
job.name.clone(),
|
||||
Style::default()
|
||||
.fg(Color::White)
|
||||
.add_modifier(Modifier::BOLD),
|
||||
),
|
||||
Span::raw(" ("),
|
||||
Span::styled(status_text, status_style),
|
||||
Span::raw(")"),
|
||||
]),
|
||||
Line::from(vec![
|
||||
Span::styled("Steps: ", Style::default().fg(Color::Blue)),
|
||||
Span::styled(
|
||||
format!("{}", job.steps.len()),
|
||||
Style::default().fg(Color::White),
|
||||
),
|
||||
]),
|
||||
])
|
||||
.block(
|
||||
Block::default()
|
||||
.borders(Borders::ALL)
|
||||
.border_type(BorderType::Rounded)
|
||||
.title(Span::styled(
|
||||
" Job Details ",
|
||||
Style::default().fg(Color::Yellow),
|
||||
)),
|
||||
);
|
||||
|
||||
f.render_widget(job_title, chunks[0]);
|
||||
|
||||
// Steps section
|
||||
let header_cells = ["Status", "Step Name"].iter().map(|h| {
|
||||
ratatui::widgets::Cell::from(*h).style(Style::default().fg(Color::Yellow))
|
||||
});
|
||||
|
||||
let header = Row::new(header_cells)
|
||||
.style(Style::default().add_modifier(Modifier::BOLD))
|
||||
.height(1);
|
||||
|
||||
let rows = job.steps.iter().map(|step| {
|
||||
let status_symbol = match step.status {
|
||||
executor::StepStatus::Success => "✅",
|
||||
executor::StepStatus::Failure => "❌",
|
||||
executor::StepStatus::Skipped => "⏭",
|
||||
};
|
||||
|
||||
let status_style = match step.status {
|
||||
executor::StepStatus::Success => Style::default().fg(Color::Green),
|
||||
executor::StepStatus::Failure => Style::default().fg(Color::Red),
|
||||
executor::StepStatus::Skipped => Style::default().fg(Color::Gray),
|
||||
};
|
||||
|
||||
Row::new(vec![
|
||||
ratatui::widgets::Cell::from(status_symbol).style(status_style),
|
||||
ratatui::widgets::Cell::from(step.name.clone()),
|
||||
])
|
||||
});
|
||||
|
||||
let steps_table = Table::new(rows)
|
||||
.header(header)
|
||||
.block(
|
||||
Block::default()
|
||||
.borders(Borders::ALL)
|
||||
.border_type(BorderType::Rounded)
|
||||
.title(Span::styled(" Steps ", Style::default().fg(Color::Yellow))),
|
||||
)
|
||||
.highlight_style(
|
||||
Style::default()
|
||||
.bg(Color::DarkGray)
|
||||
.add_modifier(Modifier::BOLD),
|
||||
)
|
||||
.highlight_symbol("» ")
|
||||
.widths(&[
|
||||
Constraint::Length(8), // Status icon column
|
||||
Constraint::Percentage(92), // Name column
|
||||
]);
|
||||
|
||||
// We need to use the table state from the app
|
||||
f.render_stateful_widget(steps_table, chunks[1], &mut app.step_table_state);
|
||||
|
||||
// Step detail section
|
||||
if let Some(step_idx) = app.step_table_state.selected() {
|
||||
if step_idx < job.steps.len() {
|
||||
let step = &job.steps[step_idx];
|
||||
|
||||
// Show step output with proper styling
|
||||
let status_text = match step.status {
|
||||
executor::StepStatus::Success => "Success",
|
||||
executor::StepStatus::Failure => "Failed",
|
||||
executor::StepStatus::Skipped => "Skipped",
|
||||
};
|
||||
|
||||
let status_style = match step.status {
|
||||
executor::StepStatus::Success => Style::default().fg(Color::Green),
|
||||
executor::StepStatus::Failure => Style::default().fg(Color::Red),
|
||||
executor::StepStatus::Skipped => Style::default().fg(Color::Yellow),
|
||||
};
|
||||
|
||||
let mut output_text = step.output.clone();
|
||||
// Truncate if too long
|
||||
if output_text.len() > 1000 {
|
||||
output_text = format!("{}... [truncated]", &output_text[..1000]);
|
||||
}
|
||||
|
||||
let step_detail = Paragraph::new(vec![
|
||||
Line::from(vec![
|
||||
Span::styled("Step: ", Style::default().fg(Color::Blue)),
|
||||
Span::styled(
|
||||
step.name.clone(),
|
||||
Style::default()
|
||||
.fg(Color::White)
|
||||
.add_modifier(Modifier::BOLD),
|
||||
),
|
||||
Span::raw(" ("),
|
||||
Span::styled(status_text, status_style),
|
||||
Span::raw(")"),
|
||||
]),
|
||||
Line::from(""),
|
||||
Line::from(output_text),
|
||||
])
|
||||
.block(
|
||||
Block::default()
|
||||
.borders(Borders::ALL)
|
||||
.border_type(BorderType::Rounded)
|
||||
.title(Span::styled(
|
||||
" Step Output ",
|
||||
Style::default().fg(Color::Yellow),
|
||||
)),
|
||||
)
|
||||
.wrap(ratatui::widgets::Wrap { trim: false });
|
||||
|
||||
f.render_widget(step_detail, chunks[2]);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
345
crates/ui/src/views/logs_tab.rs
Normal file
345
crates/ui/src/views/logs_tab.rs
Normal file
@@ -0,0 +1,345 @@
|
||||
// Logs tab rendering
|
||||
use crate::app::App;
|
||||
use ratatui::{
|
||||
backend::CrosstermBackend,
|
||||
layout::{Alignment, Constraint, Direction, Layout, Rect},
|
||||
style::{Color, Modifier, Style},
|
||||
text::{Line, Span},
|
||||
widgets::{Block, BorderType, Borders, Cell, Paragraph, Row, Table, TableState},
|
||||
Frame,
|
||||
};
|
||||
use std::io;
|
||||
|
||||
// Render the logs tab
|
||||
pub fn render_logs_tab(f: &mut Frame<CrosstermBackend<io::Stdout>>, app: &App, area: Rect) {
|
||||
// Split the area into header, search bar (optionally shown), and log content
|
||||
let chunks = Layout::default()
|
||||
.direction(Direction::Vertical)
|
||||
.constraints(
|
||||
[
|
||||
Constraint::Length(3), // Header with instructions
|
||||
Constraint::Length(
|
||||
if app.log_search_active
|
||||
|| !app.log_search_query.is_empty()
|
||||
|| app.log_filter_level.is_some()
|
||||
{
|
||||
3
|
||||
} else {
|
||||
0
|
||||
},
|
||||
), // Search bar (optional)
|
||||
Constraint::Min(3), // Logs content
|
||||
]
|
||||
.as_ref(),
|
||||
)
|
||||
.margin(1)
|
||||
.split(area);
|
||||
|
||||
// Determine if search/filter bar should be shown
|
||||
let show_search_bar =
|
||||
app.log_search_active || !app.log_search_query.is_empty() || app.log_filter_level.is_some();
|
||||
|
||||
// Render header with instructions
|
||||
let mut header_text = vec![
|
||||
Line::from(vec![Span::styled(
|
||||
"Execution and System Logs",
|
||||
Style::default()
|
||||
.fg(Color::Yellow)
|
||||
.add_modifier(Modifier::BOLD),
|
||||
)]),
|
||||
Line::from(vec![
|
||||
Span::styled("↑/↓", Style::default().fg(Color::Cyan)),
|
||||
Span::raw(" or "),
|
||||
Span::styled("j/k", Style::default().fg(Color::Cyan)),
|
||||
Span::raw(": Navigate logs/matches "),
|
||||
Span::styled("s", Style::default().fg(Color::Cyan)),
|
||||
Span::raw(": Search "),
|
||||
Span::styled("f", Style::default().fg(Color::Cyan)),
|
||||
Span::raw(": Filter "),
|
||||
Span::styled("Tab", Style::default().fg(Color::Cyan)),
|
||||
Span::raw(": Switch tabs"),
|
||||
]),
|
||||
];
|
||||
|
||||
if show_search_bar {
|
||||
header_text.push(Line::from(vec![
|
||||
Span::styled("Enter", Style::default().fg(Color::Cyan)),
|
||||
Span::raw(": Apply search "),
|
||||
Span::styled("Esc", Style::default().fg(Color::Cyan)),
|
||||
Span::raw(": Clear search "),
|
||||
Span::styled("c", Style::default().fg(Color::Cyan)),
|
||||
Span::raw(": Clear all filters"),
|
||||
]));
|
||||
}
|
||||
|
||||
let header = Paragraph::new(header_text)
|
||||
.block(
|
||||
Block::default()
|
||||
.borders(Borders::ALL)
|
||||
.border_type(BorderType::Rounded),
|
||||
)
|
||||
.alignment(Alignment::Center);
|
||||
|
||||
f.render_widget(header, chunks[0]);
|
||||
|
||||
// Render search bar if active or has content
|
||||
if show_search_bar {
|
||||
let search_text = if app.log_search_active {
|
||||
format!("Search: {}█", app.log_search_query)
|
||||
} else {
|
||||
format!("Search: {}", app.log_search_query)
|
||||
};
|
||||
|
||||
let filter_text = match &app.log_filter_level {
|
||||
Some(level) => format!("Filter: {}", level.to_string()),
|
||||
None => "No filter".to_string(),
|
||||
};
|
||||
|
||||
let match_info = if !app.log_search_matches.is_empty() {
|
||||
format!(
|
||||
"Matches: {}/{}",
|
||||
app.log_search_match_idx + 1,
|
||||
app.log_search_matches.len()
|
||||
)
|
||||
} else if !app.log_search_query.is_empty() {
|
||||
"No matches".to_string()
|
||||
} else {
|
||||
"".to_string()
|
||||
};
|
||||
|
||||
let search_info = Line::from(vec![
|
||||
Span::raw(search_text),
|
||||
Span::raw(" "),
|
||||
Span::styled(
|
||||
filter_text,
|
||||
Style::default().fg(match &app.log_filter_level {
|
||||
Some(crate::models::LogFilterLevel::Error) => Color::Red,
|
||||
Some(crate::models::LogFilterLevel::Warning) => Color::Yellow,
|
||||
Some(crate::models::LogFilterLevel::Info) => Color::Cyan,
|
||||
Some(crate::models::LogFilterLevel::Success) => Color::Green,
|
||||
Some(crate::models::LogFilterLevel::Trigger) => Color::Magenta,
|
||||
Some(crate::models::LogFilterLevel::All) | None => Color::Gray,
|
||||
}),
|
||||
),
|
||||
Span::raw(" "),
|
||||
Span::styled(match_info, Style::default().fg(Color::Magenta)),
|
||||
]);
|
||||
|
||||
let search_block = Paragraph::new(search_info)
|
||||
.block(
|
||||
Block::default()
|
||||
.borders(Borders::ALL)
|
||||
.border_type(BorderType::Rounded)
|
||||
.title(Span::styled(
|
||||
" Search & Filter ",
|
||||
Style::default().fg(Color::Yellow),
|
||||
)),
|
||||
)
|
||||
.alignment(Alignment::Left);
|
||||
|
||||
f.render_widget(search_block, chunks[1]);
|
||||
}
|
||||
|
||||
// Combine application logs with system logs
|
||||
let mut all_logs = Vec::new();
|
||||
|
||||
// Now all logs should have timestamps in the format [HH:MM:SS]
|
||||
|
||||
// Process app logs
|
||||
for log in &app.logs {
|
||||
all_logs.push(log.clone());
|
||||
}
|
||||
|
||||
// Process system logs
|
||||
for log in logging::get_logs() {
|
||||
all_logs.push(log.clone());
|
||||
}
|
||||
|
||||
// Filter logs based on search query and filter level
|
||||
let filtered_logs = if !app.log_search_query.is_empty() || app.log_filter_level.is_some() {
|
||||
all_logs
|
||||
.iter()
|
||||
.filter(|log| {
|
||||
let passes_filter = match &app.log_filter_level {
|
||||
None => true,
|
||||
Some(level) => level.matches(log),
|
||||
};
|
||||
|
||||
let matches_search = if app.log_search_query.is_empty() {
|
||||
true
|
||||
} else {
|
||||
log.to_lowercase()
|
||||
.contains(&app.log_search_query.to_lowercase())
|
||||
};
|
||||
|
||||
passes_filter && matches_search
|
||||
})
|
||||
.cloned()
|
||||
.collect::<Vec<String>>()
|
||||
} else {
|
||||
all_logs.clone() // Clone to avoid moving all_logs
|
||||
};
|
||||
|
||||
// Create a table for logs for better organization
|
||||
let header_cells = ["Time", "Type", "Message"]
|
||||
.iter()
|
||||
.map(|h| Cell::from(*h).style(Style::default().fg(Color::Yellow)));
|
||||
|
||||
let header = Row::new(header_cells)
|
||||
.style(Style::default().add_modifier(Modifier::BOLD))
|
||||
.height(1);
|
||||
|
||||
let rows = filtered_logs.iter().map(|log_line| {
|
||||
// Parse log line to extract timestamp, type and message
|
||||
|
||||
// Extract timestamp from log format [HH:MM:SS]
|
||||
let timestamp = if log_line.starts_with('[') && log_line.contains(']') {
|
||||
let end = log_line.find(']').unwrap_or(0);
|
||||
if end > 1 {
|
||||
log_line[1..end].to_string()
|
||||
} else {
|
||||
"??:??:??".to_string() // Show placeholder for malformed logs
|
||||
}
|
||||
} else {
|
||||
"??:??:??".to_string() // Show placeholder for malformed logs
|
||||
};
|
||||
|
||||
let (log_type, log_style, _) =
|
||||
if log_line.contains("Error") || log_line.contains("error") || log_line.contains("❌")
|
||||
{
|
||||
("ERROR", Style::default().fg(Color::Red), log_line.as_str())
|
||||
} else if log_line.contains("Warning")
|
||||
|| log_line.contains("warning")
|
||||
|| log_line.contains("⚠️")
|
||||
{
|
||||
(
|
||||
"WARN",
|
||||
Style::default().fg(Color::Yellow),
|
||||
log_line.as_str(),
|
||||
)
|
||||
} else if log_line.contains("Success")
|
||||
|| log_line.contains("success")
|
||||
|| log_line.contains("✅")
|
||||
{
|
||||
(
|
||||
"SUCCESS",
|
||||
Style::default().fg(Color::Green),
|
||||
log_line.as_str(),
|
||||
)
|
||||
} else if log_line.contains("Running")
|
||||
|| log_line.contains("running")
|
||||
|| log_line.contains("⟳")
|
||||
{
|
||||
("INFO", Style::default().fg(Color::Cyan), log_line.as_str())
|
||||
} else if log_line.contains("Triggering") || log_line.contains("triggered") {
|
||||
(
|
||||
"TRIG",
|
||||
Style::default().fg(Color::Magenta),
|
||||
log_line.as_str(),
|
||||
)
|
||||
} else {
|
||||
("INFO", Style::default().fg(Color::Gray), log_line.as_str())
|
||||
};
|
||||
|
||||
// Extract content after timestamp
|
||||
let content = if log_line.starts_with('[') && log_line.contains(']') {
|
||||
let start = log_line.find(']').unwrap_or(0) + 1;
|
||||
log_line[start..].trim()
|
||||
} else {
|
||||
log_line.as_str()
|
||||
};
|
||||
|
||||
// Highlight search matches in content if search is active
|
||||
let mut content_spans = Vec::new();
|
||||
if !app.log_search_query.is_empty() {
|
||||
let lowercase_content = content.to_lowercase();
|
||||
let lowercase_query = app.log_search_query.to_lowercase();
|
||||
|
||||
if lowercase_content.contains(&lowercase_query) {
|
||||
let mut last_idx = 0;
|
||||
while let Some(idx) = lowercase_content[last_idx..].find(&lowercase_query) {
|
||||
let real_idx = last_idx + idx;
|
||||
|
||||
// Add text before match
|
||||
if real_idx > last_idx {
|
||||
content_spans.push(Span::raw(content[last_idx..real_idx].to_string()));
|
||||
}
|
||||
|
||||
// Add matched text with highlight
|
||||
let match_end = real_idx + app.log_search_query.len();
|
||||
content_spans.push(Span::styled(
|
||||
content[real_idx..match_end].to_string(),
|
||||
Style::default().bg(Color::Yellow).fg(Color::Black),
|
||||
));
|
||||
|
||||
last_idx = match_end;
|
||||
}
|
||||
|
||||
// Add remaining text after last match
|
||||
if last_idx < content.len() {
|
||||
content_spans.push(Span::raw(content[last_idx..].to_string()));
|
||||
}
|
||||
} else {
|
||||
content_spans.push(Span::raw(content));
|
||||
}
|
||||
} else {
|
||||
content_spans.push(Span::raw(content));
|
||||
}
|
||||
|
||||
Row::new(vec![
|
||||
Cell::from(timestamp),
|
||||
Cell::from(log_type).style(log_style),
|
||||
Cell::from(Line::from(content_spans)),
|
||||
])
|
||||
});
|
||||
|
||||
let content_idx = if show_search_bar { 2 } else { 1 };
|
||||
|
||||
let log_table = Table::new(rows)
|
||||
.header(header)
|
||||
.block(
|
||||
Block::default()
|
||||
.borders(Borders::ALL)
|
||||
.border_type(BorderType::Rounded)
|
||||
.title(Span::styled(
|
||||
format!(
|
||||
" Logs ({}/{}) ",
|
||||
if filtered_logs.is_empty() {
|
||||
0
|
||||
} else {
|
||||
app.log_scroll + 1
|
||||
},
|
||||
filtered_logs.len()
|
||||
),
|
||||
Style::default().fg(Color::Yellow),
|
||||
)),
|
||||
)
|
||||
.highlight_style(Style::default().bg(Color::DarkGray))
|
||||
.widths(&[
|
||||
Constraint::Length(10), // Timestamp column
|
||||
Constraint::Length(7), // Log type column
|
||||
Constraint::Percentage(80), // Message column
|
||||
]);
|
||||
|
||||
// We need to convert log_scroll index to a TableState
|
||||
let mut log_table_state = TableState::default();
|
||||
|
||||
if !filtered_logs.is_empty() {
|
||||
// If we have search matches, use the match index as the selected row
|
||||
if !app.log_search_matches.is_empty() {
|
||||
// Make sure we're within bounds
|
||||
let _match_index = app
|
||||
.log_search_match_idx
|
||||
.min(app.log_search_matches.len() - 1);
|
||||
|
||||
// This would involve more complex logic to go from search matches to the filtered logs
|
||||
// For simplicity in this placeholder, we'll just use the scroll position
|
||||
log_table_state.select(Some(app.log_scroll.min(filtered_logs.len() - 1)));
|
||||
} else {
|
||||
// No search matches, use regular scroll position
|
||||
log_table_state.select(Some(app.log_scroll.min(filtered_logs.len() - 1)));
|
||||
}
|
||||
}
|
||||
|
||||
f.render_stateful_widget(log_table, chunks[content_idx], &mut log_table_state);
|
||||
}
|
||||
57
crates/ui/src/views/mod.rs
Normal file
57
crates/ui/src/views/mod.rs
Normal file
@@ -0,0 +1,57 @@
|
||||
// UI Views module
|
||||
mod execution_tab;
|
||||
mod help_overlay;
|
||||
mod job_detail;
|
||||
mod logs_tab;
|
||||
mod status_bar;
|
||||
mod title_bar;
|
||||
mod workflows_tab;
|
||||
|
||||
use crate::app::App;
|
||||
use ratatui::{backend::CrosstermBackend, Frame};
|
||||
use std::io;
|
||||
|
||||
// Main render function for the UI
|
||||
pub fn render_ui(f: &mut Frame<CrosstermBackend<io::Stdout>>, app: &mut App) {
|
||||
// Check if help should be shown as an overlay
|
||||
if app.show_help {
|
||||
help_overlay::render_help_overlay(f);
|
||||
return;
|
||||
}
|
||||
|
||||
let size = f.size();
|
||||
|
||||
// Create main layout
|
||||
let main_chunks = ratatui::layout::Layout::default()
|
||||
.direction(ratatui::layout::Direction::Vertical)
|
||||
.constraints(
|
||||
[
|
||||
ratatui::layout::Constraint::Length(3), // Title bar and tabs
|
||||
ratatui::layout::Constraint::Min(5), // Main content
|
||||
ratatui::layout::Constraint::Length(2), // Status bar
|
||||
]
|
||||
.as_ref(),
|
||||
)
|
||||
.split(size);
|
||||
|
||||
// Render title bar with tabs
|
||||
title_bar::render_title_bar(f, app, main_chunks[0]);
|
||||
|
||||
// Render main content based on selected tab
|
||||
match app.selected_tab {
|
||||
0 => workflows_tab::render_workflows_tab(f, app, main_chunks[1]),
|
||||
1 => {
|
||||
if app.detailed_view {
|
||||
job_detail::render_job_detail_view(f, app, main_chunks[1])
|
||||
} else {
|
||||
execution_tab::render_execution_tab(f, app, main_chunks[1])
|
||||
}
|
||||
}
|
||||
2 => logs_tab::render_logs_tab(f, app, main_chunks[1]),
|
||||
3 => help_overlay::render_help_tab(f, main_chunks[1]),
|
||||
_ => {}
|
||||
}
|
||||
|
||||
// Render status bar
|
||||
status_bar::render_status_bar(f, app, main_chunks[2]);
|
||||
}
|
||||
166
crates/ui/src/views/status_bar.rs
Normal file
166
crates/ui/src/views/status_bar.rs
Normal file
@@ -0,0 +1,166 @@
|
||||
// Status bar rendering
|
||||
use crate::app::App;
|
||||
use executor::RuntimeType;
|
||||
use ratatui::{
|
||||
backend::CrosstermBackend,
|
||||
layout::{Alignment, Rect},
|
||||
style::{Color, Style},
|
||||
text::{Line, Span},
|
||||
widgets::Paragraph,
|
||||
Frame,
|
||||
};
|
||||
use std::io;
|
||||
|
||||
// Render the status bar
|
||||
pub fn render_status_bar(f: &mut Frame<CrosstermBackend<io::Stdout>>, app: &App, area: Rect) {
|
||||
// If we have a status message, show it instead of the normal status bar
|
||||
if let Some(message) = &app.status_message {
|
||||
// Determine if this is a success message (starts with ✅)
|
||||
let is_success = message.starts_with("✅");
|
||||
|
||||
let status_message = Paragraph::new(Line::from(vec![Span::styled(
|
||||
format!(" {} ", message),
|
||||
Style::default()
|
||||
.bg(if is_success { Color::Green } else { Color::Red })
|
||||
.fg(Color::White)
|
||||
.add_modifier(ratatui::style::Modifier::BOLD),
|
||||
)]))
|
||||
.alignment(Alignment::Center);
|
||||
|
||||
f.render_widget(status_message, area);
|
||||
return;
|
||||
}
|
||||
|
||||
// Normal status bar
|
||||
let mut status_items = vec![];
|
||||
|
||||
// Add mode info
|
||||
status_items.push(Span::styled(
|
||||
format!(" {} ", app.runtime_type_name()),
|
||||
Style::default()
|
||||
.bg(match app.runtime_type {
|
||||
RuntimeType::Docker => Color::Blue,
|
||||
RuntimeType::Emulation => Color::Magenta,
|
||||
})
|
||||
.fg(Color::White),
|
||||
));
|
||||
|
||||
// Add Docker status if relevant
|
||||
if app.runtime_type == RuntimeType::Docker {
|
||||
// Check Docker silently using safe FD redirection
|
||||
let is_docker_available =
|
||||
match utils::fd::with_stderr_to_null(executor::docker::is_available) {
|
||||
Ok(result) => result,
|
||||
Err(_) => {
|
||||
logging::debug("Failed to redirect stderr when checking Docker availability.");
|
||||
false
|
||||
}
|
||||
};
|
||||
|
||||
status_items.push(Span::raw(" "));
|
||||
status_items.push(Span::styled(
|
||||
if is_docker_available {
|
||||
" Docker: Connected "
|
||||
} else {
|
||||
" Docker: Not Available "
|
||||
},
|
||||
Style::default()
|
||||
.bg(if is_docker_available {
|
||||
Color::Green
|
||||
} else {
|
||||
Color::Red
|
||||
})
|
||||
.fg(Color::White),
|
||||
));
|
||||
}
|
||||
|
||||
// Add validation/execution mode
|
||||
status_items.push(Span::raw(" "));
|
||||
status_items.push(Span::styled(
|
||||
format!(
|
||||
" {} ",
|
||||
if app.validation_mode {
|
||||
"Validation"
|
||||
} else {
|
||||
"Execution"
|
||||
}
|
||||
),
|
||||
Style::default()
|
||||
.bg(if app.validation_mode {
|
||||
Color::Yellow
|
||||
} else {
|
||||
Color::Green
|
||||
})
|
||||
.fg(Color::Black),
|
||||
));
|
||||
|
||||
// Add context-specific help based on current tab
|
||||
status_items.push(Span::raw(" "));
|
||||
let help_text = match app.selected_tab {
|
||||
0 => {
|
||||
if let Some(idx) = app.workflow_list_state.selected() {
|
||||
if idx < app.workflows.len() {
|
||||
let workflow = &app.workflows[idx];
|
||||
match workflow.status {
|
||||
crate::models::WorkflowStatus::NotStarted => "[Space] Toggle selection [Enter] Run selected [r] Run all selected [t] Trigger Workflow [Shift+R] Reset workflow",
|
||||
crate::models::WorkflowStatus::Running => "[Space] Toggle selection [Enter] Run selected [r] Run all selected (Workflow running...)",
|
||||
crate::models::WorkflowStatus::Success | crate::models::WorkflowStatus::Failed | crate::models::WorkflowStatus::Skipped => "[Space] Toggle selection [Enter] Run selected [r] Run all selected [Shift+R] Reset workflow",
|
||||
}
|
||||
} else {
|
||||
"[Space] Toggle selection [Enter] Run selected [r] Run all selected"
|
||||
}
|
||||
} else {
|
||||
"[Space] Toggle selection [Enter] Run selected [r] Run all selected"
|
||||
}
|
||||
}
|
||||
1 => {
|
||||
if app.detailed_view {
|
||||
"[Esc] Back to jobs [↑/↓] Navigate steps"
|
||||
} else {
|
||||
"[Enter] View details [↑/↓] Navigate jobs"
|
||||
}
|
||||
}
|
||||
2 => {
|
||||
// For logs tab, show scrolling instructions
|
||||
let log_count = app.logs.len() + logging::get_logs().len();
|
||||
if log_count > 0 {
|
||||
// Convert to a static string for consistent return type
|
||||
let scroll_text = format!(
|
||||
"[↑/↓] Scroll logs ({}/{}) [s] Search [f] Filter",
|
||||
app.log_scroll + 1,
|
||||
log_count
|
||||
);
|
||||
Box::leak(scroll_text.into_boxed_str())
|
||||
} else {
|
||||
"[No logs to display]"
|
||||
}
|
||||
}
|
||||
3 => "[?] Toggle help overlay",
|
||||
_ => "",
|
||||
};
|
||||
status_items.push(Span::styled(
|
||||
format!(" {} ", help_text),
|
||||
Style::default().fg(Color::White),
|
||||
));
|
||||
|
||||
// Show keybindings for common actions
|
||||
status_items.push(Span::raw(" "));
|
||||
status_items.push(Span::styled(
|
||||
" [Tab] Switch tabs ",
|
||||
Style::default().fg(Color::White),
|
||||
));
|
||||
status_items.push(Span::styled(
|
||||
" [?] Help ",
|
||||
Style::default().fg(Color::White),
|
||||
));
|
||||
status_items.push(Span::styled(
|
||||
" [q] Quit ",
|
||||
Style::default().fg(Color::White),
|
||||
));
|
||||
|
||||
let status_bar = Paragraph::new(Line::from(status_items))
|
||||
.style(Style::default().bg(Color::DarkGray))
|
||||
.alignment(Alignment::Left);
|
||||
|
||||
f.render_widget(status_bar, area);
|
||||
}
|
||||
74
crates/ui/src/views/title_bar.rs
Normal file
74
crates/ui/src/views/title_bar.rs
Normal file
@@ -0,0 +1,74 @@
|
||||
// Title bar rendering
|
||||
use crate::app::App;
|
||||
use ratatui::{
|
||||
backend::CrosstermBackend,
|
||||
layout::{Alignment, Rect},
|
||||
style::{Color, Modifier, Style},
|
||||
text::{Line, Span},
|
||||
widgets::{Block, BorderType, Borders, Tabs},
|
||||
Frame,
|
||||
};
|
||||
use std::io;
|
||||
|
||||
// Render the title bar with tabs
|
||||
pub fn render_title_bar(f: &mut Frame<CrosstermBackend<io::Stdout>>, app: &App, area: Rect) {
|
||||
let titles = ["Workflows", "Execution", "Logs", "Help"];
|
||||
let tabs = Tabs::new(
|
||||
titles
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(i, t)| {
|
||||
if i == 1 {
|
||||
// Special case for "Execution"
|
||||
let e_part = &t[0..1]; // "E"
|
||||
let x_part = &t[1..2]; // "x"
|
||||
let rest = &t[2..]; // "ecution"
|
||||
Line::from(vec![
|
||||
Span::styled(e_part, Style::default().fg(Color::White)),
|
||||
Span::styled(
|
||||
x_part,
|
||||
Style::default()
|
||||
.fg(Color::Yellow)
|
||||
.add_modifier(Modifier::UNDERLINED),
|
||||
),
|
||||
Span::styled(rest, Style::default().fg(Color::White)),
|
||||
])
|
||||
} else {
|
||||
// Original styling for other tabs
|
||||
let (first, rest) = t.split_at(1);
|
||||
Line::from(vec![
|
||||
Span::styled(
|
||||
first,
|
||||
Style::default()
|
||||
.fg(Color::Yellow)
|
||||
.add_modifier(Modifier::UNDERLINED),
|
||||
),
|
||||
Span::styled(rest, Style::default().fg(Color::White)),
|
||||
])
|
||||
}
|
||||
})
|
||||
.collect(),
|
||||
)
|
||||
.block(
|
||||
Block::default()
|
||||
.borders(Borders::ALL)
|
||||
.border_type(BorderType::Rounded)
|
||||
.title(Span::styled(
|
||||
" wrkflw ",
|
||||
Style::default()
|
||||
.fg(Color::Cyan)
|
||||
.add_modifier(Modifier::BOLD),
|
||||
))
|
||||
.title_alignment(Alignment::Center),
|
||||
)
|
||||
.highlight_style(
|
||||
Style::default()
|
||||
.bg(Color::DarkGray)
|
||||
.fg(Color::Yellow)
|
||||
.add_modifier(Modifier::BOLD),
|
||||
)
|
||||
.select(app.selected_tab)
|
||||
.divider(Span::raw("|"));
|
||||
|
||||
f.render_widget(tabs, area);
|
||||
}
|
||||
131
crates/ui/src/views/workflows_tab.rs
Normal file
131
crates/ui/src/views/workflows_tab.rs
Normal file
@@ -0,0 +1,131 @@
|
||||
// Workflows tab rendering
|
||||
use crate::app::App;
|
||||
use crate::models::WorkflowStatus;
|
||||
use ratatui::{
|
||||
backend::CrosstermBackend,
|
||||
layout::{Alignment, Constraint, Direction, Layout, Rect},
|
||||
style::{Color, Modifier, Style},
|
||||
text::{Line, Span},
|
||||
widgets::{Block, BorderType, Borders, Cell, Paragraph, Row, Table, TableState},
|
||||
Frame,
|
||||
};
|
||||
use std::io;
|
||||
|
||||
// Render the workflow list tab
|
||||
pub fn render_workflows_tab(
|
||||
f: &mut Frame<CrosstermBackend<io::Stdout>>,
|
||||
app: &mut App,
|
||||
area: Rect,
|
||||
) {
|
||||
// Create a more structured layout for the workflow tab
|
||||
let chunks = Layout::default()
|
||||
.direction(Direction::Vertical)
|
||||
.constraints(
|
||||
[
|
||||
Constraint::Length(3), // Header with instructions
|
||||
Constraint::Min(5), // Workflow list
|
||||
]
|
||||
.as_ref(),
|
||||
)
|
||||
.margin(1)
|
||||
.split(area);
|
||||
|
||||
// Render header with instructions
|
||||
let header_text = vec![
|
||||
Line::from(vec![Span::styled(
|
||||
"Available Workflows",
|
||||
Style::default()
|
||||
.fg(Color::Yellow)
|
||||
.add_modifier(Modifier::BOLD),
|
||||
)]),
|
||||
Line::from(vec![
|
||||
Span::styled("Space", Style::default().fg(Color::Cyan)),
|
||||
Span::raw(": Toggle selection "),
|
||||
Span::styled("Enter", Style::default().fg(Color::Cyan)),
|
||||
Span::raw(": Run "),
|
||||
Span::styled("t", Style::default().fg(Color::Cyan)),
|
||||
Span::raw(": Trigger remotely"),
|
||||
]),
|
||||
];
|
||||
|
||||
let header = Paragraph::new(header_text)
|
||||
.block(
|
||||
Block::default()
|
||||
.borders(Borders::ALL)
|
||||
.border_type(BorderType::Rounded),
|
||||
)
|
||||
.alignment(Alignment::Center);
|
||||
|
||||
f.render_widget(header, chunks[0]);
|
||||
|
||||
// Create a table for workflows instead of a list for better organization
|
||||
let selected_style = Style::default()
|
||||
.bg(Color::DarkGray)
|
||||
.add_modifier(Modifier::BOLD);
|
||||
|
||||
// Normal style definition removed as it was unused
|
||||
|
||||
let header_cells = ["", "Status", "Workflow Name", "Path"]
|
||||
.iter()
|
||||
.map(|h| Cell::from(*h).style(Style::default().fg(Color::Yellow)));
|
||||
|
||||
let header = Row::new(header_cells)
|
||||
.style(Style::default().add_modifier(Modifier::BOLD))
|
||||
.height(1);
|
||||
|
||||
let rows = app.workflows.iter().map(|workflow| {
|
||||
// Create cells for each column
|
||||
let checkbox = if workflow.selected { "✓" } else { " " };
|
||||
|
||||
let (status_symbol, status_style) = match workflow.status {
|
||||
WorkflowStatus::NotStarted => ("○", Style::default().fg(Color::Gray)),
|
||||
WorkflowStatus::Running => ("⟳", Style::default().fg(Color::Cyan)),
|
||||
WorkflowStatus::Success => ("✅", Style::default().fg(Color::Green)),
|
||||
WorkflowStatus::Failed => ("❌", Style::default().fg(Color::Red)),
|
||||
WorkflowStatus::Skipped => ("⏭", Style::default().fg(Color::Yellow)),
|
||||
};
|
||||
|
||||
let path_display = workflow.path.to_string_lossy();
|
||||
let path_shortened = if path_display.len() > 30 {
|
||||
format!("...{}", &path_display[path_display.len() - 30..])
|
||||
} else {
|
||||
path_display.to_string()
|
||||
};
|
||||
|
||||
Row::new(vec![
|
||||
Cell::from(checkbox).style(Style::default().fg(Color::Green)),
|
||||
Cell::from(status_symbol).style(status_style),
|
||||
Cell::from(workflow.name.clone()),
|
||||
Cell::from(path_shortened).style(Style::default().fg(Color::DarkGray)),
|
||||
])
|
||||
});
|
||||
|
||||
let workflows_table = Table::new(rows)
|
||||
.header(header)
|
||||
.block(
|
||||
Block::default()
|
||||
.borders(Borders::ALL)
|
||||
.border_type(BorderType::Rounded)
|
||||
.title(Span::styled(
|
||||
" Workflows ",
|
||||
Style::default().fg(Color::Yellow),
|
||||
)),
|
||||
)
|
||||
.highlight_style(selected_style)
|
||||
.highlight_symbol("» ")
|
||||
.widths(&[
|
||||
Constraint::Length(3), // Checkbox column
|
||||
Constraint::Length(4), // Status icon column
|
||||
Constraint::Percentage(45), // Name column
|
||||
Constraint::Percentage(45), // Path column
|
||||
]);
|
||||
|
||||
// We need to convert ListState to TableState
|
||||
let mut table_state = TableState::default();
|
||||
table_state.select(app.workflow_list_state.selected());
|
||||
|
||||
f.render_stateful_widget(workflows_table, chunks[1], &mut table_state);
|
||||
|
||||
// Update the app list state to match the table state
|
||||
app.workflow_list_state.select(table_state.selected());
|
||||
}
|
||||
15
crates/utils/Cargo.toml
Normal file
15
crates/utils/Cargo.toml
Normal file
@@ -0,0 +1,15 @@
|
||||
[package]
|
||||
name = "utils"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
description = "utility functions for wrkflw"
|
||||
license.workspace = true
|
||||
|
||||
[dependencies]
|
||||
# Internal crates
|
||||
models = { path = "../models" }
|
||||
|
||||
# External dependencies
|
||||
serde.workspace = true
|
||||
serde_yaml.workspace = true
|
||||
nix.workspace = true
|
||||
@@ -1,6 +1,17 @@
|
||||
// utils crate
|
||||
|
||||
use std::path::Path;
|
||||
|
||||
pub fn is_workflow_file(path: &Path) -> bool {
|
||||
// First, check for GitLab CI files by name
|
||||
if let Some(file_name) = path.file_name() {
|
||||
let file_name_str = file_name.to_string_lossy().to_lowercase();
|
||||
if file_name_str == ".gitlab-ci.yml" || file_name_str.ends_with("gitlab-ci.yml") {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
// Then check for GitHub Actions workflows
|
||||
if let Some(ext) = path.extension() {
|
||||
if ext == "yml" || ext == "yaml" {
|
||||
// Check if the file is in a .github/workflows directory
|
||||
@@ -47,7 +58,7 @@ pub mod fd {
|
||||
// Duplicate the current stderr fd
|
||||
let stderr_backup = match dup(STDERR_FILENO) {
|
||||
Ok(fd) => fd,
|
||||
Err(e) => return Err(io::Error::new(io::ErrorKind::Other, e)),
|
||||
Err(e) => return Err(io::Error::other(e)),
|
||||
};
|
||||
|
||||
// Open /dev/null
|
||||
@@ -55,7 +66,7 @@ pub mod fd {
|
||||
Ok(fd) => fd,
|
||||
Err(e) => {
|
||||
let _ = close(stderr_backup); // Clean up on error
|
||||
return Err(io::Error::new(io::ErrorKind::Other, e));
|
||||
return Err(io::Error::other(e));
|
||||
}
|
||||
};
|
||||
|
||||
@@ -63,7 +74,7 @@ pub mod fd {
|
||||
if let Err(e) = dup2(null_fd, STDERR_FILENO) {
|
||||
let _ = close(stderr_backup); // Clean up on error
|
||||
let _ = close(null_fd);
|
||||
return Err(io::Error::new(io::ErrorKind::Other, e));
|
||||
return Err(io::Error::other(e));
|
||||
}
|
||||
|
||||
Ok(RedirectedStderr {
|
||||
15
crates/validators/Cargo.toml
Normal file
15
crates/validators/Cargo.toml
Normal file
@@ -0,0 +1,15 @@
|
||||
[package]
|
||||
name = "validators"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
description = "validation functionality for wrkflw"
|
||||
license.workspace = true
|
||||
|
||||
[dependencies]
|
||||
# Internal crates
|
||||
models = { path = "../models" }
|
||||
matrix = { path = "../matrix" }
|
||||
|
||||
# External dependencies
|
||||
serde.workspace = true
|
||||
serde_yaml.workspace = true
|
||||
@@ -1,4 +1,4 @@
|
||||
use crate::models::ValidationResult;
|
||||
use models::ValidationResult;
|
||||
|
||||
pub fn validate_action_reference(
|
||||
action_ref: &str,
|
||||
234
crates/validators/src/gitlab.rs
Normal file
234
crates/validators/src/gitlab.rs
Normal file
@@ -0,0 +1,234 @@
|
||||
use models::gitlab::{Job, Pipeline};
|
||||
use models::ValidationResult;
|
||||
use std::collections::HashMap;
|
||||
|
||||
/// Validate a GitLab CI/CD pipeline
|
||||
pub fn validate_gitlab_pipeline(pipeline: &Pipeline) -> ValidationResult {
|
||||
let mut result = ValidationResult::new();
|
||||
|
||||
// Basic structure validation
|
||||
if pipeline.jobs.is_empty() {
|
||||
result.add_issue("Pipeline must contain at least one job".to_string());
|
||||
}
|
||||
|
||||
// Validate jobs
|
||||
validate_jobs(&pipeline.jobs, &mut result);
|
||||
|
||||
// Validate stages if defined
|
||||
if let Some(stages) = &pipeline.stages {
|
||||
validate_stages(stages, &pipeline.jobs, &mut result);
|
||||
}
|
||||
|
||||
// Validate dependencies
|
||||
validate_dependencies(&pipeline.jobs, &mut result);
|
||||
|
||||
// Validate extends
|
||||
validate_extends(&pipeline.jobs, &mut result);
|
||||
|
||||
// Validate artifacts
|
||||
validate_artifacts(&pipeline.jobs, &mut result);
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
/// Validate GitLab CI/CD jobs
|
||||
fn validate_jobs(jobs: &HashMap<String, Job>, result: &mut ValidationResult) {
|
||||
for (job_name, job) in jobs {
|
||||
// Skip template jobs
|
||||
if let Some(true) = job.template {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check for script or extends
|
||||
if job.script.is_none() && job.extends.is_none() {
|
||||
result.add_issue(format!(
|
||||
"Job '{}' must have a script section or extend another job",
|
||||
job_name
|
||||
));
|
||||
}
|
||||
|
||||
// Check when value if present
|
||||
if let Some(when) = &job.when {
|
||||
match when.as_str() {
|
||||
"on_success" | "on_failure" | "always" | "manual" | "never" => {
|
||||
// Valid when value
|
||||
}
|
||||
_ => {
|
||||
result.add_issue(format!(
|
||||
"Job '{}' has invalid 'when' value: '{}'. Valid values are: on_success, on_failure, always, manual, never",
|
||||
job_name, when
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check retry configuration
|
||||
if let Some(retry) = &job.retry {
|
||||
match retry {
|
||||
models::gitlab::Retry::MaxAttempts(attempts) => {
|
||||
if *attempts > 10 {
|
||||
result.add_issue(format!(
|
||||
"Job '{}' has excessive retry count: {}. Consider reducing to avoid resource waste",
|
||||
job_name, attempts
|
||||
));
|
||||
}
|
||||
}
|
||||
models::gitlab::Retry::Detailed { max, when: _ } => {
|
||||
if *max > 10 {
|
||||
result.add_issue(format!(
|
||||
"Job '{}' has excessive retry count: {}. Consider reducing to avoid resource waste",
|
||||
job_name, max
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Validate GitLab CI/CD stages
|
||||
fn validate_stages(stages: &[String], jobs: &HashMap<String, Job>, result: &mut ValidationResult) {
|
||||
// Check that all jobs reference existing stages
|
||||
for (job_name, job) in jobs {
|
||||
if let Some(stage) = &job.stage {
|
||||
if !stages.contains(stage) {
|
||||
result.add_issue(format!(
|
||||
"Job '{}' references undefined stage '{}'. Available stages are: {}",
|
||||
job_name,
|
||||
stage,
|
||||
stages.join(", ")
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check for unused stages
|
||||
for stage in stages {
|
||||
let used = jobs.values().any(|job| {
|
||||
if let Some(job_stage) = &job.stage {
|
||||
job_stage == stage
|
||||
} else {
|
||||
false
|
||||
}
|
||||
});
|
||||
|
||||
if !used {
|
||||
result.add_issue(format!(
|
||||
"Stage '{}' is defined but not used by any job",
|
||||
stage
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Validate GitLab CI/CD job dependencies
|
||||
fn validate_dependencies(jobs: &HashMap<String, Job>, result: &mut ValidationResult) {
|
||||
for (job_name, job) in jobs {
|
||||
if let Some(dependencies) = &job.dependencies {
|
||||
for dependency in dependencies {
|
||||
if !jobs.contains_key(dependency) {
|
||||
result.add_issue(format!(
|
||||
"Job '{}' depends on undefined job '{}'",
|
||||
job_name, dependency
|
||||
));
|
||||
} else if job_name == dependency {
|
||||
result.add_issue(format!("Job '{}' cannot depend on itself", job_name));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Validate GitLab CI/CD job extends
|
||||
fn validate_extends(jobs: &HashMap<String, Job>, result: &mut ValidationResult) {
|
||||
// Check for circular extends
|
||||
for (job_name, job) in jobs {
|
||||
if let Some(extends) = &job.extends {
|
||||
// Check that all extended jobs exist
|
||||
for extend in extends {
|
||||
if !jobs.contains_key(extend) {
|
||||
result.add_issue(format!(
|
||||
"Job '{}' extends undefined job '{}'",
|
||||
job_name, extend
|
||||
));
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check for circular extends
|
||||
let mut visited = vec![job_name.clone()];
|
||||
check_circular_extends(extend, jobs, &mut visited, result);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Helper function to detect circular extends
|
||||
fn check_circular_extends(
|
||||
job_name: &str,
|
||||
jobs: &HashMap<String, Job>,
|
||||
visited: &mut Vec<String>,
|
||||
result: &mut ValidationResult,
|
||||
) {
|
||||
visited.push(job_name.to_string());
|
||||
|
||||
if let Some(job) = jobs.get(job_name) {
|
||||
if let Some(extends) = &job.extends {
|
||||
for extend in extends {
|
||||
if visited.contains(&extend.to_string()) {
|
||||
// Circular dependency detected
|
||||
let cycle = visited
|
||||
.iter()
|
||||
.skip(visited.iter().position(|x| x == extend).unwrap())
|
||||
.chain(std::iter::once(extend))
|
||||
.cloned()
|
||||
.collect::<Vec<_>>()
|
||||
.join(" -> ");
|
||||
|
||||
result.add_issue(format!("Circular extends detected: {}", cycle));
|
||||
return;
|
||||
}
|
||||
|
||||
check_circular_extends(extend, jobs, visited, result);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
visited.pop();
|
||||
}
|
||||
|
||||
/// Validate GitLab CI/CD job artifacts
|
||||
fn validate_artifacts(jobs: &HashMap<String, Job>, result: &mut ValidationResult) {
|
||||
for (job_name, job) in jobs {
|
||||
if let Some(artifacts) = &job.artifacts {
|
||||
// Check that paths are specified
|
||||
if let Some(paths) = &artifacts.paths {
|
||||
if paths.is_empty() {
|
||||
result.add_issue(format!(
|
||||
"Job '{}' has artifacts section with empty paths",
|
||||
job_name
|
||||
));
|
||||
}
|
||||
} else {
|
||||
result.add_issue(format!(
|
||||
"Job '{}' has artifacts section without specifying paths",
|
||||
job_name
|
||||
));
|
||||
}
|
||||
|
||||
// Check for valid 'when' value if present
|
||||
if let Some(when) = &artifacts.when {
|
||||
match when.as_str() {
|
||||
"on_success" | "on_failure" | "always" => {
|
||||
// Valid when value
|
||||
}
|
||||
_ => {
|
||||
result.add_issue(format!(
|
||||
"Job '{}' has artifacts with invalid 'when' value: '{}'. Valid values are: on_success, on_failure, always",
|
||||
job_name, when
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
use crate::models::ValidationResult;
|
||||
use crate::validators::{validate_matrix, validate_steps};
|
||||
use crate::{validate_matrix, validate_steps};
|
||||
use models::ValidationResult;
|
||||
use serde_yaml::Value;
|
||||
|
||||
pub fn validate_jobs(jobs: &Value, result: &mut ValidationResult) {
|
||||
@@ -1,10 +1,14 @@
|
||||
// validators crate
|
||||
|
||||
mod actions;
|
||||
mod gitlab;
|
||||
mod jobs;
|
||||
mod matrix;
|
||||
mod steps;
|
||||
mod triggers;
|
||||
|
||||
pub use actions::validate_action_reference;
|
||||
pub use gitlab::validate_gitlab_pipeline;
|
||||
pub use jobs::validate_jobs;
|
||||
pub use matrix::validate_matrix;
|
||||
pub use steps::validate_steps;
|
||||
@@ -1,4 +1,4 @@
|
||||
use crate::models::ValidationResult;
|
||||
use models::ValidationResult;
|
||||
use serde_yaml::Value;
|
||||
|
||||
pub fn validate_matrix(matrix: &Value, result: &mut ValidationResult) {
|
||||
@@ -1,8 +1,11 @@
|
||||
use crate::models::ValidationResult;
|
||||
use crate::validators::validate_action_reference;
|
||||
use crate::validate_action_reference;
|
||||
use models::ValidationResult;
|
||||
use serde_yaml::Value;
|
||||
use std::collections::HashSet;
|
||||
|
||||
pub fn validate_steps(steps: &[Value], job_name: &str, result: &mut ValidationResult) {
|
||||
let mut step_ids: HashSet<String> = HashSet::new();
|
||||
|
||||
for (i, step) in steps.iter().enumerate() {
|
||||
if let Some(step_map) = step.as_mapping() {
|
||||
if !step_map.contains_key(Value::String("name".to_string()))
|
||||
@@ -27,6 +30,18 @@ pub fn validate_steps(steps: &[Value], job_name: &str, result: &mut ValidationRe
|
||||
));
|
||||
}
|
||||
|
||||
// Check for duplicate step IDs
|
||||
if let Some(Value::String(id)) = step_map.get(Value::String("id".to_string())) {
|
||||
if !step_ids.insert(id.clone()) {
|
||||
result.add_issue(format!(
|
||||
"Job '{}', step {}: The identifier '{}' may not be used more than once within the same scope",
|
||||
job_name,
|
||||
i + 1,
|
||||
id
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
// Validate action reference if 'uses' is present
|
||||
if let Some(Value::String(uses)) = step_map.get(Value::String("uses".to_string())) {
|
||||
validate_action_reference(uses, job_name, i, result);
|
||||
@@ -1,4 +1,4 @@
|
||||
use crate::models::ValidationResult;
|
||||
use models::ValidationResult;
|
||||
use serde_yaml::Value;
|
||||
|
||||
pub fn validate_triggers(on: &Value, result: &mut ValidationResult) {
|
||||
65
crates/wrkflw/Cargo.toml
Normal file
65
crates/wrkflw/Cargo.toml
Normal file
@@ -0,0 +1,65 @@
|
||||
[package]
|
||||
name = "wrkflw"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
description.workspace = true
|
||||
documentation.workspace = true
|
||||
homepage.workspace = true
|
||||
repository.workspace = true
|
||||
keywords.workspace = true
|
||||
categories.workspace = true
|
||||
license.workspace = true
|
||||
|
||||
[dependencies]
|
||||
# Workspace crates
|
||||
models = { path = "../models" }
|
||||
executor = { path = "../executor" }
|
||||
github = { path = "../github" }
|
||||
gitlab = { path = "../gitlab" }
|
||||
logging = { path = "../logging" }
|
||||
matrix = { path = "../matrix" }
|
||||
parser = { path = "../parser" }
|
||||
runtime = { path = "../runtime" }
|
||||
ui = { path = "../ui" }
|
||||
utils = { path = "../utils" }
|
||||
validators = { path = "../validators" }
|
||||
evaluator = { path = "../evaluator" }
|
||||
|
||||
# External dependencies
|
||||
clap.workspace = true
|
||||
bollard.workspace = true
|
||||
tokio.workspace = true
|
||||
futures-util.workspace = true
|
||||
futures.workspace = true
|
||||
chrono.workspace = true
|
||||
uuid.workspace = true
|
||||
tempfile.workspace = true
|
||||
dirs.workspace = true
|
||||
thiserror.workspace = true
|
||||
log.workspace = true
|
||||
regex.workspace = true
|
||||
lazy_static.workspace = true
|
||||
reqwest.workspace = true
|
||||
libc.workspace = true
|
||||
nix.workspace = true
|
||||
urlencoding.workspace = true
|
||||
serde.workspace = true
|
||||
serde_yaml.workspace = true
|
||||
serde_json.workspace = true
|
||||
colored.workspace = true
|
||||
indexmap.workspace = true
|
||||
rayon.workspace = true
|
||||
num_cpus.workspace = true
|
||||
itertools.workspace = true
|
||||
once_cell.workspace = true
|
||||
crossterm.workspace = true
|
||||
ratatui.workspace = true
|
||||
walkdir = "2.4"
|
||||
|
||||
[lib]
|
||||
name = "wrkflw_lib"
|
||||
path = "src/lib.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "wrkflw"
|
||||
path = "src/main.rs"
|
||||
12
crates/wrkflw/src/lib.rs
Normal file
12
crates/wrkflw/src/lib.rs
Normal file
@@ -0,0 +1,12 @@
|
||||
pub use evaluator;
|
||||
pub use executor;
|
||||
pub use github;
|
||||
pub use gitlab;
|
||||
pub use logging;
|
||||
pub use matrix;
|
||||
pub use models;
|
||||
pub use parser;
|
||||
pub use runtime;
|
||||
pub use ui;
|
||||
pub use utils;
|
||||
pub use validators;
|
||||
654
crates/wrkflw/src/main.rs
Normal file
654
crates/wrkflw/src/main.rs
Normal file
@@ -0,0 +1,654 @@
|
||||
use bollard::Docker;
|
||||
use clap::{Parser, Subcommand};
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
#[command(
|
||||
name = "wrkflw",
|
||||
about = "GitHub & GitLab CI/CD validator and executor",
|
||||
version,
|
||||
long_about = "A CI/CD validator and executor that runs workflows locally.\n\nExamples:\n wrkflw validate # Validate all workflows in .github/workflows\n wrkflw run .github/workflows/build.yml # Run a specific workflow\n wrkflw run .gitlab-ci.yml # Run a GitLab CI pipeline\n wrkflw --verbose run .github/workflows/build.yml # Run with more output\n wrkflw --debug run .github/workflows/build.yml # Run with detailed debug information\n wrkflw run --emulate .github/workflows/build.yml # Use emulation mode instead of Docker\n wrkflw run --preserve-containers-on-failure .github/workflows/build.yml # Keep failed containers for debugging"
|
||||
)]
|
||||
struct Wrkflw {
|
||||
#[command(subcommand)]
|
||||
command: Option<Commands>,
|
||||
|
||||
/// Run in verbose mode with detailed output
|
||||
#[arg(short, long, global = true)]
|
||||
verbose: bool,
|
||||
|
||||
/// Run in debug mode with extensive execution details
|
||||
#[arg(short, long, global = true)]
|
||||
debug: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Subcommand)]
|
||||
enum Commands {
|
||||
/// Validate workflow or pipeline files
|
||||
Validate {
|
||||
/// Path to workflow/pipeline file or directory (defaults to .github/workflows)
|
||||
path: Option<PathBuf>,
|
||||
|
||||
/// Explicitly validate as GitLab CI/CD pipeline
|
||||
#[arg(long)]
|
||||
gitlab: bool,
|
||||
|
||||
/// Set exit code to 1 on validation failure
|
||||
#[arg(long = "exit-code", default_value_t = true)]
|
||||
exit_code: bool,
|
||||
|
||||
/// Don't set exit code to 1 on validation failure (overrides --exit-code)
|
||||
#[arg(long = "no-exit-code", conflicts_with = "exit_code")]
|
||||
no_exit_code: bool,
|
||||
},
|
||||
|
||||
/// Execute workflow or pipeline files locally
|
||||
Run {
|
||||
/// Path to workflow/pipeline file to execute
|
||||
path: PathBuf,
|
||||
|
||||
/// Use emulation mode instead of Docker
|
||||
#[arg(short, long)]
|
||||
emulate: bool,
|
||||
|
||||
/// Show 'Would execute GitHub action' messages in emulation mode
|
||||
#[arg(long, default_value_t = false)]
|
||||
show_action_messages: bool,
|
||||
|
||||
/// Preserve Docker containers on failure for debugging (Docker mode only)
|
||||
#[arg(long)]
|
||||
preserve_containers_on_failure: bool,
|
||||
|
||||
/// Explicitly run as GitLab CI/CD pipeline
|
||||
#[arg(long)]
|
||||
gitlab: bool,
|
||||
},
|
||||
|
||||
/// Open TUI interface to manage workflows
|
||||
Tui {
|
||||
/// Path to workflow file or directory (defaults to .github/workflows)
|
||||
path: Option<PathBuf>,
|
||||
|
||||
/// Use emulation mode instead of Docker
|
||||
#[arg(short, long)]
|
||||
emulate: bool,
|
||||
|
||||
/// Show 'Would execute GitHub action' messages in emulation mode
|
||||
#[arg(long, default_value_t = false)]
|
||||
show_action_messages: bool,
|
||||
|
||||
/// Preserve Docker containers on failure for debugging (Docker mode only)
|
||||
#[arg(long)]
|
||||
preserve_containers_on_failure: bool,
|
||||
},
|
||||
|
||||
/// Trigger a GitHub workflow remotely
|
||||
Trigger {
|
||||
/// Name of the workflow file (without .yml extension)
|
||||
workflow: String,
|
||||
|
||||
/// Branch to run the workflow on
|
||||
#[arg(short, long)]
|
||||
branch: Option<String>,
|
||||
|
||||
/// Key-value inputs for the workflow in format key=value
|
||||
#[arg(short, long, value_parser = parse_key_val)]
|
||||
input: Option<Vec<(String, String)>>,
|
||||
},
|
||||
|
||||
/// Trigger a GitLab pipeline remotely
|
||||
TriggerGitlab {
|
||||
/// Branch to run the pipeline on
|
||||
#[arg(short, long)]
|
||||
branch: Option<String>,
|
||||
|
||||
/// Key-value variables for the pipeline in format key=value
|
||||
#[arg(short = 'V', long, value_parser = parse_key_val)]
|
||||
variable: Option<Vec<(String, String)>>,
|
||||
},
|
||||
|
||||
/// List available workflows and pipelines
|
||||
List,
|
||||
}
|
||||
|
||||
// Parser function for key-value pairs
|
||||
fn parse_key_val(s: &str) -> Result<(String, String), String> {
|
||||
let pos = s
|
||||
.find('=')
|
||||
.ok_or_else(|| format!("invalid KEY=value: no `=` found in `{}`", s))?;
|
||||
|
||||
Ok((s[..pos].to_string(), s[pos + 1..].to_string()))
|
||||
}
|
||||
|
||||
// Make this function public for testing? Or move to a utils/cleanup mod?
|
||||
// Or call executor::cleanup and runtime::cleanup directly?
|
||||
// Let's try calling them directly for now.
|
||||
async fn cleanup_on_exit() {
|
||||
// Clean up Docker resources if available, but don't let it block indefinitely
|
||||
match tokio::time::timeout(std::time::Duration::from_secs(3), async {
|
||||
match Docker::connect_with_local_defaults() {
|
||||
Ok(docker) => {
|
||||
// Assuming cleanup_resources exists in executor crate
|
||||
executor::cleanup_resources(&docker).await;
|
||||
}
|
||||
Err(_) => {
|
||||
// Docker not available
|
||||
logging::info("Docker not available, skipping Docker cleanup");
|
||||
}
|
||||
}
|
||||
})
|
||||
.await
|
||||
{
|
||||
Ok(_) => logging::debug("Docker cleanup completed successfully"),
|
||||
Err(_) => {
|
||||
logging::warning("Docker cleanup timed out after 3 seconds, continuing with shutdown")
|
||||
}
|
||||
}
|
||||
|
||||
// Always clean up emulation resources
|
||||
match tokio::time::timeout(
|
||||
std::time::Duration::from_secs(2),
|
||||
// Assuming cleanup_resources exists in runtime::emulation module
|
||||
runtime::emulation::cleanup_resources(),
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(_) => logging::debug("Emulation cleanup completed successfully"),
|
||||
Err(_) => logging::warning("Emulation cleanup timed out, continuing with shutdown"),
|
||||
}
|
||||
|
||||
logging::info("Resource cleanup completed");
|
||||
}
|
||||
|
||||
async fn handle_signals() {
|
||||
// Set up a hard exit timer in case cleanup takes too long
|
||||
// This ensures the app always exits even if Docker operations are stuck
|
||||
let hard_exit_time = std::time::Duration::from_secs(10);
|
||||
|
||||
// Wait for Ctrl+C
|
||||
match tokio::signal::ctrl_c().await {
|
||||
Ok(_) => {
|
||||
println!("Received Ctrl+C, shutting down and cleaning up...");
|
||||
}
|
||||
Err(e) => {
|
||||
// Log the error but continue with cleanup
|
||||
eprintln!("Warning: Failed to properly listen for ctrl+c event: {}", e);
|
||||
println!("Shutting down and cleaning up...");
|
||||
}
|
||||
}
|
||||
|
||||
// Set up a watchdog thread that will force exit if cleanup takes too long
|
||||
// This is important because Docker operations can sometimes hang indefinitely
|
||||
let _ = std::thread::spawn(move || {
|
||||
std::thread::sleep(hard_exit_time);
|
||||
eprintln!(
|
||||
"Cleanup taking too long (over {} seconds), forcing exit...",
|
||||
hard_exit_time.as_secs()
|
||||
);
|
||||
logging::error("Forced exit due to cleanup timeout");
|
||||
std::process::exit(1);
|
||||
});
|
||||
|
||||
// Clean up containers
|
||||
cleanup_on_exit().await;
|
||||
|
||||
// Exit with success status - the force exit thread will be terminated automatically
|
||||
std::process::exit(0);
|
||||
}
|
||||
|
||||
/// Determines if a file is a GitLab CI/CD pipeline based on its name and content
|
||||
fn is_gitlab_pipeline(path: &Path) -> bool {
|
||||
// First check the file name
|
||||
if let Some(file_name) = path.file_name() {
|
||||
if let Some(file_name_str) = file_name.to_str() {
|
||||
if file_name_str == ".gitlab-ci.yml" || file_name_str.ends_with("gitlab-ci.yml") {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check if file is in .gitlab/ci directory
|
||||
if let Some(parent) = path.parent() {
|
||||
if let Some(parent_str) = parent.to_str() {
|
||||
if parent_str.ends_with(".gitlab/ci")
|
||||
&& path
|
||||
.extension()
|
||||
.is_some_and(|ext| ext == "yml" || ext == "yaml")
|
||||
{
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If file exists, check the content
|
||||
if path.exists() {
|
||||
if let Ok(content) = std::fs::read_to_string(path) {
|
||||
// GitLab CI/CD pipelines typically have stages, before_script, after_script at the top level
|
||||
if content.contains("stages:")
|
||||
|| content.contains("before_script:")
|
||||
|| content.contains("after_script:")
|
||||
{
|
||||
// Check for GitHub Actions specific keys that would indicate it's not GitLab
|
||||
if !content.contains("on:")
|
||||
&& !content.contains("runs-on:")
|
||||
&& !content.contains("uses:")
|
||||
{
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
let cli = Wrkflw::parse();
|
||||
let verbose = cli.verbose;
|
||||
let debug = cli.debug;
|
||||
|
||||
// Set log level based on command line flags
|
||||
if debug {
|
||||
logging::set_log_level(logging::LogLevel::Debug);
|
||||
logging::debug("Debug mode enabled - showing detailed logs");
|
||||
} else if verbose {
|
||||
logging::set_log_level(logging::LogLevel::Info);
|
||||
logging::info("Verbose mode enabled");
|
||||
} else {
|
||||
logging::set_log_level(logging::LogLevel::Warning);
|
||||
}
|
||||
|
||||
// Setup a Ctrl+C handler that runs in the background
|
||||
tokio::spawn(handle_signals());
|
||||
|
||||
match &cli.command {
|
||||
Some(Commands::Validate {
|
||||
path,
|
||||
gitlab,
|
||||
exit_code,
|
||||
no_exit_code,
|
||||
}) => {
|
||||
// Determine the path to validate
|
||||
let validate_path = path
|
||||
.clone()
|
||||
.unwrap_or_else(|| PathBuf::from(".github/workflows"));
|
||||
|
||||
// Check if the path exists
|
||||
if !validate_path.exists() {
|
||||
eprintln!("Error: Path does not exist: {}", validate_path.display());
|
||||
std::process::exit(1);
|
||||
}
|
||||
|
||||
// Determine if we're validating a GitLab pipeline based on the --gitlab flag or file detection
|
||||
let force_gitlab = *gitlab;
|
||||
let mut validation_failed = false;
|
||||
|
||||
if validate_path.is_dir() {
|
||||
// Validate all workflow files in the directory
|
||||
let entries = std::fs::read_dir(&validate_path)
|
||||
.expect("Failed to read directory")
|
||||
.filter_map(|entry| entry.ok())
|
||||
.filter(|entry| {
|
||||
entry.path().is_file()
|
||||
&& entry
|
||||
.path()
|
||||
.extension()
|
||||
.is_some_and(|ext| ext == "yml" || ext == "yaml")
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
println!("Validating {} workflow file(s)...", entries.len());
|
||||
|
||||
for entry in entries {
|
||||
let path = entry.path();
|
||||
let is_gitlab = force_gitlab || is_gitlab_pipeline(&path);
|
||||
|
||||
let file_failed = if is_gitlab {
|
||||
validate_gitlab_pipeline(&path, verbose)
|
||||
} else {
|
||||
validate_github_workflow(&path, verbose)
|
||||
};
|
||||
|
||||
if file_failed {
|
||||
validation_failed = true;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Validate a single workflow file
|
||||
let is_gitlab = force_gitlab || is_gitlab_pipeline(&validate_path);
|
||||
|
||||
validation_failed = if is_gitlab {
|
||||
validate_gitlab_pipeline(&validate_path, verbose)
|
||||
} else {
|
||||
validate_github_workflow(&validate_path, verbose)
|
||||
};
|
||||
}
|
||||
|
||||
// Set exit code if validation failed and exit_code flag is true (and no_exit_code is false)
|
||||
if validation_failed && *exit_code && !*no_exit_code {
|
||||
std::process::exit(1);
|
||||
}
|
||||
}
|
||||
Some(Commands::Run {
|
||||
path,
|
||||
emulate,
|
||||
show_action_messages: _,
|
||||
preserve_containers_on_failure,
|
||||
gitlab,
|
||||
}) => {
|
||||
// Create execution configuration
|
||||
let config = executor::ExecutionConfig {
|
||||
runtime_type: if *emulate {
|
||||
executor::RuntimeType::Emulation
|
||||
} else {
|
||||
executor::RuntimeType::Docker
|
||||
},
|
||||
verbose,
|
||||
preserve_containers_on_failure: *preserve_containers_on_failure,
|
||||
};
|
||||
|
||||
// Check if we're explicitly or implicitly running a GitLab pipeline
|
||||
let is_gitlab = *gitlab || is_gitlab_pipeline(path);
|
||||
let workflow_type = if is_gitlab {
|
||||
"GitLab CI pipeline"
|
||||
} else {
|
||||
"GitHub workflow"
|
||||
};
|
||||
|
||||
logging::info(&format!("Running {} at: {}", workflow_type, path.display()));
|
||||
|
||||
// Execute the workflow
|
||||
let result = executor::execute_workflow(path, config)
|
||||
.await
|
||||
.unwrap_or_else(|e| {
|
||||
eprintln!("Error executing workflow: {}", e);
|
||||
std::process::exit(1);
|
||||
});
|
||||
|
||||
// Print execution summary
|
||||
if result.failure_details.is_some() {
|
||||
eprintln!("❌ Workflow execution failed:");
|
||||
if let Some(details) = result.failure_details {
|
||||
if verbose {
|
||||
// Show full error details in verbose mode
|
||||
eprintln!("{}", details);
|
||||
} else {
|
||||
// Show simplified error info in non-verbose mode
|
||||
let simplified_error = details
|
||||
.lines()
|
||||
.filter(|line| line.contains("❌") || line.trim().starts_with("Error:"))
|
||||
.take(5) // Limit to the first 5 error lines
|
||||
.collect::<Vec<&str>>()
|
||||
.join("\n");
|
||||
|
||||
eprintln!("{}", simplified_error);
|
||||
|
||||
if details.lines().count() > 5 {
|
||||
eprintln!("\nUse --verbose flag to see full error details");
|
||||
}
|
||||
}
|
||||
}
|
||||
std::process::exit(1);
|
||||
} else {
|
||||
println!("✅ Workflow execution completed successfully!");
|
||||
|
||||
// Print a summary of executed jobs
|
||||
if true {
|
||||
// Always show job summary
|
||||
println!("\nJob summary:");
|
||||
for job in result.jobs {
|
||||
println!(
|
||||
" {} {} ({})",
|
||||
match job.status {
|
||||
executor::JobStatus::Success => "✅",
|
||||
executor::JobStatus::Failure => "❌",
|
||||
executor::JobStatus::Skipped => "⏭️",
|
||||
},
|
||||
job.name,
|
||||
match job.status {
|
||||
executor::JobStatus::Success => "success",
|
||||
executor::JobStatus::Failure => "failure",
|
||||
executor::JobStatus::Skipped => "skipped",
|
||||
}
|
||||
);
|
||||
|
||||
// Always show steps, not just in debug mode
|
||||
println!(" Steps:");
|
||||
for step in job.steps {
|
||||
let step_status = match step.status {
|
||||
executor::StepStatus::Success => "✅",
|
||||
executor::StepStatus::Failure => "❌",
|
||||
executor::StepStatus::Skipped => "⏭️",
|
||||
};
|
||||
|
||||
println!(" {} {}", step_status, step.name);
|
||||
|
||||
// If step failed and we're not in verbose mode, show condensed error info
|
||||
if step.status == executor::StepStatus::Failure && !verbose {
|
||||
// Extract error information from step output
|
||||
let error_lines = step
|
||||
.output
|
||||
.lines()
|
||||
.filter(|line| {
|
||||
line.contains("error:")
|
||||
|| line.contains("Error:")
|
||||
|| line.trim().starts_with("Exit code:")
|
||||
|| line.contains("failed")
|
||||
})
|
||||
.take(3) // Limit to 3 most relevant error lines
|
||||
.collect::<Vec<&str>>();
|
||||
|
||||
if !error_lines.is_empty() {
|
||||
println!(" Error details:");
|
||||
for line in error_lines {
|
||||
println!(" {}", line.trim());
|
||||
}
|
||||
|
||||
if step.output.lines().count() > 3 {
|
||||
println!(" (Use --verbose for full output)");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Cleanup is handled automatically via the signal handler
|
||||
}
|
||||
Some(Commands::TriggerGitlab { branch, variable }) => {
|
||||
// Convert optional Vec<(String, String)> to Option<HashMap<String, String>>
|
||||
let variables = variable
|
||||
.as_ref()
|
||||
.map(|v| v.iter().cloned().collect::<HashMap<String, String>>());
|
||||
|
||||
// Trigger the pipeline
|
||||
if let Err(e) = gitlab::trigger_pipeline(branch.as_deref(), variables).await {
|
||||
eprintln!("Error triggering GitLab pipeline: {}", e);
|
||||
std::process::exit(1);
|
||||
}
|
||||
}
|
||||
Some(Commands::Tui {
|
||||
path,
|
||||
emulate,
|
||||
show_action_messages: _,
|
||||
preserve_containers_on_failure,
|
||||
}) => {
|
||||
// Set runtime type based on the emulate flag
|
||||
let runtime_type = if *emulate {
|
||||
executor::RuntimeType::Emulation
|
||||
} else {
|
||||
executor::RuntimeType::Docker
|
||||
};
|
||||
|
||||
// Call the TUI implementation from the ui crate
|
||||
if let Err(e) = ui::run_wrkflw_tui(
|
||||
path.as_ref(),
|
||||
runtime_type,
|
||||
verbose,
|
||||
*preserve_containers_on_failure,
|
||||
)
|
||||
.await
|
||||
{
|
||||
eprintln!("Error running TUI: {}", e);
|
||||
std::process::exit(1);
|
||||
}
|
||||
}
|
||||
Some(Commands::Trigger {
|
||||
workflow,
|
||||
branch,
|
||||
input,
|
||||
}) => {
|
||||
// Convert optional Vec<(String, String)> to Option<HashMap<String, String>>
|
||||
let inputs = input
|
||||
.as_ref()
|
||||
.map(|i| i.iter().cloned().collect::<HashMap<String, String>>());
|
||||
|
||||
// Trigger the workflow
|
||||
if let Err(e) = github::trigger_workflow(workflow, branch.as_deref(), inputs).await {
|
||||
eprintln!("Error triggering GitHub workflow: {}", e);
|
||||
std::process::exit(1);
|
||||
}
|
||||
}
|
||||
Some(Commands::List) => {
|
||||
list_workflows_and_pipelines(verbose);
|
||||
}
|
||||
None => {
|
||||
// Launch TUI by default when no command is provided
|
||||
let runtime_type = executor::RuntimeType::Docker;
|
||||
|
||||
// Call the TUI implementation from the ui crate with default path
|
||||
if let Err(e) = ui::run_wrkflw_tui(None, runtime_type, verbose, false).await {
|
||||
eprintln!("Error running TUI: {}", e);
|
||||
std::process::exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Validate a GitHub workflow file
|
||||
/// Returns true if validation failed, false if it passed
|
||||
fn validate_github_workflow(path: &Path, verbose: bool) -> bool {
|
||||
print!("Validating GitHub workflow file: {}... ", path.display());
|
||||
|
||||
// Use the ui crate's validate_workflow function
|
||||
match ui::validate_workflow(path, verbose) {
|
||||
Ok(_) => {
|
||||
// The detailed validation output is already printed by the function
|
||||
// We need to check if there were validation issues
|
||||
// Since ui::validate_workflow doesn't return the validation result directly,
|
||||
// we need to call the evaluator directly to get the result
|
||||
match evaluator::evaluate_workflow_file(path, verbose) {
|
||||
Ok(result) => !result.is_valid,
|
||||
Err(_) => true, // Parse errors count as validation failure
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Error validating workflow: {}", e);
|
||||
true // Any error counts as validation failure
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Validate a GitLab CI/CD pipeline file
|
||||
/// Returns true if validation failed, false if it passed
|
||||
fn validate_gitlab_pipeline(path: &Path, verbose: bool) -> bool {
|
||||
print!("Validating GitLab CI pipeline file: {}... ", path.display());
|
||||
|
||||
// Parse and validate the pipeline file
|
||||
match parser::gitlab::parse_pipeline(path) {
|
||||
Ok(pipeline) => {
|
||||
println!("✅ Valid syntax");
|
||||
|
||||
// Additional structural validation
|
||||
let validation_result = validators::validate_gitlab_pipeline(&pipeline);
|
||||
|
||||
if !validation_result.is_valid {
|
||||
println!("⚠️ Validation issues:");
|
||||
for issue in validation_result.issues {
|
||||
println!(" - {}", issue);
|
||||
}
|
||||
true // Validation failed
|
||||
} else {
|
||||
if verbose {
|
||||
println!("✅ All validation checks passed");
|
||||
}
|
||||
false // Validation passed
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
println!("❌ Invalid");
|
||||
eprintln!("Validation failed: {}", e);
|
||||
true // Parse error counts as validation failure
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// List available workflows and pipelines in the repository
|
||||
fn list_workflows_and_pipelines(verbose: bool) {
|
||||
// Check for GitHub workflows
|
||||
let github_path = PathBuf::from(".github/workflows");
|
||||
if github_path.exists() && github_path.is_dir() {
|
||||
println!("GitHub Workflows:");
|
||||
|
||||
let entries = std::fs::read_dir(&github_path)
|
||||
.expect("Failed to read directory")
|
||||
.filter_map(|entry| entry.ok())
|
||||
.filter(|entry| {
|
||||
entry.path().is_file()
|
||||
&& entry
|
||||
.path()
|
||||
.extension()
|
||||
.is_some_and(|ext| ext == "yml" || ext == "yaml")
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if entries.is_empty() {
|
||||
println!(" No workflow files found in .github/workflows");
|
||||
} else {
|
||||
for entry in entries {
|
||||
println!(" - {}", entry.path().display());
|
||||
}
|
||||
}
|
||||
} else {
|
||||
println!("GitHub Workflows: No .github/workflows directory found");
|
||||
}
|
||||
|
||||
// Check for GitLab CI pipeline
|
||||
let gitlab_path = PathBuf::from(".gitlab-ci.yml");
|
||||
if gitlab_path.exists() && gitlab_path.is_file() {
|
||||
println!("GitLab CI Pipeline:");
|
||||
println!(" - {}", gitlab_path.display());
|
||||
} else {
|
||||
println!("GitLab CI Pipeline: No .gitlab-ci.yml file found");
|
||||
}
|
||||
|
||||
// Check for other GitLab CI pipeline files
|
||||
if verbose {
|
||||
println!("Searching for other GitLab CI pipeline files...");
|
||||
|
||||
let entries = walkdir::WalkDir::new(".")
|
||||
.follow_links(true)
|
||||
.into_iter()
|
||||
.filter_map(|entry| entry.ok())
|
||||
.filter(|entry| {
|
||||
entry.path().is_file()
|
||||
&& entry
|
||||
.file_name()
|
||||
.to_string_lossy()
|
||||
.ends_with("gitlab-ci.yml")
|
||||
&& entry.path() != gitlab_path
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if !entries.is_empty() {
|
||||
println!("Additional GitLab CI Pipeline files:");
|
||||
for entry in entries {
|
||||
println!(" - {}", entry.path().display());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
774
red.cast
Normal file
774
red.cast
Normal file
@@ -0,0 +1,774 @@
|
||||
{"version": 2, "width": 245, "height": 61, "timestamp": 1746300930, "env": {"SHELL": "/bin/zsh", "TERM": "xterm-256color"}}
|
||||
[0.393681, "o", "\u001b[1m\u001b[7m%\u001b[27m\u001b[1m\u001b[0m \r \r"]
|
||||
[0.394167, "o", "\u001b]2;goku@Gokuls-MacBook-Pro:~/projects/wrkflw\u0007"]
|
||||
[0.394183, "o", "\u001b]1;..ojects/wrkflw\u0007"]
|
||||
[0.395693, "o", "\u001b]7;file://Gokuls-MacBook-Pro.local/Users/goku/projects/wrkflw\u001b\\"]
|
||||
[0.396236, "o", "\r\u001b[0m\u001b[27m\u001b[24m\u001b[J\u001b[01;32m➜ \u001b[36mwrkflw\u001b[00m \u001b[K"]
|
||||
[0.396293, "o", "\u001b[?1h\u001b="]
|
||||
[0.396318, "o", "\u001b[?2004h"]
|
||||
[0.437911, "o", "\r\r\u001b[0m\u001b[27m\u001b[24m\u001b[J\u001b[01;32m➜ \u001b[36mwrkflw\u001b[00m \u001b[01;34m(\u001b[31mmain\u001b[34m) \u001b[33m✗\u001b[00m \u001b[K"]
|
||||
[1.784697, "o", "c"]
|
||||
[1.885654, "o", "\bca"]
|
||||
[2.079234, "o", "t"]
|
||||
[2.252374, "o", " "]
|
||||
[2.858954, "o", "t"]
|
||||
[2.980971, "o", "e"]
|
||||
[3.172113, "o", "s"]
|
||||
[3.276278, "o", "t"]
|
||||
[3.976366, "o", "_"]
|
||||
[5.418211, "o", "g"]
|
||||
[5.804484, "o", "itlab_ci\u001b[1m/\u001b[0m"]
|
||||
[7.30486, "o", "\u0007"]
|
||||
[7.304922, "o", "\b\u001b[0m/\r\r\n"]
|
||||
[7.305065, "o", "\u001b[J\u001b[0madvanced.gitlab-ci.yml \u001b[Jbasic.gitlab-ci.yml \u001b[Jdocker.gitlab-ci.yml \u001b[Jincludes.gitlab-ci.yml \u001b[Jinvalid.gitlab-ci.yml \u001b[Jminimal.gitlab-ci.yml \u001b[Jservices.gitlab-ci.yml \u001b[Jworkflow.gitlab-ci.yml\u001b[J\u001b[A\u001b[0m\u001b[27m\u001b[24m\r\u001b[19Ccat test_gitlab_ci/\u001b[K"]
|
||||
[8.264729, "o", "m"]
|
||||
[8.4479, "o", "i"]
|
||||
[8.643085, "o", "nimal.gitlab-ci.yml\u001b[1m \u001b[0m"]
|
||||
[9.529005, "o", "\b\u001b[0m \b"]
|
||||
[9.529291, "o", "\u001b[?1l\u001b>\u001b[?2004l\r\r\n\u001b[J"]
|
||||
[9.531431, "o", "\u001b]2;cat test_gitlab_ci/minimal.gitlab-ci.yml\u0007\u001b]1;cat\u0007"]
|
||||
[9.563469, "o", "# Minimal GitLab CI configuration\r\n\r\nimage: rust:latest\r\n\r\nbuild:\r\n script:\r\n - cargo build\r\n\r\ntest:\r\n script:\r\n - cargo test "]
|
||||
[9.563717, "o", "\u001b[1m\u001b[7m%\u001b[27m\u001b[1m\u001b[0m \r \r"]
|
||||
[9.564397, "o", "\u001b]2;goku@Gokuls-MacBook-Pro:~/projects/wrkflw\u0007"]
|
||||
[9.564419, "o", "\u001b]1;..ojects/wrkflw\u0007"]
|
||||
[9.566692, "o", "\u001b]7;file://Gokuls-MacBook-Pro.local/Users/goku/projects/wrkflw\u001b\\"]
|
||||
[9.568961, "o", "\r\u001b[0m\u001b[27m\u001b[24m\u001b[J\u001b[01;32m➜ \u001b[36mwrkflw\u001b[00m \u001b[01;34m(\u001b[31mmain\u001b[34m) \u001b[33m✗\u001b[00m \u001b[K"]
|
||||
[9.569081, "o", "\u001b[?1h\u001b="]
|
||||
[9.569181, "o", "\u001b[?2004h"]
|
||||
[11.969049, "o", "c"]
|
||||
[12.088692, "o", "\bca"]
|
||||
[12.375203, "o", "r"]
|
||||
[12.467428, "o", "g"]
|
||||
[12.549475, "o", "o"]
|
||||
[12.816019, "o", "r"]
|
||||
[13.230493, "o", "\b \b"]
|
||||
[13.400359, "o", " "]
|
||||
[13.600041, "o", "r"]
|
||||
[13.715537, "o", " "]
|
||||
[14.313772, "o", "v"]
|
||||
[14.503158, "o", "a"]
|
||||
[14.615728, "o", "l"]
|
||||
[14.836236, "o", "i"]
|
||||
[14.961289, "o", "d"]
|
||||
[15.051538, "o", "a"]
|
||||
[15.243561, "o", "t"]
|
||||
[15.350827, "o", "e"]
|
||||
[15.447092, "o", " "]
|
||||
[19.359227, "o", "\u001b[7mtest_gitlab_ci/minimal.gitlab-ci.yml\u001b[27m"]
|
||||
[20.437202, "o", "\u001b[36D\u001b[27mt\u001b[27me\u001b[27ms\u001b[27mt\u001b[27m_\u001b[27mg\u001b[27mi\u001b[27mt\u001b[27ml\u001b[27ma\u001b[27mb\u001b[27m_\u001b[27mc\u001b[27mi\u001b[27m/\u001b[27mm\u001b[27mi\u001b[27mn\u001b[27mi\u001b[27mm\u001b[27ma\u001b[27ml\u001b[27m.\u001b[27mg\u001b[27mi\u001b[27mt\u001b[27ml\u001b[27ma\u001b[27mb\u001b[27m-\u001b[27mc\u001b[27mi\u001b[27m.\u001b[27my\u001b[27mm\u001b[27ml"]
|
||||
[20.437676, "o", "\u001b[?1l\u001b>\u001b[?2004l\r\r\n"]
|
||||
[20.439716, "o", "\u001b]2;cargo r validate test_gitlab_ci/minimal.gitlab-ci.yml\u0007\u001b]1;cargo\u0007"]
|
||||
[20.763171, "o", "\u001b[1m\u001b[32m Compiling\u001b[0m proc-macro2 v1.0.94\r\n\u001b[1m\u001b[32m Compiling\u001b[0m unicode-ident v1.0.18\r\n"]
|
||||
[20.763198, "o", "\u001b[1m\u001b[32m Compiling\u001b[0m libc v0.2.171\r\n\u001b[1m\u001b[32m Compiling\u001b[0m autocfg v1.4.0\r\n\u001b[1m\u001b[32m Compiling\u001b[0m cfg-if v1.0.0\r\n"]
|
||||
[20.763207, "o", "\u001b[1m\u001b[32m Compiling\u001b[0m memchr v2.7.4\r\n"]
|
||||
[20.763396, "o", "\u001b[1m\u001b[32m Compiling\u001b[0m serde v1.0.219\r\n\u001b[1m\u001b[32m Compiling\u001b[0m smallvec v1.14.0\r\n\u001b[1m\u001b[32m Compiling\u001b[0m itoa v1.0.15\r\n\u001b[1m\u001b[32m Compiling\u001b[0m bitflags v2.9.0\r\n\u001b[1m\u001b[32m Compiling\u001b[0m parking_lot_core v0.9.10\r\n\u001b[1m\u001b[32m Compiling\u001b[0m scopeguard v1.2.0\r\n\u001b[1m\u001b[36m Building\u001b[0m [ ] 0/270: scopeguard, autocfg, libc(build.rs), serde(build.rs), unicode-ident, memchr, cfg-if, smallvec, proc-macro2(build.rs), itoa, bitflags, parking_lot_core(build.rs) \r"]
|
||||
[20.863836, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m pin-project-lite v0.2.16\r\n\u001b[1m\u001b[36m Building\u001b[0m [ ] 1/270: scopeguard, autocfg, libc(build.rs), serde(build.rs), unicode-ident, memchr, pin-project-lite, smallvec, proc-macro2(build.rs), itoa, bitflags, parking_lot_core(build.rs) \r"]
|
||||
[20.888886, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m hashbrown v0.15.2\r\n\u001b[1m\u001b[36m Building\u001b[0m [ ] 2/270: autocfg, libc(build.rs), serde(build.rs), hashbrown, unicode-ident, memchr, pin-project-lite, smallvec, proc-macro2(build.rs), itoa, bitflags, parking_lot_core(build.rs) \r"]
|
||||
[20.889407, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m equivalent v1.0.2\r\n\u001b[1m\u001b[36m Building\u001b[0m [ ] 3/270: equivalent, autocfg, libc(build.rs), serde(build.rs), hashbrown, memchr, pin-project-lite, smallvec, proc-macro2(build.rs), itoa, bitflags, parking_lot_core(build.rs) \r"]
|
||||
[20.901807, "o", "\u001b[K"]
|
||||
[20.901967, "o", "\u001b[1m\u001b[32m Compiling\u001b[0m stable_deref_trait v1.2.0\r\n\u001b[1m\u001b[36m Building\u001b[0m [ ] 4/270: equivalent, autocfg, libc(build.rs), serde(build.rs), hashbrown, stable_deref_trait, memchr, pin-project-lite, smallvec, proc-macro2(build.rs), bitflags, parking_lot_core(build.rs) \r"]
|
||||
[20.916803, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m once_cell v1.21.2\r\n"]
|
||||
[20.916855, "o", "\u001b[1m\u001b[36m Building\u001b[0m [ ] 5/270: equivalent, autocfg, libc(build.rs), serde(build.rs), hashbrown, stable_deref_trait, memchr, once_cell, smallvec, proc-macro2(build.rs), bitflags, parking_lot_core(build.rs) \r"]
|
||||
[20.954688, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m core-foundation-sys v0.8.7\r\n\u001b[1m\u001b[36m Building\u001b[0m [ ] 6/270: equivalent, autocfg, core-foundation-sys, libc(build.rs), serde(build.rs), hashbrown, stable_deref_trait, memchr, once_cell, proc-macro2(build.rs), bitflags, parking_lot_core(build.rs) \r"]
|
||||
[20.960325, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m bytes v1.10.1\r\n\u001b[1m\u001b[36m Building\u001b[0m [ ] 7/270: autocfg, core-foundation-sys, hashbrown, once_cell, parking_lot_core(build.rs), equivalent, bytes, libc(build.rs), serde(build.rs), stable_deref_trait, memchr, proc-macro2(build.rs) \r"]
|
||||
[20.961287, "o", "\u001b[1m\u001b[36m Building\u001b[0m [ ] 8/270: autocfg, core-foundation-sys, hashbrown, once_cell, equivalent, bytes, libc(build.rs), serde(build.rs), stable_deref_trait, parking_lot_core(build), memchr, proc-macro2(build.rs) \r"]
|
||||
[20.964842, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m futures-sink v0.3.31\r\n\u001b[1m\u001b[32m Compiling\u001b[0m futures-core v0.3.31\r\n\u001b[1m\u001b[36m Building\u001b[0m [ ] 10/270: autocfg, core-foundation-sys, hashbrown, once_cell, bytes, futures-core, libc(build.rs), serde(build.rs), parking_lot_core(build), memchr, proc-macro2(build.rs), futures-sink \r"]
|
||||
[20.979236, "o", "\u001b[1m\u001b[36m Building\u001b[0m [> ] 11/270: autocfg, core-foundation-sys, hashbrown, once_cell, bytes, futures-core, proc-macro2(build), libc(build.rs), serde(build.rs), parking_lot_core(build), memchr, futures-sink \r"]
|
||||
[20.981339, "o", "\u001b[1m\u001b[36m Building\u001b[0m [> ] 12/270: autocfg, core-foundation-sys, hashbrown, once_cell, bytes, futures-core, proc-macro2(build), libc(build.rs), serde(build), parking_lot_core(build), memchr, futures-sink \r"]
|
||||
[21.033814, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m ryu v1.0.20\r\n"]
|
||||
[21.033863, "o", "\u001b[1m\u001b[36m Building\u001b[0m [> ] 13/270: autocfg, core-foundation-sys, ryu, hashbrown, bytes, futures-core, proc-macro2(build), libc(build.rs), serde(build), parking_lot_core(build), memchr, futures-sink \r"]
|
||||
[21.034272, "o", "\u001b[1m\u001b[36m Building\u001b[0m [> ] 14/270: autocfg, core-foundation-sys, ryu, hashbrown, bytes, futures-core, proc-macro2(build), libc(build), serde(build), parking_lot_core(build), memchr, futures-sink \r"]
|
||||
[21.04051, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m serde_json v1.0.140\r\n"]
|
||||
[21.040547, "o", "\u001b[1m\u001b[36m Building\u001b[0m [> ] 15/270: autocfg, core-foundation-sys, ryu, hashbrown, bytes, futures-core, serde_json(build.rs), proc-macro2(build), libc(build), serde(build), parking_lot_core(build), memchr \r"]
|
||||
[21.05181, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m futures-channel v0.3.31\r\n"]
|
||||
[21.052003, "o", "\u001b[1m\u001b[36m Building\u001b[0m [> ] 16/270: futures-channel, autocfg, core-foundation-sys, ryu, hashbrown, bytes, serde_json(build.rs), proc-macro2(build), libc(build), serde(build), parking_lot_core(build), memchr "]
|
||||
[21.052164, "o", "\r"]
|
||||
[21.063105, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m lock_api v0.4.12\r\n"]
|
||||
[21.063156, "o", "\u001b[1m\u001b[36m Building\u001b[0m [> ] 17/270: futures-channel, core-foundation-sys, ryu, hashbrown, bytes, lock_api(build.rs), serde_json(build.rs), proc-macro2(build), libc(build), serde(build), parking_lot_core(build), memchr \r"]
|
||||
[21.07565, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m slab v0.4.9\r\n"]
|
||||
[21.075716, "o", "\u001b[1m\u001b[36m Building\u001b[0m [> ] 18/270: futures-channel, ryu, hashbrown, slab(build.rs), bytes, lock_api(build.rs), serde_json(build.rs), proc-macro2(build), libc(build), serde(build), parking_lot_core(build), memchr \r"]
|
||||
[21.108286, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m num-traits v0.2.19\r\n"]
|
||||
[21.10843, "o", "\u001b[1m\u001b[36m Building\u001b[0m [> ] 19/270: futures-channel, ryu, hashbrown, slab(build.rs), bytes, lock_api(build.rs), serde_json(build.rs), proc-macro2(build), libc(build), serde(build), num-traits(build.rs), parking_lot_core(build) \r"]
|
||||
[21.135823, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m litemap v0.7.5\r\n"]
|
||||
[21.136189, "o", "\u001b[1m\u001b[36m Building\u001b[0m [> ] 20/270: futures-channel, hashbrown, slab(build.rs), bytes, lock_api(build.rs), serde_json(build.rs), proc-macro2(build), libc(build), serde(build), num-traits(build.rs), parking_lot_core(build), lit...\r"]
|
||||
[21.138845, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=> ] 21/270: futures-channel, hashbrown, slab(build.rs), bytes, lock_api(build.rs), serde_json(build), proc-macro2(build), libc(build), serde(build), num-traits(build.rs), parking_lot_core(build), litemap \r\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m icu_locid_transform_data v1.5.1\r\n\u001b[1m\u001b[36m Building\u001b[0m [=> ] 22/270: futures-channel, hashbrown, slab(build.rs), bytes, lock_api(build.rs), serde_json(build), proc-macro2(build), libc(build), serde(build), num-traits(build.rs), icu_locid_transform_data(build....\r"]
|
||||
[21.161233, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m pin-utils v0.1.0\r\n"]
|
||||
[21.161271, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=> ] 23/270: pin-utils, hashbrown, slab(build.rs), bytes, lock_api(build.rs), serde_json(build), proc-macro2(build), libc(build), serde(build), num-traits(build.rs), icu_locid_transform_data(build.rs), l...\r"]
|
||||
[21.161905, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=> ] 24/270: pin-utils, hashbrown, slab(build.rs), bytes, serde_json(build), proc-macro2(build), libc(build), serde(build), num-traits(build.rs), icu_locid_transform_data(build.rs), litemap, lock_api(build)\r"]
|
||||
[21.171062, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=> ] 25/270: pin-utils, hashbrown, bytes, serde_json(build), slab(build), proc-macro2(build), libc(build), serde(build), num-traits(build.rs), icu_locid_transform_data(build.rs), litemap, lock_api(build) \r"]
|
||||
[21.18027, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m rustix v1.0.3\r\n"]
|
||||
[21.180299, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=> ] 26/270: pin-utils, rustix(build.rs), bytes, serde_json(build), slab(build), proc-macro2(build), libc(build), serde(build), num-traits(build.rs), icu_locid_transform_data(build.rs), litemap, lock_api...\r"]
|
||||
[21.196422, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m writeable v0.5.5\r\n"]
|
||||
[21.19645, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=> ] 27/270: rustix(build.rs), writeable, bytes, serde_json(build), slab(build), proc-macro2(build), libc(build), serde(build), num-traits(build.rs), icu_locid_transform_data(build.rs), litemap, lock_api...\r"]
|
||||
[21.209074, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m futures-task v0.3.31\r\n\u001b[1m\u001b[36m Building\u001b[0m [=> ] 28/270: rustix(build.rs), writeable, bytes, serde_json(build), slab(build), proc-macro2(build), libc(build), serde(build), icu_locid_transform_data(build.rs), litemap, lock_api(build), futures-task \r"]
|
||||
[21.230428, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m futures-io v0.3.31\r\n"]
|
||||
[21.23048, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=> ] 29/270: rustix(build.rs), writeable, bytes, serde_json(build), slab(build), proc-macro2(build), libc(build), serde(build), futures-io, litemap, lock_api(build), futures-task \r"]
|
||||
[21.24605, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=> ] 30/270: rustix(build.rs), writeable, bytes, serde_json(build), slab(build), proc-macro2(build), libc(build), serde(build), futures-io, lock_api(build), icu_locid_transform_data(build), futures-task \r"]
|
||||
[21.27647, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=> ] 31/270: num-traits(build), rustix(build.rs), writeable, bytes, serde_json(build), slab(build), proc-macro2(build), libc(build), serde(build), futures-io, lock_api(build), icu_locid_transform_data(bu...\r"]
|
||||
[21.294729, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m thiserror v1.0.69\r\n"]
|
||||
[21.294924, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==> ] 32/270: num-traits(build), rustix(build.rs), writeable, bytes, serde_json(build), slab(build), proc-macro2(build), libc(build), serde(build), lock_api(build), icu_locid_transform_data(build), thiser...\r"]
|
||||
[21.312333, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m icu_properties_data v1.5.1\r\n"]
|
||||
[21.312427, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==> ] 33/270: num-traits(build), rustix(build.rs), writeable, serde_json(build), slab(build), proc-macro2(build), libc(build), serde(build), icu_properties_data(build.rs), lock_api(build), icu_locid_trans...\r"]
|
||||
[21.314227, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==> ] 34/270: num-traits(build), rustix(build), writeable, serde_json(build), slab(build), proc-macro2(build), libc(build), serde(build), icu_properties_data(build.rs), lock_api(build), icu_locid_transfor...\r"]
|
||||
[21.33527, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m getrandom v0.3.2\r\n"]
|
||||
[21.335367, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==> ] 35/270: num-traits(build), rustix(build), getrandom(build.rs), serde_json(build), slab(build), proc-macro2(build), libc(build), serde(build), icu_properties_data(build.rs), lock_api(build), icu_loci...\r"]
|
||||
[21.382291, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==> ] 36/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build.rs), serde_json(build), slab(build), proc-macro2(build), libc(build), serde(build), lock_api(build), icu_locid_t...\r"]
|
||||
[21.409263, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==> ] 37/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build.rs), proc-macro2, serde_json(build), slab(build), libc(build), serde(build), lock_api(build), icu_locid_transfor...\r"]
|
||||
[21.409608, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==> ] 38/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build.rs), proc-macro2, serde_json(build), slab(build), thiserror(build), libc(build), serde(build), lock_api(build), ...\r"]
|
||||
[21.452063, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==> ] 39/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), proc-macro2, serde_json(build), slab(build), thiserror(build), libc(build), serde(build), lock_api(build), icu...\r"]
|
||||
[21.640658, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m quote v1.0.40\r\n"]
|
||||
[21.640694, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==> ] 40/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), quote, proc-macro2, serde_json(build), slab(build), thiserror(build), libc(build), lock_api(build), icu_locid_...\r"]
|
||||
[21.686485, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m icu_normalizer_data v1.5.1\r\n"]
|
||||
[21.686579, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==> ] 41/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), icu_normalizer_data(build.rs), quote, serde_json(build), slab(build), thiserror(build), libc(build), lock_api(...\r"]
|
||||
[21.759368, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m syn v2.0.100\r\n"]
|
||||
[21.759454, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===> ] 42/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, quote, serde_json(build), slab(build), thiserror(build), libc(build), lock_api(build), icu_locid_transfor...\r"]
|
||||
[21.764469, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m fnv v1.0.7\r\n"]
|
||||
[21.764538, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===> ] 43/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, serde_json(build), fnv, slab(build), thiserror(build), libc(build), lock_api(build), icu_locid_transform_...\r"]
|
||||
[21.796702, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m http v0.2.12\r\n"]
|
||||
[21.796791, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===> ] 44/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, serde_json(build), slab(build), thiserror(build), libc(build), http, lock_api(build), icu_locid_transform...\r"]
|
||||
[21.889367, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===> ] 45/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, serde_json(build), slab(build), thiserror(build), http, lock_api(build), libc, icu_locid_transform_data(b...\r"]
|
||||
[22.197788, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===> ] 46/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, slab(build), thiserror(build), http, lock_api(build), libc, icu_locid_transform_data(build), icu_normaliz...\r"]
|
||||
[22.324053, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m signal-hook-registry v1.4.2\r\n"]
|
||||
[22.324162, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===> ] 47/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, slab(build), thiserror(build), signal-hook-registry, lock_api(build), libc, icu_locid_transform_data(buil...\r"]
|
||||
[22.335479, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===> ] 48/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, parking_lot_core, slab(build), thiserror(build), signal-hook-registry, lock_api(build), icu_locid_transfo...\r"]
|
||||
[22.463262, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m errno v0.3.10\r\n"]
|
||||
[22.463317, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===> ] 49/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, slab(build), thiserror(build), signal-hook-registry, lock_api(build), icu_locid_transform_data(build), ic...\r"]
|
||||
[22.46546, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===> ] 50/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, lock_api, slab(build), thiserror(build), signal-hook-registry, icu_locid_transform_data(build), icu_norma...\r"]
|
||||
[22.513539, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m mio v1.0.3\r\n"]
|
||||
[22.51357, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===> ] 51/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, lock_api, slab(build), thiserror(build), icu_locid_transform_data(build), mio, icu_normalizer_data(build)...\r"]
|
||||
[22.519736, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m socket2 v0.5.8\r\n"]
|
||||
[22.519842, "o", "\u001b[1m\u001b[36m Building\u001b[0m [====> ] 52/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, lock_api, slab(build), thiserror(build), icu_locid_transform_data(build), mio, icu_normalizer_data(build)...\r"]
|
||||
[22.556956, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m parking_lot v0.12.3\r\n"]
|
||||
[22.557057, "o", "\u001b[1m\u001b[36m Building\u001b[0m [====> ] 53/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, slab(build), thiserror(build), parking_lot, icu_locid_transform_data(build), mio, icu_normalizer_data(bui...\r"]
|
||||
[22.712544, "o", "\u001b[1m\u001b[36m Building\u001b[0m [====> ] 54/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, slab, thiserror(build), parking_lot, icu_locid_transform_data(build), mio, icu_normalizer_data(build), so...\r"]
|
||||
[22.747265, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m tracing-core v0.1.33\r\n"]
|
||||
[22.747298, "o", "\u001b[1m\u001b[36m Building\u001b[0m [====> ] 55/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, tracing-core, slab, thiserror(build), icu_locid_transform_data(build), mio, icu_normalizer_data(build), s...\r"]
|
||||
[22.747509, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m log v0.4.27\r\n"]
|
||||
[22.747596, "o", "\u001b[1m\u001b[36m Building\u001b[0m [====> ] 56/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, tracing-core, log, slab, thiserror(build), icu_locid_transform_data(build), mio, icu_normalizer_data(build) \r"]
|
||||
[22.777478, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m unsafe-libyaml v0.2.11\r\n"]
|
||||
[22.777508, "o", "\u001b[1m\u001b[36m Building\u001b[0m [====> ] 57/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, tracing-core, log, thiserror(build), icu_locid_transform_data(build), mio, icu_normalizer_data(build), un...\r"]
|
||||
[22.803035, "o", "\u001b[1m\u001b[36m Building\u001b[0m [====> ] 58/270: icu_locid_transform_data, num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, tracing-core, log, thiserror(build), mio, icu_normalizer_data(build), unsafe-li...\r"]
|
||||
[22.83338, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m httparse v1.10.1\r\n"]
|
||||
[22.833504, "o", "\u001b[1m\u001b[36m Building\u001b[0m [====> ] 59/270: icu_locid_transform_data, num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, tracing-core, httparse(build.rs), thiserror(build), mio, icu_normalizer_data(bu...\r"]
|
||||
[22.83581, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m core-foundation v0.9.4\r\n"]
|
||||
[22.835833, "o", "\u001b[1m\u001b[36m Building\u001b[0m [====> ] 60/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, tracing-core, httparse(build.rs), thiserror(build), core-foundation, mio, icu_normalizer_data(build), uns...\r"]
|
||||
[22.836268, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m fastrand v2.3.0\r\n\u001b[1m\u001b[36m Building\u001b[0m [====> ] 61/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, tracing-core, httparse(build.rs), thiserror(build), fastrand, core-foundation, icu_normalizer_data(build)...\r"]
|
||||
[22.941203, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m tracing v0.1.41\r\n"]
|
||||
[22.941376, "o", "\u001b[1m\u001b[36m Building\u001b[0m [====> ] 62/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, tracing-core, httparse(build.rs), thiserror(build), core-foundation, tracing, icu_normalizer_data(build),...\r"]
|
||||
[22.981021, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=====> ] 63/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, tracing-core, thiserror(build), httparse(build), core-foundation, tracing, icu_normalizer_data(build), un...\r"]
|
||||
[23.021436, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m utf8_iter v1.0.4\r\n"]
|
||||
[23.021547, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=====> ] 64/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, tracing-core, utf8_iter, thiserror(build), httparse(build), tracing, icu_normalizer_data(build), unsafe-l...\r"]
|
||||
[23.038774, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m write16 v1.0.0\r\n"]
|
||||
[23.038804, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=====> ] 65/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, tracing-core, write16, utf8_iter, thiserror(build), httparse(build), icu_normalizer_data(build), unsafe-l...\r"]
|
||||
[23.055807, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m utf16_iter v1.0.5\r\n"]
|
||||
[23.055835, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=====> ] 66/270: utf16_iter, num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, write16, utf8_iter, thiserror(build), httparse(build), icu_normalizer_data(build), unsafe-lib...\r"]
|
||||
[23.074201, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m try-lock v0.2.5\r\n"]
|
||||
[23.07423, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=====> ] 67/270: utf16_iter, num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, try-lock, utf8_iter, thiserror(build), httparse(build), icu_normalizer_data(build), unsafe-li...\r"]
|
||||
[23.082638, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m security-framework-sys v2.14.0\r\n"]
|
||||
[23.082663, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=====> ] 68/270: utf16_iter, num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, security-framework-sys, try-lock, thiserror(build), httparse(build), icu_normalizer_data(buil...\r"]
|
||||
[23.086985, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=====> ] 69/270: utf16_iter, rustix(build), icu_properties_data(build), getrandom(build), syn, security-framework-sys, num-traits, try-lock, thiserror(build), httparse(build), icu_normalizer_data(build), uns...\r"]
|
||||
[23.107838, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m want v0.3.1\r\n\u001b[1m\u001b[36m Building\u001b[0m [=====> ] 70/270: rustix(build), icu_properties_data(build), getrandom(build), syn, security-framework-sys, num-traits, try-lock, thiserror(build), httparse(build), want, icu_normalizer_data(build), unsafe-li...\r"]
|
||||
[23.114348, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m http-body v0.4.6\r\n"]
|
||||
[23.11446, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=====> ] 71/270: rustix(build), icu_properties_data(build), getrandom(build), syn, security-framework-sys, num-traits, thiserror(build), httparse(build), http-body, want, icu_normalizer_data(build), unsafe-l...\r"]
|
||||
[23.167457, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m percent-encoding v2.3.1\r\n"]
|
||||
[23.167557, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=====> ] 72/270: rustix(build), icu_properties_data(build), getrandom(build), syn, percent-encoding, num-traits, thiserror(build), httparse(build), http-body, want, icu_normalizer_data(build), unsafe-libyaml \r"]
|
||||
[23.173471, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m httpdate v1.0.3\r\n"]
|
||||
[23.173495, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======> ] 73/270: rustix(build), icu_properties_data(build), getrandom(build), syn, percent-encoding, httpdate, num-traits, thiserror(build), httparse(build), http-body, icu_normalizer_data(build), unsafe-lib...\r"]
|
||||
[23.212014, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m tower-service v0.3.3\r\n"]
|
||||
[23.212044, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======> ] 74/270: rustix(build), icu_properties_data(build), getrandom(build), syn, percent-encoding, httpdate, num-traits, tower-service, thiserror(build), httparse(build), icu_normalizer_data(build), unsafe...\r"]
|
||||
[23.244991, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m native-tls v0.2.14\r\n"]
|
||||
[23.245021, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======> ] 75/270: rustix(build), icu_properties_data(build), getrandom(build), syn, httpdate, num-traits, tower-service, native-tls(build.rs), thiserror(build), httparse(build), icu_normalizer_data(build), un...\r\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m form_urlencoded v1.2.1\r\n"]
|
||||
[23.24516, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======> ] 76/270: rustix(build), icu_properties_data(build), getrandom(build), syn, httpdate, num-traits, native-tls(build.rs), thiserror(build), httparse(build), form_urlencoded, icu_normalizer_data(build), ...\r"]
|
||||
[23.280024, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m security-framework v2.11.1\r\n"]
|
||||
[23.280156, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======> ] 77/270: rustix(build), icu_properties_data(build), getrandom(build), syn, num-traits, native-tls(build.rs), security-framework, thiserror(build), httparse(build), form_urlencoded, icu_normalizer_dat...\r"]
|
||||
[23.291052, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m system-configuration-sys v0.5.0\r\n"]
|
||||
[23.291221, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======> ] 78/270: rustix(build), icu_properties_data(build), getrandom(build), syn, num-traits, native-tls(build.rs), system-configuration-sys(build.rs), security-framework, thiserror(build), httparse(build),...\r"]
|
||||
[23.325386, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m synstructure v0.13.1\r\n"]
|
||||
[23.325527, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======> ] 79/270: rustix(build), icu_properties_data(build), getrandom(build), syn, num-traits, native-tls(build.rs), system-configuration-sys(build.rs), security-framework, thiserror(build), httparse(build),...\r"]
|
||||
[23.330716, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======> ] 80/270: rustix(build), icu_properties_data(build), getrandom(build), syn, num-traits, system-configuration-sys(build.rs), security-framework, thiserror(build), httparse(build), native-tls(build), sy...\r"]
|
||||
[23.340335, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======> ] 81/270: icu_properties_data(build), getrandom(build), syn, num-traits, system-configuration-sys(build.rs), security-framework, thiserror(build), httparse(build), native-tls(build), synstructure, icu...\r"]
|
||||
[23.380593, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======> ] 82/270: system-configuration-sys(build), icu_properties_data(build), getrandom(build), syn, num-traits, security-framework, thiserror(build), httparse(build), native-tls(build), synstructure, icu_no...\r"]
|
||||
[23.429561, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m num-integer v0.1.46\r\n"]
|
||||
[23.42969, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======> ] 83/270: system-configuration-sys(build), icu_properties_data(build), getrandom(build), syn, num-integer, security-framework, thiserror(build), httparse(build), native-tls(build), synstructure, icu_n...\r"]
|
||||
[23.434159, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=======> ] 84/270: system-configuration-sys(build), getrandom(build), syn, num-integer, security-framework, thiserror(build), icu_properties_data, httparse(build), native-tls(build), synstructure, icu_normaliz...\r"]
|
||||
[23.516165, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m aho-corasick v1.1.3\r\n"]
|
||||
[23.516195, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=======> ] 85/270: system-configuration-sys(build), getrandom(build), syn, aho-corasick, num-integer, security-framework, thiserror(build), httparse(build), native-tls(build), synstructure, icu_normalizer_data...\r"]
|
||||
[23.601166, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m lazy_static v1.5.0\r\n"]
|
||||
[23.601196, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=======> ] 86/270: system-configuration-sys(build), getrandom(build), syn, aho-corasick, lazy_static, security-framework, thiserror(build), httparse(build), native-tls(build), synstructure, icu_normalizer_data...\r"]
|
||||
[23.613656, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m bitflags v1.3.2\r\n"]
|
||||
[23.613682, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=======> ] 87/270: system-configuration-sys(build), getrandom(build), syn, aho-corasick, lazy_static, security-framework, thiserror(build), httparse(build), bitflags, native-tls(build), icu_normalizer_data(bui...\r"]
|
||||
[23.638898, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m regex-syntax v0.8.5\r\n"]
|
||||
[23.639053, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=======> ] 88/270: system-configuration-sys(build), regex-syntax, getrandom(build), syn, aho-corasick, security-framework, thiserror(build), httparse(build), bitflags, native-tls(build), icu_normalizer_data(bu...\r"]
|
||||
[23.642064, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m base64 v0.21.7\r\n"]
|
||||
[23.642168, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=======> ] 89/270: system-configuration-sys(build), regex-syntax, getrandom(build), syn, aho-corasick, base64, security-framework, thiserror(build), httparse(build), native-tls(build), icu_normalizer_data(buil...\r"]
|
||||
[23.7033, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m utf8parse v0.2.2\r\n"]
|
||||
[23.703481, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=======> ] 90/270: system-configuration-sys(build), utf8parse, regex-syntax, getrandom(build), syn, aho-corasick, base64, security-framework, httparse(build), native-tls(build), icu_normalizer_data(build), rustix\r"]
|
||||
[23.747399, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m anstyle-parse v0.2.6\r\n"]
|
||||
[23.747622, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=======> ] 91/270: system-configuration-sys(build), regex-syntax, getrandom(build), syn, aho-corasick, base64, security-framework, httparse(build), anstyle-parse, native-tls(build), icu_normalizer_data(build),...\r"]
|
||||
[23.810049, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m rustls-pemfile v1.0.4\r\n"]
|
||||
[23.810086, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=======> ] 92/270: system-configuration-sys(build), regex-syntax, getrandom(build), syn, aho-corasick, security-framework, rustls-pemfile, httparse(build), anstyle-parse, native-tls(build), icu_normalizer_data...\r"]
|
||||
[23.826972, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m num-bigint v0.4.6\r\n"]
|
||||
[23.826999, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=======> ] 93/270: system-configuration-sys(build), regex-syntax, getrandom(build), syn, aho-corasick, num-bigint, security-framework, rustls-pemfile, httparse(build), native-tls(build), icu_normalizer_data(bu...\r"]
|
||||
[23.908947, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m encoding_rs v0.8.35\r\n"]
|
||||
[23.908978, "o", "\u001b[1m\u001b[36m Building\u001b[0m [========> ] 94/270: system-configuration-sys(build), regex-syntax, getrandom(build), syn, aho-corasick, num-bigint, encoding_rs, security-framework, httparse(build), native-tls(build), icu_normalizer_data(build...\r"]
|
||||
[23.92933, "o", "\u001b[1m\u001b[36m Building\u001b[0m [========> ] 95/270: system-configuration-sys(build), getrandom, regex-syntax, syn, aho-corasick, num-bigint, encoding_rs, security-framework, httparse(build), native-tls(build), icu_normalizer_data(build), rustix \r"]
|
||||
[24.004068, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m tempfile v3.19.1\r\n"]
|
||||
[24.004095, "o", "\u001b[1m\u001b[36m Building\u001b[0m [========> ] 96/270: system-configuration-sys(build), tempfile, regex-syntax, syn, aho-corasick, num-bigint, encoding_rs, security-framework, httparse(build), native-tls(build), icu_normalizer_data(build), rustix \r"]
|
||||
[24.007109, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m version_check v0.9.5\r\n"]
|
||||
[24.00727, "o", "\u001b[1m\u001b[36m Building\u001b[0m [========> ] 97/270: system-configuration-sys(build), tempfile, regex-syntax, syn, aho-corasick, num-bigint, encoding_rs, security-framework, httparse(build), version_check, native-tls(build), icu_normalizer_dat...\r"]
|
||||
[24.043623, "o", "\u001b[1m\u001b[36m Building\u001b[0m [========> ] 98/270: system-configuration-sys(build), tempfile, regex-syntax, syn, aho-corasick, num-bigint, encoding_rs, icu_normalizer_data, security-framework, httparse(build), version_check, native-tls(build) \r"]
|
||||
[24.068188, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m either v1.15.0\r\n"]
|
||||
[24.068525, "o", "\u001b[1m\u001b[36m Building\u001b[0m [========> ] 99/270: system-configuration-sys(build), tempfile, regex-syntax, syn, aho-corasick, num-bigint, encoding_rs, icu_normalizer_data, httparse(build), version_check, native-tls(build), either \r"]
|
||||
[24.08646, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m anstyle v1.0.10\r\n"]
|
||||
[24.086583, "o", "\u001b[1m\u001b[36m Building\u001b[0m [========> ] 100/270: system-configuration-sys(build), tempfile, regex-syntax, syn, aho-corasick, num-bigint, encoding_rs, httparse(build), anstyle, version_check, native-tls(build), either \r"]
|
||||
[24.167709, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m signal-hook v0.3.17\r\n"]
|
||||
[24.16779, "o", "\u001b[1m\u001b[36m Building\u001b[0m [========> ] 101/270: system-configuration-sys(build), tempfile, regex-syntax, syn, aho-corasick, num-bigint, encoding_rs, signal-hook(build.rs), httparse(build), anstyle, version_check, native-tls(build) \r"]
|
||||
[24.198748, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m anstyle-query v1.1.2\r\n"]
|
||||
[24.198821, "o", "\u001b[1m\u001b[36m Building\u001b[0m [========> ] 102/270: system-configuration-sys(build), tempfile, regex-syntax, syn, aho-corasick, num-bigint, encoding_rs, signal-hook(build.rs), httparse(build), anstyle, anstyle-query, native-tls(build) \r"]
|
||||
[24.213334, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m colorchoice v1.0.3\r\n\u001b[1m\u001b[36m Building\u001b[0m [========> ] 103/270: system-configuration-sys(build), tempfile, regex-syntax, syn, aho-corasick, num-bigint, encoding_rs, signal-hook(build.rs), colorchoice, httparse(build), anstyle-query, native-tls(build) \r"]
|
||||
[24.248936, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m mime v0.3.17\r\n\u001b[1m\u001b[36m Building\u001b[0m [========> ] 104/270: system-configuration-sys(build), tempfile, regex-syntax, syn, aho-corasick, num-bigint, encoding_rs, mime, signal-hook(build.rs), colorchoice, httparse(build), native-tls(build) \r"]
|
||||
[24.260788, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m is_terminal_polyfill v1.70.1\r\n"]
|
||||
[24.261031, "o", "\u001b[1m\u001b[36m Building\u001b[0m [========> ] 105/270: system-configuration-sys(build), regex-syntax, syn, aho-corasick, num-bigint, encoding_rs, mime, signal-hook(build.rs), colorchoice, httparse(build), is_terminal_polyfill, native-tls(build) \r"]
|
||||
[24.264716, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m sync_wrapper v0.1.2\r\n"]
|
||||
[24.264777, "o", "\u001b[1m\u001b[36m Building\u001b[0m [========> ] 106/270: system-configuration-sys(build), regex-syntax, syn, aho-corasick, num-bigint, encoding_rs, mime, signal-hook(build.rs), sync_wrapper, httparse(build), is_terminal_polyfill, native-tls(build) \r"]
|
||||
[24.26859, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m ipnet v2.11.0\r\n"]
|
||||
[24.26867, "o", "\u001b[1m\u001b[36m Building\u001b[0m [========> ] 107/270: system-configuration-sys(build), ipnet, regex-syntax, syn, aho-corasick, num-bigint, encoding_rs, mime, sync_wrapper, httparse(build), is_terminal_polyfill, native-tls(build) \r"]
|
||||
[24.269272, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m serde_derive v1.0.219\r\n"]
|
||||
[24.269318, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=========> ] 108/270: system-configuration-sys(build), serde_derive, ipnet, regex-syntax, aho-corasick, num-bigint, encoding_rs, mime, sync_wrapper, httparse(build), is_terminal_polyfill, native-tls(build) \r"]
|
||||
[24.300797, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m zerofrom-derive v0.1.6\r\n"]
|
||||
[24.30086, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=========> ] 109/270: system-configuration-sys(build), serde_derive, ipnet, regex-syntax, aho-corasick, num-bigint, encoding_rs, zerofrom-derive, mime, sync_wrapper, httparse(build), native-tls(build) \r"]
|
||||
[24.314188, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m yoke-derive v0.7.5\r\n"]
|
||||
[24.314355, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=========> ] 110/270: system-configuration-sys(build), serde_derive, ipnet, regex-syntax, yoke-derive, aho-corasick, num-bigint, encoding_rs, zerofrom-derive, mime, httparse(build), native-tls(build) \r"]
|
||||
[24.358675, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m zerovec-derive v0.10.3\r\n"]
|
||||
[24.358726, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=========> ] 111/270: system-configuration-sys(build), serde_derive, ipnet, regex-syntax, yoke-derive, aho-corasick, num-bigint, zerovec-derive, encoding_rs, zerofrom-derive, mime, native-tls(build) \r"]
|
||||
[24.409688, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m displaydoc v0.2.5\r\n\u001b[1m\u001b[36m Building\u001b[0m [=========> ] 112/270: system-configuration-sys(build), serde_derive, ipnet, regex-syntax, yoke-derive, aho-corasick, num-bigint, zerovec-derive, encoding_rs, zerofrom-derive, displaydoc, native-tls(build) \r"]
|
||||
[24.599591, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m tokio-macros v2.5.0\r\n\u001b[1m\u001b[36m Building\u001b[0m [=========> ] 113/270: system-configuration-sys(build), serde_derive, ipnet, tokio-macros, regex-syntax, yoke-derive, aho-corasick, num-bigint, zerovec-derive, encoding_rs, zerofrom-derive, displaydoc \r"]
|
||||
[24.643663, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m futures-macro v0.3.31\r\n"]
|
||||
[24.643744, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=========> ] 114/270: system-configuration-sys(build), serde_derive, ipnet, tokio-macros, regex-syntax, yoke-derive, num-bigint, zerovec-derive, encoding_rs, zerofrom-derive, displaydoc, futures-macro \r"]
|
||||
[24.659942, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m icu_provider_macros v1.5.0\r\n"]
|
||||
[24.660005, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=========> ] 115/270: system-configuration-sys(build), serde_derive, icu_provider_macros, tokio-macros, regex-syntax, yoke-derive, num-bigint, zerovec-derive, encoding_rs, zerofrom-derive, displaydoc, futures-macro \r"]
|
||||
[24.706738, "o", "\u001b[K"]
|
||||
[24.706797, "o", "\u001b[1m\u001b[32m Compiling\u001b[0m thiserror-impl v1.0.69\r\n\u001b[1m\u001b[36m Building\u001b[0m [=========> ] 116/270: system-configuration-sys(build), serde_derive, icu_provider_macros, tokio-macros, regex-syntax, yoke-derive, zerovec-derive, encoding_rs, zerofrom-derive, displaydoc, futures-macro, thiserro...\r"]
|
||||
[24.829133, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=========> ] 117/270: system-configuration-sys(build), serde_derive, icu_provider_macros, tokio-macros, regex-syntax, yoke-derive, zerovec-derive, encoding_rs, zerofrom-derive, httparse, futures-macro, thiserror-...\r"]
|
||||
[24.844696, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=========> ] 118/270: serde_derive, icu_provider_macros, native-tls, tokio-macros, regex-syntax, yoke-derive, zerovec-derive, encoding_rs, zerofrom-derive, httparse, futures-macro, thiserror-impl \r"]
|
||||
[24.935985, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==========> ] 119/270: serde_derive, native-tls, tokio-macros, regex-syntax, yoke-derive, zerovec-derive, encoding_rs, zerofrom-derive, httparse, futures-macro, system-configuration-sys, thiserror-impl \r"]
|
||||
[24.974286, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m regex-automata v0.4.9\r\n"]
|
||||
[24.974336, "o", "\u001b[1m\u001b[36m Building\u001b[0m "]
|
||||
[24.97454, "o", "[==========> ] 120/270: serde_derive, native-tls, tokio-macros, regex-syntax, zerovec-derive, encoding_rs, regex-automata, zerofrom-derive, httparse, futures-macro, system-configuration-sys, thiserror-impl \r"]
|
||||
[24.996321, "o", "\u001b[K"]
|
||||
[24.996368, "o", "\u001b[1m\u001b[32m Compiling\u001b[0m tokio v1.44.1\r\n"]
|
||||
[24.996527, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==========> ] 121/270: serde_derive, native-tls, regex-syntax, zerovec-derive, encoding_rs, regex-automata, zerofrom-derive, tokio, httparse, futures-macro, system-configuration-sys, thiserror-impl \r"]
|
||||
[25.002432, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m system-configuration v0.5.1\r\n"]
|
||||
[25.002466, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==========> ] 122/270: serde_derive, native-tls, regex-syntax, zerovec-derive, encoding_rs, regex-automata, zerofrom-derive, tokio, system-configuration, httparse, futures-macro, thiserror-impl \r"]
|
||||
[25.007122, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m num-rational v0.4.2\r\n\u001b[1m\u001b[36m Building\u001b[0m [==========> ] 123/270: serde_derive, native-tls, regex-syntax, encoding_rs, regex-automata, zerofrom-derive, tokio, system-configuration, httparse, num-rational, futures-macro, thiserror-impl \r"]
|
||||
[25.032568, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m anstream v0.6.18\r\n"]
|
||||
[25.033282, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==========> ] 124/270: serde_derive, native-tls, regex-syntax, encoding_rs, regex-automata, zerofrom-derive, tokio, system-configuration, anstream, num-rational, futures-macro, thiserror-impl \r"]
|
||||
[25.059859, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m zerofrom v0.1.6\r\n\u001b[1m\u001b[36m Building\u001b[0m "]
|
||||
[25.059907, "o", "[==========> ] 125/270: serde_derive, native-tls, regex-syntax, encoding_rs, regex-automata, tokio, system-configuration, anstream, num-rational, zerofrom, futures-macro, thiserror-impl \r"]
|
||||
[25.075759, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==========> ] 126/270: serde_derive, regex-syntax, encoding_rs, regex-automata, tokio, system-configuration, anstream, num-rational, zerofrom, signal-hook(build), futures-macro, thiserror-impl \r"]
|
||||
[25.164447, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m ahash v0.8.11\r\n"]
|
||||
[25.164496, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==========> ] 127/270: serde_derive, regex-syntax, encoding_rs, regex-automata, tokio, ahash(build.rs), anstream, num-rational, zerofrom, signal-hook(build), futures-macro, thiserror-impl \r"]
|
||||
[25.165147, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m futures-util v0.3.31\r\n"]
|
||||
[25.168247, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==========> ] 128/270: serde_derive, regex-syntax, futures-util, encoding_rs, regex-automata, tokio, ahash(build.rs), anstream, num-rational, zerofrom, signal-hook(build), thiserror-impl \r"]
|
||||
[25.213807, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m num-iter v0.1.45\r\n"]
|
||||
[25.214192, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==========> ] 129/270: serde_derive, regex-syntax, futures-util, encoding_rs, regex-automata, tokio, ahash(build.rs), anstream, num-rational, zerofrom, num-iter, thiserror-impl \r"]
|
||||
[25.236125, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m num-complex v0.4.6\r\n\u001b[1m\u001b[36m Building\u001b[0m [===========> ] 130/270: serde_derive, regex-syntax, futures-util, num-complex, encoding_rs, regex-automata, tokio, ahash(build.rs), num-rational, zerofrom, num-iter, thiserror-impl \r"]
|
||||
[25.278473, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m crossbeam-utils v0.8.21\r\n"]
|
||||
[25.278525, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===========> ] 131/270: serde_derive, regex-syntax, futures-util, num-complex, encoding_rs, regex-automata, tokio, ahash(build.rs), num-rational, zerofrom, thiserror-impl, crossbeam-utils(build.rs) \r"]
|
||||
[25.307289, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m anyhow v1.0.98\r\n"]
|
||||
[25.307434, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===========> ] 132/270: serde_derive, regex-syntax, futures-util, num-complex, encoding_rs, regex-automata, anyhow(build.rs), tokio, num-rational, zerofrom, thiserror-impl, crossbeam-utils(build.rs) \r"]
|
||||
[25.338384, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m num-conv v0.1.0\r\n"]
|
||||
[25.338438, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===========> ] 133/270: serde_derive, regex-syntax, futures-util, num-complex, regex-automata, anyhow(build.rs), tokio, num-rational, zerofrom, num-conv, thiserror-impl, crossbeam-utils(build.rs) \r"]
|
||||
[25.409289, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m rustix v0.38.44\r\n"]
|
||||
[25.409339, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===========> ] 134/270: serde_derive, regex-syntax, futures-util, rustix(build.rs), num-complex, regex-automata, anyhow(build.rs), tokio, num-rational, zerofrom, thiserror-impl, crossbeam-utils(build.rs) \r"]
|
||||
[25.447503, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m strsim v0.11.1\r\n"]
|
||||
[25.447618, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===========> ] 135/270: serde_derive, regex-syntax, futures-util, rustix(build.rs), strsim, num-complex, regex-automata, anyhow(build.rs), tokio, num-rational, zerofrom, thiserror-impl \r"]
|
||||
[25.467725, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===========> ] 136/270: thiserror, serde_derive, regex-syntax, futures-util, rustix(build.rs), strsim, num-complex, regex-automata, anyhow(build.rs), tokio, num-rational, zerofrom \r"]
|
||||
[25.473512, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m rustversion v1.0.20\r\n"]
|
||||
[25.473567, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===========> ] 137/270: thiserror, serde_derive, rustversion(build.rs), regex-syntax, futures-util, rustix(build.rs), strsim, num-complex, regex-automata, tokio, num-rational, zerofrom \r"]
|
||||
[25.477253, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m time-core v0.1.4\r\n"]
|
||||
[25.477339, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===========> ] 138/270: thiserror, serde_derive, rustversion(build.rs), regex-syntax, futures-util, rustix(build.rs), strsim, num-complex, time-core, regex-automata, tokio, zerofrom \r"]
|
||||
[25.493979, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m bit-vec v0.6.3\r\n"]
|
||||
[25.494049, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===========> ] 139/270: thiserror, serde_derive, rustversion(build.rs), regex-syntax, futures-util, rustix(build.rs), strsim, time-core, regex-automata, bit-vec, tokio, zerofrom \r"]
|
||||
[25.498947, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m powerfmt v0.2.0\r\n"]
|
||||
[25.498992, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===========> ] 140/270: thiserror, serde_derive, rustversion(build.rs), futures-util, rustix(build.rs), strsim, powerfmt, time-core, regex-automata, bit-vec, tokio, zerofrom \r"]
|
||||
[25.516976, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m yoke v0.7.5\r\n"]
|
||||
[25.517027, "o", "\u001b[1m\u001b[36m Building\u001b[0m [============> ] 141/270: thiserror, serde_derive, rustversion(build.rs), futures-util, rustix(build.rs), strsim, powerfmt, time-core, regex-automata, bit-vec, tokio, yoke \r"]
|
||||
[25.551361, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m clap_lex v0.7.4\r\n"]
|
||||
[25.551606, "o", "\u001b[1m\u001b[36m Building\u001b[0m [============> ] 142/270: thiserror, serde_derive, rustversion(build.rs), futures-util, rustix(build.rs), strsim, powerfmt, regex-automata, bit-vec, tokio, clap_lex, yoke \r"]
|
||||
[25.583082, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m heck v0.5.0\r\n"]
|
||||
[25.583155, "o", "\u001b[1m\u001b[36m Building\u001b[0m [============> ] 143/270: thiserror, serde_derive, rustversion(build.rs), futures-util, strsim, powerfmt, regex-automata, bit-vec, tokio, clap_lex, yoke, heck \r"]
|
||||
[25.620283, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m bit-set v0.5.3\r\n"]
|
||||
[25.620448, "o", "\u001b[1m\u001b[36m Building\u001b[0m [============> ] 144/270: thiserror, serde_derive, rustversion(build.rs), futures-util, strsim, regex-automata, bit-vec, tokio, clap_lex, yoke, bit-set, heck \r"]
|
||||
[25.620888, "o", "\u001b[1m\u001b[36m Building\u001b[0m [============> ] 145/270: thiserror, serde_derive, rustix(build), rustversion(build.rs), futures-util, strsim, regex-automata, tokio, clap_lex, yoke, bit-set, heck \r"]
|
||||
[25.668499, "o", "\u001b[1m\u001b[36m Building\u001b[0m [============> ] 146/270: thiserror, serde_derive, rustix(build), futures-util, strsim, regex-automata, tokio, clap_lex, yoke, bit-set, rustversion(build), heck \r"]
|
||||
[25.673766, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m clap_builder v4.5.34\r\n"]
|
||||
[25.673848, "o", "\u001b[1m\u001b[36m Building\u001b[0m [============> ] 147/270: thiserror, serde_derive, rustix(build), futures-util, strsim, regex-automata, tokio, yoke, bit-set, rustversion(build), heck, clap_builder \r"]
|
||||
[25.687039, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m deranged v0.4.1\r\n"]
|
||||
[25.687142, "o", "\u001b[1m\u001b[36m Building\u001b[0m [============> ] 148/270: thiserror, serde_derive, rustix(build), futures-util, strsim, regex-automata, tokio, yoke, rustversion(build), heck, clap_builder, deranged \r"]
|
||||
[25.694934, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m clap_derive v4.5.32\r\n"]
|
||||
[25.69508, "o", "\u001b[1m\u001b[36m Building\u001b[0m [============> ] 149/270: thiserror, serde_derive, clap_derive, rustix(build), futures-util, strsim, regex-automata, tokio, yoke, rustversion(build), clap_builder, deranged \r"]
|
||||
[25.702353, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m time-macros v0.2.22\r\n"]
|
||||
[25.702382, "o", "\u001b[1m\u001b[36m Building\u001b[0m [============> ] 150/270: thiserror, serde_derive, clap_derive, rustix(build), time-macros, futures-util, regex-automata, tokio, yoke, rustversion(build), clap_builder, deranged \r"]
|
||||
[25.946083, "o", "\u001b[1m\u001b[36m Building\u001b[0m [============> ] 151/270: thiserror, clap_derive, rustix(build), time-macros, futures-util, regex-automata, tokio, yoke, rustversion(build), clap_builder, serde, deranged \r"]
|
||||
[26.006998, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=============> ] 152/270: thiserror, clap_derive, rustix(build), time-macros, futures-util, regex-automata, tokio, yoke, rustversion(build), anyhow(build), clap_builder, serde \r"]
|
||||
[26.344192, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m regex v1.11.1\r\n"]
|
||||
[26.344222, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=============> ] 153/270: thiserror, clap_derive, rustix(build), futures-util, regex-automata, tokio, yoke, rustversion(build), anyhow(build), clap_builder, serde, regex \r"]
|
||||
[26.419882, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m num v0.4.3\r\n"]
|
||||
[26.419909, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=============> ] 154/270: num, clap_derive, rustix(build), futures-util, regex-automata, tokio, yoke, rustversion(build), anyhow(build), clap_builder, serde, regex \r"]
|
||||
[26.459008, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=============> ] 155/270: crossbeam-utils(build), clap_derive, rustix(build), futures-util, regex-automata, tokio, yoke, rustversion(build), anyhow(build), clap_builder, serde, regex \r"]
|
||||
[26.563875, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=============> ] 156/270: crossbeam-utils(build), rustix(build), futures-util, regex-automata, tokio, yoke, rustversion(build), anyhow(build), ahash(build), clap_builder, serde, regex \r"]
|
||||
[26.581267, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=============> ] 157/270: crossbeam-utils(build), rustix(build), futures-util, regex-automata, tokio, yoke, rustversion(build), anyhow(build), ahash(build), clap_builder, signal-hook, serde \r"]
|
||||
[26.732902, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m zerovec v0.10.4\r\n"]
|
||||
[26.733036, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=============> ] 158/270: crossbeam-utils(build), rustix(build), futures-util, zerovec, regex-automata, tokio, rustversion(build), anyhow(build), ahash(build), clap_builder, signal-hook, serde \r"]
|
||||
[26.805769, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m pin-project-internal v1.1.10\r\n"]
|
||||
[26.805815, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=============> ] 159/270: crossbeam-utils(build), rustix(build), futures-util, zerovec, pin-project-internal, regex-automata, tokio, rustversion(build), anyhow(build), ahash(build), clap_builder, serde \r"]
|
||||
[26.939474, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m mio v0.8.11\r\n"]
|
||||
[26.939612, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=============> ] 160/270: crossbeam-utils(build), futures-util, zerovec, pin-project-internal, regex-automata, tokio, mio, rustversion(build), anyhow(build), ahash(build), clap_builder, serde \r"]
|
||||
[27.135956, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m getrandom v0.2.15\r\n"]
|
||||
[27.13618, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=============> ] 161/270: crossbeam-utils(build), getrandom, zerovec, pin-project-internal, regex-automata, tokio, mio, rustversion(build), anyhow(build), ahash(build), clap_builder, serde \r"]
|
||||
[27.160029, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m iana-time-zone v0.1.62\r\n"]
|
||||
[27.160304, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==============> ] 162/270: crossbeam-utils(build), getrandom, iana-time-zone, zerovec, pin-project-internal, tokio, mio, rustversion(build), anyhow(build), ahash(build), clap_builder, serde \r"]
|
||||
[27.168847, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m nom v8.0.0\r\n"]
|
||||
[27.168897, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==============> ] 163/270: crossbeam-utils(build), getrandom, nom, iana-time-zone, zerovec, pin-project-internal, tokio, mio, anyhow(build), ahash(build), clap_builder, serde \r"]
|
||||
[27.201855, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m zerocopy v0.7.35\r\n"]
|
||||
[27.202076, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==============> ] 164/270: crossbeam-utils(build), nom, iana-time-zone, zerovec, pin-project-internal, tokio, mio, anyhow(build), ahash(build), zerocopy, clap_builder, serde \r"]
|
||||
[27.227265, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m chrono v0.4.40\r\n"]
|
||||
[27.227339, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==============> ] 165/270: crossbeam-utils(build), nom, zerovec, pin-project-internal, chrono, tokio, mio, anyhow(build), ahash(build), zerocopy, clap_builder, serde \r"]
|
||||
[27.296492, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==============> ] 166/270: crossbeam-utils(build), nom, zerovec, pin-project-internal, chrono, tokio, anyhow(build), ahash(build), zerocopy, clap_builder, serde, rustversion \r"]
|
||||
[27.314139, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m pin-project v1.1.10\r\n\u001b[1m\u001b[36m Building\u001b[0m [==============> ] 167/270: crossbeam-utils(build), nom, pin-project, zerovec, chrono, tokio, anyhow(build), ahash(build), zerocopy, clap_builder, serde, rustversion \r"]
|
||||
[27.373114, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m signal-hook-mio v0.2.4\r\n"]
|
||||
[27.37324, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==============> ] 168/270: crossbeam-utils(build), nom, pin-project, zerovec, chrono, tokio, signal-hook-mio, anyhow(build), ahash(build), clap_builder, serde, rustversion \r"]
|
||||
[27.415303, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m tokio-util v0.7.14\r\n"]
|
||||
[27.415331, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==============> ] 169/270: crossbeam-utils(build), nom, pin-project, zerovec, tokio-util, chrono, tokio, anyhow(build), ahash(build), clap_builder, serde, rustversion \r"]
|
||||
[27.536012, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m tokio-native-tls v0.3.1\r\n"]
|
||||
[27.536162, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==============> ] 170/270: crossbeam-utils(build), nom, tokio-native-tls, pin-project, zerovec, tokio-util, chrono, tokio, anyhow(build), ahash(build), clap_builder, serde \r"]
|
||||
[27.597054, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m futures-executor v0.3.31\r\n"]
|
||||
[27.597106, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==============> ] 171/270: crossbeam-utils(build), nom, pin-project, zerovec, tokio-util, chrono, tokio, futures-executor, anyhow(build), ahash(build), clap_builder, serde \r"]
|
||||
[27.709368, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==============> ] 172/270: crossbeam-utils(build), nom, pin-project, zerovec, anyhow, tokio-util, chrono, tokio, futures-executor, ahash(build), clap_builder, serde \r"]
|
||||
[27.716961, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===============> ] 173/270: crossbeam-utils(build), rustix, nom, pin-project, zerovec, anyhow, chrono, tokio, futures-executor, ahash(build), clap_builder, serde \r"]
|
||||
[27.738433, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m clap v4.5.34\r\n"]
|
||||
[27.738489, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===============> ] 174/270: crossbeam-utils(build), rustix, nom, pin-project, zerovec, anyhow, chrono, tokio, clap, ahash(build), clap_builder, serde \r"]
|
||||
[27.748759, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m fancy-regex v0.11.0\r\n"]
|
||||
[27.748798, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===============> ] 175/270: crossbeam-utils(build), rustix, nom, pin-project, zerovec, anyhow, chrono, tokio, clap, ahash(build), fancy-regex, serde \r"]
|
||||
[27.928307, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===============> ] 176/270: rustix, nom, pin-project, zerovec, anyhow, chrono, tokio, clap, ahash(build), fancy-regex, crossbeam-utils, serde \r"]
|
||||
[27.930598, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m fraction v0.13.1\r\n"]
|
||||
[27.930634, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===============> ] 177/270: rustix, nom, pin-project, zerovec, chrono, tokio, clap, ahash(build), fraction, fancy-regex, crossbeam-utils, serde \r"]
|
||||
[28.09247, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m time v0.3.41\r\n"]
|
||||
[28.094192, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===============> ] 178/270: rustix, nom, pin-project, zerovec, time, tokio, clap, ahash(build), fraction, fancy-regex, crossbeam-utils, serde \r"]
|
||||
[28.171592, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m uuid v1.16.0\r\n"]
|
||||
[28.171762, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===============> ] 179/270: rustix, uuid, nom, pin-project, zerovec, time, tokio, clap, ahash(build), fancy-regex, crossbeam-utils, serde \r"]
|
||||
[28.231062, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m nix v0.27.1\r\n"]
|
||||
[28.23111, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===============> ] 180/270: rustix, uuid, nom, pin-project, zerovec, time, tokio, clap, ahash(build), fancy-regex, nix, serde \r"]
|
||||
[28.244612, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m paste v1.0.15\r\n\u001b[1m\u001b[36m Building\u001b[0m "]
|
||||
[28.244867, "o", "[===============> ] 181/270: rustix, uuid, nom, pin-project, zerovec, time, tokio, clap, paste(build.rs), fancy-regex, nix, serde \r"]
|
||||
[28.286837, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m bytecount v0.6.8\r\n"]
|
||||
[28.286995, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===============> ] 182/270: rustix, uuid, nom, pin-project, zerovec, time, tokio, clap, paste(build.rs), bytecount, nix, serde \r"]
|
||||
[28.342152, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m heck v0.4.1\r\n"]
|
||||
[28.342211, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===============> ] 183/270: uuid, nom, pin-project, zerovec, time, tokio, clap, heck, paste(build.rs), bytecount, nix, serde \r"]
|
||||
[28.36806, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m num-cmp v0.1.0\r\n"]
|
||||
[28.368385, "o", "\u001b[1m\u001b[36m Building\u001b[0m [================> ] 184/270: nom, pin-project, zerovec, num-cmp, time, tokio, clap, heck, paste(build.rs), bytecount, nix, serde \r"]
|
||||
[28.381471, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m option-ext v0.2.0\r\n"]
|
||||
[28.381606, "o", "\u001b[1m\u001b[36m Building\u001b[0m [================> ] 185/270: nom, pin-project, zerovec, num-cmp, time, option-ext, tokio, clap, heck, bytecount, nix, serde \r"]
|
||||
[28.383891, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m hex v0.4.3\r\n"]
|
||||
[28.384002, "o", "\u001b[1m\u001b[36m Building\u001b[0m [================> ] 186/270: nom, hex, pin-project, zerovec, num-cmp, time, option-ext, tokio, clap, heck, nix, serde \r"]
|
||||
[28.42974, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m home v0.5.11\r\n"]
|
||||
[28.430048, "o", "\u001b[1m\u001b[36m Building\u001b[0m [================> ] 187/270: home, nom, hex, pin-project, zerovec, num-cmp, time, tokio, clap, heck, nix, serde \r"]
|
||||
[28.452346, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m strum_macros v0.25.3\r\n"]
|
||||
[28.452511, "o", "\u001b[1m\u001b[36m Building\u001b[0m [================> ] 188/270: home, nom, strum_macros, hex, pin-project, zerovec, num-cmp, time, tokio, clap, nix, serde \r"]
|
||||
[28.489991, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m which v4.4.2\r\n"]
|
||||
[28.490071, "o", "\u001b[1m\u001b[36m Building\u001b[0m [================> ] 189/270: nom, strum_macros, hex, pin-project, zerovec, num-cmp, time, tokio, clap, which, nix, serde \r"]
|
||||
[28.511349, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m iso8601 v0.6.2\r\n\u001b[1m\u001b[36m Building\u001b[0m [================> ] 190/270: nom, strum_macros, iso8601, pin-project, zerovec, num-cmp, time, tokio, clap, which, nix, serde \r"]
|
||||
[28.515445, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m dirs-sys v0.4.1\r\n"]
|
||||
[28.515849, "o", "\u001b[1m\u001b[36m Building\u001b[0m [================> ] 191/270: nom, strum_macros, iso8601, pin-project, zerovec, time, tokio, clap, dirs-sys, which, nix, serde \r"]
|
||||
[28.57553, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m indexmap v2.8.0\r\n"]
|
||||
[28.575846, "o", "\u001b[1m\u001b[36m Building\u001b[0m [================> ] 192/270: strum_macros, iso8601, pin-project, zerovec, indexmap, time, tokio, clap, dirs-sys, which, nix, serde \r"]
|
||||
[28.586559, "o", "\u001b[1m\u001b[36m Building\u001b[0m [================> ] 193/270: strum_macros, iso8601, pin-project, zerovec, indexmap, time, tokio, clap, serde_json, which, nix, serde \r"]
|
||||
[28.600211, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m serde_urlencoded v0.7.1\r\n"]
|
||||
[28.600611, "o", "\u001b[1m\u001b[36m Building\u001b[0m [================> ] 194/270: strum_macros, iso8601, pin-project, zerovec, indexmap, time, tokio, clap, serde_json, which, serde_urlencoded, nix \r"]
|
||||
[28.639939, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=================> ] 195/270: strum_macros, iso8601, pin-project, zerovec, indexmap, ahash, time, tokio, clap, serde_json, serde_urlencoded, nix \r"]
|
||||
[28.71242, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m serde_with v2.3.3\r\n"]
|
||||
[28.712715, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=================> ] 196/270: strum_macros, iso8601, pin-project, zerovec, indexmap, ahash, time, serde_with, tokio, clap, serde_json, serde_urlencoded \r"]
|
||||
[28.718502, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=================> ] 197/270: paste(build), strum_macros, iso8601, pin-project, zerovec, indexmap, ahash, time, serde_with, tokio, clap, serde_json \r"]
|
||||
[28.766112, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m crossbeam-epoch v0.9.18\r\n"]
|
||||
[28.766143, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=================> ] 198/270: paste(build), crossbeam-epoch, strum_macros, iso8601, pin-project, zerovec, indexmap, time, serde_with, tokio, clap, serde_json \r"]
|
||||
[28.804231, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m futures v0.3.31\r\n"]
|
||||
[28.804285, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=================> ] 199/270: paste(build), crossbeam-epoch, strum_macros, iso8601, zerovec, indexmap, time, serde_with, tokio, clap, futures, serde_json \r"]
|
||||
[28.848245, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m async-trait v0.1.88\r\n"]
|
||||
[28.848379, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=================> ] 200/270: paste(build), crossbeam-epoch, strum_macros, iso8601, zerovec, indexmap, time, serde_with, tokio, clap, serde_json, async-trait \r"]
|
||||
[28.883251, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m tinystr v0.7.6\r\n"]
|
||||
[28.88339, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=================> ] 201/270: paste(build), crossbeam-epoch, strum_macros, iso8601, zerovec, time, serde_with, tokio, clap, serde_json, tinystr, async-trait \r"]
|
||||
[28.895154, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m icu_collections v1.5.0\r\n"]
|
||||
[28.895327, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=================> ] 202/270: paste(build), crossbeam-epoch, strum_macros, zerovec, time, serde_with, tokio, clap, serde_json, tinystr, async-trait, icu_collections \r"]
|
||||
[28.951617, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m serde_yaml v0.9.34+deprecated\r\n"]
|
||||
[28.951647, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=================> ] 203/270: paste(build), serde_yaml, strum_macros, zerovec, time, serde_with, tokio, clap, serde_json, tinystr, async-trait, icu_collections \r"]
|
||||
[28.963731, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m h2 v0.3.26\r\n"]
|
||||
[28.963758, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=================> ] 204/270: paste(build), serde_yaml, strum_macros, zerovec, time, serde_with, clap, serde_json, h2, tinystr, async-trait, icu_collections \r"]
|
||||
[28.978714, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m serde_repr v0.1.20\r\n"]
|
||||
[28.978743, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=================> ] 205/270: paste(build), serde_yaml, strum_macros, serde_repr, time, serde_with, clap, serde_json, h2, tinystr, async-trait, icu_collections \r"]
|
||||
[29.061025, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m xattr v1.5.0\r\n"]
|
||||
[29.061094, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==================> ] 206/270: xattr, paste(build), serde_yaml, strum_macros, serde_repr, time, serde_with, serde_json, h2, tinystr, async-trait, icu_collections \r"]
|
||||
[29.168367, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m filetime v0.2.25\r\n"]
|
||||
[29.16851, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==================> ] 207/270: xattr, paste(build), serde_yaml, strum_macros, time, serde_with, serde_json, h2, filetime, tinystr, async-trait, icu_collections \r"]
|
||||
[29.200162, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m rayon-core v1.12.1\r\n"]
|
||||
[29.200297, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==================> ] 208/270: paste(build), serde_yaml, strum_macros, time, serde_with, rayon-core(build.rs), serde_json, h2, filetime, tinystr, async-trait, icu_collections \r"]
|
||||
[29.260113, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m tar v0.4.44\r\n"]
|
||||
[29.260221, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==================> ] 209/270: paste(build), serde_yaml, strum_macros, time, serde_with, rayon-core(build.rs), tar, serde_json, h2, tinystr, async-trait, icu_collections \r"]
|
||||
[29.277112, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==================> ] 210/270: rayon-core(build), paste(build), serde_yaml, strum_macros, time, serde_with, tar, serde_json, h2, tinystr, async-trait, icu_collections \r"]
|
||||
[29.279362, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m crossbeam-deque v0.8.6\r\n"]
|
||||
[29.279518, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==================> ] 211/270: rayon-core(build), paste(build), serde_yaml, strum_macros, crossbeam-deque, time, serde_with, tar, h2, tinystr, async-trait, icu_collections \r"]
|
||||
[29.328543, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m dirs v5.0.1\r\n"]
|
||||
[29.328571, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==================> ] 212/270: rayon-core(build), paste(build), serde_yaml, strum_macros, crossbeam-deque, time, serde_with, dirs, tar, h2, tinystr, icu_collections "]
|
||||
[29.328594, "o", "\r"]
|
||||
[29.359226, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m crossterm v0.27.0\r\n"]
|
||||
[29.359346, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==================> ] 213/270: rayon-core(build), paste(build), serde_yaml, strum_macros, time, serde_with, dirs, crossterm, tar, h2, tinystr, icu_collections \r"]
|
||||
[29.38027, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m itertools v0.11.0\r\n"]
|
||||
[29.380463, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==================> ] 214/270: rayon-core(build), paste(build), serde_yaml, strum_macros, itertools, time, serde_with, crossterm, tar, h2, tinystr, icu_collections \r"]
|
||||
[29.480834, "o", "\u001b[K"]
|
||||
[29.480864, "o", "\u001b[1m\u001b[32m Compiling\u001b[0m models v0.4.0 (/Users/goku/projects/wrkflw/crates/models)\r\n"]
|
||||
[29.48099, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==================> ] 215/270: rayon-core(build), paste(build), serde_yaml, strum_macros, itertools, time, crossterm, tar, h2, tinystr, icu_collections, models \r"]
|
||||
[29.749227, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m matrix v0.4.0 (/Users/goku/projects/wrkflw/crates/matrix)\r\n"]
|
||||
[29.7494, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===================> ] 216/270: rayon-core(build), serde_yaml, strum_macros, itertools, time, crossterm, tar, h2, matrix, tinystr, icu_collections, models \r"]
|
||||
[29.784576, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m logging v0.4.0 (/Users/goku/projects/wrkflw/crates/logging)\r\n"]
|
||||
[29.784625, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===================> ] 217/270: rayon-core(build), serde_yaml, strum_macros, itertools, time, crossterm, tar, h2, matrix, logging, tinystr, icu_collections \r"]
|
||||
[29.893603, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m utils v0.4.0 (/Users/goku/projects/wrkflw/crates/utils)\r\n"]
|
||||
[29.893669, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===================> ] 218/270: rayon-core(build), serde_yaml, strum_macros, itertools, utils, time, crossterm, h2, matrix, logging, tinystr, icu_collections \r"]
|
||||
[29.974513, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m bollard-stubs v1.42.0-rc.7\r\n"]
|
||||
[29.974559, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===================> ] 219/270: rayon-core(build), bollard-stubs, strum_macros, itertools, utils, time, crossterm, h2, matrix, logging, tinystr, icu_collections \r"]
|
||||
[29.991577, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m runtime v0.4.0 (/Users/goku/projects/wrkflw/crates/runtime)\r\n"]
|
||||
[29.991626, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===================> ] 220/270: rayon-core(build), bollard-stubs, strum_macros, utils, time, crossterm, h2, matrix, logging, tinystr, icu_collections, runtime \r"]
|
||||
[29.996852, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m validators v0.4.0 (/Users/goku/projects/wrkflw/crates/validators)\r\n\u001b[1m\u001b[36m Building\u001b[0m [===================> ] 221/270: rayon-core(build), bollard-stubs, strum_macros, utils, time, h2, matrix, logging, tinystr, validators, icu_collections, runtime \r"]
|
||||
[30.028518, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===================> ] 222/270: rayon-core(build), bollard-stubs, strum_macros, utils, paste, time, h2, matrix, tinystr, validators, icu_collections, runtime \r"]
|
||||
[30.082507, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m icu_locid v1.5.0\r\n\u001b[1m\u001b[36m Building\u001b[0m [===================> ] 223/270: rayon-core(build), bollard-stubs, strum_macros, paste, time, h2, matrix, tinystr, validators, icu_locid, icu_collections, runtime \r"]
|
||||
[30.108104, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m hyper v0.14.32\r\n"]
|
||||
[30.108153, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===================> ] 224/270: rayon-core(build), bollard-stubs, strum_macros, paste, time, hyper, h2, matrix, validators, icu_locid, icu_collections, runtime \r"]
|
||||
[30.141236, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m colored v2.2.0\r\n"]
|
||||
[30.141305, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===================> ] 225/270: rayon-core(build), bollard-stubs, strum_macros, paste, time, hyper, h2, colored, validators, icu_locid, icu_collections, runtime \r"]
|
||||
[30.145174, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m num_cpus v1.16.0\r\n"]
|
||||
[30.145218, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===================> ] 226/270: bollard-stubs, strum_macros, num_cpus, paste, time, hyper, h2, colored, validators, icu_locid, icu_collections, runtime \r"]
|
||||
[30.20999, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m cassowary v0.3.0\r\n\u001b[1m\u001b[36m Building\u001b[0m [====================> ] 227/270: bollard-stubs, strum_macros, paste, time, hyper, h2, colored, validators, icu_locid, cassowary, icu_collections, runtime \r"]
|
||||
[30.303977, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m unicode-segmentation v1.12.0\r\n"]
|
||||
[30.30402, "o", "\u001b[1m\u001b[36m Building\u001b[0m [====================> ] 228/270: unicode-segmentation, bollard-stubs, strum_macros, paste, time, hyper, h2, colored, icu_locid, cassowary, icu_collections, runtime \r"]
|
||||
[30.392998, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m indoc v2.0.6\r\n"]
|
||||
[30.393666, "o", "\u001b[1m\u001b[36m Building\u001b[0m [====================> ] 229/270: unicode-segmentation, bollard-stubs, strum_macros, paste, time, hyper, h2, indoc, colored, icu_locid, cassowary, runtime \r"]
|
||||
[30.396679, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m strum v0.25.0\r\n\u001b[1m\u001b[36m Building\u001b[0m [====================> ] 230/270: unicode-segmentation, bollard-stubs, strum, paste, time, hyper, h2, indoc, colored, icu_locid, cassowary, runtime \r"]
|
||||
[30.399296, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m unicode-width v0.1.14\r\n"]
|
||||
[30.399332, "o", "\u001b[1m\u001b[36m Building\u001b[0m [====================> ] 231/270: unicode-segmentation, bollard-stubs, strum, time, unicode-width, hyper, h2, indoc, colored, icu_locid, cassowary, runtime \r"]
|
||||
[30.407848, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m evaluator v0.4.0 (/Users/goku/projects/wrkflw/crates/evaluator)\r\n"]
|
||||
[30.407878, "o", "\u001b[1m\u001b[36m Building\u001b[0m [====================> ] 232/270: unicode-segmentation, bollard-stubs, strum, time, unicode-width, hyper, evaluator, h2, indoc, icu_locid, cassowary, runtime \r"]
|
||||
[30.554265, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m icu_provider v1.5.0\r\n"]
|
||||
[30.554908, "o", "\u001b[1m\u001b[36m Building\u001b[0m [====================> ] 233/270: unicode-segmentation, bollard-stubs, strum, time, hyper, evaluator, h2, indoc, icu_locid, cassowary, icu_provider, runtime \r"]
|
||||
[30.591042, "o", "\u001b[1m\u001b[36m Building\u001b[0m [====================> ] 234/270: bollard-stubs, strum, time, hyper, rayon-core, evaluator, h2, indoc, icu_locid, cassowary, icu_provider, runtime \r"]
|
||||
[30.604905, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m crossterm v0.26.1\r\n"]
|
||||
[30.605074, "o", "\u001b[1m\u001b[36m Building\u001b[0m [====================> ] 235/270: bollard-stubs, strum, time, hyper, rayon-core, crossterm, evaluator, h2, indoc, icu_locid, icu_provider, runtime \r"]
|
||||
[30.653721, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m same-file v1.0.6\r\n"]
|
||||
[30.653797, "o", "\u001b[1m\u001b[36m Building\u001b[0m [====================> ] 236/270: bollard-stubs, strum, time, hyper, rayon-core, crossterm, evaluator, h2, icu_locid, same-file, icu_provider, runtime \r"]
|
||||
[30.691821, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m urlencoding v2.1.3\r\n"]
|
||||
[30.691883, "o", "\u001b[1m\u001b[36m Building\u001b[0m [====================> ] 237/270: urlencoding, bollard-stubs, strum, time, hyper, rayon-core, crossterm, h2, icu_locid, same-file, icu_provider, runtime \r"]
|
||||
[30.708108, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=====================> ] 238/270: urlencoding, bollard-stubs, strum, hyper, rayon-core, crossterm, h2, icu_locid, same-file, icu_provider, runtime \r"]
|
||||
[30.749513, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m ratatui v0.23.0\r\n\u001b[1m\u001b[36m Building\u001b[0m [=====================> ] 239/270: urlencoding, bollard-stubs, ratatui, hyper, rayon-core, crossterm, h2, icu_locid, same-file, icu_provider, runtime \r"]
|
||||
[30.756806, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m walkdir v2.5.0\r\n\u001b[1m\u001b[36m Building\u001b[0m "]
|
||||
[30.756853, "o", "[=====================> ] 240/270: urlencoding, bollard-stubs, walkdir, ratatui, hyper, rayon-core, crossterm, h2, icu_locid, icu_provider, runtime \r"]
|
||||
[30.764425, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=====================> ] 241/270: urlencoding, bollard-stubs, walkdir, ratatui, hyper, rayon-core, crossterm, h2, icu_locid, icu_provider \r"]
|
||||
[30.794001, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m rayon v1.10.0\r\n"]
|
||||
[30.794057, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=====================> ] 241/270: urlencoding, bollard-stubs, walkdir, ratatui, hyper, rayon-core, crossterm, h2, rayon, icu_locid, icu_provider \r"]
|
||||
[30.83092, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=====================> ] 242/270: urlencoding, bollard-stubs, walkdir, ratatui, hyper, rayon-core, crossterm, h2, rayon, icu_provider \r"]
|
||||
[30.838227, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=====================> ] 243/270: bollard-stubs, walkdir, ratatui, hyper, rayon-core, crossterm, h2, rayon, icu_provider \r"]
|
||||
[30.846511, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=====================> ] 244/270: bollard-stubs, walkdir, ratatui, hyper, rayon-core, crossterm, rayon, icu_provider \r"]
|
||||
[30.914955, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=====================> ] 245/270: bollard-stubs, walkdir, ratatui, hyper, crossterm, rayon, icu_provider \r"]
|
||||
[30.917348, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=====================> ] 246/270: bollard-stubs, ratatui, hyper, crossterm, rayon, icu_provider \r"]
|
||||
[30.978944, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=====================> ] 247/270: bollard-stubs, ratatui, hyper, rayon, icu_provider \r"]
|
||||
[31.11234, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m icu_locid_transform v1.5.0\r\n"]
|
||||
[31.112427, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=====================> ] 247/270: bollard-stubs, ratatui, hyper, icu_locid_transform, rayon, icu_provider \r"]
|
||||
[31.162661, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=====================> ] 248/270: bollard-stubs, ratatui, hyper, icu_locid_transform, rayon \r"]
|
||||
[31.401195, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m icu_properties v1.5.1\r\n"]
|
||||
[31.401226, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=====================> ] 248/270: bollard-stubs, ratatui, hyper, icu_locid_transform, icu_properties, rayon \r"]
|
||||
[31.420216, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m hyper-tls v0.5.0\r\n"]
|
||||
[31.42035, "o", "\u001b[1m\u001b[32m Compiling\u001b[0m hyperlocal v0.8.0\r\n\u001b[1m\u001b[36m Building\u001b[0m [=====================> ] 248/270: bollard-stubs, hyperlocal, hyper-tls, ratatui, hyper, icu_locid_transform, icu_properties, rayon \r"]
|
||||
[31.524709, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======================> ] 249/270: bollard-stubs, hyperlocal, ratatui, hyper, icu_locid_transform, icu_properties, rayon \r"]
|
||||
[31.556444, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======================> ] 250/270: bollard-stubs, ratatui, hyper, icu_locid_transform, icu_properties, rayon \r"]
|
||||
[31.566354, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======================> ] 251/270: bollard-stubs, ratatui, hyper, icu_properties, rayon \r"]
|
||||
[31.685692, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======================> ] 252/270: bollard-stubs, ratatui, icu_properties, rayon \r"]
|
||||
[31.74925, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======================> ] 253/270: bollard-stubs, ratatui, icu_properties \r"]
|
||||
[31.927424, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======================> ] 254/270: bollard-stubs, icu_properties \r"]
|
||||
[32.170935, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m icu_normalizer v1.5.0\r\n"]
|
||||
[32.170967, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======================> ] 254/270: bollard-stubs, icu_normalizer, icu_properties \r"]
|
||||
[32.268484, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======================> ] 255/270: bollard-stubs, icu_normalizer \r"]
|
||||
[32.367189, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m idna_adapter v1.2.0\r\n"]
|
||||
[32.367291, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======================> ] 255/270: bollard-stubs, icu_normalizer, idna_adapter \r"]
|
||||
[32.408588, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m idna v1.0.3\r\n"]
|
||||
[32.408647, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======================> ] 255/270: bollard-stubs, idna, icu_normalizer, idna_adapter \r"]
|
||||
[32.41806, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======================> ] 256/270: bollard-stubs, idna, icu_normalizer \r"]
|
||||
[32.492269, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======================> ] 257/270: bollard-stubs, idna \r"]
|
||||
[32.545839, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m url v2.5.4\r\n"]
|
||||
[32.545928, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======================> ] 257/270: bollard-stubs, idna, url \r"]
|
||||
[32.629833, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======================> ] 258/270: bollard-stubs, url \r"]
|
||||
[32.759557, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m reqwest v0.11.27\r\n"]
|
||||
[32.759587, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======================> ] 258/270: bollard-stubs, reqwest, url \r"]
|
||||
[32.835456, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======================> ] 259/270: bollard-stubs, reqwest \r"]
|
||||
[33.312497, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m bollard v0.14.0\r\n"]
|
||||
[33.312523, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======================> ] 259/270: bollard-stubs, bollard, reqwest \r"]
|
||||
[33.331502, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m jsonschema v0.17.1\r\n"]
|
||||
[33.331527, "o", "\u001b[1m\u001b[32m Compiling\u001b[0m github v0.4.0 (/Users/goku/projects/wrkflw/crates/github)\r\n\u001b[1m\u001b[32m Compiling\u001b[0m gitlab v0.4.0 (/Users/goku/projects/wrkflw/crates/gitlab)\r\n"]
|
||||
[33.331737, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======================> ] 259/270: bollard-stubs, github, bollard, gitlab, jsonschema, reqwest \r"]
|
||||
[33.393223, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=======================> ] 260/270: github, bollard, gitlab, jsonschema, reqwest \r"]
|
||||
[33.595709, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=======================> ] 261/270: github, bollard, jsonschema, reqwest \r"]
|
||||
[33.604039, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=======================> ] 262/270: bollard, jsonschema, reqwest \r"]
|
||||
[34.004668, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=======================> ] 263/270: bollard, jsonschema \r"]
|
||||
[34.015674, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m parser v0.4.0 (/Users/goku/projects/wrkflw/crates/parser)\r\n"]
|
||||
[34.015746, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=======================> ] 263/270: parser, bollard, jsonschema \r"]
|
||||
[34.346413, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=======================> ] 264/270: parser, bollard \r"]
|
||||
[34.500995, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=======================> ] 265/270: bollard \r"]
|
||||
[34.745632, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m executor v0.4.0 (/Users/goku/projects/wrkflw/crates/executor)\r\n"]
|
||||
[34.74581, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=======================> ] 265/270: bollard, executor \r"]
|
||||
[35.477556, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=======================> ] 266/270: executor \r"]
|
||||
[35.613178, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m ui v0.4.0 (/Users/goku/projects/wrkflw/crates/ui)\r\n"]
|
||||
[35.613272, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=======================> ] 266/270: ui, executor \r"]
|
||||
[35.999581, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=======================> ] 267/270: ui \r"]
|
||||
[36.013859, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m wrkflw v0.4.0 (/Users/goku/projects/wrkflw/crates/wrkflw)\r\n"]
|
||||
[36.013926, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=======================> ] 267/270: ui, wrkflw \r"]
|
||||
[36.063415, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=======================> ] 268/270: ui \r"]
|
||||
[36.374123, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=======================> ] 269/270: wrkflw(bin) \r"]
|
||||
[36.934682, "o", "\u001b[K\u001b[1m\u001b[32m Finished\u001b[0m `dev` profile [unoptimized + debuginfo] target(s) in 16.45s\r\n"]
|
||||
[36.946554, "o", "\u001b[1m\u001b[32m Running\u001b[0m `target/debug/wrkflw validate test_gitlab_ci/minimal.gitlab-ci.yml`\r\n"]
|
||||
[37.469642, "o", "Validating GitLab CI pipeline file: test_gitlab_ci/minimal.gitlab-ci.yml... ✅ Valid syntax\r\n"]
|
||||
[37.470535, "o", "\u001b[1m\u001b[7m%\u001b[27m\u001b[1m\u001b[0m \r \r"]
|
||||
[37.471315, "o", "\u001b]2;goku@Gokuls-MacBook-Pro:~/projects/wrkflw\u0007"]
|
||||
[37.471326, "o", "\u001b]1;..ojects/wrkflw\u0007"]
|
||||
[37.473048, "o", "\u001b]7;file://Gokuls-MacBook-Pro.local/Users/goku/projects/wrkflw\u001b\\"]
|
||||
[37.47485, "o", "\r\u001b[0m\u001b[27m\u001b[24m\u001b[J\u001b[01;32m➜ \u001b[36mwrkflw\u001b[00m \u001b[01;34m(\u001b[31mmain\u001b[34m) \u001b[33m✗\u001b[00m \u001b[K"]
|
||||
[37.474976, "o", "\u001b[?1h\u001b="]
|
||||
[37.475042, "o", "\u001b[?2004h"]
|
||||
[39.504083, "o", "c"]
|
||||
[39.575281, "o", "\bca"]
|
||||
[39.985807, "o", "r"]
|
||||
[40.110435, "o", "g"]
|
||||
[40.247171, "o", "o"]
|
||||
[40.366603, "o", " "]
|
||||
[40.491496, "o", "r"]
|
||||
[41.167474, "o", "\b \b"]
|
||||
[41.318578, "o", "\b"]
|
||||
[41.464227, "o", "\b \b"]
|
||||
[41.588577, "o", "\b \b"]
|
||||
[41.725879, "o", "\b \b"]
|
||||
[41.849987, "o", "\b\bc \b"]
|
||||
[42.776052, "o", "\bcl"]
|
||||
[42.880903, "o", "e"]
|
||||
[43.132681, "o", "a"]
|
||||
[43.245463, "o", "r"]
|
||||
[43.601618, "o", "\u001b[?1l\u001b>"]
|
||||
[43.601729, "o", "\u001b[?2004l\r\r\n"]
|
||||
[43.603201, "o", "\u001b]2;clear\u0007\u001b]1;clear\u0007"]
|
||||
[43.630852, "o", "\u001b[3J\u001b[H\u001b[2J"]
|
||||
[43.631162, "o", "\u001b[1m\u001b[7m%\u001b[27m\u001b[1m\u001b[0m \r \r"]
|
||||
[43.632238, "o", "\u001b]2;goku@Gokuls-MacBook-Pro:~/projects/wrkflw\u0007"]
|
||||
[43.632263, "o", "\u001b]1;..ojects/wrkflw\u0007"]
|
||||
[43.635069, "o", "\u001b]7;file://Gokuls-MacBook-Pro.local/Users/goku/projects/wrkflw\u001b\\"]
|
||||
[43.637553, "o", "\r\u001b[0m\u001b[27m\u001b[24m\u001b[J\u001b[01;32m➜ \u001b[36mwrkflw\u001b[00m \u001b[01;34m(\u001b[31mmain\u001b[34m) \u001b[33m✗\u001b[00m \u001b[K"]
|
||||
[43.637652, "o", "\u001b[?1h\u001b="]
|
||||
[43.637664, "o", "\u001b[?2004h"]
|
||||
[43.991397, "o", "c"]
|
||||
[44.088651, "o", "\bca"]
|
||||
[44.374368, "o", "r"]
|
||||
[44.446833, "o", "g"]
|
||||
[44.53755, "o", "o"]
|
||||
[44.628977, "o", " "]
|
||||
[44.812984, "o", "r"]
|
||||
[44.922289, "o", " "]
|
||||
[46.356703, "o", "-"]
|
||||
[46.687628, "o", "-"]
|
||||
[47.264144, "o", " "]
|
||||
[47.638826, "o", "r"]
|
||||
[47.824999, "o", "u"]
|
||||
[48.00395, "o", "n"]
|
||||
[48.099902, "o", " "]
|
||||
[50.32697, "o", "t"]
|
||||
[50.449608, "o", "e"]
|
||||
[50.661865, "o", "s"]
|
||||
[50.768766, "o", "t"]
|
||||
[51.489835, "o", "_"]
|
||||
[51.868906, "o", "g"]
|
||||
[51.965985, "o", "itlab_ci\u001b[1m/\u001b[0m"]
|
||||
[53.657521, "o", "\b\u001b[0m/m"]
|
||||
[53.813699, "o", "i"]
|
||||
[53.937831, "o", "nimal.gitlab-ci.yml\u001b[1m \u001b[0m"]
|
||||
[55.072612, "o", "\b\u001b[0m -"]
|
||||
[55.326911, "o", "e"]
|
||||
[56.520262, "o", "\u001b[?1l\u001b>"]
|
||||
[56.520344, "o", "\u001b[?2004l\r\r\n"]
|
||||
[56.521995, "o", "\u001b]2;cargo r -- run test_gitlab_ci/minimal.gitlab-ci.yml -e\u0007\u001b]1;cargo\u0007"]
|
||||
[56.760823, "o", "\u001b[1m\u001b[32m Finished\u001b[0m `dev` profile [unoptimized + debuginfo] target(s) in 0.19s\r\n"]
|
||||
[56.766792, "o", "\u001b[1m\u001b[32m Running\u001b[0m `target/debug/wrkflw run test_gitlab_ci/minimal.gitlab-ci.yml -e`\r\n"]
|
||||
[63.060648, "o", "✅ Workflow execution completed successfully!\r\n\r\nJob summary:\r\n ✅ build (success)\r\n Steps:\r\n ✅ Run script line 1\r\n ✅ test (success)\r\n Steps:\r\n ✅ Run script line 1\r\n ✅ build (success)\r\n Steps:\r\n ✅ Run script line 1\r\n ✅ test (success)\r\n Steps:\r\n ✅ Run script line 1\r\n"]
|
||||
[63.062528, "o", "\u001b[1m\u001b[7m%\u001b[27m\u001b[1m\u001b[0m \r \r"]
|
||||
[63.063152, "o", "\u001b]2;goku@Gokuls-MacBook-Pro:~/projects/wrkflw\u0007"]
|
||||
[63.063163, "o", "\u001b]1;..ojects/wrkflw\u0007"]
|
||||
[63.064999, "o", "\u001b]7;file://Gokuls-MacBook-Pro.local/Users/goku/projects/wrkflw\u001b\\"]
|
||||
[63.06677, "o", "\r\u001b[0m\u001b[27m\u001b[24m\u001b[J\u001b[01;32m➜ \u001b[36mwrkflw\u001b[00m \u001b[01;34m(\u001b[31mmain\u001b[34m) \u001b[33m✗\u001b[00m \u001b[K"]
|
||||
[63.066845, "o", "\u001b[?1h\u001b=\u001b[?2004h"]
|
||||
[66.121168, "o", "c"]
|
||||
[66.234389, "o", "\bca"]
|
||||
[66.398021, "o", "t"]
|
||||
[66.595798, "o", " "]
|
||||
[67.93179, "o", "t"]
|
||||
[68.057573, "o", "e"]
|
||||
[68.252993, "o", "s"]
|
||||
[68.380648, "o", "t"]
|
||||
[68.977726, "o", "_"]
|
||||
[69.395102, "o", "g"]
|
||||
[69.506881, "o", "itlab_ci\u001b[1m/\u001b[0m"]
|
||||
[72.095324, "o", "\b\u001b[0m/i"]
|
||||
[72.270688, "o", "n"]
|
||||
[72.41996, "o", "\u0007\r\r\n"]
|
||||
[72.420018, "o", "\u001b[J\u001b[0mincludes.gitlab-ci.yml \u001b[Jinvalid.gitlab-ci.yml \u001b[J\u001b[A\u001b[0m\u001b[27m\u001b[24m\r\u001b[19Ccat test_gitlab_ci/in\u001b[K"]
|
||||
[73.498026, "o", "v"]
|
||||
[73.636495, "o", "alid.gitlab-ci.yml\u001b[1m \u001b[0m"]
|
||||
[75.147715, "o", "\b\u001b[0m \b"]
|
||||
[75.148084, "o", "\u001b[?1l\u001b>\u001b[?2004l\r\r\n\u001b[J"]
|
||||
[75.149613, "o", "\u001b]2;cat test_gitlab_ci/invalid.gitlab-ci.yml\u0007\u001b]1;cat\u0007"]
|
||||
[75.175256, "o", "# Invalid GitLab CI file with common mistakes\r\n\r\n# Missing stages definition\r\n# stages:\r\n# - build\r\n# - test\r\n\r\nvariables:\r\n CARGO_HOME: ${CI_PROJECT_DIR}/.cargo # Missing quotes around value with variables\r\n\r\n# Invalid job definition (missing script)\r\nbuild:\r\n stage: build # Referring to undefined stage\r\n # Missing required script section\r\n artifacts:\r\n paths:\r\n - target/release/\r\n expire_in: 1 week\r\n\r\n# Invalid job with incorrect when value\r\ntest:\r\n stage: test\r\n script:\r\n - cargo test\r\n when: never # Invalid value for when (should be always, manual, or delayed)\r\n dependencies:\r\n - non_existent_job # Dependency on non-existent job\r\n\r\n# Improperly structured job with invalid keys\r\ndeploy:\r\n stagee: deploy # Typo in stage key\r\n scriptt: # Typo in script key\r\n - echo \"Deploying...\"\r\n only:\r\n - main\r\n environment:\r\n production # Incorrect format for environment\r\n retry: hello # Incorrect type for retry (should be integer or object)\r\n\r\n# Invalid rules section\r\nl"]
|
||||
[75.175425, "o", "int:\r\n stage: test\r\n script:\r\n - cargo clippy\r\n rules:\r\n - equals: $CI_COMMIT_BRANCH == \"main\" # Invalid rule (should be if, changes, exists, etc.)\r\n \r\n# Job with invalid cache configuration\r\ncache-test:\r\n stage: test\r\n script:\r\n - echo \"Testing cache\"\r\n cache:\r\n paths:\r\n - ${CARGO_HOME}\r\n key: [invalid, key, type] # Invalid type for key (should be string)\r\n policy: invalid-policy # Invalid policy value "]
|
||||
[75.175543, "o", "\u001b[1m\u001b[7m%\u001b[27m\u001b[1m\u001b[0m \r \r"]
|
||||
[75.176254, "o", "\u001b]2;goku@Gokuls-MacBook-Pro:~/projects/wrkflw\u0007"]
|
||||
[75.17627, "o", "\u001b]1;..ojects/wrkflw\u0007"]
|
||||
[75.179062, "o", "\u001b]7;file://Gokuls-MacBook-Pro.local/Users/goku/projects/wrkflw\u001b\\"]
|
||||
[75.181195, "o", "\r\u001b[0m\u001b[27m\u001b[24m\u001b[J\u001b[01;32m➜ \u001b[36mwrkflw\u001b[00m \u001b[01;34m(\u001b[31mmain\u001b[34m) \u001b[33m✗\u001b[00m \u001b[K"]
|
||||
[75.181307, "o", "\u001b[?1h\u001b="]
|
||||
[75.181372, "o", "\u001b[?2004h"]
|
||||
[78.644579, "o", "c"]
|
||||
[78.757216, "o", "\bca"]
|
||||
[79.422982, "o", "\b\bc \b"]
|
||||
[80.126467, "o", "\bcl"]
|
||||
[80.241618, "o", "e"]
|
||||
[80.499926, "o", "a"]
|
||||
[80.620047, "o", "r"]
|
||||
[80.768709, "o", "\u001b[?1l\u001b>"]
|
||||
[80.768793, "o", "\u001b[?2004l\r\r\n"]
|
||||
[80.770763, "o", "\u001b]2;clear\u0007\u001b]1;clear\u0007"]
|
||||
[80.796043, "o", "\u001b[3J\u001b[H\u001b[2J"]
|
||||
[80.796272, "o", "\u001b[1m\u001b[7m%\u001b[27m\u001b[1m\u001b[0m \r \r"]
|
||||
[80.797072, "o", "\u001b]2;goku@Gokuls-MacBook-Pro:~/projects/wrkflw\u0007\u001b]1;..ojects/wrkflw\u0007"]
|
||||
[80.799811, "o", "\u001b]7;file://Gokuls-MacBook-Pro.local/Users/goku/projects/wrkflw\u001b\\"]
|
||||
[80.802093, "o", "\r\u001b[0m\u001b[27m\u001b[24m\u001b[J\u001b[01;32m➜ \u001b[36mwrkflw\u001b[00m \u001b[01;34m(\u001b[31mmain\u001b[34m) \u001b[33m✗\u001b[00m \u001b[K"]
|
||||
[80.802198, "o", "\u001b[?1h\u001b="]
|
||||
[80.802212, "o", "\u001b[?2004h"]
|
||||
[81.165962, "o", "c"]
|
||||
[81.237876, "o", "\bca"]
|
||||
[81.541593, "o", "r"]
|
||||
[81.632992, "o", "g"]
|
||||
[81.702718, "o", "o"]
|
||||
[81.811783, "o", " "]
|
||||
[82.041789, "o", "r"]
|
||||
[82.171861, "o", " "]
|
||||
[83.210945, "o", "-"]
|
||||
[83.370683, "o", "-"]
|
||||
[83.531883, "o", " "]
|
||||
[84.72197, "o", "v"]
|
||||
[85.400474, "o", "\b \b"]
|
||||
[85.531347, "o", "\b"]
|
||||
[85.666295, "o", "\b \b"]
|
||||
[85.92588, "o", "\b \b"]
|
||||
[86.620454, "o", "v"]
|
||||
[86.804257, "o", "a"]
|
||||
[86.911944, "o", "l"]
|
||||
[87.132942, "o", "i"]
|
||||
[87.276373, "o", "d"]
|
||||
[87.352783, "o", "a"]
|
||||
[87.544066, "o", "t"]
|
||||
[87.657321, "o", "e"]
|
||||
[87.785925, "o", " "]
|
||||
[88.963881, "o", "t"]
|
||||
[89.074873, "o", "e"]
|
||||
[89.258553, "o", "s"]
|
||||
[89.357494, "o", "t"]
|
||||
[89.816142, "o", "\u0007"]
|
||||
[89.816398, "o", "\r\r\n"]
|
||||
[89.816612, "o", "\u001b[J\u001b[1;36mtest_gitlab_ci\u001b[0m/ \u001b[J\u001b[1;36mtest-workflows\u001b[0m/ \u001b[J\u001b[1;36mtests\u001b[0m/ \u001b[J\u001b[A\u001b[0m\u001b[27m\u001b[24m\r\u001b[19Ccargo r validate test\u001b[K"]
|
||||
[90.569999, "o", "_"]
|
||||
[90.950079, "o", "g"]
|
||||
[91.040342, "o", "itlab_ci\u001b[1m/\u001b[0m"]
|
||||
[92.906492, "o", "\b\u001b[0m/m"]
|
||||
[93.078283, "o", "i"]
|
||||
[93.194416, "o", "nimal.gitlab-ci.yml\u001b[1m \u001b[0m"]
|
||||
[94.398323, "o", "\b\u001b[0m \b"]
|
||||
[94.899238, "o", "\b \b"]
|
||||
[94.982652, "o", "\b \b"]
|
||||
[95.065722, "o", "\b \b"]
|
||||
[95.149466, "o", "\b \b"]
|
||||
[95.233618, "o", "\b \b"]
|
||||
[95.317716, "o", "\b \b"]
|
||||
[95.4019, "o", "\b \b"]
|
||||
[95.485971, "o", "\b \b"]
|
||||
[95.569449, "o", "\b \b"]
|
||||
[95.653691, "o", "\b \b"]
|
||||
[95.736766, "o", "\b \b"]
|
||||
[95.82133, "o", "\b \b"]
|
||||
[95.905257, "o", "\b \b"]
|
||||
[95.988404, "o", "\b \b"]
|
||||
[96.072177, "o", "\b \b"]
|
||||
[96.156204, "o", "\b \b"]
|
||||
[96.240362, "o", "\b \b"]
|
||||
[96.324551, "o", "\b \b"]
|
||||
[96.513245, "o", "\b \b"]
|
||||
[96.673025, "o", "\b \b"]
|
||||
[96.851629, "o", "\b \b"]
|
||||
[97.496169, "o", "i"]
|
||||
[97.698031, "o", "n"]
|
||||
[97.987174, "o", "v"]
|
||||
[98.138347, "o", "alid.gitlab-ci.yml\u001b[1m \u001b[0m"]
|
||||
[98.957859, "o", "\b\u001b[0m \b"]
|
||||
[98.958383, "o", "\u001b[?1l\u001b>\u001b[?2004l\r\r\n\u001b[J"]
|
||||
[98.960319, "o", "\u001b]2;cargo r validate test_gitlab_ci/invalid.gitlab-ci.yml\u0007\u001b]1;cargo\u0007"]
|
||||
[99.107154, "o", "\u001b[1m\u001b[32m Finished\u001b[0m `dev` profile [unoptimized + debuginfo] target(s) in 0.09s\r\n"]
|
||||
[99.114895, "o", "\u001b[1m\u001b[32m Running\u001b[0m `target/debug/wrkflw validate test_gitlab_ci/invalid.gitlab-ci.yml`\r\n"]
|
||||
[99.636477, "o", "Validating GitLab CI pipeline file: test_gitlab_ci/invalid.gitlab-ci.yml... ❌ Invalid\r\nValidation failed: Schema validation error: GitLab CI validation failed:\r\n- {\"key\":[\"invalid\",\"key\",\"type\"],\"paths\":[\"${CARGO_HOME}\"],\"policy\":\"invalid-policy\"} is not valid under any of the schemas listed in the 'oneOf' keyword\r\n- \"hello\" is not valid under any of the schemas listed in the 'oneOf' keyword\r\n- Additional properties are not allowed ('scriptt', 'stagee' were unexpected)\r\n- {\"equals\":\"$CI_COMMIT_BRANCH == \\\"main\\\"\"} is not valid under any of the schemas listed in the 'anyOf' keyword\r\n\r\n"]
|
||||
[99.637323, "o", "\u001b[1m\u001b[7m%\u001b[27m\u001b[1m\u001b[0m \r \r"]
|
||||
[99.638217, "o", "\u001b]2;goku@Gokuls-MacBook-Pro:~/projects/wrkflw\u0007"]
|
||||
[99.638226, "o", "\u001b]1;..ojects/wrkflw\u0007"]
|
||||
[99.639979, "o", "\u001b]7;file://Gokuls-MacBook-Pro.local/Users/goku/projects/wrkflw\u001b\\"]
|
||||
[99.642108, "o", "\r\u001b[0m\u001b[27m\u001b[24m\u001b[J\u001b[01;32m➜ \u001b[36mwrkflw\u001b[00m \u001b[01;34m(\u001b[31mmain\u001b[34m) \u001b[33m✗\u001b[00m \u001b[K"]
|
||||
[99.642189, "o", "\u001b[?1h\u001b="]
|
||||
[99.642244, "o", "\u001b[?2004h"]
|
||||
[101.389433, "o", "c"]
|
||||
[101.489821, "o", "\bca"]
|
||||
[101.781592, "o", "r"]
|
||||
[101.870935, "o", "g"]
|
||||
[101.913828, "o", "o"]
|
||||
[102.021608, "o", " "]
|
||||
[102.173967, "o", "r"]
|
||||
[102.282804, "o", " "]
|
||||
[103.113368, "o", "-"]
|
||||
[103.251079, "o", "-"]
|
||||
[103.3802, "o", " "]
|
||||
[103.637955, "o", "r"]
|
||||
[103.756731, "o", "u"]
|
||||
[104.035863, "o", " "]
|
||||
[104.396646, "o", "\b"]
|
||||
[104.88292, "o", "n"]
|
||||
[104.97564, "o", " "]
|
||||
[106.361505, "o", "t"]
|
||||
[106.453323, "o", "e"]
|
||||
[106.66181, "o", "s"]
|
||||
[106.761957, "o", "t"]
|
||||
[107.423959, "o", "_"]
|
||||
[107.591679, "o", "gitlab_ci\u001b[1m/\u001b[0m"]
|
||||
[109.594052, "o", "\b\u001b[0m/i"]
|
||||
[109.78732, "o", "n"]
|
||||
[110.089516, "o", "v"]
|
||||
[110.259654, "o", "alid.gitlab-ci.yml\u001b[1m \u001b[0m"]
|
||||
[112.918071, "o", "\b\u001b[0m -"]
|
||||
[113.487665, "o", "e"]
|
||||
[114.05841, "o", "\u001b[?1l\u001b>"]
|
||||
[114.05869, "o", "\u001b[?2004l\r\r\n"]
|
||||
[114.060284, "o", "\u001b]2;cargo r -- run test_gitlab_ci/invalid.gitlab-ci.yml -e\u0007\u001b]1;cargo\u0007"]
|
||||
[114.193654, "o", "\u001b[1m\u001b[32m Finished\u001b[0m `dev` profile [unoptimized + debuginfo] target(s) in 0.09s\r\n"]
|
||||
[114.200619, "o", "\u001b[1m\u001b[32m Running\u001b[0m `target/debug/wrkflw run test_gitlab_ci/invalid.gitlab-ci.yml -e`\r\n"]
|
||||
[114.727902, "o", "Error executing workflow: Parse error: Failed to parse GitLab pipeline: Schema validation error: GitLab CI validation failed:\r\n- {\"key\":[\"invalid\",\"key\",\"type\"],\"paths\":[\"${CARGO_HOME}\"],\"policy\":\"invalid-policy\"} is not valid under any of the schemas listed in the 'oneOf' keyword\r\n- \"hello\" is not valid under any of the schemas listed in the 'oneOf' keyword\r\n- Additional properties are not allowed ('scriptt', 'stagee' were unexpected)\r\n- {\"equals\":\"$CI_COMMIT_BRANCH == \\\"main\\\"\"} is not valid under any of the schemas listed in the 'anyOf' keyword\r\n\r\n"]
|
||||
[114.728458, "o", "\u001b[1m\u001b[7m%\u001b[27m\u001b[1m\u001b[0m \r \r"]
|
||||
[114.72932, "o", "\u001b]2;goku@Gokuls-MacBook-Pro:~/projects/wrkflw\u0007"]
|
||||
[114.729328, "o", "\u001b]1;..ojects/wrkflw\u0007"]
|
||||
[114.731093, "o", "\u001b]7;file://Gokuls-MacBook-Pro.local/Users/goku/projects/wrkflw\u001b\\"]
|
||||
[114.732938, "o", "\r\u001b[0m\u001b[27m\u001b[24m\u001b[J\u001b[01;31m➜ \u001b[36mwrkflw\u001b[00m \u001b[01;34m(\u001b[31mmain\u001b[34m) \u001b[33m✗\u001b[00m \u001b[K"]
|
||||
[114.73302, "o", "\u001b[?1h\u001b="]
|
||||
[114.733045, "o", "\u001b[?2004h"]
|
||||
[118.210217, "o", "\u001b[?2004l\r\r\n"]
|
||||
3012
schemas/gitlab-ci.json
Normal file
3012
schemas/gitlab-ci.json
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,44 +0,0 @@
|
||||
#[async_trait]
|
||||
impl ContainerRuntime for EmulationRuntime {
|
||||
async fn run_container(
|
||||
&self,
|
||||
image: &str,
|
||||
cmd: &[&str],
|
||||
env_vars: &[(&str, &str)],
|
||||
working_dir: &Path,
|
||||
volumes: &[(&Path, &Path)],
|
||||
) -> Result<ContainerOutput, ContainerError> {
|
||||
// ... existing code ...
|
||||
}
|
||||
|
||||
async fn pull_image(&self, image: &str) -> Result<(), ContainerError> {
|
||||
// ... existing code ...
|
||||
}
|
||||
|
||||
async fn build_image(&self, dockerfile: &Path, tag: &str) -> Result<(), ContainerError> {
|
||||
// ... existing code ...
|
||||
}
|
||||
|
||||
async fn prepare_language_environment(
|
||||
&self,
|
||||
language: &str,
|
||||
version: Option<&str>,
|
||||
additional_packages: Option<Vec<String>>,
|
||||
) -> Result<String, ContainerError> {
|
||||
// For emulation runtime, we'll use a simplified approach
|
||||
// that doesn't require building custom images
|
||||
let base_image = match language {
|
||||
"python" => version.map_or("python:3.11-slim".to_string(), |v| format!("python:{}", v)),
|
||||
"node" => version.map_or("node:20-slim".to_string(), |v| format!("node:{}", v)),
|
||||
"java" => version.map_or("eclipse-temurin:17-jdk".to_string(), |v| format!("eclipse-temurin:{}", v)),
|
||||
"go" => version.map_or("golang:1.21-slim".to_string(), |v| format!("golang:{}", v)),
|
||||
"dotnet" => version.map_or("mcr.microsoft.com/dotnet/sdk:7.0".to_string(), |v| format!("mcr.microsoft.com/dotnet/sdk:{}", v)),
|
||||
"rust" => version.map_or("rust:latest".to_string(), |v| format!("rust:{}", v)),
|
||||
_ => return Err(ContainerError::ContainerStart(format!("Unsupported language: {}", language))),
|
||||
};
|
||||
|
||||
// For emulation, we'll just return the base image
|
||||
// The actual package installation will be handled during container execution
|
||||
Ok(base_image)
|
||||
}
|
||||
}
|
||||
52
src/lib.rs
52
src/lib.rs
@@ -1,52 +0,0 @@
|
||||
pub mod evaluator;
|
||||
pub mod executor;
|
||||
pub mod github;
|
||||
pub mod gitlab;
|
||||
pub mod logging;
|
||||
pub mod matrix;
|
||||
pub mod models;
|
||||
pub mod parser;
|
||||
pub mod runtime;
|
||||
pub mod ui;
|
||||
pub mod utils;
|
||||
pub mod validators;
|
||||
|
||||
use bollard::Docker;
|
||||
|
||||
/// Clean up all resources when exiting the application
|
||||
/// This is used by both main.rs and in tests
|
||||
pub async fn cleanup_on_exit() {
|
||||
// Clean up Docker resources if available, but don't let it block indefinitely
|
||||
match tokio::time::timeout(std::time::Duration::from_secs(3), async {
|
||||
match Docker::connect_with_local_defaults() {
|
||||
Ok(docker) => {
|
||||
let _ = executor::docker::cleanup_containers(&docker).await;
|
||||
let _ = executor::docker::cleanup_networks(&docker).await;
|
||||
}
|
||||
Err(_) => {
|
||||
// Docker not available
|
||||
logging::info("Docker not available, skipping Docker cleanup");
|
||||
}
|
||||
}
|
||||
})
|
||||
.await
|
||||
{
|
||||
Ok(_) => logging::debug("Docker cleanup completed successfully"),
|
||||
Err(_) => {
|
||||
logging::warning("Docker cleanup timed out after 3 seconds, continuing with shutdown")
|
||||
}
|
||||
}
|
||||
|
||||
// Always clean up emulation resources
|
||||
match tokio::time::timeout(
|
||||
std::time::Duration::from_secs(2),
|
||||
runtime::emulation::cleanup_resources(),
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(_) => logging::debug("Emulation cleanup completed successfully"),
|
||||
Err(_) => logging::warning("Emulation cleanup timed out, continuing with shutdown"),
|
||||
}
|
||||
|
||||
logging::info("Resource cleanup completed");
|
||||
}
|
||||
472
src/main.rs
472
src/main.rs
@@ -1,472 +0,0 @@
|
||||
// Import public modules from lib.rs
|
||||
use wrkflw::*;
|
||||
|
||||
use bollard::Docker;
|
||||
use clap::{Parser, Subcommand};
|
||||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
#[command(
|
||||
name = "wrkflw",
|
||||
about = "GitHub Workflow validator and executor",
|
||||
version,
|
||||
long_about = "A GitHub Workflow validator and executor that runs workflows locally.\n\nExamples:\n wrkflw validate # Validate all workflows in .github/workflows\n wrkflw run .github/workflows/build.yml # Run a specific workflow\n wrkflw --verbose run .github/workflows/build.yml # Run with more output\n wrkflw --debug run .github/workflows/build.yml # Run with detailed debug information\n wrkflw run --emulate .github/workflows/build.yml # Use emulation mode instead of Docker"
|
||||
)]
|
||||
struct Wrkflw {
|
||||
#[command(subcommand)]
|
||||
command: Option<Commands>,
|
||||
|
||||
/// Run in verbose mode with detailed output
|
||||
#[arg(short, long, global = true)]
|
||||
verbose: bool,
|
||||
|
||||
/// Run in debug mode with extensive execution details
|
||||
#[arg(short, long, global = true)]
|
||||
debug: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Subcommand)]
|
||||
enum Commands {
|
||||
/// Validate GitHub workflow files
|
||||
Validate {
|
||||
/// Path to workflow file or directory (defaults to .github/workflows)
|
||||
path: Option<PathBuf>,
|
||||
},
|
||||
|
||||
/// Execute GitHub workflow files locally
|
||||
Run {
|
||||
/// Path to workflow file to execute
|
||||
path: PathBuf,
|
||||
|
||||
/// Use emulation mode instead of Docker
|
||||
#[arg(short, long)]
|
||||
emulate: bool,
|
||||
|
||||
/// Show 'Would execute GitHub action' messages in emulation mode
|
||||
#[arg(long, default_value_t = false)]
|
||||
show_action_messages: bool,
|
||||
},
|
||||
|
||||
/// Open TUI interface to manage workflows
|
||||
Tui {
|
||||
/// Path to workflow file or directory (defaults to .github/workflows)
|
||||
path: Option<PathBuf>,
|
||||
|
||||
/// Use emulation mode instead of Docker
|
||||
#[arg(short, long)]
|
||||
emulate: bool,
|
||||
|
||||
/// Show 'Would execute GitHub action' messages in emulation mode
|
||||
#[arg(long, default_value_t = false)]
|
||||
show_action_messages: bool,
|
||||
},
|
||||
|
||||
/// Trigger a GitHub workflow remotely
|
||||
Trigger {
|
||||
/// Name of the workflow file (without .yml extension)
|
||||
workflow: String,
|
||||
|
||||
/// Branch to run the workflow on
|
||||
#[arg(short, long)]
|
||||
branch: Option<String>,
|
||||
|
||||
/// Key-value inputs for the workflow in format key=value
|
||||
#[arg(short, long, value_parser = parse_key_val)]
|
||||
input: Option<Vec<(String, String)>>,
|
||||
},
|
||||
|
||||
/// Trigger a GitLab pipeline remotely
|
||||
TriggerGitlab {
|
||||
/// Branch to run the pipeline on
|
||||
#[arg(short, long)]
|
||||
branch: Option<String>,
|
||||
|
||||
/// Key-value variables for the pipeline in format key=value
|
||||
#[arg(short = 'V', long, value_parser = parse_key_val)]
|
||||
variable: Option<Vec<(String, String)>>,
|
||||
},
|
||||
|
||||
/// List available workflows
|
||||
List,
|
||||
}
|
||||
|
||||
// Parser function for key-value pairs
|
||||
fn parse_key_val(s: &str) -> Result<(String, String), String> {
|
||||
let pos = s
|
||||
.find('=')
|
||||
.ok_or_else(|| format!("invalid KEY=value: no `=` found in `{}`", s))?;
|
||||
|
||||
Ok((s[..pos].to_string(), s[pos + 1..].to_string()))
|
||||
}
|
||||
|
||||
// Make this function public for testing
|
||||
pub async fn cleanup_on_exit() {
|
||||
// Clean up Docker resources if available, but don't let it block indefinitely
|
||||
match tokio::time::timeout(std::time::Duration::from_secs(3), async {
|
||||
match Docker::connect_with_local_defaults() {
|
||||
Ok(docker) => {
|
||||
executor::cleanup_resources(&docker).await;
|
||||
}
|
||||
Err(_) => {
|
||||
// Docker not available
|
||||
logging::info("Docker not available, skipping Docker cleanup");
|
||||
}
|
||||
}
|
||||
})
|
||||
.await
|
||||
{
|
||||
Ok(_) => logging::debug("Docker cleanup completed successfully"),
|
||||
Err(_) => {
|
||||
logging::warning("Docker cleanup timed out after 3 seconds, continuing with shutdown")
|
||||
}
|
||||
}
|
||||
|
||||
// Always clean up emulation resources
|
||||
match tokio::time::timeout(
|
||||
std::time::Duration::from_secs(2),
|
||||
runtime::emulation::cleanup_resources(),
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(_) => logging::debug("Emulation cleanup completed successfully"),
|
||||
Err(_) => logging::warning("Emulation cleanup timed out, continuing with shutdown"),
|
||||
}
|
||||
|
||||
logging::info("Resource cleanup completed");
|
||||
}
|
||||
|
||||
async fn handle_signals() {
|
||||
// Set up a hard exit timer in case cleanup takes too long
|
||||
// This ensures the app always exits even if Docker operations are stuck
|
||||
let hard_exit_time = std::time::Duration::from_secs(10);
|
||||
|
||||
// Wait for Ctrl+C
|
||||
match tokio::signal::ctrl_c().await {
|
||||
Ok(_) => {
|
||||
println!("Received Ctrl+C, shutting down and cleaning up...");
|
||||
}
|
||||
Err(e) => {
|
||||
// Log the error but continue with cleanup
|
||||
eprintln!("Warning: Failed to properly listen for ctrl+c event: {}", e);
|
||||
println!("Shutting down and cleaning up...");
|
||||
}
|
||||
}
|
||||
|
||||
// Set up a watchdog thread that will force exit if cleanup takes too long
|
||||
// This is important because Docker operations can sometimes hang indefinitely
|
||||
let _ = std::thread::spawn(move || {
|
||||
std::thread::sleep(hard_exit_time);
|
||||
eprintln!(
|
||||
"Cleanup taking too long (over {} seconds), forcing exit...",
|
||||
hard_exit_time.as_secs()
|
||||
);
|
||||
logging::error("Forced exit due to cleanup timeout");
|
||||
std::process::exit(1);
|
||||
});
|
||||
|
||||
// Clean up containers
|
||||
cleanup_on_exit().await;
|
||||
|
||||
// Exit with success status - the force exit thread will be terminated automatically
|
||||
std::process::exit(0);
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
let cli = Wrkflw::parse();
|
||||
let verbose = cli.verbose;
|
||||
let debug = cli.debug;
|
||||
|
||||
// Set log level based on command line flags
|
||||
if debug {
|
||||
logging::set_log_level(logging::LogLevel::Debug);
|
||||
logging::debug("Debug mode enabled - showing detailed logs");
|
||||
} else if verbose {
|
||||
logging::set_log_level(logging::LogLevel::Info);
|
||||
logging::info("Verbose mode enabled");
|
||||
} else {
|
||||
logging::set_log_level(logging::LogLevel::Warning);
|
||||
}
|
||||
|
||||
// Setup a Ctrl+C handler that runs in the background
|
||||
tokio::spawn(handle_signals());
|
||||
|
||||
match &cli.command {
|
||||
Some(Commands::Validate { path }) => {
|
||||
// Determine the path to validate
|
||||
let validate_path = path
|
||||
.clone()
|
||||
.unwrap_or_else(|| PathBuf::from(".github/workflows"));
|
||||
|
||||
// Run the validation
|
||||
ui::validate_workflow(&validate_path, verbose).unwrap_or_else(|e| {
|
||||
eprintln!("Error: {}", e);
|
||||
std::process::exit(1);
|
||||
});
|
||||
}
|
||||
|
||||
Some(Commands::Run {
|
||||
path,
|
||||
emulate,
|
||||
show_action_messages: _,
|
||||
}) => {
|
||||
// Set runner mode based on flags
|
||||
let runtime_type = if *emulate {
|
||||
executor::RuntimeType::Emulation
|
||||
} else {
|
||||
executor::RuntimeType::Docker
|
||||
};
|
||||
|
||||
// First validate the workflow file
|
||||
match parser::workflow::parse_workflow(path) {
|
||||
Ok(_) => logging::info("Validating workflow..."),
|
||||
Err(e) => {
|
||||
logging::error(&format!("Workflow validation failed: {}", e));
|
||||
std::process::exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
// Execute the workflow
|
||||
match executor::execute_workflow(path, runtime_type, verbose || debug).await {
|
||||
Ok(result) => {
|
||||
// Print job results
|
||||
for job in &result.jobs {
|
||||
println!(
|
||||
"\n{} Job {}: {}",
|
||||
if job.status == executor::JobStatus::Success {
|
||||
"✅"
|
||||
} else {
|
||||
"❌"
|
||||
},
|
||||
job.name,
|
||||
if job.status == executor::JobStatus::Success {
|
||||
"succeeded"
|
||||
} else {
|
||||
"failed"
|
||||
}
|
||||
);
|
||||
|
||||
// Print step results
|
||||
for step in &job.steps {
|
||||
println!(
|
||||
" {} {}",
|
||||
if step.status == executor::StepStatus::Success {
|
||||
"✅"
|
||||
} else {
|
||||
"❌"
|
||||
},
|
||||
step.name
|
||||
);
|
||||
|
||||
if !step.output.trim().is_empty() {
|
||||
// If the output is very long, trim it
|
||||
let output_lines = step.output.lines().collect::<Vec<&str>>();
|
||||
|
||||
println!(" Output:");
|
||||
|
||||
// In verbose mode, show complete output
|
||||
if verbose || debug {
|
||||
for line in &output_lines {
|
||||
println!(" {}", line);
|
||||
}
|
||||
} else {
|
||||
// Show only the first few lines
|
||||
let max_lines = 5;
|
||||
for line in output_lines.iter().take(max_lines) {
|
||||
println!(" {}", line);
|
||||
}
|
||||
|
||||
if output_lines.len() > max_lines {
|
||||
println!(" ... ({} more lines, use --verbose to see full output)",
|
||||
output_lines.len() - max_lines);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Print detailed failure information if available
|
||||
if let Some(failure_details) = &result.failure_details {
|
||||
println!("\n❌ Workflow execution failed!");
|
||||
println!("{}", failure_details);
|
||||
println!("\nTo fix these issues:");
|
||||
println!("1. Check the formatting issues with: cargo fmt");
|
||||
println!("2. Fix clippy warnings with: cargo clippy -- -D warnings");
|
||||
println!("3. Run tests to ensure everything passes: cargo test");
|
||||
std::process::exit(1);
|
||||
} else {
|
||||
println!("\n✅ Workflow completed successfully!");
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
logging::error(&format!("Workflow execution failed: {}", e));
|
||||
std::process::exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Some(Commands::Tui {
|
||||
path,
|
||||
emulate,
|
||||
show_action_messages,
|
||||
}) => {
|
||||
// Open the TUI interface
|
||||
let runtime_type = if *emulate {
|
||||
executor::RuntimeType::Emulation
|
||||
} else {
|
||||
// Check if Docker is available, fall back to emulation if not
|
||||
if !executor::docker::is_available() {
|
||||
println!("⚠️ Docker is not available. Using emulation mode instead.");
|
||||
logging::warning("Docker is not available. Using emulation mode instead.");
|
||||
executor::RuntimeType::Emulation
|
||||
} else {
|
||||
executor::RuntimeType::Docker
|
||||
}
|
||||
};
|
||||
|
||||
// Control hiding action messages based on the flag
|
||||
if !show_action_messages {
|
||||
std::env::set_var("WRKFLW_HIDE_ACTION_MESSAGES", "true");
|
||||
} else {
|
||||
std::env::set_var("WRKFLW_HIDE_ACTION_MESSAGES", "false");
|
||||
}
|
||||
|
||||
match ui::run_wrkflw_tui(path.as_ref(), runtime_type, verbose).await {
|
||||
Ok(_) => {
|
||||
// Clean up on successful exit
|
||||
cleanup_on_exit().await;
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Error: {}", e);
|
||||
cleanup_on_exit().await;
|
||||
std::process::exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Some(Commands::Trigger {
|
||||
workflow,
|
||||
branch,
|
||||
input,
|
||||
}) => {
|
||||
logging::info(&format!("Triggering workflow {} on GitHub", workflow));
|
||||
|
||||
// Convert inputs to HashMap
|
||||
let input_map = input.as_ref().map(|i| {
|
||||
i.iter()
|
||||
.map(|(k, v)| (k.clone(), v.clone()))
|
||||
.collect::<HashMap<String, String>>()
|
||||
});
|
||||
|
||||
match github::trigger_workflow(workflow, branch.as_deref(), input_map).await {
|
||||
Ok(_) => logging::info("Workflow triggered successfully"),
|
||||
Err(e) => {
|
||||
eprintln!("Error triggering workflow: {}", e);
|
||||
std::process::exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Some(Commands::TriggerGitlab { branch, variable }) => {
|
||||
logging::info("Triggering pipeline on GitLab");
|
||||
|
||||
// Convert variables to HashMap
|
||||
let variable_map = variable.as_ref().map(|v| {
|
||||
v.iter()
|
||||
.map(|(k, v)| (k.clone(), v.clone()))
|
||||
.collect::<HashMap<String, String>>()
|
||||
});
|
||||
|
||||
match gitlab::trigger_pipeline(branch.as_deref(), variable_map).await {
|
||||
Ok(_) => logging::info("GitLab pipeline triggered successfully"),
|
||||
Err(e) => {
|
||||
eprintln!("Error triggering GitLab pipeline: {}", e);
|
||||
std::process::exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Some(Commands::List) => {
|
||||
logging::info("Listing available workflows");
|
||||
|
||||
// Attempt to get GitHub repo info
|
||||
if let Ok(repo_info) = github::get_repo_info() {
|
||||
match github::list_workflows(&repo_info).await {
|
||||
Ok(workflows) => {
|
||||
if workflows.is_empty() {
|
||||
println!("No GitHub workflows found in repository");
|
||||
} else {
|
||||
println!("GitHub workflows:");
|
||||
for workflow in workflows {
|
||||
println!(" {}", workflow);
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Error listing GitHub workflows: {}", e);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
println!("Not a GitHub repository or unable to get repository information");
|
||||
}
|
||||
|
||||
// Attempt to get GitLab repo info
|
||||
if let Ok(repo_info) = gitlab::get_repo_info() {
|
||||
match gitlab::list_pipelines(&repo_info).await {
|
||||
Ok(pipelines) => {
|
||||
if pipelines.is_empty() {
|
||||
println!("No GitLab pipelines found in repository");
|
||||
} else {
|
||||
println!("GitLab pipelines:");
|
||||
for pipeline in pipelines {
|
||||
println!(" {}", pipeline);
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Error listing GitLab pipelines: {}", e);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
println!("Not a GitLab repository or unable to get repository information");
|
||||
}
|
||||
}
|
||||
|
||||
None => {
|
||||
// Default to TUI interface if no subcommand
|
||||
// Check if Docker is available, fall back to emulation if not
|
||||
let runtime_type = if !executor::docker::is_available() {
|
||||
println!("⚠️ Docker is not available. Using emulation mode instead.");
|
||||
logging::warning("Docker is not available. Using emulation mode instead.");
|
||||
executor::RuntimeType::Emulation
|
||||
} else {
|
||||
executor::RuntimeType::Docker
|
||||
};
|
||||
|
||||
// Set environment variable to hide action messages by default
|
||||
std::env::set_var("WRKFLW_HIDE_ACTION_MESSAGES", "true");
|
||||
|
||||
match ui::run_wrkflw_tui(
|
||||
Some(&PathBuf::from(".github/workflows")),
|
||||
runtime_type,
|
||||
verbose,
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(_) => {
|
||||
// Clean up on successful exit
|
||||
cleanup_on_exit().await;
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Error: {}", e);
|
||||
cleanup_on_exit().await;
|
||||
std::process::exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Final cleanup before program exit
|
||||
cleanup_on_exit().await;
|
||||
}
|
||||
@@ -1,24 +0,0 @@
|
||||
pub struct ValidationResult {
|
||||
pub is_valid: bool,
|
||||
pub issues: Vec<String>,
|
||||
}
|
||||
|
||||
impl Default for ValidationResult {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl ValidationResult {
|
||||
pub fn new() -> Self {
|
||||
ValidationResult {
|
||||
is_valid: true,
|
||||
issues: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn add_issue(&mut self, issue: String) {
|
||||
self.is_valid = false;
|
||||
self.issues.push(issue);
|
||||
}
|
||||
}
|
||||
@@ -1,43 +0,0 @@
|
||||
use jsonschema::JSONSchema;
|
||||
use serde_json::Value;
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
|
||||
const GITHUB_WORKFLOW_SCHEMA: &str = include_str!("../../schemas/github-workflow.json");
|
||||
|
||||
pub struct SchemaValidator {
|
||||
schema: JSONSchema,
|
||||
}
|
||||
|
||||
impl SchemaValidator {
|
||||
pub fn new() -> Result<Self, String> {
|
||||
let schema_json: Value = serde_json::from_str(GITHUB_WORKFLOW_SCHEMA)
|
||||
.map_err(|e| format!("Failed to parse GitHub workflow schema: {}", e))?;
|
||||
|
||||
let schema = JSONSchema::compile(&schema_json)
|
||||
.map_err(|e| format!("Failed to compile JSON schema: {}", e))?;
|
||||
|
||||
Ok(Self { schema })
|
||||
}
|
||||
|
||||
pub fn validate_workflow(&self, workflow_path: &Path) -> Result<(), String> {
|
||||
// Read the workflow file
|
||||
let content = fs::read_to_string(workflow_path)
|
||||
.map_err(|e| format!("Failed to read workflow file: {}", e))?;
|
||||
|
||||
// Parse YAML to JSON Value
|
||||
let workflow_json: Value = serde_yaml::from_str(&content)
|
||||
.map_err(|e| format!("Failed to parse workflow YAML: {}", e))?;
|
||||
|
||||
// Validate against schema
|
||||
if let Err(errors) = self.schema.validate(&workflow_json) {
|
||||
let mut error_msg = String::from("Workflow validation failed:\n");
|
||||
for error in errors {
|
||||
error_msg.push_str(&format!("- {}\n", error));
|
||||
}
|
||||
return Err(error_msg);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
33
test_gitlab_ci/.gitlab/ci/build.yml
Normal file
33
test_gitlab_ci/.gitlab/ci/build.yml
Normal file
@@ -0,0 +1,33 @@
|
||||
.build-template:
|
||||
stage: build
|
||||
script:
|
||||
- cargo build --release
|
||||
artifacts:
|
||||
paths:
|
||||
- target/release/
|
||||
expire_in: 1 week
|
||||
cache:
|
||||
key:
|
||||
files:
|
||||
- Cargo.lock
|
||||
paths:
|
||||
- ${CARGO_HOME}
|
||||
- target/
|
||||
|
||||
# Normal build job
|
||||
build:
|
||||
extends: .build-template
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
- if: $CI_COMMIT_BRANCH == "main"
|
||||
|
||||
# Debug build with additional flags
|
||||
debug-build:
|
||||
extends: .build-template
|
||||
script:
|
||||
- cargo build --features debug
|
||||
variables:
|
||||
RUSTFLAGS: "-Z debug-info=2"
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event" && $DEBUG_BUILD == "true"
|
||||
when: manual
|
||||
63
test_gitlab_ci/.gitlab/ci/test.yml
Normal file
63
test_gitlab_ci/.gitlab/ci/test.yml
Normal file
@@ -0,0 +1,63 @@
|
||||
.test-template:
|
||||
stage: test
|
||||
dependencies:
|
||||
- build
|
||||
cache:
|
||||
key:
|
||||
files:
|
||||
- Cargo.lock
|
||||
paths:
|
||||
- ${CARGO_HOME}
|
||||
- target/
|
||||
|
||||
# Unit tests
|
||||
unit-tests:
|
||||
extends: .test-template
|
||||
script:
|
||||
- cargo test --lib
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
- if: $CI_COMMIT_BRANCH == "main"
|
||||
|
||||
# Integration tests
|
||||
integration-tests:
|
||||
extends: .test-template
|
||||
script:
|
||||
- cargo test --test '*'
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
- if: $CI_COMMIT_BRANCH == "main"
|
||||
|
||||
# Lint with clippy
|
||||
lint:
|
||||
extends: .test-template
|
||||
dependencies: [] # No dependencies needed for linting
|
||||
script:
|
||||
- rustup component add clippy
|
||||
- cargo clippy -- -D warnings
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
- if: $CI_COMMIT_BRANCH == "main"
|
||||
|
||||
# Check formatting
|
||||
format:
|
||||
extends: .test-template
|
||||
dependencies: [] # No dependencies needed for formatting
|
||||
script:
|
||||
- rustup component add rustfmt
|
||||
- cargo fmt -- --check
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
- if: $CI_COMMIT_BRANCH == "main"
|
||||
|
||||
# Deployment template
|
||||
.deploy-template:
|
||||
stage: deploy
|
||||
script:
|
||||
- echo "Deploying to ${ENVIRONMENT} environment"
|
||||
- cp target/release/wrkflw /tmp/wrkflw-${ENVIRONMENT}
|
||||
artifacts:
|
||||
paths:
|
||||
- /tmp/wrkflw-${ENVIRONMENT}
|
||||
dependencies:
|
||||
- build
|
||||
197
test_gitlab_ci/advanced.gitlab-ci.yml
Normal file
197
test_gitlab_ci/advanced.gitlab-ci.yml
Normal file
@@ -0,0 +1,197 @@
|
||||
stages:
|
||||
- setup
|
||||
- build
|
||||
- test
|
||||
- package
|
||||
- deploy
|
||||
|
||||
variables:
|
||||
CARGO_HOME: "${CI_PROJECT_DIR}/.cargo"
|
||||
RUST_BACKTRACE: "1"
|
||||
|
||||
workflow:
|
||||
rules:
|
||||
- if: $CI_COMMIT_BRANCH == "main"
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
- if: $CI_COMMIT_TAG =~ /^v\d+\.\d+\.\d+$/
|
||||
- if: $CI_COMMIT_BRANCH =~ /^feature\/.*/
|
||||
- if: $CI_COMMIT_BRANCH == "staging"
|
||||
|
||||
# Default image and settings for all jobs
|
||||
default:
|
||||
image: rust:1.76
|
||||
interruptible: true
|
||||
retry:
|
||||
max: 2
|
||||
when:
|
||||
- runner_system_failure
|
||||
- stuck_or_timeout_failure
|
||||
|
||||
# Cache configuration
|
||||
.cargo-cache:
|
||||
cache:
|
||||
key:
|
||||
files:
|
||||
- Cargo.lock
|
||||
paths:
|
||||
- ${CARGO_HOME}
|
||||
- target/
|
||||
policy: pull-push
|
||||
|
||||
# Job to initialize the environment
|
||||
setup:
|
||||
stage: setup
|
||||
extends: .cargo-cache
|
||||
cache:
|
||||
policy: push
|
||||
script:
|
||||
- cargo --version
|
||||
- rustc --version
|
||||
- cargo fetch
|
||||
artifacts:
|
||||
paths:
|
||||
- Cargo.lock
|
||||
|
||||
# Matrix build for multiple platforms
|
||||
.build-matrix:
|
||||
stage: build
|
||||
extends: .cargo-cache
|
||||
needs:
|
||||
- setup
|
||||
parallel:
|
||||
matrix:
|
||||
- TARGET:
|
||||
- x86_64-unknown-linux-gnu
|
||||
- x86_64-apple-darwin
|
||||
- aarch64-apple-darwin
|
||||
- x86_64-pc-windows-msvc
|
||||
RUST_VERSION:
|
||||
- "1.75"
|
||||
- "1.76"
|
||||
script:
|
||||
- rustup target add $TARGET
|
||||
- cargo build --release --target $TARGET
|
||||
artifacts:
|
||||
paths:
|
||||
- target/$TARGET/release/
|
||||
expire_in: 1 week
|
||||
rules:
|
||||
- if: $CI_COMMIT_BRANCH == "main" || $CI_COMMIT_TAG
|
||||
when: always
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
when: manual
|
||||
allow_failure: true
|
||||
|
||||
# Regular build job for most cases
|
||||
build:
|
||||
stage: build
|
||||
extends: .cargo-cache
|
||||
needs:
|
||||
- setup
|
||||
script:
|
||||
- cargo build --release
|
||||
artifacts:
|
||||
paths:
|
||||
- target/release/
|
||||
expire_in: 1 week
|
||||
rules:
|
||||
- if: $CI_COMMIT_BRANCH != "main" && !$CI_COMMIT_TAG
|
||||
when: always
|
||||
|
||||
# Test with different feature combinations
|
||||
.test-template:
|
||||
stage: test
|
||||
extends: .cargo-cache
|
||||
needs:
|
||||
- build
|
||||
artifacts:
|
||||
reports:
|
||||
junit: test-results.xml
|
||||
when: always
|
||||
|
||||
test-default:
|
||||
extends: .test-template
|
||||
script:
|
||||
- cargo test -- -Z unstable-options --format json | tee test-output.json
|
||||
- cat test-output.json | jq -r '.[]' > test-results.xml
|
||||
|
||||
test-all-features:
|
||||
extends: .test-template
|
||||
script:
|
||||
- cargo test --all-features -- -Z unstable-options --format json | tee test-output.json
|
||||
- cat test-output.json | jq -r '.[]' > test-results.xml
|
||||
|
||||
test-no-features:
|
||||
extends: .test-template
|
||||
script:
|
||||
- cargo test --no-default-features -- -Z unstable-options --format json | tee test-output.json
|
||||
- cat test-output.json | jq -r '.[]' > test-results.xml
|
||||
|
||||
# Security scanning
|
||||
security:
|
||||
stage: test
|
||||
extends: .cargo-cache
|
||||
needs:
|
||||
- build
|
||||
script:
|
||||
- cargo install cargo-audit || true
|
||||
- cargo audit
|
||||
allow_failure: true
|
||||
|
||||
# Linting
|
||||
lint:
|
||||
stage: test
|
||||
extends: .cargo-cache
|
||||
script:
|
||||
- rustup component add clippy
|
||||
- cargo clippy -- -D warnings
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
|
||||
# Package for different targets
|
||||
package:
|
||||
stage: package
|
||||
extends: .cargo-cache
|
||||
needs:
|
||||
- job: build
|
||||
artifacts: true
|
||||
- test-default
|
||||
- test-all-features
|
||||
script:
|
||||
- mkdir -p packages
|
||||
- tar -czf packages/wrkflw-${CI_COMMIT_REF_SLUG}.tar.gz -C target/release wrkflw
|
||||
artifacts:
|
||||
paths:
|
||||
- packages/
|
||||
only:
|
||||
- main
|
||||
- tags
|
||||
|
||||
# Deploy to staging
|
||||
deploy-staging:
|
||||
stage: deploy
|
||||
image: alpine
|
||||
needs:
|
||||
- package
|
||||
environment:
|
||||
name: staging
|
||||
script:
|
||||
- apk add --no-cache curl
|
||||
- curl -X POST -F "file=@packages/wrkflw-${CI_COMMIT_REF_SLUG}.tar.gz" ${STAGING_DEPLOY_URL}
|
||||
only:
|
||||
- staging
|
||||
|
||||
# Deploy to production
|
||||
deploy-production:
|
||||
stage: deploy
|
||||
image: alpine
|
||||
needs:
|
||||
- package
|
||||
environment:
|
||||
name: production
|
||||
script:
|
||||
- apk add --no-cache curl
|
||||
- curl -X POST -F "file=@packages/wrkflw-${CI_COMMIT_REF_SLUG}.tar.gz" ${PROD_DEPLOY_URL}
|
||||
only:
|
||||
- tags
|
||||
when: manual
|
||||
45
test_gitlab_ci/basic.gitlab-ci.yml
Normal file
45
test_gitlab_ci/basic.gitlab-ci.yml
Normal file
@@ -0,0 +1,45 @@
|
||||
stages:
|
||||
- build
|
||||
- test
|
||||
- deploy
|
||||
|
||||
variables:
|
||||
CARGO_HOME: "${CI_PROJECT_DIR}/.cargo"
|
||||
|
||||
# Default image for all jobs
|
||||
image: rust:1.76
|
||||
|
||||
build:
|
||||
stage: build
|
||||
script:
|
||||
- cargo build --release
|
||||
artifacts:
|
||||
paths:
|
||||
- target/release/
|
||||
expire_in: 1 week
|
||||
|
||||
test:
|
||||
stage: test
|
||||
script:
|
||||
- cargo test
|
||||
dependencies:
|
||||
- build
|
||||
|
||||
lint:
|
||||
stage: test
|
||||
script:
|
||||
- rustup component add clippy
|
||||
- cargo clippy -- -D warnings
|
||||
- cargo fmt -- --check
|
||||
|
||||
deploy:
|
||||
stage: deploy
|
||||
script:
|
||||
- echo "Deploying application..."
|
||||
- cp target/release/wrkflw /usr/local/bin/
|
||||
only:
|
||||
- main
|
||||
environment:
|
||||
name: production
|
||||
dependencies:
|
||||
- build
|
||||
97
test_gitlab_ci/docker.gitlab-ci.yml
Normal file
97
test_gitlab_ci/docker.gitlab-ci.yml
Normal file
@@ -0,0 +1,97 @@
|
||||
stages:
|
||||
- build
|
||||
- test
|
||||
- deploy
|
||||
|
||||
variables:
|
||||
DOCKER_DRIVER: overlay2
|
||||
DOCKER_TLS_CERTDIR: "/certs"
|
||||
CONTAINER_IMAGE: ${CI_REGISTRY_IMAGE}:${CI_COMMIT_REF_SLUG}
|
||||
CONTAINER_IMAGE_LATEST: ${CI_REGISTRY_IMAGE}:latest
|
||||
|
||||
# Use Docker-in-Docker for building and testing
|
||||
.docker:
|
||||
image: docker:20.10
|
||||
services:
|
||||
- docker:20.10-dind
|
||||
variables:
|
||||
DOCKER_HOST: tcp://docker:2376
|
||||
DOCKER_TLS_VERIFY: 1
|
||||
DOCKER_CERT_PATH: $DOCKER_TLS_CERTDIR/client
|
||||
before_script:
|
||||
- docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
|
||||
|
||||
# Build the Docker image
|
||||
build-docker:
|
||||
extends: .docker
|
||||
stage: build
|
||||
script:
|
||||
- docker build --pull -t $CONTAINER_IMAGE -t $CONTAINER_IMAGE_LATEST .
|
||||
- docker push $CONTAINER_IMAGE
|
||||
- docker push $CONTAINER_IMAGE_LATEST
|
||||
only:
|
||||
- main
|
||||
- tags
|
||||
|
||||
# Run tests inside Docker
|
||||
test-docker:
|
||||
extends: .docker
|
||||
stage: test
|
||||
script:
|
||||
- docker pull $CONTAINER_IMAGE
|
||||
- docker run --rm $CONTAINER_IMAGE cargo test
|
||||
dependencies:
|
||||
- build-docker
|
||||
|
||||
# Security scan the Docker image
|
||||
security-scan:
|
||||
extends: .docker
|
||||
stage: test
|
||||
image: aquasec/trivy:latest
|
||||
script:
|
||||
- trivy image --no-progress --exit-code 1 --severity HIGH,CRITICAL $CONTAINER_IMAGE
|
||||
allow_failure: true
|
||||
|
||||
# Run a Docker container with our app in the staging environment
|
||||
deploy-staging:
|
||||
extends: .docker
|
||||
stage: deploy
|
||||
environment:
|
||||
name: staging
|
||||
url: https://staging.example.com
|
||||
script:
|
||||
- docker pull $CONTAINER_IMAGE
|
||||
- docker tag $CONTAINER_IMAGE wrkflw-staging
|
||||
- |
|
||||
cat > deploy.sh << 'EOF'
|
||||
docker stop wrkflw-staging || true
|
||||
docker rm wrkflw-staging || true
|
||||
docker run -d --name wrkflw-staging -p 8080:8080 wrkflw-staging
|
||||
EOF
|
||||
- chmod +x deploy.sh
|
||||
- ssh $STAGING_USER@$STAGING_HOST 'bash -s' < deploy.sh
|
||||
only:
|
||||
- main
|
||||
when: manual
|
||||
|
||||
# Run a Docker container with our app in the production environment
|
||||
deploy-production:
|
||||
extends: .docker
|
||||
stage: deploy
|
||||
environment:
|
||||
name: production
|
||||
url: https://wrkflw.example.com
|
||||
script:
|
||||
- docker pull $CONTAINER_IMAGE
|
||||
- docker tag $CONTAINER_IMAGE wrkflw-production
|
||||
- |
|
||||
cat > deploy.sh << 'EOF'
|
||||
docker stop wrkflw-production || true
|
||||
docker rm wrkflw-production || true
|
||||
docker run -d --name wrkflw-production -p 80:8080 wrkflw-production
|
||||
EOF
|
||||
- chmod +x deploy.sh
|
||||
- ssh $PRODUCTION_USER@$PRODUCTION_HOST 'bash -s' < deploy.sh
|
||||
only:
|
||||
- tags
|
||||
when: manual
|
||||
40
test_gitlab_ci/includes.gitlab-ci.yml
Normal file
40
test_gitlab_ci/includes.gitlab-ci.yml
Normal file
@@ -0,0 +1,40 @@
|
||||
stages:
|
||||
- build
|
||||
- test
|
||||
- deploy
|
||||
|
||||
# Including external files
|
||||
include:
|
||||
- local: '.gitlab/ci/build.yml' # Will be created in a moment
|
||||
- local: '.gitlab/ci/test.yml' # Will be created in a moment
|
||||
- template: 'Workflows/MergeRequest-Pipelines.gitlab-ci.yml' # Built-in template
|
||||
|
||||
variables:
|
||||
RUST_VERSION: "1.76"
|
||||
CARGO_HOME: "${CI_PROJECT_DIR}/.cargo"
|
||||
|
||||
# Default settings for all jobs
|
||||
default:
|
||||
image: rust:${RUST_VERSION}
|
||||
before_script:
|
||||
- rustc --version
|
||||
- cargo --version
|
||||
|
||||
# Main pipeline jobs that use the included templates
|
||||
production_deploy:
|
||||
stage: deploy
|
||||
extends: .deploy-template # This template is defined in one of the included files
|
||||
variables:
|
||||
ENVIRONMENT: production
|
||||
only:
|
||||
- main
|
||||
when: manual
|
||||
|
||||
staging_deploy:
|
||||
stage: deploy
|
||||
extends: .deploy-template
|
||||
variables:
|
||||
ENVIRONMENT: staging
|
||||
only:
|
||||
- staging
|
||||
when: manual
|
||||
57
test_gitlab_ci/invalid.gitlab-ci.yml
Normal file
57
test_gitlab_ci/invalid.gitlab-ci.yml
Normal file
@@ -0,0 +1,57 @@
|
||||
# Invalid GitLab CI file with common mistakes
|
||||
|
||||
# Missing stages definition
|
||||
# stages:
|
||||
# - build
|
||||
# - test
|
||||
|
||||
variables:
|
||||
CARGO_HOME: ${CI_PROJECT_DIR}/.cargo # Missing quotes around value with variables
|
||||
|
||||
# Invalid job definition (missing script)
|
||||
build:
|
||||
stage: build # Referring to undefined stage
|
||||
# Missing required script section
|
||||
artifacts:
|
||||
paths:
|
||||
- target/release/
|
||||
expire_in: 1 week
|
||||
|
||||
# Invalid job with incorrect when value
|
||||
test:
|
||||
stage: test
|
||||
script:
|
||||
- cargo test
|
||||
when: never # Invalid value for when (should be always, manual, or delayed)
|
||||
dependencies:
|
||||
- non_existent_job # Dependency on non-existent job
|
||||
|
||||
# Improperly structured job with invalid keys
|
||||
deploy:
|
||||
stagee: deploy # Typo in stage key
|
||||
scriptt: # Typo in script key
|
||||
- echo "Deploying..."
|
||||
only:
|
||||
- main
|
||||
environment:
|
||||
production # Incorrect format for environment
|
||||
retry: hello # Incorrect type for retry (should be integer or object)
|
||||
|
||||
# Invalid rules section
|
||||
lint:
|
||||
stage: test
|
||||
script:
|
||||
- cargo clippy
|
||||
rules:
|
||||
- equals: $CI_COMMIT_BRANCH == "main" # Invalid rule (should be if, changes, exists, etc.)
|
||||
|
||||
# Job with invalid cache configuration
|
||||
cache-test:
|
||||
stage: test
|
||||
script:
|
||||
- echo "Testing cache"
|
||||
cache:
|
||||
paths:
|
||||
- ${CARGO_HOME}
|
||||
key: [invalid, key, type] # Invalid type for key (should be string)
|
||||
policy: invalid-policy # Invalid policy value
|
||||
11
test_gitlab_ci/minimal.gitlab-ci.yml
Normal file
11
test_gitlab_ci/minimal.gitlab-ci.yml
Normal file
@@ -0,0 +1,11 @@
|
||||
# Minimal GitLab CI configuration
|
||||
|
||||
image: rust:latest
|
||||
|
||||
build:
|
||||
script:
|
||||
- cargo build
|
||||
|
||||
test:
|
||||
script:
|
||||
- cargo test
|
||||
167
test_gitlab_ci/services.gitlab-ci.yml
Normal file
167
test_gitlab_ci/services.gitlab-ci.yml
Normal file
@@ -0,0 +1,167 @@
|
||||
stages:
|
||||
- build
|
||||
- test
|
||||
- deploy
|
||||
|
||||
variables:
|
||||
POSTGRES_DB: test_db
|
||||
POSTGRES_USER: postgres
|
||||
POSTGRES_PASSWORD: postgres
|
||||
POSTGRES_HOST: postgres
|
||||
REDIS_HOST: redis
|
||||
MONGO_HOST: mongo
|
||||
RUST_BACKTRACE: 1
|
||||
|
||||
# Default settings
|
||||
default:
|
||||
image: rust:1.76
|
||||
|
||||
# Build the application
|
||||
build:
|
||||
stage: build
|
||||
script:
|
||||
- cargo build --release
|
||||
artifacts:
|
||||
paths:
|
||||
- target/release/
|
||||
cache:
|
||||
key:
|
||||
files:
|
||||
- Cargo.lock
|
||||
paths:
|
||||
- ${CI_PROJECT_DIR}/.cargo
|
||||
- target/
|
||||
|
||||
# Run unit tests (no services needed)
|
||||
unit-tests:
|
||||
stage: test
|
||||
needs:
|
||||
- build
|
||||
script:
|
||||
- cargo test --lib
|
||||
cache:
|
||||
key:
|
||||
files:
|
||||
- Cargo.lock
|
||||
paths:
|
||||
- ${CI_PROJECT_DIR}/.cargo
|
||||
- target/
|
||||
policy: pull
|
||||
|
||||
# Run integration tests with a PostgreSQL service
|
||||
postgres-tests:
|
||||
stage: test
|
||||
needs:
|
||||
- build
|
||||
services:
|
||||
- name: postgres:14-alpine
|
||||
alias: postgres
|
||||
variables:
|
||||
# Service-specific variables
|
||||
POSTGRES_DB: test_db
|
||||
POSTGRES_USER: postgres
|
||||
POSTGRES_PASSWORD: postgres
|
||||
DATABASE_URL: postgres://postgres:postgres@postgres:5432/test_db
|
||||
script:
|
||||
- apt-get update && apt-get install -y postgresql-client
|
||||
- cd target/release && ./wrkflw test-postgres
|
||||
- psql -h postgres -U postgres -d test_db -c "SELECT 1;"
|
||||
cache:
|
||||
key:
|
||||
files:
|
||||
- Cargo.lock
|
||||
paths:
|
||||
- ${CI_PROJECT_DIR}/.cargo
|
||||
- target/
|
||||
policy: pull
|
||||
|
||||
# Run integration tests with Redis service
|
||||
redis-tests:
|
||||
stage: test
|
||||
needs:
|
||||
- build
|
||||
services:
|
||||
- name: redis:alpine
|
||||
alias: redis
|
||||
variables:
|
||||
REDIS_URL: redis://redis:6379
|
||||
script:
|
||||
- apt-get update && apt-get install -y redis-tools
|
||||
- cd target/release && ./wrkflw test-redis
|
||||
- redis-cli -h redis PING
|
||||
cache:
|
||||
key:
|
||||
files:
|
||||
- Cargo.lock
|
||||
paths:
|
||||
- ${CI_PROJECT_DIR}/.cargo
|
||||
- target/
|
||||
policy: pull
|
||||
|
||||
# Run integration tests with MongoDB service
|
||||
mongo-tests:
|
||||
stage: test
|
||||
needs:
|
||||
- build
|
||||
services:
|
||||
- name: mongo:5
|
||||
alias: mongo
|
||||
variables:
|
||||
MONGO_URL: mongodb://mongo:27017
|
||||
script:
|
||||
- apt-get update && apt-get install -y mongodb-clients
|
||||
- cd target/release && ./wrkflw test-mongo
|
||||
- mongosh --host mongo --eval "db.version()"
|
||||
cache:
|
||||
key:
|
||||
files:
|
||||
- Cargo.lock
|
||||
paths:
|
||||
- ${CI_PROJECT_DIR}/.cargo
|
||||
- target/
|
||||
policy: pull
|
||||
|
||||
# Run multi-service integration tests
|
||||
all-services-test:
|
||||
stage: test
|
||||
needs:
|
||||
- build
|
||||
services:
|
||||
- name: postgres:14-alpine
|
||||
alias: postgres
|
||||
- name: redis:alpine
|
||||
alias: redis
|
||||
- name: mongo:5
|
||||
alias: mongo
|
||||
- name: rabbitmq:3-management
|
||||
alias: rabbitmq
|
||||
variables:
|
||||
DATABASE_URL: postgres://postgres:postgres@postgres:5432/test_db
|
||||
REDIS_URL: redis://redis:6379
|
||||
MONGO_URL: mongodb://mongo:27017
|
||||
RABBITMQ_URL: amqp://guest:guest@rabbitmq:5672
|
||||
script:
|
||||
- apt-get update && apt-get install -y postgresql-client redis-tools mongodb-clients
|
||||
- cd target/release && ./wrkflw test-all-services
|
||||
cache:
|
||||
key:
|
||||
files:
|
||||
- Cargo.lock
|
||||
paths:
|
||||
- ${CI_PROJECT_DIR}/.cargo
|
||||
- target/
|
||||
policy: pull
|
||||
|
||||
# Deploy to production
|
||||
deploy:
|
||||
stage: deploy
|
||||
needs:
|
||||
- unit-tests
|
||||
- postgres-tests
|
||||
- redis-tests
|
||||
- mongo-tests
|
||||
script:
|
||||
- echo "Deploying application..."
|
||||
- cp target/release/wrkflw /tmp/
|
||||
only:
|
||||
- main
|
||||
186
test_gitlab_ci/workflow.gitlab-ci.yml
Normal file
186
test_gitlab_ci/workflow.gitlab-ci.yml
Normal file
@@ -0,0 +1,186 @@
|
||||
stages:
|
||||
- prepare
|
||||
- build
|
||||
- test
|
||||
- deploy
|
||||
|
||||
# Global workflow rules to control when pipelines run
|
||||
workflow:
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
when: always
|
||||
- if: $CI_COMMIT_BRANCH == "main"
|
||||
when: always
|
||||
- if: $CI_COMMIT_TAG
|
||||
when: always
|
||||
- if: $CI_COMMIT_BRANCH == "develop"
|
||||
when: always
|
||||
- if: $CI_COMMIT_BRANCH =~ /^release\/.*/
|
||||
when: always
|
||||
- if: $CI_COMMIT_BRANCH =~ /^hotfix\/.*/
|
||||
when: always
|
||||
- when: never # Skip all other branches
|
||||
|
||||
variables:
|
||||
RUST_VERSION: "1.76"
|
||||
CARGO_HOME: "${CI_PROJECT_DIR}/.cargo"
|
||||
|
||||
# Default settings
|
||||
default:
|
||||
image: "rust:${RUST_VERSION}"
|
||||
interruptible: true
|
||||
|
||||
# Cache definition to be used by other jobs
|
||||
.cargo-cache:
|
||||
cache:
|
||||
key:
|
||||
files:
|
||||
- Cargo.lock
|
||||
paths:
|
||||
- ${CARGO_HOME}
|
||||
- target/
|
||||
|
||||
# Prepare the dependencies (runs on all branches)
|
||||
prepare:
|
||||
stage: prepare
|
||||
extends: .cargo-cache
|
||||
script:
|
||||
- cargo fetch --locked
|
||||
artifacts:
|
||||
paths:
|
||||
- Cargo.lock
|
||||
|
||||
# Build only on main branch and MRs
|
||||
build:
|
||||
stage: build
|
||||
extends: .cargo-cache
|
||||
needs:
|
||||
- prepare
|
||||
script:
|
||||
- cargo build --release
|
||||
artifacts:
|
||||
paths:
|
||||
- target/release/
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
- if: $CI_COMMIT_BRANCH == "main"
|
||||
- if: $CI_COMMIT_TAG
|
||||
|
||||
# Build with debug symbols on develop branch
|
||||
debug-build:
|
||||
stage: build
|
||||
extends: .cargo-cache
|
||||
needs:
|
||||
- prepare
|
||||
script:
|
||||
- cargo build
|
||||
artifacts:
|
||||
paths:
|
||||
- target/debug/
|
||||
rules:
|
||||
- if: $CI_COMMIT_BRANCH == "develop"
|
||||
|
||||
# Test job - run on all branches except release and hotfix
|
||||
test:
|
||||
stage: test
|
||||
extends: .cargo-cache
|
||||
needs:
|
||||
- job: build
|
||||
optional: true
|
||||
- job: debug-build
|
||||
optional: true
|
||||
script:
|
||||
- |
|
||||
if [ -d "target/release" ]; then
|
||||
cargo test --release
|
||||
else
|
||||
cargo test
|
||||
fi
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
- if: $CI_COMMIT_BRANCH == "main"
|
||||
- if: $CI_COMMIT_BRANCH == "develop"
|
||||
- if: $CI_COMMIT_TAG
|
||||
- if: $CI_COMMIT_BRANCH =~ /^feature\/.*/
|
||||
|
||||
# Only lint on MRs and develop
|
||||
lint:
|
||||
stage: test
|
||||
extends: .cargo-cache
|
||||
script:
|
||||
- rustup component add clippy
|
||||
- cargo clippy -- -D warnings
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
- if: $CI_COMMIT_BRANCH == "develop"
|
||||
|
||||
# Run benchmarks only on main branch
|
||||
benchmark:
|
||||
stage: test
|
||||
extends: .cargo-cache
|
||||
needs:
|
||||
- build
|
||||
script:
|
||||
- cargo bench
|
||||
rules:
|
||||
- if: $CI_COMMIT_BRANCH == "main"
|
||||
- if: $CI_COMMIT_TAG
|
||||
|
||||
# Deploy to staging on develop branch pushes
|
||||
deploy-staging:
|
||||
stage: deploy
|
||||
needs:
|
||||
- test
|
||||
environment:
|
||||
name: staging
|
||||
url: https://staging.example.com
|
||||
script:
|
||||
- echo "Deploying to staging..."
|
||||
- cp target/release/wrkflw /tmp/wrkflw-staging
|
||||
rules:
|
||||
- if: $CI_COMMIT_BRANCH == "develop"
|
||||
when: on_success
|
||||
- if: $CI_COMMIT_BRANCH =~ /^release\/.*/
|
||||
when: manual
|
||||
|
||||
# Deploy to production on main branch and tags
|
||||
deploy-prod:
|
||||
stage: deploy
|
||||
needs:
|
||||
- test
|
||||
- benchmark
|
||||
environment:
|
||||
name: production
|
||||
url: https://example.com
|
||||
script:
|
||||
- echo "Deploying to production..."
|
||||
- cp target/release/wrkflw /tmp/wrkflw-prod
|
||||
rules:
|
||||
- if: $CI_COMMIT_BRANCH == "main"
|
||||
when: manual
|
||||
- if: $CI_COMMIT_TAG =~ /^v\d+\.\d+\.\d+$/
|
||||
when: manual
|
||||
- if: $CI_COMMIT_BRANCH =~ /^hotfix\/.*/
|
||||
when: manual
|
||||
|
||||
# Notify slack only when deploy succeeded or failed
|
||||
notify:
|
||||
stage: .post
|
||||
image: curlimages/curl:latest
|
||||
needs:
|
||||
- job: deploy-staging
|
||||
optional: true
|
||||
- job: deploy-prod
|
||||
optional: true
|
||||
script:
|
||||
- |
|
||||
if [ "$CI_JOB_STATUS" == "success" ]; then
|
||||
curl -X POST -H 'Content-type: application/json' --data '{"text":"Deployment succeeded! :tada:"}' $SLACK_WEBHOOK_URL
|
||||
else
|
||||
curl -X POST -H 'Content-type: application/json' --data '{"text":"Deployment failed! :boom:"}' $SLACK_WEBHOOK_URL
|
||||
fi
|
||||
rules:
|
||||
- if: $CI_COMMIT_BRANCH == "main" && $CI_PIPELINE_SOURCE != "merge_request_event"
|
||||
- if: $CI_COMMIT_BRANCH == "develop" && $CI_PIPELINE_SOURCE != "merge_request_event"
|
||||
- if: $CI_COMMIT_TAG
|
||||
- if: $CI_COMMIT_BRANCH =~ /^hotfix\/.*/
|
||||
BIN
wrkflw.gif
Normal file
BIN
wrkflw.gif
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 6.0 MiB |
Reference in New Issue
Block a user