mirror of
https://github.com/bahdotsh/wrkflw.git
synced 2026-01-04 03:16:50 +01:00
Compare commits
69 Commits
v0.3.0
...
bahdotsh/v
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b4a73a3cde | ||
|
|
4802e686de | ||
|
|
64621375cb | ||
|
|
cff8e3f4bd | ||
|
|
4251e6469d | ||
|
|
2ba3dbe65b | ||
|
|
7edc6b3645 | ||
|
|
93f18d0327 | ||
|
|
faee4717e1 | ||
|
|
22389736c3 | ||
|
|
699c9250f2 | ||
|
|
48e944a4cc | ||
|
|
d5d1904d0a | ||
|
|
00fa569add | ||
|
|
a97398f949 | ||
|
|
e73b0df520 | ||
|
|
9f51e26eb3 | ||
|
|
3a88b33c83 | ||
|
|
3a9f4f1101 | ||
|
|
470132c5bf | ||
|
|
6ee550d39e | ||
|
|
16fc7ca83e | ||
|
|
61cb474c01 | ||
|
|
d8cf675f37 | ||
|
|
6f09411c6f | ||
|
|
62475282ee | ||
|
|
89f255b226 | ||
|
|
fffa920e4a | ||
|
|
27f5229325 | ||
|
|
26e1ccf7c3 | ||
|
|
f658cf409d | ||
|
|
b17cfd10fb | ||
|
|
f97c3304cb | ||
|
|
34e1fc513e | ||
|
|
e978d09a7d | ||
|
|
7bd7cc3b2b | ||
|
|
8975519c03 | ||
|
|
dff56fd855 | ||
|
|
49a5eec484 | ||
|
|
fb1c636971 | ||
|
|
0c5460e6ea | ||
|
|
f1421dc154 | ||
|
|
189fc0f97b | ||
|
|
46cd1d6e33 | ||
|
|
6e3d61efe3 | ||
|
|
674af353f1 | ||
|
|
0acc65ff79 | ||
|
|
e524122f62 | ||
|
|
3b8d9d09a9 | ||
|
|
c8bcb3820a | ||
|
|
818cfe5522 | ||
|
|
6455dffa94 | ||
|
|
ad7046ed89 | ||
|
|
cb3f753f22 | ||
|
|
056572a246 | ||
|
|
bd525ca23a | ||
|
|
22664eb324 | ||
|
|
f04439011e | ||
|
|
6e1eb8e62d | ||
|
|
e6c068cc1d | ||
|
|
99a0bae3e9 | ||
|
|
3f9ec9f89b | ||
|
|
ad6ad05311 | ||
|
|
bb77848b78 | ||
|
|
85a335c4fa | ||
|
|
4b4d5e3d26 | ||
|
|
5ba2759b4d | ||
|
|
034feec268 | ||
|
|
b542ae00d6 |
64
.github/workflows/build.yml
vendored
64
.github/workflows/build.yml
vendored
@@ -1,51 +1,57 @@
|
||||
name: Build & Test
|
||||
name: Build
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
branches: [ main ]
|
||||
pull_request:
|
||||
branches: [ main ]
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build & Test
|
||||
name: Build
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest, macos-latest]
|
||||
rust: [stable]
|
||||
runs-on: ${{ matrix.os }}
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
target: x86_64-unknown-linux-gnu
|
||||
- os: macos-latest
|
||||
target: x86_64-apple-darwin
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Install Rust
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Setup Rust
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: ${{ matrix.rust }}
|
||||
toolchain: stable
|
||||
target: ${{ matrix.target }}
|
||||
override: true
|
||||
components: rustfmt, clippy
|
||||
components: clippy, rustfmt
|
||||
|
||||
- name: Cache Dependencies
|
||||
uses: actions/cache@v3
|
||||
- name: Check formatting
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/registry
|
||||
~/.cargo/git
|
||||
target
|
||||
key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
|
||||
command: fmt
|
||||
args: -- --check
|
||||
|
||||
- name: Check Formatting
|
||||
run: cargo fmt --all -- --check
|
||||
|
||||
- name: Lint with Clippy
|
||||
run: cargo clippy -- -D warnings
|
||||
- name: Run clippy
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: clippy
|
||||
args: -- -D warnings
|
||||
|
||||
- name: Build
|
||||
run: cargo build --verbose
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: build
|
||||
args: --target ${{ matrix.target }}
|
||||
|
||||
- name: Run Tests
|
||||
run: cargo test --verbose
|
||||
- name: Run tests
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: test
|
||||
args: --target ${{ matrix.target }}
|
||||
42
.github/workflows/release.yml
vendored
42
.github/workflows/release.yml
vendored
@@ -4,11 +4,24 @@ on:
|
||||
push:
|
||||
tags:
|
||||
- 'v*'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version:
|
||||
description: 'Version to use (e.g. v1.0.0)'
|
||||
required: true
|
||||
default: 'test-release'
|
||||
|
||||
# Add permissions at workflow level
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
create-release:
|
||||
name: Create Release
|
||||
runs-on: ubuntu-latest
|
||||
# You can also set permissions at the job level if needed
|
||||
# permissions:
|
||||
# contents: write
|
||||
outputs:
|
||||
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
||||
steps:
|
||||
@@ -17,22 +30,29 @@ jobs:
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup Rust
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: stable
|
||||
override: true
|
||||
|
||||
- name: Install git-cliff
|
||||
run: |
|
||||
curl -LSfs https://raw.githubusercontent.com/orhun/git-cliff/main/install.sh | sh -s -- --version latest
|
||||
echo "$HOME/.cargo/bin" >> $GITHUB_PATH
|
||||
cargo install git-cliff --force
|
||||
|
||||
- name: Generate Changelog
|
||||
run: git cliff --latest --output CHANGELOG.md
|
||||
run: git-cliff --latest --output CHANGELOG.md
|
||||
|
||||
- name: Create Release
|
||||
id: create_release
|
||||
uses: softprops/action-gh-release@v1
|
||||
with:
|
||||
name: "wrkflw ${{ github.ref_name }}"
|
||||
name: "wrkflw ${{ github.event.inputs.version || github.ref_name }}"
|
||||
body_path: CHANGELOG.md
|
||||
draft: false
|
||||
prerelease: false
|
||||
tag_name: ${{ github.event.inputs.version || github.ref_name }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
@@ -40,25 +60,24 @@ jobs:
|
||||
name: Build Release
|
||||
needs: [create-release]
|
||||
runs-on: ${{ matrix.os }}
|
||||
# You can also set permissions at the job level if needed
|
||||
# permissions:
|
||||
# contents: write
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
target: x86_64-unknown-linux-gnu
|
||||
artifact_name: wrkflw
|
||||
asset_name: wrkflw-${{ github.ref_name }}-linux-x86_64
|
||||
asset_name: wrkflw-${{ github.event.inputs.version || github.ref_name }}-linux-x86_64
|
||||
- os: macos-latest
|
||||
target: x86_64-apple-darwin
|
||||
artifact_name: wrkflw
|
||||
asset_name: wrkflw-${{ github.ref_name }}-macos-x86_64
|
||||
asset_name: wrkflw-${{ github.event.inputs.version || github.ref_name }}-macos-x86_64
|
||||
- os: macos-latest
|
||||
target: aarch64-apple-darwin
|
||||
artifact_name: wrkflw
|
||||
asset_name: wrkflw-${{ github.ref_name }}-macos-arm64
|
||||
- os: windows-latest
|
||||
target: x86_64-pc-windows-msvc
|
||||
artifact_name: wrkflw.exe
|
||||
asset_name: wrkflw-${{ github.ref_name }}-windows-x86_64
|
||||
asset_name: wrkflw-${{ github.event.inputs.version || github.ref_name }}-macos-arm64
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
@@ -103,5 +122,6 @@ jobs:
|
||||
uses: softprops/action-gh-release@v1
|
||||
with:
|
||||
files: ${{ env.ASSET_PATH }}
|
||||
tag_name: ${{ github.event.inputs.version || github.ref_name }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
100
.gitlab-ci.yml
Normal file
100
.gitlab-ci.yml
Normal file
@@ -0,0 +1,100 @@
|
||||
# GitLab CI/CD Pipeline for wrkflw
|
||||
# This pipeline will build and test the Rust project
|
||||
|
||||
stages:
|
||||
- build
|
||||
- test
|
||||
- deploy
|
||||
|
||||
variables:
|
||||
RUST_VERSION: "1.70.0"
|
||||
CARGO_TERM_COLOR: always
|
||||
|
||||
# Cache settings
|
||||
cache:
|
||||
key: "$CI_COMMIT_REF_SLUG"
|
||||
paths:
|
||||
- target/
|
||||
script:
|
||||
- echo "This is a placeholder - the cache directive doesn't need a script"
|
||||
|
||||
# Lint job - runs rustfmt and clippy
|
||||
lint:
|
||||
stage: test
|
||||
image: rust:${RUST_VERSION}
|
||||
script:
|
||||
- rustup component add clippy
|
||||
- cargo clippy -- -D warnings
|
||||
allow_failure: true
|
||||
|
||||
# Build job - builds the application
|
||||
build:
|
||||
stage: build
|
||||
image: rust:${RUST_VERSION}
|
||||
script:
|
||||
- cargo build --verbose
|
||||
artifacts:
|
||||
paths:
|
||||
- target/debug
|
||||
expire_in: 1 week
|
||||
|
||||
# Test job - runs unit and integration tests
|
||||
test:
|
||||
stage: test
|
||||
image: rust:${RUST_VERSION}
|
||||
script:
|
||||
- cargo test --verbose
|
||||
dependencies:
|
||||
- build
|
||||
|
||||
# Release job - creates a release build
|
||||
release:
|
||||
stage: deploy
|
||||
image: rust:${RUST_VERSION}
|
||||
script:
|
||||
- cargo build --release --verbose
|
||||
artifacts:
|
||||
paths:
|
||||
- target/release/wrkflw
|
||||
expire_in: 1 month
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "web" && $BUILD_RELEASE == "true"
|
||||
when: always
|
||||
- if: $CI_COMMIT_TAG
|
||||
when: always
|
||||
- when: never
|
||||
|
||||
# Custom job for documentation
|
||||
docs:
|
||||
stage: deploy
|
||||
image: rust:${RUST_VERSION}
|
||||
script:
|
||||
- cargo doc --no-deps
|
||||
- mkdir -p public
|
||||
- cp -r target/doc/* public/
|
||||
artifacts:
|
||||
paths:
|
||||
- public
|
||||
only:
|
||||
- main
|
||||
|
||||
format:
|
||||
stage: test
|
||||
image: rust:${RUST_VERSION}
|
||||
script:
|
||||
- rustup component add rustfmt
|
||||
- cargo fmt --check
|
||||
allow_failure: true
|
||||
|
||||
pages:
|
||||
stage: deploy
|
||||
image: rust:${RUST_VERSION}
|
||||
script:
|
||||
- cargo doc --no-deps
|
||||
- mkdir -p public
|
||||
- cp -r target/doc/* public/
|
||||
artifacts:
|
||||
paths:
|
||||
- public
|
||||
only:
|
||||
- main
|
||||
434
Cargo.lock
generated
434
Cargo.lock
generated
@@ -17,6 +17,20 @@ version = "2.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627"
|
||||
|
||||
[[package]]
|
||||
name = "ahash"
|
||||
version = "0.8.11"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"getrandom 0.2.15",
|
||||
"once_cell",
|
||||
"serde",
|
||||
"version_check",
|
||||
"zerocopy",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "aho-corasick"
|
||||
version = "1.1.3"
|
||||
@@ -91,6 +105,12 @@ dependencies = [
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "anyhow"
|
||||
version = "1.0.98"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e16d2d3311acee920a9eb8d33b8cbc1787ce4a264e85f964c2404b969bdcd487"
|
||||
|
||||
[[package]]
|
||||
name = "async-trait"
|
||||
version = "0.1.88"
|
||||
@@ -135,6 +155,21 @@ version = "0.21.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567"
|
||||
|
||||
[[package]]
|
||||
name = "bit-set"
|
||||
version = "0.5.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0700ddab506f33b20a03b13996eccd309a48e5ff77d0d95926aa0210fb4e95f1"
|
||||
dependencies = [
|
||||
"bit-vec",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bit-vec"
|
||||
version = "0.6.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "349f9b6a179ed607305526ca489b34ad0a41aed5f7980fa90eb03160b69598fb"
|
||||
|
||||
[[package]]
|
||||
name = "bitflags"
|
||||
version = "1.3.2"
|
||||
@@ -192,6 +227,12 @@ version = "3.17.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1628fb46dfa0b37568d12e5edd512553eccf6a22a78e8bde00bb4aed84d5bdbf"
|
||||
|
||||
[[package]]
|
||||
name = "bytecount"
|
||||
version = "0.6.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5ce89b21cab1437276d2650d57e971f9d548a2d9037cc231abdc0562b97498ce"
|
||||
|
||||
[[package]]
|
||||
name = "bytes"
|
||||
version = "1.10.1"
|
||||
@@ -445,6 +486,56 @@ dependencies = [
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "evaluator"
|
||||
version = "0.4.0"
|
||||
dependencies = [
|
||||
"colored",
|
||||
"models",
|
||||
"serde_yaml",
|
||||
"validators",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "executor"
|
||||
version = "0.4.0"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"bollard",
|
||||
"chrono",
|
||||
"dirs",
|
||||
"futures",
|
||||
"futures-util",
|
||||
"lazy_static",
|
||||
"logging",
|
||||
"matrix",
|
||||
"models",
|
||||
"num_cpus",
|
||||
"once_cell",
|
||||
"parser",
|
||||
"regex",
|
||||
"runtime",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_yaml",
|
||||
"tar",
|
||||
"tempfile",
|
||||
"thiserror",
|
||||
"tokio",
|
||||
"utils",
|
||||
"uuid",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fancy-regex"
|
||||
version = "0.11.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b95f7c0680e4142284cf8b22c14a476e87d61b004a3a0861872b32ef7ead40a2"
|
||||
dependencies = [
|
||||
"bit-set",
|
||||
"regex",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fastrand"
|
||||
version = "2.3.0"
|
||||
@@ -493,6 +584,16 @@ dependencies = [
|
||||
"percent-encoding",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fraction"
|
||||
version = "0.13.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3027ae1df8d41b4bed2241c8fdad4acc1e7af60c8e17743534b545e77182d678"
|
||||
dependencies = [
|
||||
"lazy_static",
|
||||
"num",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "futures"
|
||||
version = "0.3.31"
|
||||
@@ -589,8 +690,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"js-sys",
|
||||
"libc",
|
||||
"wasi 0.11.0+wasi-snapshot-preview1",
|
||||
"wasm-bindgen",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -611,6 +714,35 @@ version = "0.31.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f"
|
||||
|
||||
[[package]]
|
||||
name = "github"
|
||||
version = "0.4.0"
|
||||
dependencies = [
|
||||
"lazy_static",
|
||||
"models",
|
||||
"regex",
|
||||
"reqwest",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_yaml",
|
||||
"thiserror",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "gitlab"
|
||||
version = "0.4.0"
|
||||
dependencies = [
|
||||
"lazy_static",
|
||||
"models",
|
||||
"regex",
|
||||
"reqwest",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_yaml",
|
||||
"thiserror",
|
||||
"urlencoding",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "h2"
|
||||
version = "0.3.26"
|
||||
@@ -962,6 +1094,15 @@ version = "1.70.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf"
|
||||
|
||||
[[package]]
|
||||
name = "iso8601"
|
||||
version = "0.6.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c5c177cff824ab21a6f41079a4c401241c4e8be14f316c4c6b07d5fca351c98d"
|
||||
dependencies = [
|
||||
"nom",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "itertools"
|
||||
version = "0.11.0"
|
||||
@@ -987,6 +1128,36 @@ dependencies = [
|
||||
"wasm-bindgen",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "jsonschema"
|
||||
version = "0.17.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2a071f4f7efc9a9118dfb627a0a94ef247986e1ab8606a4c806ae2b3aa3b6978"
|
||||
dependencies = [
|
||||
"ahash",
|
||||
"anyhow",
|
||||
"base64 0.21.7",
|
||||
"bytecount",
|
||||
"clap",
|
||||
"fancy-regex",
|
||||
"fraction",
|
||||
"getrandom 0.2.15",
|
||||
"iso8601",
|
||||
"itoa",
|
||||
"memchr",
|
||||
"num-cmp",
|
||||
"once_cell",
|
||||
"parking_lot",
|
||||
"percent-encoding",
|
||||
"regex",
|
||||
"reqwest",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"time",
|
||||
"url",
|
||||
"uuid",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "lazy_static"
|
||||
version = "1.5.0"
|
||||
@@ -1044,6 +1215,28 @@ version = "0.4.27"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94"
|
||||
|
||||
[[package]]
|
||||
name = "logging"
|
||||
version = "0.4.0"
|
||||
dependencies = [
|
||||
"chrono",
|
||||
"models",
|
||||
"once_cell",
|
||||
"serde",
|
||||
"serde_yaml",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "matrix"
|
||||
version = "0.4.0"
|
||||
dependencies = [
|
||||
"indexmap 2.8.0",
|
||||
"models",
|
||||
"serde",
|
||||
"serde_yaml",
|
||||
"thiserror",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "memchr"
|
||||
version = "2.7.4"
|
||||
@@ -1088,6 +1281,16 @@ dependencies = [
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "models"
|
||||
version = "0.4.0"
|
||||
dependencies = [
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_yaml",
|
||||
"thiserror",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "native-tls"
|
||||
version = "0.2.14"
|
||||
@@ -1116,12 +1319,91 @@ dependencies = [
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nom"
|
||||
version = "8.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "df9761775871bdef83bee530e60050f7e54b1105350d6884eb0fb4f46c2f9405"
|
||||
dependencies = [
|
||||
"memchr",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "num"
|
||||
version = "0.4.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "35bd024e8b2ff75562e5f34e7f4905839deb4b22955ef5e73d2fea1b9813cb23"
|
||||
dependencies = [
|
||||
"num-bigint",
|
||||
"num-complex",
|
||||
"num-integer",
|
||||
"num-iter",
|
||||
"num-rational",
|
||||
"num-traits",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "num-bigint"
|
||||
version = "0.4.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9"
|
||||
dependencies = [
|
||||
"num-integer",
|
||||
"num-traits",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "num-cmp"
|
||||
version = "0.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "63335b2e2c34fae2fb0aa2cecfd9f0832a1e24b3b32ecec612c3426d46dc8aaa"
|
||||
|
||||
[[package]]
|
||||
name = "num-complex"
|
||||
version = "0.4.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "73f88a1307638156682bada9d7604135552957b7818057dcef22705b4d509495"
|
||||
dependencies = [
|
||||
"num-traits",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "num-conv"
|
||||
version = "0.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9"
|
||||
|
||||
[[package]]
|
||||
name = "num-integer"
|
||||
version = "0.1.46"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f"
|
||||
dependencies = [
|
||||
"num-traits",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "num-iter"
|
||||
version = "0.1.45"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1429034a0490724d0075ebb2bc9e875d6503c3cf69e235a8941aa757d83ef5bf"
|
||||
dependencies = [
|
||||
"autocfg",
|
||||
"num-integer",
|
||||
"num-traits",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "num-rational"
|
||||
version = "0.4.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f83d14da390562dca69fc84082e73e548e1ad308d24accdedd2720017cb37824"
|
||||
dependencies = [
|
||||
"num-bigint",
|
||||
"num-integer",
|
||||
"num-traits",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "num-traits"
|
||||
version = "0.2.19"
|
||||
@@ -1229,6 +1511,20 @@ dependencies = [
|
||||
"windows-targets 0.52.6",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "parser"
|
||||
version = "0.4.0"
|
||||
dependencies = [
|
||||
"jsonschema",
|
||||
"matrix",
|
||||
"models",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_yaml",
|
||||
"tempfile",
|
||||
"thiserror",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "paste"
|
||||
version = "1.0.15"
|
||||
@@ -1435,6 +1731,23 @@ dependencies = [
|
||||
"winreg",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "runtime"
|
||||
version = "0.4.0"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"futures",
|
||||
"logging",
|
||||
"models",
|
||||
"once_cell",
|
||||
"serde",
|
||||
"serde_yaml",
|
||||
"tempfile",
|
||||
"tokio",
|
||||
"utils",
|
||||
"which",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rustc-demangle"
|
||||
version = "0.1.24"
|
||||
@@ -1488,6 +1801,15 @@ version = "1.0.20"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f"
|
||||
|
||||
[[package]]
|
||||
name = "same-file"
|
||||
version = "1.0.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502"
|
||||
dependencies = [
|
||||
"winapi-util",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "schannel"
|
||||
version = "0.1.27"
|
||||
@@ -1921,6 +2243,28 @@ version = "0.2.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b"
|
||||
|
||||
[[package]]
|
||||
name = "ui"
|
||||
version = "0.4.0"
|
||||
dependencies = [
|
||||
"chrono",
|
||||
"crossterm 0.26.1",
|
||||
"evaluator",
|
||||
"executor",
|
||||
"futures",
|
||||
"github",
|
||||
"logging",
|
||||
"models",
|
||||
"ratatui",
|
||||
"regex",
|
||||
"reqwest",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_yaml",
|
||||
"tokio",
|
||||
"utils",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "unicode-ident"
|
||||
version = "1.0.18"
|
||||
@@ -1956,6 +2300,12 @@ dependencies = [
|
||||
"percent-encoding",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "urlencoding"
|
||||
version = "2.1.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "daf8dba3b7eb870caf1ddeed7bc9d2a049f3cfdfae7cb521b087cc33ae4c49da"
|
||||
|
||||
[[package]]
|
||||
name = "utf16_iter"
|
||||
version = "1.0.5"
|
||||
@@ -1974,6 +2324,16 @@ version = "0.2.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821"
|
||||
|
||||
[[package]]
|
||||
name = "utils"
|
||||
version = "0.4.0"
|
||||
dependencies = [
|
||||
"models",
|
||||
"nix",
|
||||
"serde",
|
||||
"serde_yaml",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "uuid"
|
||||
version = "1.16.0"
|
||||
@@ -1983,12 +2343,38 @@ dependencies = [
|
||||
"getrandom 0.3.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "validators"
|
||||
version = "0.4.0"
|
||||
dependencies = [
|
||||
"matrix",
|
||||
"models",
|
||||
"serde",
|
||||
"serde_yaml",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "vcpkg"
|
||||
version = "0.2.15"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426"
|
||||
|
||||
[[package]]
|
||||
name = "version_check"
|
||||
version = "0.9.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a"
|
||||
|
||||
[[package]]
|
||||
name = "walkdir"
|
||||
version = "2.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b"
|
||||
dependencies = [
|
||||
"same-file",
|
||||
"winapi-util",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "want"
|
||||
version = "0.3.1"
|
||||
@@ -2122,6 +2508,15 @@ version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
|
||||
|
||||
[[package]]
|
||||
name = "winapi-util"
|
||||
version = "0.1.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb"
|
||||
dependencies = [
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "winapi-x86_64-pc-windows-gnu"
|
||||
version = "0.4.0"
|
||||
@@ -2324,38 +2719,49 @@ checksum = "1e9df38ee2d2c3c5948ea468a8406ff0db0b29ae1ffde1bcf20ef305bcc95c51"
|
||||
|
||||
[[package]]
|
||||
name = "wrkflw"
|
||||
version = "0.3.0"
|
||||
version = "0.4.0"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"bollard",
|
||||
"chrono",
|
||||
"clap",
|
||||
"colored",
|
||||
"crossterm 0.26.1",
|
||||
"dirs",
|
||||
"evaluator",
|
||||
"executor",
|
||||
"futures",
|
||||
"futures-util",
|
||||
"github",
|
||||
"gitlab",
|
||||
"indexmap 2.8.0",
|
||||
"itertools",
|
||||
"lazy_static",
|
||||
"libc",
|
||||
"log",
|
||||
"logging",
|
||||
"matrix",
|
||||
"models",
|
||||
"nix",
|
||||
"num_cpus",
|
||||
"once_cell",
|
||||
"parser",
|
||||
"ratatui",
|
||||
"rayon",
|
||||
"regex",
|
||||
"reqwest",
|
||||
"runtime",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_yaml",
|
||||
"tar",
|
||||
"tempfile",
|
||||
"thiserror",
|
||||
"tokio",
|
||||
"ui",
|
||||
"urlencoding",
|
||||
"utils",
|
||||
"uuid",
|
||||
"which",
|
||||
"validators",
|
||||
"walkdir",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2392,6 +2798,26 @@ dependencies = [
|
||||
"synstructure",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zerocopy"
|
||||
version = "0.7.35"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0"
|
||||
dependencies = [
|
||||
"zerocopy-derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zerocopy-derive"
|
||||
version = "0.7.35"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zerofrom"
|
||||
version = "0.1.6"
|
||||
|
||||
17
Cargo.toml
17
Cargo.toml
@@ -1,6 +1,11 @@
|
||||
[package]
|
||||
name = "wrkflw"
|
||||
version = "0.3.0"
|
||||
[workspace]
|
||||
members = [
|
||||
"crates/*"
|
||||
]
|
||||
resolver = "2"
|
||||
|
||||
[workspace.package]
|
||||
version = "0.4.0"
|
||||
edition = "2021"
|
||||
description = "A GitHub Actions workflow validator and executor"
|
||||
documentation = "https://github.com/bahdotsh/wrkflw"
|
||||
@@ -10,12 +15,13 @@ keywords = ["workflows", "github", "local"]
|
||||
categories = ["command-line-utilities"]
|
||||
license = "MIT"
|
||||
|
||||
[dependencies]
|
||||
[workspace.dependencies]
|
||||
clap = { version = "4.3", features = ["derive"] }
|
||||
colored = "2.0"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_yaml = "0.9"
|
||||
serde_json = "1.0"
|
||||
jsonschema = "0.17"
|
||||
tokio = { version = "1.28", features = ["full"] }
|
||||
async-trait = "0.1"
|
||||
bollard = "0.14"
|
||||
@@ -36,11 +42,12 @@ itertools = "0.11.0"
|
||||
indexmap = { version = "2.0.0", features = ["serde"] }
|
||||
rayon = "1.7.0"
|
||||
num_cpus = "1.16.0"
|
||||
regex = "1.9"
|
||||
regex = "1.10"
|
||||
lazy_static = "1.4"
|
||||
reqwest = { version = "0.11", features = ["json"] }
|
||||
libc = "0.2"
|
||||
nix = { version = "0.27.1", features = ["fs"] }
|
||||
urlencoding = "2.1.3"
|
||||
|
||||
[profile.release]
|
||||
codegen-units = 1
|
||||
|
||||
83
GITLAB_USAGE.md
Normal file
83
GITLAB_USAGE.md
Normal file
@@ -0,0 +1,83 @@
|
||||
# Using wrkflw with GitLab Pipelines
|
||||
|
||||
This guide explains how to use the `wrkflw` tool to trigger GitLab CI/CD pipelines.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
1. A GitLab repository with a `.gitlab-ci.yml` file
|
||||
2. A GitLab personal access token with API access
|
||||
3. `wrkflw` installed on your system
|
||||
|
||||
## Setting Up
|
||||
|
||||
1. Create a GitLab personal access token:
|
||||
- Go to GitLab > User Settings > Access Tokens
|
||||
- Create a token with `api` scope
|
||||
- Copy the token value
|
||||
|
||||
2. Set the token as an environment variable:
|
||||
```bash
|
||||
export GITLAB_TOKEN=your_token_here
|
||||
```
|
||||
|
||||
## Triggering a Pipeline
|
||||
|
||||
You can trigger a GitLab pipeline using the `trigger-gitlab` command:
|
||||
|
||||
```bash
|
||||
# Trigger using the default branch
|
||||
wrkflw trigger-gitlab
|
||||
|
||||
# Trigger on a specific branch
|
||||
wrkflw trigger-gitlab --branch feature-branch
|
||||
|
||||
# Trigger with custom variables
|
||||
wrkflw trigger-gitlab --variable BUILD_RELEASE=true
|
||||
```
|
||||
|
||||
### Example: Triggering a Release Build
|
||||
|
||||
To trigger the release build job in our sample pipeline:
|
||||
|
||||
```bash
|
||||
wrkflw trigger-gitlab --variable BUILD_RELEASE=true
|
||||
```
|
||||
|
||||
This will set the `BUILD_RELEASE` variable to `true`, which activates the release job in our sample pipeline.
|
||||
|
||||
### Example: Building Documentation
|
||||
|
||||
To trigger the documentation build job:
|
||||
|
||||
```bash
|
||||
wrkflw trigger-gitlab --variable BUILD_DOCS=true
|
||||
```
|
||||
|
||||
## Controlling Job Execution with Variables
|
||||
|
||||
Our sample GitLab pipeline is configured to make certain jobs conditional based on variables. You can use the `--variable` flag to control which jobs run:
|
||||
|
||||
| Variable | Purpose |
|
||||
|----------|---------|
|
||||
| `BUILD_RELEASE` | Set to `true` to run the release job |
|
||||
| `BUILD_DOCS` | Set to `true` to build documentation |
|
||||
|
||||
## Checking Pipeline Status
|
||||
|
||||
After triggering a pipeline, you can check its status directly on GitLab:
|
||||
|
||||
1. Navigate to your GitLab repository
|
||||
2. Go to CI/CD > Pipelines
|
||||
3. Find your recently triggered pipeline
|
||||
|
||||
The `wrkflw` command will also provide a direct URL to the pipeline after triggering.
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
If you encounter issues:
|
||||
|
||||
1. Verify your GitLab token is set correctly
|
||||
2. Check that you're in a repository with a valid GitLab remote URL
|
||||
3. Ensure your `.gitlab-ci.yml` file is valid
|
||||
4. Check that your GitLab token has API access permissions
|
||||
5. Review GitLab's CI/CD pipeline logs for detailed error information
|
||||
206
README.md
206
README.md
@@ -13,7 +13,7 @@ WRKFLW is a powerful command-line tool for validating and executing GitHub Actio
|
||||
## Features
|
||||
|
||||
- **TUI Interface**: A full-featured terminal user interface for managing and monitoring workflow executions
|
||||
- **Validate Workflow Files**: Check for syntax errors and common mistakes in GitHub Actions workflow files
|
||||
- **Validate Workflow Files**: Check for syntax errors and common mistakes in GitHub Actions workflow files with proper exit codes for CI/CD integration
|
||||
- **Execute Workflows Locally**: Run workflows directly on your machine using Docker containers
|
||||
- **Emulation Mode**: Optional execution without Docker by emulating the container environment locally
|
||||
- **Job Dependency Resolution**: Automatically determines the correct execution order based on job dependencies
|
||||
@@ -28,7 +28,7 @@ WRKFLW is a powerful command-line tool for validating and executing GitHub Actio
|
||||
- **Special Action Handling**: Native handling for commonly used actions like `actions/checkout`
|
||||
- **Output Capturing**: View logs, step outputs, and execution details
|
||||
- **Parallel Job Execution**: Runs independent jobs in parallel for faster workflow execution
|
||||
- **Trigger Workflows Remotely**: Manually trigger workflow runs on GitHub
|
||||
- **Trigger Workflows Remotely**: Manually trigger workflow runs on GitHub or GitLab
|
||||
|
||||
## Installation
|
||||
|
||||
@@ -77,8 +77,38 @@ wrkflw validate path/to/workflows
|
||||
|
||||
# Validate with verbose output
|
||||
wrkflw validate --verbose path/to/workflow.yml
|
||||
|
||||
# Validate GitLab CI pipelines
|
||||
wrkflw validate .gitlab-ci.yml --gitlab
|
||||
|
||||
# Disable exit codes for custom error handling (default: enabled)
|
||||
wrkflw validate --no-exit-code path/to/workflow.yml
|
||||
```
|
||||
|
||||
#### Exit Codes for CI/CD Integration
|
||||
|
||||
By default, `wrkflw validate` sets the exit code to `1` when validation fails, making it perfect for CI/CD pipelines and scripts:
|
||||
|
||||
```bash
|
||||
# In CI/CD scripts - validation failure will cause the script to exit
|
||||
if ! wrkflw validate; then
|
||||
echo "❌ Workflow validation failed!"
|
||||
exit 1
|
||||
fi
|
||||
echo "✅ All workflows are valid!"
|
||||
|
||||
# For custom error handling, disable exit codes
|
||||
wrkflw validate --no-exit-code
|
||||
if [ $? -eq 0 ]; then
|
||||
echo "Validation completed (check output for details)"
|
||||
fi
|
||||
```
|
||||
|
||||
**Exit Code Behavior:**
|
||||
- `0`: All validations passed successfully
|
||||
- `1`: One or more validation failures detected
|
||||
- `2`: Command usage error (invalid arguments, file not found, etc.)
|
||||
|
||||
### Running Workflows in CLI Mode
|
||||
|
||||
```bash
|
||||
@@ -90,6 +120,9 @@ wrkflw run --emulate .github/workflows/ci.yml
|
||||
|
||||
# Run with verbose output
|
||||
wrkflw run --verbose .github/workflows/ci.yml
|
||||
|
||||
# Preserve failed containers for debugging
|
||||
wrkflw run --preserve-containers-on-failure .github/workflows/ci.yml
|
||||
```
|
||||
|
||||
### Using the TUI Interface
|
||||
@@ -113,6 +146,9 @@ wrkflw tui --emulate
|
||||
```bash
|
||||
# Trigger a workflow remotely on GitHub
|
||||
wrkflw trigger workflow-name --branch main --input key1=value1 --input key2=value2
|
||||
|
||||
# Trigger a pipeline remotely on GitLab
|
||||
wrkflw trigger-gitlab --branch main --variable key1=value1 --variable key2=value2
|
||||
```
|
||||
|
||||
## TUI Controls
|
||||
@@ -137,17 +173,25 @@ The terminal user interface provides an interactive way to manage workflows:
|
||||
|
||||
```bash
|
||||
$ wrkflw validate .github/workflows/rust.yml
|
||||
Validating GitHub workflow file: .github/workflows/rust.yml... Validating 1 workflow file(s)...
|
||||
✅ Valid: .github/workflows/rust.yml
|
||||
|
||||
Validating workflows in: .github/workflows/rust.yml
|
||||
============================================================
|
||||
✅ Valid: rust.yml
|
||||
------------------------------------------------------------
|
||||
Summary: 1 valid, 0 invalid
|
||||
|
||||
Summary
|
||||
============================================================
|
||||
✅ 1 valid workflow file(s)
|
||||
$ echo $?
|
||||
0
|
||||
|
||||
All workflows are valid! 🎉
|
||||
# Example with validation failure
|
||||
$ wrkflw validate .github/workflows/invalid.yml
|
||||
Validating GitHub workflow file: .github/workflows/invalid.yml... Validating 1 workflow file(s)...
|
||||
❌ Invalid: .github/workflows/invalid.yml
|
||||
1. Job 'test' is missing 'runs-on' field
|
||||
2. Job 'test' is missing 'steps' section
|
||||
|
||||
Summary: 0 valid, 1 invalid
|
||||
|
||||
$ echo $?
|
||||
1
|
||||
```
|
||||
|
||||
### Running a Workflow
|
||||
@@ -220,12 +264,146 @@ WRKFLW supports composite actions, which are actions made up of multiple steps.
|
||||
|
||||
WRKFLW automatically cleans up any Docker containers created during workflow execution, even if the process is interrupted with Ctrl+C.
|
||||
|
||||
For debugging failed workflows, you can preserve containers that fail by using the `--preserve-containers-on-failure` flag:
|
||||
|
||||
```bash
|
||||
# Preserve failed containers for debugging
|
||||
wrkflw run --preserve-containers-on-failure .github/workflows/build.yml
|
||||
|
||||
# Also available in TUI mode
|
||||
wrkflw tui --preserve-containers-on-failure
|
||||
```
|
||||
|
||||
When a container fails with this flag enabled, WRKFLW will:
|
||||
- Keep the failed container running instead of removing it
|
||||
- Log the container ID and provide inspection instructions
|
||||
- Show a message like: `Preserving container abc123 for debugging (exit code: 1). Use 'docker exec -it abc123 bash' to inspect.`
|
||||
|
||||
This allows you to inspect the exact state of the container when the failure occurred, examine files, check environment variables, and debug issues more effectively.
|
||||
|
||||
## Limitations
|
||||
|
||||
- Some GitHub-specific functionality might not work exactly as it does on GitHub
|
||||
- Complex matrix builds with very large matrices may have performance limitations
|
||||
- Actions that require specific GitHub environment features may need customization
|
||||
- Network-isolated actions might need internet connectivity configured differently
|
||||
### Supported Features
|
||||
- ✅ Basic workflow syntax and validation (all YAML syntax checks, required fields, and structure) with proper exit codes for CI/CD integration
|
||||
- ✅ Job dependency resolution and parallel execution (all jobs with correct 'needs' relationships are executed in the right order, and independent jobs run in parallel)
|
||||
- ✅ Matrix builds (supported for reasonable matrix sizes; very large matrices may be slow or resource-intensive)
|
||||
- ✅ Environment variables and GitHub context (all standard GitHub Actions environment variables and context objects are emulated)
|
||||
- ✅ Docker container actions (all actions that use Docker containers are supported in Docker mode)
|
||||
- ✅ JavaScript actions (all actions that use JavaScript are supported)
|
||||
- ✅ Composite actions (all composite actions, including nested and local composite actions, are supported)
|
||||
- ✅ Local actions (actions referenced with local paths are supported)
|
||||
- ✅ Special handling for common actions (e.g., `actions/checkout` is natively supported)
|
||||
- ✅ Workflow triggering via `workflow_dispatch` (manual triggering of workflows is supported)
|
||||
- ✅ GitLab pipeline triggering (manual triggering of GitLab pipelines is supported)
|
||||
- ✅ Environment files (`GITHUB_OUTPUT`, `GITHUB_ENV`, `GITHUB_PATH`, `GITHUB_STEP_SUMMARY` are fully supported)
|
||||
- ✅ TUI interface for workflow management and monitoring
|
||||
- ✅ CLI interface for validation, execution, and remote triggering
|
||||
- ✅ Output capturing (logs, step outputs, and execution details are available in both TUI and CLI)
|
||||
- ✅ Container cleanup (all containers created by wrkflw are automatically cleaned up, even on interruption)
|
||||
|
||||
### Limited or Unsupported Features (Explicit List)
|
||||
- ❌ GitHub secrets and permissions: Only basic environment variables are supported. GitHub's encrypted secrets and fine-grained permissions are NOT available.
|
||||
- ❌ GitHub Actions cache: Caching functionality (e.g., `actions/cache`) is NOT supported in emulation mode and only partially supported in Docker mode (no persistent cache between runs).
|
||||
- ❌ GitHub API integrations: Only basic workflow triggering is supported. Features like workflow status reporting, artifact upload/download, and API-based job control are NOT available.
|
||||
- ❌ GitHub-specific environment variables: Some advanced or dynamic environment variables (e.g., those set by GitHub runners or by the GitHub API) are emulated with static or best-effort values, but not all are fully functional.
|
||||
- ❌ Large/complex matrix builds: Very large matrices (hundreds or thousands of job combinations) may not be practical due to performance and resource limits.
|
||||
- ❌ Network-isolated actions: Actions that require strict network isolation or custom network configuration may not work out-of-the-box and may require manual Docker configuration.
|
||||
- ❌ Some event triggers: Only `workflow_dispatch` (manual trigger) is fully supported. Other triggers (e.g., `push`, `pull_request`, `schedule`, `release`, etc.) are NOT supported.
|
||||
- ❌ GitHub runner-specific features: Features that depend on the exact GitHub-hosted runner environment (e.g., pre-installed tools, runner labels, or hardware) are NOT guaranteed to match. Only a best-effort emulation is provided.
|
||||
- ❌ Windows and macOS runners: Only Linux-based runners are fully supported. Windows and macOS jobs are NOT supported.
|
||||
- ❌ Service containers: Service containers (e.g., databases defined in `services:`) are only supported in Docker mode. In emulation mode, they are NOT supported.
|
||||
- ❌ Artifacts: Uploading and downloading artifacts between jobs/steps is NOT supported.
|
||||
- ❌ Job/step timeouts: Custom timeouts for jobs and steps are NOT enforced.
|
||||
- ❌ Job/step concurrency and cancellation: Features like `concurrency` and job cancellation are NOT supported.
|
||||
- ❌ Expressions and advanced YAML features: Most common expressions are supported, but some advanced or edge-case expressions may not be fully implemented.
|
||||
|
||||
### Runtime Mode Differences
|
||||
- **Docker Mode**: Provides the closest match to GitHub's environment, including support for Docker container actions, service containers, and Linux-based jobs. Some advanced container configurations may still require manual setup.
|
||||
- **Emulation Mode**: Runs workflows using the local system tools. Limitations:
|
||||
- Only supports local and JavaScript actions (no Docker container actions)
|
||||
- No support for service containers
|
||||
- No caching support
|
||||
- Some actions may require adaptation to work locally
|
||||
- Special action handling is more limited
|
||||
|
||||
### Best Practices
|
||||
- Test workflows in both Docker and emulation modes to ensure compatibility
|
||||
- Keep matrix builds reasonably sized for better performance
|
||||
- Use environment variables instead of GitHub secrets when possible
|
||||
- Consider using local actions for complex custom functionality
|
||||
- Test network-dependent actions carefully in both modes
|
||||
|
||||
## Roadmap
|
||||
|
||||
The following roadmap outlines our planned approach to implementing currently unsupported or partially supported features in WRKFLW. Progress and priorities may change based on user feedback and community contributions.
|
||||
|
||||
### 1. Secrets and Permissions
|
||||
- **Goal:** Support encrypted secrets and fine-grained permissions similar to GitHub Actions.
|
||||
- **Plan:**
|
||||
- Implement secure secret storage and injection for workflow steps.
|
||||
- Add support for reading secrets from environment variables, files, or secret managers.
|
||||
- Investigate permission scoping for jobs and steps.
|
||||
|
||||
### 2. GitHub Actions Cache
|
||||
- **Goal:** Enable persistent caching between workflow runs, especially for dependencies.
|
||||
- **Plan:**
|
||||
- Implement a local cache directory for Docker mode.
|
||||
- Add support for `actions/cache` in both Docker and emulation modes.
|
||||
- Investigate cross-run cache persistence.
|
||||
|
||||
### 3. GitHub API Integrations
|
||||
- **Goal:** Support artifact upload/download, workflow/job status reporting, and other API-based features.
|
||||
- **Plan:**
|
||||
- Add artifact upload/download endpoints.
|
||||
- Implement status reporting to GitHub via the API.
|
||||
- Add support for job/step annotations and logs upload.
|
||||
|
||||
### 4. Advanced Environment Variables
|
||||
- **Goal:** Emulate all dynamic GitHub-provided environment variables.
|
||||
- **Plan:**
|
||||
- Audit missing variables and add dynamic computation where possible.
|
||||
- Provide a compatibility table in the documentation.
|
||||
|
||||
### 5. Large/Complex Matrix Builds
|
||||
- **Goal:** Improve performance and resource management for large matrices.
|
||||
- **Plan:**
|
||||
- Optimize matrix expansion and job scheduling.
|
||||
- Add resource limits and warnings for very large matrices.
|
||||
|
||||
### 6. Network-Isolated Actions
|
||||
- **Goal:** Support custom network configurations and strict isolation for actions.
|
||||
- **Plan:**
|
||||
- Add advanced Docker network configuration options.
|
||||
- Document best practices for network isolation.
|
||||
|
||||
### 7. Event Triggers
|
||||
- **Goal:** Support additional triggers (`push`, `pull_request`, `schedule`, etc.).
|
||||
- **Plan:**
|
||||
- Implement event simulation for common triggers.
|
||||
- Allow users to specify event payloads for local runs.
|
||||
|
||||
### 8. Windows and macOS Runners
|
||||
- **Goal:** Add support for non-Linux runners.
|
||||
- **Plan:**
|
||||
- Investigate cross-platform containerization and emulation.
|
||||
- Add documentation for platform-specific limitations.
|
||||
|
||||
### 9. Service Containers in Emulation Mode
|
||||
- **Goal:** Support service containers (e.g., databases) in emulation mode.
|
||||
- **Plan:**
|
||||
- Implement local service startup and teardown scripts.
|
||||
- Provide configuration for common services.
|
||||
|
||||
### 10. Artifacts, Timeouts, Concurrency, and Expressions
|
||||
- **Goal:** Support artifact handling, job/step timeouts, concurrency, and advanced YAML expressions.
|
||||
- **Plan:**
|
||||
- Add artifact storage and retrieval.
|
||||
- Enforce timeouts and concurrency limits.
|
||||
- Expand expression parser for advanced use cases.
|
||||
|
||||
---
|
||||
|
||||
**Want to help?** Contributions are welcome! See [CONTRIBUTING.md](CONTRIBUTING.md) for how to get started.
|
||||
|
||||
## License
|
||||
|
||||
|
||||
88
cliff.toml
88
cliff.toml
@@ -8,18 +8,54 @@ All notable changes to wrkflw will be documented in this file.
|
||||
|
||||
# Template for the changelog body
|
||||
body = """
|
||||
{% if version %}
|
||||
## [{{ version | trim_start_matches(pat="v") }}] - {{ timestamp | date(format="%Y-%m-%d") }}
|
||||
{% else %}
|
||||
## [unreleased]
|
||||
{% endif %}
|
||||
{%- macro remote_url() -%}
|
||||
https://github.com/bahdotsh/wrkflw
|
||||
{%- endmacro -%}
|
||||
|
||||
{% macro print_commit(commit) -%}
|
||||
- {% if commit.scope %}*({{ commit.scope }})* {% endif %}\
|
||||
{% if commit.breaking %}[**breaking**] {% endif %}\
|
||||
{{ commit.message | upper_first }} - \
|
||||
([{{ commit.id | truncate(length=7, end="") }}]({{ self::remote_url() }}/commit/{{ commit.id }}))\
|
||||
{% endmacro -%}
|
||||
|
||||
{% if version %}\
|
||||
{% if previous.version %}\
|
||||
## [{{ version | trim_start_matches(pat="v") }}]\
|
||||
({{ self::remote_url() }}/compare/{{ previous.version }}..{{ version }}) - {{ timestamp | date(format="%Y-%m-%d") }}
|
||||
{% else %}\
|
||||
## [{{ version | trim_start_matches(pat="v") }}] - {{ timestamp | date(format="%Y-%m-%d") }}
|
||||
{% endif %}\
|
||||
{% else %}\
|
||||
## [unreleased]
|
||||
{% endif %}\
|
||||
|
||||
{% for group, commits in commits | group_by(attribute="group") %}
|
||||
### {{ group | upper_first }}
|
||||
{% for commit in commits %}
|
||||
- {% if commit.breaking %}**BREAKING:** {% endif %}{{ commit.message | upper_first }} ([{{ commit.id | truncate(length=7, end="") }}]({{ commit.id | github_link }})){% if commit.links %} ({% for link in commit.links %}[{{ link.text }}]({{ link.href }}){% if not loop.last %}, {% endif %}{% endfor %}){% endif %}
|
||||
{% endfor %}
|
||||
{% endfor %}
|
||||
### {{ group | striptags | trim | upper_first }}
|
||||
{% for commit in commits
|
||||
| filter(attribute="scope")
|
||||
| sort(attribute="scope") %}
|
||||
{{ self::print_commit(commit=commit) }}
|
||||
{%- endfor %}
|
||||
{% for commit in commits %}
|
||||
{%- if not commit.scope -%}
|
||||
{{ self::print_commit(commit=commit) }}
|
||||
{% endif -%}
|
||||
{% endfor -%}
|
||||
{% endfor -%}
|
||||
{%- if github -%}
|
||||
{% if github.contributors | filter(attribute="is_first_time", value=true) | length != 0 %}
|
||||
## New Contributors ❤️
|
||||
{% endif %}\
|
||||
{% for contributor in github.contributors | filter(attribute="is_first_time", value=true) %}
|
||||
* @{{ contributor.username }} made their first contribution
|
||||
{%- if contributor.pr_number %} in \
|
||||
[#{{ contributor.pr_number }}]({{ self::remote_url() }}/pull/{{ contributor.pr_number }}) \
|
||||
{%- endif %}
|
||||
{%- endfor -%}
|
||||
{%- endif %}
|
||||
|
||||
|
||||
"""
|
||||
|
||||
# Remove the leading and trailing whitespace from the template
|
||||
@@ -35,19 +71,29 @@ footer = """
|
||||
conventional_commits = true
|
||||
filter_unconventional = true
|
||||
commit_parsers = [
|
||||
{ message = "^feat", group = "Features" },
|
||||
{ message = "^fix", group = "Bug Fixes" },
|
||||
{ message = "^docs", group = "Documentation" },
|
||||
{ message = "^style", group = "Styling" },
|
||||
{ message = "^refactor", group = "Refactor" },
|
||||
{ message = "^perf", group = "Performance" },
|
||||
{ message = "^test", group = "Testing" },
|
||||
{ message = "^chore\\(deps\\)", skip = true },
|
||||
{ message = "^chore\\(release\\)", skip = true },
|
||||
{ message = "^chore", group = "Miscellaneous Tasks" },
|
||||
{ body = ".*security", group = "Security" },
|
||||
{ message = "^feat", group = "<!-- 0 -->⛰️ Features" },
|
||||
{ message = "^fix", group = "<!-- 1 -->🐛 Bug Fixes" },
|
||||
{ message = "^doc", group = "<!-- 3 -->📚 Documentation" },
|
||||
{ message = "^perf", group = "<!-- 4 -->⚡ Performance" },
|
||||
{ message = "^refactor\\(clippy\\)", skip = true },
|
||||
{ message = "^refactor", group = "<!-- 2 -->🚜 Refactor" },
|
||||
{ message = "^style", group = "<!-- 5 -->🎨 Styling" },
|
||||
{ message = "^test", group = "<!-- 6 -->🧪 Testing" },
|
||||
{ message = "^chore\\(release\\): prepare for", skip = true },
|
||||
{ message = "^chore\\(deps.*\\)", skip = true },
|
||||
{ message = "^chore\\(pr\\)", skip = true },
|
||||
{ message = "^chore\\(pull\\)", skip = true },
|
||||
{ message = "^chore\\(npm\\).*yarn\\.lock", skip = true },
|
||||
{ message = "^chore|^ci", group = "<!-- 7 -->⚙️ Miscellaneous Tasks" },
|
||||
{ body = ".*security", group = "<!-- 8 -->🛡️ Security" },
|
||||
{ message = "^revert", group = "<!-- 9 -->◀️ Revert" },
|
||||
]
|
||||
|
||||
# Define the GitHub repository URL for commit links
|
||||
[git.link]
|
||||
# Format: https://github.com/USER/REPO/commit/{}
|
||||
commit_link = "https://github.com/bahdotsh/wrkflw/commit/{}"
|
||||
|
||||
# Format of the git commit link
|
||||
link_parsers = [
|
||||
{ pattern = "#(\\d+)", href = "https://github.com/bahdotsh/wrkflw/issues/$1" },
|
||||
|
||||
97
crates/README.md
Normal file
97
crates/README.md
Normal file
@@ -0,0 +1,97 @@
|
||||
# Wrkflw Crates
|
||||
|
||||
This directory contains the Rust crates that make up the Wrkflw project. The project has been restructured to use a workspace-based approach with individual crates for better modularity and maintainability.
|
||||
|
||||
## Crate Structure
|
||||
|
||||
- **wrkflw**: Main binary crate and entry point for the application
|
||||
- **models**: Data models and structures used throughout the application
|
||||
- **evaluator**: Workflow evaluation functionality
|
||||
- **executor**: Workflow execution engine
|
||||
- **github**: GitHub API integration
|
||||
- **gitlab**: GitLab API integration
|
||||
- **logging**: Logging functionality
|
||||
- **matrix**: Matrix-based parallelization support
|
||||
- **parser**: Workflow parsing functionality
|
||||
- **runtime**: Runtime execution environment
|
||||
- **ui**: User interface components
|
||||
- **utils**: Utility functions
|
||||
- **validators**: Validation functionality
|
||||
|
||||
## Dependencies
|
||||
|
||||
Each crate has its own `Cargo.toml` file that defines its dependencies. The root `Cargo.toml` file defines the workspace and shared dependencies.
|
||||
|
||||
## Build Instructions
|
||||
|
||||
To build the entire project:
|
||||
|
||||
```bash
|
||||
cargo build
|
||||
```
|
||||
|
||||
To build a specific crate:
|
||||
|
||||
```bash
|
||||
cargo build -p <crate-name>
|
||||
```
|
||||
|
||||
## Testing
|
||||
|
||||
To run tests for the entire project:
|
||||
|
||||
```bash
|
||||
cargo test
|
||||
```
|
||||
|
||||
To run tests for a specific crate:
|
||||
|
||||
```bash
|
||||
cargo test -p <crate-name>
|
||||
```
|
||||
|
||||
## Rust Best Practices
|
||||
|
||||
When contributing to wrkflw, please follow these Rust best practices:
|
||||
|
||||
### Code Organization
|
||||
|
||||
- Place modules in their respective crates to maintain separation of concerns
|
||||
- Use `pub` selectively to expose only the necessary APIs
|
||||
- Follow the Rust module system conventions (use `mod` and `pub mod` appropriately)
|
||||
|
||||
### Errors and Error Handling
|
||||
|
||||
- Prefer using the `thiserror` crate for defining custom error types
|
||||
- Use the `?` operator for error propagation instead of match statements when appropriate
|
||||
- Implement custom error types that provide context for the error
|
||||
- Avoid using `.unwrap()` and `.expect()` in production code
|
||||
|
||||
### Performance
|
||||
|
||||
- Profile code before optimizing using tools like `cargo flamegraph`
|
||||
- Use `Arc` and `Mutex` judiciously for shared mutable state
|
||||
- Leverage Rust's zero-cost abstractions (iterators, closures)
|
||||
- Consider adding benchmark tests using the `criterion` crate for performance-critical code
|
||||
|
||||
### Security
|
||||
|
||||
- Validate all input, especially from external sources
|
||||
- Avoid using `unsafe` code unless absolutely necessary
|
||||
- Handle secrets securely using environment variables
|
||||
- Check for integer overflows with `checked_` operations
|
||||
|
||||
### Testing
|
||||
|
||||
- Write unit tests for all public functions
|
||||
- Use integration tests to verify crate-to-crate interactions
|
||||
- Consider property-based testing for complex logic
|
||||
- Structure tests with clear preparation, execution, and verification phases
|
||||
|
||||
### Tooling
|
||||
|
||||
- Run `cargo clippy` before committing changes to catch common mistakes
|
||||
- Use `cargo fmt` to maintain consistent code formatting
|
||||
- Enable compiler warnings with `#![warn(clippy::all)]`
|
||||
|
||||
For more detailed guidance, refer to the project's best practices documentation.
|
||||
15
crates/evaluator/Cargo.toml
Normal file
15
crates/evaluator/Cargo.toml
Normal file
@@ -0,0 +1,15 @@
|
||||
[package]
|
||||
name = "evaluator"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
description = "Workflow evaluation for wrkflw"
|
||||
license.workspace = true
|
||||
|
||||
[dependencies]
|
||||
# Internal crates
|
||||
models = { path = "../models" }
|
||||
validators = { path = "../validators" }
|
||||
|
||||
# External dependencies
|
||||
colored.workspace = true
|
||||
serde_yaml.workspace = true
|
||||
@@ -3,8 +3,8 @@ use serde_yaml::{self, Value};
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
|
||||
use crate::models::ValidationResult;
|
||||
use crate::validators::{validate_jobs, validate_triggers};
|
||||
use models::ValidationResult;
|
||||
use validators::{validate_jobs, validate_triggers};
|
||||
|
||||
pub fn evaluate_workflow_file(path: &Path, verbose: bool) -> Result<ValidationResult, String> {
|
||||
let content = fs::read_to_string(path).map_err(|e| format!("Failed to read file: {}", e))?;
|
||||
@@ -23,7 +23,23 @@ pub fn evaluate_workflow_file(path: &Path, verbose: bool) -> Result<ValidationRe
|
||||
|
||||
// Check if name exists
|
||||
if workflow.get("name").is_none() {
|
||||
result.add_issue("Workflow is missing a name".to_string());
|
||||
// Check if this might be a reusable workflow caller before reporting missing name
|
||||
let has_reusable_workflow_job = if let Some(Value::Mapping(jobs)) = workflow.get("jobs") {
|
||||
jobs.values().any(|job| {
|
||||
if let Some(job_config) = job.as_mapping() {
|
||||
job_config.contains_key(Value::String("uses".to_string()))
|
||||
} else {
|
||||
false
|
||||
}
|
||||
})
|
||||
} else {
|
||||
false
|
||||
};
|
||||
|
||||
// Only report missing name if it's not a workflow with reusable workflow jobs
|
||||
if !has_reusable_workflow_job {
|
||||
result.add_issue("Workflow is missing a name".to_string());
|
||||
}
|
||||
}
|
||||
|
||||
// Check if jobs section exists
|
||||
35
crates/executor/Cargo.toml
Normal file
35
crates/executor/Cargo.toml
Normal file
@@ -0,0 +1,35 @@
|
||||
[package]
|
||||
name = "executor"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
description = "Workflow executor for wrkflw"
|
||||
license.workspace = true
|
||||
|
||||
[dependencies]
|
||||
# Internal crates
|
||||
models = { path = "../models" }
|
||||
parser = { path = "../parser" }
|
||||
runtime = { path = "../runtime" }
|
||||
logging = { path = "../logging" }
|
||||
matrix = { path = "../matrix" }
|
||||
utils = { path = "../utils" }
|
||||
|
||||
# External dependencies
|
||||
async-trait.workspace = true
|
||||
bollard.workspace = true
|
||||
chrono.workspace = true
|
||||
dirs.workspace = true
|
||||
futures.workspace = true
|
||||
futures-util.workspace = true
|
||||
lazy_static.workspace = true
|
||||
num_cpus.workspace = true
|
||||
once_cell.workspace = true
|
||||
regex.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
serde_yaml.workspace = true
|
||||
tar.workspace = true
|
||||
tempfile.workspace = true
|
||||
thiserror.workspace = true
|
||||
tokio.workspace = true
|
||||
uuid.workspace = true
|
||||
@@ -1,4 +1,4 @@
|
||||
use crate::parser::workflow::WorkflowDefinition;
|
||||
use parser::workflow::WorkflowDefinition;
|
||||
use std::collections::{HashMap, HashSet};
|
||||
|
||||
pub fn resolve_dependencies(workflow: &WorkflowDefinition) -> Result<Vec<Vec<String>>, String> {
|
||||
1158
crates/executor/src/docker.rs
Normal file
1158
crates/executor/src/docker.rs
Normal file
File diff suppressed because it is too large
Load Diff
@@ -11,9 +11,15 @@ use crate::{
|
||||
mod docker_cleanup_tests {
|
||||
use super::*;
|
||||
|
||||
// Helper function to check if Docker tests should be skipped
|
||||
fn should_skip_docker_tests() -> bool {
|
||||
std::env::var("WRKFLW_TEST_SKIP_DOCKER").is_ok() ||
|
||||
!docker::is_available()
|
||||
}
|
||||
|
||||
/// Helper function to create a Docker container that should be tracked
|
||||
async fn create_test_container(docker_client: &Docker) -> Option<String> {
|
||||
if !docker::is_available() {
|
||||
if should_skip_docker_tests() {
|
||||
return None;
|
||||
}
|
||||
|
||||
@@ -53,7 +59,7 @@ mod docker_cleanup_tests {
|
||||
|
||||
/// Helper function to create a Docker network that should be tracked
|
||||
async fn create_test_network(docker_client: &Docker) -> Option<String> {
|
||||
if !docker::is_available() {
|
||||
if should_skip_docker_tests() {
|
||||
return None;
|
||||
}
|
||||
|
||||
@@ -66,8 +72,8 @@ mod docker_cleanup_tests {
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_docker_container_cleanup() {
|
||||
if !docker::is_available() {
|
||||
println!("Docker not available, skipping test");
|
||||
if should_skip_docker_tests() {
|
||||
println!("Docker tests disabled or Docker not available, skipping test");
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -107,8 +113,8 @@ mod docker_cleanup_tests {
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_docker_network_cleanup() {
|
||||
if !docker::is_available() {
|
||||
println!("Docker not available, skipping test");
|
||||
if should_skip_docker_tests() {
|
||||
println!("Docker tests disabled or Docker not available, skipping test");
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -148,8 +154,8 @@ mod docker_cleanup_tests {
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_full_resource_cleanup() {
|
||||
if !docker::is_available() {
|
||||
println!("Docker not available, skipping test");
|
||||
if should_skip_docker_tests() {
|
||||
println!("Docker tests disabled or Docker not available, skipping test");
|
||||
return;
|
||||
}
|
||||
|
||||
2046
crates/executor/src/engine.rs
Normal file
2046
crates/executor/src/engine.rs
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,6 @@
|
||||
use crate::matrix::MatrixCombination;
|
||||
use crate::parser::workflow::WorkflowDefinition;
|
||||
use chrono::Utc;
|
||||
use matrix::MatrixCombination;
|
||||
use parser::workflow::WorkflowDefinition;
|
||||
use serde_yaml::Value;
|
||||
use std::{collections::HashMap, fs, io, path::Path};
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
// executor crate
|
||||
|
||||
#![allow(unused_variables, unused_assignments)]
|
||||
|
||||
pub mod dependency;
|
||||
@@ -8,4 +10,6 @@ pub mod substitution;
|
||||
|
||||
// Re-export public items
|
||||
pub use docker::cleanup_resources;
|
||||
pub use engine::{execute_workflow, JobResult, JobStatus, RuntimeType, StepResult, StepStatus};
|
||||
pub use engine::{
|
||||
execute_workflow, ExecutionConfig, JobResult, JobStatus, RuntimeType, StepResult, StepStatus,
|
||||
};
|
||||
@@ -10,6 +10,7 @@ lazy_static! {
|
||||
|
||||
/// Preprocesses a command string to replace GitHub-style matrix variable references
|
||||
/// with their values from the environment
|
||||
#[allow(dead_code)]
|
||||
pub fn preprocess_command(command: &str, matrix_values: &HashMap<String, Value>) -> String {
|
||||
// Replace matrix references like ${{ matrix.os }} with their values
|
||||
let result = MATRIX_PATTERN.replace_all(command, |caps: ®ex::Captures| {
|
||||
@@ -34,6 +35,7 @@ pub fn preprocess_command(command: &str, matrix_values: &HashMap<String, Value>)
|
||||
}
|
||||
|
||||
/// Apply variable substitution to step run commands
|
||||
#[allow(dead_code)]
|
||||
pub fn process_step_run(run: &str, matrix_combination: &Option<HashMap<String, Value>>) -> String {
|
||||
if let Some(matrix) = matrix_combination {
|
||||
preprocess_command(run, matrix)
|
||||
19
crates/github/Cargo.toml
Normal file
19
crates/github/Cargo.toml
Normal file
@@ -0,0 +1,19 @@
|
||||
[package]
|
||||
name = "github"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
description = "github functionality for wrkflw"
|
||||
license.workspace = true
|
||||
|
||||
[dependencies]
|
||||
# Add other crate dependencies as needed
|
||||
models = { path = "../models" }
|
||||
|
||||
# External dependencies from workspace
|
||||
serde.workspace = true
|
||||
serde_yaml.workspace = true
|
||||
serde_json.workspace = true
|
||||
reqwest.workspace = true
|
||||
thiserror.workspace = true
|
||||
lazy_static.workspace = true
|
||||
regex.workspace = true
|
||||
@@ -1,6 +1,9 @@
|
||||
// github crate
|
||||
|
||||
use lazy_static::lazy_static;
|
||||
use regex::Regex;
|
||||
use reqwest::header;
|
||||
use serde_json::{self};
|
||||
use std::collections::HashMap;
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
@@ -161,6 +164,18 @@ pub async fn trigger_workflow(
|
||||
let branch_ref = branch.unwrap_or(&repo_info.default_branch);
|
||||
println!("Using branch: {}", branch_ref);
|
||||
|
||||
// Extract just the workflow name from the path if it's a full path
|
||||
let workflow_name = if workflow_name.contains('/') {
|
||||
Path::new(workflow_name)
|
||||
.file_stem()
|
||||
.and_then(|s| s.to_str())
|
||||
.ok_or_else(|| GithubError::GitParseError("Invalid workflow name".to_string()))?
|
||||
} else {
|
||||
workflow_name
|
||||
};
|
||||
|
||||
println!("Using workflow name: {}", workflow_name);
|
||||
|
||||
// Create simplified payload
|
||||
let mut payload = serde_json::json!({
|
||||
"ref": branch_ref
|
||||
@@ -202,9 +217,23 @@ pub async fn trigger_workflow(
|
||||
.await
|
||||
.unwrap_or_else(|_| format!("Unknown error (HTTP {})", status));
|
||||
|
||||
// Add more detailed error information
|
||||
let error_details = if status == 500 {
|
||||
"Internal server error from GitHub. This could be due to:\n\
|
||||
1. The workflow file doesn't exist in the repository\n\
|
||||
2. The GitHub token doesn't have sufficient permissions\n\
|
||||
3. There's an issue with the workflow file itself\n\
|
||||
Please check:\n\
|
||||
- The workflow file exists at .github/workflows/rust.yml\n\
|
||||
- Your GitHub token has the 'workflow' scope\n\
|
||||
- The workflow file is valid YAML"
|
||||
} else {
|
||||
&error_message
|
||||
};
|
||||
|
||||
return Err(GithubError::ApiError {
|
||||
status,
|
||||
message: error_message,
|
||||
message: error_details.to_string(),
|
||||
});
|
||||
}
|
||||
|
||||
@@ -253,6 +282,16 @@ async fn list_recent_workflow_runs(
|
||||
workflow_name: &str,
|
||||
token: &str,
|
||||
) -> Result<Vec<serde_json::Value>, GithubError> {
|
||||
// Extract just the workflow name from the path if it's a full path
|
||||
let workflow_name = if workflow_name.contains('/') {
|
||||
Path::new(workflow_name)
|
||||
.file_stem()
|
||||
.and_then(|s| s.to_str())
|
||||
.ok_or_else(|| GithubError::GitParseError("Invalid workflow name".to_string()))?
|
||||
} else {
|
||||
workflow_name
|
||||
};
|
||||
|
||||
// Get recent workflow runs via GitHub API
|
||||
let url = format!(
|
||||
"https://api.github.com/repos/{}/{}/actions/workflows/{}.yml/runs?per_page=5",
|
||||
20
crates/gitlab/Cargo.toml
Normal file
20
crates/gitlab/Cargo.toml
Normal file
@@ -0,0 +1,20 @@
|
||||
[package]
|
||||
name = "gitlab"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
description = "gitlab functionality for wrkflw"
|
||||
license.workspace = true
|
||||
|
||||
[dependencies]
|
||||
# Internal crates
|
||||
models = { path = "../models" }
|
||||
|
||||
# External dependencies
|
||||
lazy_static.workspace = true
|
||||
regex.workspace = true
|
||||
reqwest.workspace = true
|
||||
serde.workspace = true
|
||||
serde_yaml.workspace = true
|
||||
serde_json.workspace = true
|
||||
thiserror.workspace = true
|
||||
urlencoding.workspace = true
|
||||
278
crates/gitlab/src/lib.rs
Normal file
278
crates/gitlab/src/lib.rs
Normal file
@@ -0,0 +1,278 @@
|
||||
// gitlab crate
|
||||
|
||||
use lazy_static::lazy_static;
|
||||
use regex::Regex;
|
||||
use reqwest::header;
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
use std::process::Command;
|
||||
use thiserror::Error;
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum GitlabError {
|
||||
#[error("HTTP error: {0}")]
|
||||
RequestError(#[from] reqwest::Error),
|
||||
|
||||
#[error("IO error: {0}")]
|
||||
IoError(#[from] std::io::Error),
|
||||
|
||||
#[error("Failed to parse Git repository URL: {0}")]
|
||||
GitParseError(String),
|
||||
|
||||
#[error("GitLab token not found. Please set GITLAB_TOKEN environment variable")]
|
||||
TokenNotFound,
|
||||
|
||||
#[error("API error: {status} - {message}")]
|
||||
ApiError { status: u16, message: String },
|
||||
}
|
||||
|
||||
/// Information about a GitLab repository
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct RepoInfo {
|
||||
pub namespace: String,
|
||||
pub project: String,
|
||||
pub default_branch: String,
|
||||
}
|
||||
|
||||
lazy_static! {
|
||||
static ref GITLAB_REPO_REGEX: Regex =
|
||||
Regex::new(r"(?:https://gitlab\.com/|git@gitlab\.com:)([^/]+)/([^/.]+)(?:\.git)?")
|
||||
.expect("Failed to compile GitLab repo regex - this is a critical error");
|
||||
}
|
||||
|
||||
/// Extract repository information from the current git repository for GitLab
|
||||
pub fn get_repo_info() -> Result<RepoInfo, GitlabError> {
|
||||
let output = Command::new("git")
|
||||
.args(["remote", "get-url", "origin"])
|
||||
.output()
|
||||
.map_err(|e| GitlabError::GitParseError(format!("Failed to execute git command: {}", e)))?;
|
||||
|
||||
if !output.status.success() {
|
||||
return Err(GitlabError::GitParseError(
|
||||
"Failed to get git origin URL. Are you in a git repository?".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
let url = String::from_utf8_lossy(&output.stdout).trim().to_string();
|
||||
|
||||
if let Some(captures) = GITLAB_REPO_REGEX.captures(&url) {
|
||||
let namespace = captures
|
||||
.get(1)
|
||||
.ok_or_else(|| {
|
||||
GitlabError::GitParseError(
|
||||
"Unable to extract namespace from GitLab URL".to_string(),
|
||||
)
|
||||
})?
|
||||
.as_str()
|
||||
.to_string();
|
||||
|
||||
let project = captures
|
||||
.get(2)
|
||||
.ok_or_else(|| {
|
||||
GitlabError::GitParseError(
|
||||
"Unable to extract project name from GitLab URL".to_string(),
|
||||
)
|
||||
})?
|
||||
.as_str()
|
||||
.to_string();
|
||||
|
||||
// Get the default branch
|
||||
let branch_output = Command::new("git")
|
||||
.args(["rev-parse", "--abbrev-ref", "HEAD"])
|
||||
.output()
|
||||
.map_err(|e| {
|
||||
GitlabError::GitParseError(format!("Failed to execute git command: {}", e))
|
||||
})?;
|
||||
|
||||
if !branch_output.status.success() {
|
||||
return Err(GitlabError::GitParseError(
|
||||
"Failed to get current branch".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
let default_branch = String::from_utf8_lossy(&branch_output.stdout)
|
||||
.trim()
|
||||
.to_string();
|
||||
|
||||
Ok(RepoInfo {
|
||||
namespace,
|
||||
project,
|
||||
default_branch,
|
||||
})
|
||||
} else {
|
||||
Err(GitlabError::GitParseError(format!(
|
||||
"URL '{}' is not a valid GitLab repository URL",
|
||||
url
|
||||
)))
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the list of available pipeline files in the repository
|
||||
pub async fn list_pipelines(_repo_info: &RepoInfo) -> Result<Vec<String>, GitlabError> {
|
||||
// GitLab CI/CD pipelines are defined in .gitlab-ci.yml files
|
||||
let pipeline_file = Path::new(".gitlab-ci.yml");
|
||||
|
||||
if !pipeline_file.exists() {
|
||||
return Err(GitlabError::IoError(std::io::Error::new(
|
||||
std::io::ErrorKind::NotFound,
|
||||
"GitLab CI/CD pipeline file not found (.gitlab-ci.yml)",
|
||||
)));
|
||||
}
|
||||
|
||||
// In GitLab, there's typically a single pipeline file with multiple jobs
|
||||
// Return a list with just that file name
|
||||
Ok(vec!["gitlab-ci".to_string()])
|
||||
}
|
||||
|
||||
/// Trigger a pipeline on GitLab
|
||||
pub async fn trigger_pipeline(
|
||||
branch: Option<&str>,
|
||||
variables: Option<HashMap<String, String>>,
|
||||
) -> Result<(), GitlabError> {
|
||||
// Get GitLab token from environment
|
||||
let token = std::env::var("GITLAB_TOKEN").map_err(|_| GitlabError::TokenNotFound)?;
|
||||
|
||||
// Trim the token to remove any leading or trailing whitespace
|
||||
let trimmed_token = token.trim();
|
||||
|
||||
// Get repository information
|
||||
let repo_info = get_repo_info()?;
|
||||
println!(
|
||||
"GitLab Repository: {}/{}",
|
||||
repo_info.namespace, repo_info.project
|
||||
);
|
||||
|
||||
// Prepare the request payload
|
||||
let branch_ref = branch.unwrap_or(&repo_info.default_branch);
|
||||
println!("Using branch: {}", branch_ref);
|
||||
|
||||
// Create simplified payload
|
||||
let mut payload = serde_json::json!({
|
||||
"ref": branch_ref
|
||||
});
|
||||
|
||||
// Add variables if provided
|
||||
if let Some(vars_map) = variables {
|
||||
// GitLab expects variables in a specific format
|
||||
let formatted_vars: Vec<serde_json::Value> = vars_map
|
||||
.iter()
|
||||
.map(|(key, value)| {
|
||||
serde_json::json!({
|
||||
"key": key,
|
||||
"value": value
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
|
||||
payload["variables"] = serde_json::json!(formatted_vars);
|
||||
println!("With variables: {:?}", vars_map);
|
||||
}
|
||||
|
||||
// URL encode the namespace and project for use in URL
|
||||
let encoded_namespace = urlencoding::encode(&repo_info.namespace);
|
||||
let encoded_project = urlencoding::encode(&repo_info.project);
|
||||
|
||||
// Send the pipeline trigger request
|
||||
let url = format!(
|
||||
"https://gitlab.com/api/v4/projects/{encoded_namespace}%2F{encoded_project}/pipeline",
|
||||
encoded_namespace = encoded_namespace,
|
||||
encoded_project = encoded_project,
|
||||
);
|
||||
|
||||
println!("Triggering pipeline at URL: {}", url);
|
||||
|
||||
// Create a reqwest client
|
||||
let client = reqwest::Client::new();
|
||||
|
||||
// Send the request using reqwest
|
||||
let response = client
|
||||
.post(&url)
|
||||
.header("PRIVATE-TOKEN", trimmed_token)
|
||||
.header(header::CONTENT_TYPE, "application/json")
|
||||
.json(&payload)
|
||||
.send()
|
||||
.await
|
||||
.map_err(GitlabError::RequestError)?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
let status = response.status().as_u16();
|
||||
let error_message = response
|
||||
.text()
|
||||
.await
|
||||
.unwrap_or_else(|_| format!("Unknown error (HTTP {})", status));
|
||||
|
||||
// Add more detailed error information
|
||||
let error_details = if status == 404 {
|
||||
"Project not found or token doesn't have access to it. This could be due to:\n\
|
||||
1. The project doesn't exist\n\
|
||||
2. The GitLab token doesn't have sufficient permissions\n\
|
||||
Please check:\n\
|
||||
- The repository URL is correct\n\
|
||||
- Your GitLab token has the correct scope (api access)\n\
|
||||
- Your token has access to the project"
|
||||
} else if status == 401 {
|
||||
"Unauthorized. Your GitLab token may be invalid or expired."
|
||||
} else {
|
||||
&error_message
|
||||
};
|
||||
|
||||
return Err(GitlabError::ApiError {
|
||||
status,
|
||||
message: error_details.to_string(),
|
||||
});
|
||||
}
|
||||
|
||||
// Parse response to get pipeline ID
|
||||
let pipeline_info: serde_json::Value = response.json().await?;
|
||||
let pipeline_id = pipeline_info["id"].as_i64().unwrap_or(0);
|
||||
let pipeline_url = format!(
|
||||
"https://gitlab.com/{}/{}/pipelines/{}",
|
||||
repo_info.namespace, repo_info.project, pipeline_id
|
||||
);
|
||||
|
||||
println!("Pipeline triggered successfully!");
|
||||
println!("View pipeline at: {}", pipeline_url);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_parse_gitlab_url_https() {
|
||||
let url = "https://gitlab.com/mygroup/myproject.git";
|
||||
assert!(GITLAB_REPO_REGEX.is_match(url));
|
||||
|
||||
let captures = GITLAB_REPO_REGEX.captures(url).unwrap();
|
||||
assert_eq!(captures.get(1).unwrap().as_str(), "mygroup");
|
||||
assert_eq!(captures.get(2).unwrap().as_str(), "myproject");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_gitlab_url_ssh() {
|
||||
let url = "git@gitlab.com:mygroup/myproject.git";
|
||||
assert!(GITLAB_REPO_REGEX.is_match(url));
|
||||
|
||||
let captures = GITLAB_REPO_REGEX.captures(url).unwrap();
|
||||
assert_eq!(captures.get(1).unwrap().as_str(), "mygroup");
|
||||
assert_eq!(captures.get(2).unwrap().as_str(), "myproject");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_gitlab_url_no_git_extension() {
|
||||
let url = "https://gitlab.com/mygroup/myproject";
|
||||
assert!(GITLAB_REPO_REGEX.is_match(url));
|
||||
|
||||
let captures = GITLAB_REPO_REGEX.captures(url).unwrap();
|
||||
assert_eq!(captures.get(1).unwrap().as_str(), "mygroup");
|
||||
assert_eq!(captures.get(2).unwrap().as_str(), "myproject");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_invalid_url() {
|
||||
let url = "https://github.com/myuser/myrepo.git";
|
||||
assert!(!GITLAB_REPO_REGEX.is_match(url));
|
||||
}
|
||||
}
|
||||
16
crates/logging/Cargo.toml
Normal file
16
crates/logging/Cargo.toml
Normal file
@@ -0,0 +1,16 @@
|
||||
[package]
|
||||
name = "logging"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
description = "logging functionality for wrkflw"
|
||||
license.workspace = true
|
||||
|
||||
[dependencies]
|
||||
# Internal crates
|
||||
models = { path = "../models" }
|
||||
|
||||
# External dependencies
|
||||
chrono.workspace = true
|
||||
once_cell.workspace = true
|
||||
serde.workspace = true
|
||||
serde_yaml.workspace = true
|
||||
@@ -5,7 +5,11 @@ use std::sync::{Arc, Mutex};
|
||||
// Thread-safe log storage
|
||||
static LOGS: Lazy<Arc<Mutex<Vec<String>>>> = Lazy::new(|| Arc::new(Mutex::new(Vec::new())));
|
||||
|
||||
// Current log level
|
||||
static LOG_LEVEL: Lazy<Arc<Mutex<LogLevel>>> = Lazy::new(|| Arc::new(Mutex::new(LogLevel::Info)));
|
||||
|
||||
// Log levels
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub enum LogLevel {
|
||||
Debug,
|
||||
Info,
|
||||
@@ -24,6 +28,23 @@ impl LogLevel {
|
||||
}
|
||||
}
|
||||
|
||||
// Set the current log level
|
||||
pub fn set_log_level(level: LogLevel) {
|
||||
if let Ok(mut current_level) = LOG_LEVEL.lock() {
|
||||
*current_level = level;
|
||||
}
|
||||
}
|
||||
|
||||
// Get the current log level
|
||||
pub fn get_log_level() -> LogLevel {
|
||||
if let Ok(level) = LOG_LEVEL.lock() {
|
||||
*level
|
||||
} else {
|
||||
// Default to Info if we can't get the lock
|
||||
LogLevel::Info
|
||||
}
|
||||
}
|
||||
|
||||
// Log a message with timestamp and level
|
||||
pub fn log(level: LogLevel, message: &str) {
|
||||
let timestamp = Local::now().format("%H:%M:%S").to_string();
|
||||
@@ -32,11 +53,20 @@ pub fn log(level: LogLevel, message: &str) {
|
||||
let formatted = format!("[{}] {} {}", timestamp, level.prefix(), message);
|
||||
|
||||
if let Ok(mut logs) = LOGS.lock() {
|
||||
logs.push(formatted);
|
||||
logs.push(formatted.clone());
|
||||
}
|
||||
|
||||
// In verbose mode or when not in TUI, we might still want to print to console
|
||||
// This can be controlled by a setting
|
||||
// Print to console if the message level is >= the current log level
|
||||
// This ensures Debug messages only show up when the Debug level is set
|
||||
if let Ok(current_level) = LOG_LEVEL.lock() {
|
||||
if level >= *current_level {
|
||||
// Print to stdout/stderr based on level
|
||||
match level {
|
||||
LogLevel::Error | LogLevel::Warning => eprintln!("{}", formatted),
|
||||
_ => println!("{}", formatted),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Get all logs
|
||||
16
crates/matrix/Cargo.toml
Normal file
16
crates/matrix/Cargo.toml
Normal file
@@ -0,0 +1,16 @@
|
||||
[package]
|
||||
name = "matrix"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
description = "matrix functionality for wrkflw"
|
||||
license.workspace = true
|
||||
|
||||
[dependencies]
|
||||
# Internal crates
|
||||
models = { path = "../models" }
|
||||
|
||||
# External dependencies
|
||||
indexmap.workspace = true
|
||||
serde.workspace = true
|
||||
serde_yaml.workspace = true
|
||||
thiserror.workspace = true
|
||||
@@ -1,3 +1,5 @@
|
||||
// matrix crate
|
||||
|
||||
use indexmap::IndexMap;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_yaml::Value;
|
||||
12
crates/models/Cargo.toml
Normal file
12
crates/models/Cargo.toml
Normal file
@@ -0,0 +1,12 @@
|
||||
[package]
|
||||
name = "models"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
description = "Data models for wrkflw"
|
||||
license.workspace = true
|
||||
|
||||
[dependencies]
|
||||
serde.workspace = true
|
||||
serde_yaml.workspace = true
|
||||
serde_json.workspace = true
|
||||
thiserror.workspace = true
|
||||
338
crates/models/src/lib.rs
Normal file
338
crates/models/src/lib.rs
Normal file
@@ -0,0 +1,338 @@
|
||||
pub struct ValidationResult {
|
||||
pub is_valid: bool,
|
||||
pub issues: Vec<String>,
|
||||
}
|
||||
|
||||
impl Default for ValidationResult {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl ValidationResult {
|
||||
pub fn new() -> Self {
|
||||
ValidationResult {
|
||||
is_valid: true,
|
||||
issues: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn add_issue(&mut self, issue: String) {
|
||||
self.is_valid = false;
|
||||
self.issues.push(issue);
|
||||
}
|
||||
}
|
||||
|
||||
// GitLab pipeline models
|
||||
pub mod gitlab {
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
|
||||
/// Represents a GitLab CI/CD pipeline configuration
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct Pipeline {
|
||||
/// Default image for all jobs
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub image: Option<Image>,
|
||||
|
||||
/// Global variables available to all jobs
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub variables: Option<HashMap<String, String>>,
|
||||
|
||||
/// Pipeline stages in execution order
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub stages: Option<Vec<String>>,
|
||||
|
||||
/// Default before_script for all jobs
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub before_script: Option<Vec<String>>,
|
||||
|
||||
/// Default after_script for all jobs
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub after_script: Option<Vec<String>>,
|
||||
|
||||
/// Job definitions (name => job)
|
||||
#[serde(flatten)]
|
||||
pub jobs: HashMap<String, Job>,
|
||||
|
||||
/// Workflow rules for the pipeline
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub workflow: Option<Workflow>,
|
||||
|
||||
/// Includes for pipeline configuration
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub include: Option<Vec<Include>>,
|
||||
}
|
||||
|
||||
/// A job in a GitLab CI/CD pipeline
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct Job {
|
||||
/// The stage this job belongs to
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub stage: Option<String>,
|
||||
|
||||
/// Docker image to use for this job
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub image: Option<Image>,
|
||||
|
||||
/// Script commands to run
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub script: Option<Vec<String>>,
|
||||
|
||||
/// Commands to run before the main script
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub before_script: Option<Vec<String>>,
|
||||
|
||||
/// Commands to run after the main script
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub after_script: Option<Vec<String>>,
|
||||
|
||||
/// When to run the job (on_success, on_failure, always, manual)
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub when: Option<String>,
|
||||
|
||||
/// Allow job failure
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub allow_failure: Option<bool>,
|
||||
|
||||
/// Services to run alongside the job
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub services: Option<Vec<Service>>,
|
||||
|
||||
/// Tags to define which runners can execute this job
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub tags: Option<Vec<String>>,
|
||||
|
||||
/// Job-specific variables
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub variables: Option<HashMap<String, String>>,
|
||||
|
||||
/// Job dependencies
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub dependencies: Option<Vec<String>>,
|
||||
|
||||
/// Artifacts to store after job execution
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub artifacts: Option<Artifacts>,
|
||||
|
||||
/// Cache configuration
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub cache: Option<Cache>,
|
||||
|
||||
/// Rules for when this job should run
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub rules: Option<Vec<Rule>>,
|
||||
|
||||
/// Only run on specified refs
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub only: Option<Only>,
|
||||
|
||||
/// Exclude specified refs
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub except: Option<Except>,
|
||||
|
||||
/// Retry configuration
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub retry: Option<Retry>,
|
||||
|
||||
/// Timeout for the job in seconds
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub timeout: Option<String>,
|
||||
|
||||
/// Mark job as parallel and specify instance count
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub parallel: Option<usize>,
|
||||
|
||||
/// Flag to indicate this is a template job
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub template: Option<bool>,
|
||||
|
||||
/// List of jobs this job extends from
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub extends: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
/// Docker image configuration
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
#[serde(untagged)]
|
||||
pub enum Image {
|
||||
/// Simple image name as string
|
||||
Simple(String),
|
||||
/// Detailed image configuration
|
||||
Detailed {
|
||||
/// Image name
|
||||
name: String,
|
||||
/// Entrypoint to override in the image
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
entrypoint: Option<Vec<String>>,
|
||||
},
|
||||
}
|
||||
|
||||
/// Service container to run alongside a job
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
#[serde(untagged)]
|
||||
pub enum Service {
|
||||
/// Simple service name as string
|
||||
Simple(String),
|
||||
/// Detailed service configuration
|
||||
Detailed {
|
||||
/// Service name/image
|
||||
name: String,
|
||||
/// Command to run in the service container
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
command: Option<Vec<String>>,
|
||||
/// Entrypoint to override in the image
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
entrypoint: Option<Vec<String>>,
|
||||
},
|
||||
}
|
||||
|
||||
/// Artifacts configuration
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct Artifacts {
|
||||
/// Paths to include as artifacts
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub paths: Option<Vec<String>>,
|
||||
/// Artifact expiration duration
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub expire_in: Option<String>,
|
||||
/// When to upload artifacts (on_success, on_failure, always)
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub when: Option<String>,
|
||||
}
|
||||
|
||||
/// Cache configuration
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct Cache {
|
||||
/// Cache key
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub key: Option<String>,
|
||||
/// Paths to cache
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub paths: Option<Vec<String>>,
|
||||
/// When to save cache (on_success, on_failure, always)
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub when: Option<String>,
|
||||
/// Cache policy
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub policy: Option<String>,
|
||||
}
|
||||
|
||||
/// Rule for conditional job execution
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct Rule {
|
||||
/// If condition expression
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub if_: Option<String>,
|
||||
/// When to run if condition is true
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub when: Option<String>,
|
||||
/// Variables to set if condition is true
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub variables: Option<HashMap<String, String>>,
|
||||
}
|
||||
|
||||
/// Only/except configuration
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
#[serde(untagged)]
|
||||
pub enum Only {
|
||||
/// Simple list of refs
|
||||
Refs(Vec<String>),
|
||||
/// Detailed configuration
|
||||
Complex {
|
||||
/// Refs to include
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
refs: Option<Vec<String>>,
|
||||
/// Branch patterns to include
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
branches: Option<Vec<String>>,
|
||||
/// Tags to include
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
tags: Option<Vec<String>>,
|
||||
/// Pipeline types to include
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
variables: Option<Vec<String>>,
|
||||
/// Changes to files that trigger the job
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
changes: Option<Vec<String>>,
|
||||
},
|
||||
}
|
||||
|
||||
/// Except configuration
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
#[serde(untagged)]
|
||||
pub enum Except {
|
||||
/// Simple list of refs
|
||||
Refs(Vec<String>),
|
||||
/// Detailed configuration
|
||||
Complex {
|
||||
/// Refs to exclude
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
refs: Option<Vec<String>>,
|
||||
/// Branch patterns to exclude
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
branches: Option<Vec<String>>,
|
||||
/// Tags to exclude
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
tags: Option<Vec<String>>,
|
||||
/// Pipeline types to exclude
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
variables: Option<Vec<String>>,
|
||||
/// Changes to files that don't trigger the job
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
changes: Option<Vec<String>>,
|
||||
},
|
||||
}
|
||||
|
||||
/// Workflow configuration
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct Workflow {
|
||||
/// Rules for when to run the pipeline
|
||||
pub rules: Vec<Rule>,
|
||||
}
|
||||
|
||||
/// Retry configuration
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
#[serde(untagged)]
|
||||
pub enum Retry {
|
||||
/// Simple max attempts
|
||||
MaxAttempts(u32),
|
||||
/// Detailed retry configuration
|
||||
Detailed {
|
||||
/// Maximum retry attempts
|
||||
max: u32,
|
||||
/// When to retry
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
when: Option<Vec<String>>,
|
||||
},
|
||||
}
|
||||
|
||||
/// Include configuration for external pipeline files
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
#[serde(untagged)]
|
||||
pub enum Include {
|
||||
/// Simple string include
|
||||
Local(String),
|
||||
/// Detailed include configuration
|
||||
Detailed {
|
||||
/// Local file path
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
local: Option<String>,
|
||||
/// Remote file URL
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
remote: Option<String>,
|
||||
/// Include from project
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
project: Option<String>,
|
||||
/// Include specific file from project
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
file: Option<String>,
|
||||
/// Include template
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
template: Option<String>,
|
||||
/// Ref to use when including from project
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
ref_: Option<String>,
|
||||
},
|
||||
}
|
||||
}
|
||||
21
crates/parser/Cargo.toml
Normal file
21
crates/parser/Cargo.toml
Normal file
@@ -0,0 +1,21 @@
|
||||
[package]
|
||||
name = "parser"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
description = "Parser functionality for wrkflw"
|
||||
license.workspace = true
|
||||
|
||||
[dependencies]
|
||||
# Internal crates
|
||||
models = { path = "../models" }
|
||||
matrix = { path = "../matrix" }
|
||||
|
||||
# External dependencies
|
||||
jsonschema.workspace = true
|
||||
serde.workspace = true
|
||||
serde_yaml.workspace = true
|
||||
serde_json.workspace = true
|
||||
thiserror.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
tempfile = "3.7"
|
||||
275
crates/parser/src/gitlab.rs
Normal file
275
crates/parser/src/gitlab.rs
Normal file
@@ -0,0 +1,275 @@
|
||||
use crate::schema::{SchemaType, SchemaValidator};
|
||||
use crate::workflow;
|
||||
use models::gitlab::Pipeline;
|
||||
use models::ValidationResult;
|
||||
use std::collections::HashMap;
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
use thiserror::Error;
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum GitlabParserError {
|
||||
#[error("I/O error: {0}")]
|
||||
IoError(#[from] std::io::Error),
|
||||
|
||||
#[error("YAML parsing error: {0}")]
|
||||
YamlError(#[from] serde_yaml::Error),
|
||||
|
||||
#[error("Invalid pipeline structure: {0}")]
|
||||
InvalidStructure(String),
|
||||
|
||||
#[error("Schema validation error: {0}")]
|
||||
SchemaValidationError(String),
|
||||
}
|
||||
|
||||
/// Parse a GitLab CI/CD pipeline file
|
||||
pub fn parse_pipeline(pipeline_path: &Path) -> Result<Pipeline, GitlabParserError> {
|
||||
// Read the pipeline file
|
||||
let pipeline_content = fs::read_to_string(pipeline_path)?;
|
||||
|
||||
// Validate against schema
|
||||
let validator = SchemaValidator::new().map_err(GitlabParserError::SchemaValidationError)?;
|
||||
|
||||
validator
|
||||
.validate_with_specific_schema(&pipeline_content, SchemaType::GitLab)
|
||||
.map_err(GitlabParserError::SchemaValidationError)?;
|
||||
|
||||
// Parse the pipeline YAML
|
||||
let pipeline: Pipeline = serde_yaml::from_str(&pipeline_content)?;
|
||||
|
||||
// Return the parsed pipeline
|
||||
Ok(pipeline)
|
||||
}
|
||||
|
||||
/// Validate the basic structure of a GitLab CI/CD pipeline
|
||||
pub fn validate_pipeline_structure(pipeline: &Pipeline) -> ValidationResult {
|
||||
let mut result = ValidationResult::new();
|
||||
|
||||
// Check for at least one job
|
||||
if pipeline.jobs.is_empty() {
|
||||
result.add_issue("Pipeline must contain at least one job".to_string());
|
||||
}
|
||||
|
||||
// Check for script in jobs
|
||||
for (job_name, job) in &pipeline.jobs {
|
||||
// Skip template jobs
|
||||
if let Some(true) = job.template {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check for script or extends
|
||||
if job.script.is_none() && job.extends.is_none() {
|
||||
result.add_issue(format!(
|
||||
"Job '{}' must have a script section or extend another job",
|
||||
job_name
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
// Check that referenced stages are defined
|
||||
if let Some(stages) = &pipeline.stages {
|
||||
for (job_name, job) in &pipeline.jobs {
|
||||
if let Some(stage) = &job.stage {
|
||||
if !stages.contains(stage) {
|
||||
result.add_issue(format!(
|
||||
"Job '{}' references undefined stage '{}'",
|
||||
job_name, stage
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check that job dependencies exist
|
||||
for (job_name, job) in &pipeline.jobs {
|
||||
if let Some(dependencies) = &job.dependencies {
|
||||
for dependency in dependencies {
|
||||
if !pipeline.jobs.contains_key(dependency) {
|
||||
result.add_issue(format!(
|
||||
"Job '{}' depends on undefined job '{}'",
|
||||
job_name, dependency
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check that job extensions exist
|
||||
for (job_name, job) in &pipeline.jobs {
|
||||
if let Some(extends) = &job.extends {
|
||||
for extend in extends {
|
||||
if !pipeline.jobs.contains_key(extend) {
|
||||
result.add_issue(format!(
|
||||
"Job '{}' extends undefined job '{}'",
|
||||
job_name, extend
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
/// Convert a GitLab CI/CD pipeline to a format compatible with the workflow executor
|
||||
pub fn convert_to_workflow_format(pipeline: &Pipeline) -> workflow::WorkflowDefinition {
|
||||
// Create a new workflow with required fields
|
||||
let mut workflow = workflow::WorkflowDefinition {
|
||||
name: "Converted GitLab CI Pipeline".to_string(),
|
||||
on: vec!["push".to_string()], // Default trigger
|
||||
on_raw: serde_yaml::Value::String("push".to_string()),
|
||||
jobs: HashMap::new(),
|
||||
};
|
||||
|
||||
// Convert each GitLab job to a GitHub Actions job
|
||||
for (job_name, gitlab_job) in &pipeline.jobs {
|
||||
// Skip template jobs
|
||||
if let Some(true) = gitlab_job.template {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Create a new job
|
||||
let mut job = workflow::Job {
|
||||
runs_on: "ubuntu-latest".to_string(), // Default runner
|
||||
needs: None,
|
||||
steps: Vec::new(),
|
||||
env: HashMap::new(),
|
||||
matrix: None,
|
||||
services: HashMap::new(),
|
||||
if_condition: None,
|
||||
outputs: None,
|
||||
permissions: None,
|
||||
};
|
||||
|
||||
// Add job-specific environment variables
|
||||
if let Some(variables) = &gitlab_job.variables {
|
||||
job.env.extend(variables.clone());
|
||||
}
|
||||
|
||||
// Add global variables if they exist
|
||||
if let Some(variables) = &pipeline.variables {
|
||||
// Only add if not already defined at job level
|
||||
for (key, value) in variables {
|
||||
job.env.entry(key.clone()).or_insert_with(|| value.clone());
|
||||
}
|
||||
}
|
||||
|
||||
// Convert before_script to steps if it exists
|
||||
if let Some(before_script) = &gitlab_job.before_script {
|
||||
for (i, cmd) in before_script.iter().enumerate() {
|
||||
let step = workflow::Step {
|
||||
name: Some(format!("Before script {}", i + 1)),
|
||||
uses: None,
|
||||
run: Some(cmd.clone()),
|
||||
with: None,
|
||||
env: HashMap::new(),
|
||||
continue_on_error: None,
|
||||
};
|
||||
job.steps.push(step);
|
||||
}
|
||||
}
|
||||
|
||||
// Convert main script to steps
|
||||
if let Some(script) = &gitlab_job.script {
|
||||
for (i, cmd) in script.iter().enumerate() {
|
||||
let step = workflow::Step {
|
||||
name: Some(format!("Run script line {}", i + 1)),
|
||||
uses: None,
|
||||
run: Some(cmd.clone()),
|
||||
with: None,
|
||||
env: HashMap::new(),
|
||||
continue_on_error: None,
|
||||
};
|
||||
job.steps.push(step);
|
||||
}
|
||||
}
|
||||
|
||||
// Convert after_script to steps if it exists
|
||||
if let Some(after_script) = &gitlab_job.after_script {
|
||||
for (i, cmd) in after_script.iter().enumerate() {
|
||||
let step = workflow::Step {
|
||||
name: Some(format!("After script {}", i + 1)),
|
||||
uses: None,
|
||||
run: Some(cmd.clone()),
|
||||
with: None,
|
||||
env: HashMap::new(),
|
||||
continue_on_error: Some(true), // After script should continue even if previous steps fail
|
||||
};
|
||||
job.steps.push(step);
|
||||
}
|
||||
}
|
||||
|
||||
// Add services if they exist
|
||||
if let Some(services) = &gitlab_job.services {
|
||||
for (i, service) in services.iter().enumerate() {
|
||||
let service_name = format!("service-{}", i);
|
||||
let service_image = match service {
|
||||
models::gitlab::Service::Simple(name) => name.clone(),
|
||||
models::gitlab::Service::Detailed { name, .. } => name.clone(),
|
||||
};
|
||||
|
||||
let service = workflow::Service {
|
||||
image: service_image,
|
||||
ports: None,
|
||||
env: HashMap::new(),
|
||||
volumes: None,
|
||||
options: None,
|
||||
};
|
||||
|
||||
job.services.insert(service_name, service);
|
||||
}
|
||||
}
|
||||
|
||||
// Add the job to the workflow
|
||||
workflow.jobs.insert(job_name.clone(), job);
|
||||
}
|
||||
|
||||
workflow
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::path::PathBuf;
|
||||
use tempfile::NamedTempFile;
|
||||
|
||||
#[test]
|
||||
fn test_parse_simple_pipeline() {
|
||||
// Create a temporary file with a simple GitLab CI/CD pipeline
|
||||
let mut file = NamedTempFile::new().unwrap();
|
||||
let content = r#"
|
||||
stages:
|
||||
- build
|
||||
- test
|
||||
|
||||
build_job:
|
||||
stage: build
|
||||
script:
|
||||
- echo "Building..."
|
||||
- make build
|
||||
|
||||
test_job:
|
||||
stage: test
|
||||
script:
|
||||
- echo "Testing..."
|
||||
- make test
|
||||
"#;
|
||||
fs::write(&file, content).unwrap();
|
||||
|
||||
// Parse the pipeline
|
||||
let pipeline = parse_pipeline(&file.path()).unwrap();
|
||||
|
||||
// Validate basic structure
|
||||
assert_eq!(pipeline.stages.as_ref().unwrap().len(), 2);
|
||||
assert_eq!(pipeline.jobs.len(), 2);
|
||||
|
||||
// Check job contents
|
||||
let build_job = pipeline.jobs.get("build_job").unwrap();
|
||||
assert_eq!(build_job.stage.as_ref().unwrap(), "build");
|
||||
assert_eq!(build_job.script.as_ref().unwrap().len(), 2);
|
||||
|
||||
let test_job = pipeline.jobs.get("test_job").unwrap();
|
||||
assert_eq!(test_job.stage.as_ref().unwrap(), "test");
|
||||
assert_eq!(test_job.script.as_ref().unwrap().len(), 2);
|
||||
}
|
||||
}
|
||||
5
crates/parser/src/lib.rs
Normal file
5
crates/parser/src/lib.rs
Normal file
@@ -0,0 +1,5 @@
|
||||
// parser crate
|
||||
|
||||
pub mod gitlab;
|
||||
pub mod schema;
|
||||
pub mod workflow;
|
||||
111
crates/parser/src/schema.rs
Normal file
111
crates/parser/src/schema.rs
Normal file
@@ -0,0 +1,111 @@
|
||||
use jsonschema::JSONSchema;
|
||||
use serde_json::Value;
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
|
||||
const GITHUB_WORKFLOW_SCHEMA: &str = include_str!("../../../schemas/github-workflow.json");
|
||||
const GITLAB_CI_SCHEMA: &str = include_str!("../../../schemas/gitlab-ci.json");
|
||||
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub enum SchemaType {
|
||||
GitHub,
|
||||
GitLab,
|
||||
}
|
||||
|
||||
pub struct SchemaValidator {
|
||||
github_schema: JSONSchema,
|
||||
gitlab_schema: JSONSchema,
|
||||
}
|
||||
|
||||
impl SchemaValidator {
|
||||
pub fn new() -> Result<Self, String> {
|
||||
let github_schema_json: Value = serde_json::from_str(GITHUB_WORKFLOW_SCHEMA)
|
||||
.map_err(|e| format!("Failed to parse GitHub workflow schema: {}", e))?;
|
||||
|
||||
let gitlab_schema_json: Value = serde_json::from_str(GITLAB_CI_SCHEMA)
|
||||
.map_err(|e| format!("Failed to parse GitLab CI schema: {}", e))?;
|
||||
|
||||
let github_schema = JSONSchema::compile(&github_schema_json)
|
||||
.map_err(|e| format!("Failed to compile GitHub JSON schema: {}", e))?;
|
||||
|
||||
let gitlab_schema = JSONSchema::compile(&gitlab_schema_json)
|
||||
.map_err(|e| format!("Failed to compile GitLab JSON schema: {}", e))?;
|
||||
|
||||
Ok(Self {
|
||||
github_schema,
|
||||
gitlab_schema,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn validate_workflow(&self, workflow_path: &Path) -> Result<(), String> {
|
||||
// Determine the schema type based on the filename
|
||||
let schema_type = if workflow_path.file_name().is_some_and(|name| {
|
||||
let name_str = name.to_string_lossy();
|
||||
name_str.ends_with(".gitlab-ci.yml") || name_str.ends_with(".gitlab-ci.yaml")
|
||||
}) {
|
||||
SchemaType::GitLab
|
||||
} else {
|
||||
SchemaType::GitHub
|
||||
};
|
||||
|
||||
// Read the workflow file
|
||||
let content = fs::read_to_string(workflow_path)
|
||||
.map_err(|e| format!("Failed to read workflow file: {}", e))?;
|
||||
|
||||
// Parse YAML to JSON Value
|
||||
let workflow_json: Value = serde_yaml::from_str(&content)
|
||||
.map_err(|e| format!("Failed to parse workflow YAML: {}", e))?;
|
||||
|
||||
// Validate against the appropriate schema
|
||||
let validation_result = match schema_type {
|
||||
SchemaType::GitHub => self.github_schema.validate(&workflow_json),
|
||||
SchemaType::GitLab => self.gitlab_schema.validate(&workflow_json),
|
||||
};
|
||||
|
||||
// Handle validation errors
|
||||
if let Err(errors) = validation_result {
|
||||
let schema_name = match schema_type {
|
||||
SchemaType::GitHub => "GitHub workflow",
|
||||
SchemaType::GitLab => "GitLab CI",
|
||||
};
|
||||
let mut error_msg = format!("{} validation failed:\n", schema_name);
|
||||
for error in errors {
|
||||
error_msg.push_str(&format!("- {}\n", error));
|
||||
}
|
||||
return Err(error_msg);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn validate_with_specific_schema(
|
||||
&self,
|
||||
content: &str,
|
||||
schema_type: SchemaType,
|
||||
) -> Result<(), String> {
|
||||
// Parse YAML to JSON Value
|
||||
let workflow_json: Value =
|
||||
serde_yaml::from_str(content).map_err(|e| format!("Failed to parse YAML: {}", e))?;
|
||||
|
||||
// Validate against the appropriate schema
|
||||
let validation_result = match schema_type {
|
||||
SchemaType::GitHub => self.github_schema.validate(&workflow_json),
|
||||
SchemaType::GitLab => self.gitlab_schema.validate(&workflow_json),
|
||||
};
|
||||
|
||||
// Handle validation errors
|
||||
if let Err(errors) = validation_result {
|
||||
let schema_name = match schema_type {
|
||||
SchemaType::GitHub => "GitHub workflow",
|
||||
SchemaType::GitLab => "GitLab CI",
|
||||
};
|
||||
let mut error_msg = format!("{} validation failed:\n", schema_name);
|
||||
for error in errors {
|
||||
error_msg.push_str(&format!("- {}\n", error));
|
||||
}
|
||||
return Err(error_msg);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@@ -1,9 +1,31 @@
|
||||
use crate::matrix::MatrixConfig;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use matrix::MatrixConfig;
|
||||
use serde::{Deserialize, Deserializer, Serialize};
|
||||
use std::collections::HashMap;
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
|
||||
use super::schema::SchemaValidator;
|
||||
|
||||
// Custom deserializer for needs field that handles both string and array formats
|
||||
fn deserialize_needs<'de, D>(deserializer: D) -> Result<Option<Vec<String>>, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
#[derive(Deserialize)]
|
||||
#[serde(untagged)]
|
||||
enum StringOrVec {
|
||||
String(String),
|
||||
Vec(Vec<String>),
|
||||
}
|
||||
|
||||
let value = Option::<StringOrVec>::deserialize(deserializer)?;
|
||||
match value {
|
||||
Some(StringOrVec::String(s)) => Ok(Some(vec![s])),
|
||||
Some(StringOrVec::Vec(v)) => Ok(Some(v)),
|
||||
None => Ok(None),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, Serialize)]
|
||||
pub struct WorkflowDefinition {
|
||||
pub name: String,
|
||||
@@ -18,7 +40,7 @@ pub struct WorkflowDefinition {
|
||||
pub struct Job {
|
||||
#[serde(rename = "runs-on")]
|
||||
pub runs_on: String,
|
||||
#[serde(default)]
|
||||
#[serde(default, deserialize_with = "deserialize_needs")]
|
||||
pub needs: Option<Vec<String>>,
|
||||
pub steps: Vec<Step>,
|
||||
#[serde(default)]
|
||||
@@ -27,6 +49,12 @@ pub struct Job {
|
||||
pub matrix: Option<MatrixConfig>,
|
||||
#[serde(default)]
|
||||
pub services: HashMap<String, Service>,
|
||||
#[serde(default, rename = "if")]
|
||||
pub if_condition: Option<String>,
|
||||
#[serde(default)]
|
||||
pub outputs: Option<HashMap<String, String>>,
|
||||
#[serde(default)]
|
||||
pub permissions: Option<HashMap<String, String>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, Serialize)]
|
||||
@@ -54,6 +82,8 @@ pub struct Step {
|
||||
pub with: Option<HashMap<String, String>>,
|
||||
#[serde(default)]
|
||||
pub env: HashMap<String, String>,
|
||||
#[serde(default)]
|
||||
pub continue_on_error: Option<bool>,
|
||||
}
|
||||
|
||||
impl WorkflowDefinition {
|
||||
@@ -83,6 +113,11 @@ pub struct ActionInfo {
|
||||
}
|
||||
|
||||
pub fn parse_workflow(path: &Path) -> Result<WorkflowDefinition, String> {
|
||||
// First validate against schema
|
||||
let validator = SchemaValidator::new()?;
|
||||
validator.validate_workflow(path)?;
|
||||
|
||||
// If validation passes, parse the workflow
|
||||
let content =
|
||||
fs::read_to_string(path).map_err(|e| format!("Failed to read workflow file: {}", e))?;
|
||||
|
||||
22
crates/runtime/Cargo.toml
Normal file
22
crates/runtime/Cargo.toml
Normal file
@@ -0,0 +1,22 @@
|
||||
[package]
|
||||
name = "runtime"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
description = "Runtime environment for wrkflw"
|
||||
license.workspace = true
|
||||
|
||||
[dependencies]
|
||||
# Internal crates
|
||||
models = { path = "../models" }
|
||||
logging = { path = "../logging", version = "0.4.0" }
|
||||
|
||||
# External dependencies
|
||||
async-trait.workspace = true
|
||||
once_cell = "1.19"
|
||||
serde.workspace = true
|
||||
serde_yaml.workspace = true
|
||||
tempfile = "3.9"
|
||||
tokio.workspace = true
|
||||
futures = "0.3"
|
||||
utils = { path = "../utils", version = "0.4.0" }
|
||||
which = "4.4"
|
||||
@@ -15,6 +15,13 @@ pub trait ContainerRuntime {
|
||||
async fn pull_image(&self, image: &str) -> Result<(), ContainerError>;
|
||||
|
||||
async fn build_image(&self, dockerfile: &Path, tag: &str) -> Result<(), ContainerError>;
|
||||
|
||||
async fn prepare_language_environment(
|
||||
&self,
|
||||
language: &str,
|
||||
version: Option<&str>,
|
||||
additional_packages: Option<Vec<String>>,
|
||||
) -> Result<String, ContainerError>;
|
||||
}
|
||||
|
||||
pub struct ContainerOutput {
|
||||
819
crates/runtime/src/emulation.rs
Normal file
819
crates/runtime/src/emulation.rs
Normal file
@@ -0,0 +1,819 @@
|
||||
use crate::container::{ContainerError, ContainerOutput, ContainerRuntime};
|
||||
use async_trait::async_trait;
|
||||
use logging;
|
||||
use once_cell::sync::Lazy;
|
||||
use std::collections::HashMap;
|
||||
use std::fs;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::process::Command;
|
||||
use std::sync::Mutex;
|
||||
use tempfile::TempDir;
|
||||
use which;
|
||||
|
||||
// Global collection of resources to clean up
|
||||
static EMULATION_WORKSPACES: Lazy<Mutex<Vec<PathBuf>>> = Lazy::new(|| Mutex::new(Vec::new()));
|
||||
static EMULATION_PROCESSES: Lazy<Mutex<Vec<u32>>> = Lazy::new(|| Mutex::new(Vec::new()));
|
||||
|
||||
pub struct EmulationRuntime {
|
||||
#[allow(dead_code)]
|
||||
workspace: TempDir,
|
||||
}
|
||||
|
||||
impl Default for EmulationRuntime {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl EmulationRuntime {
|
||||
pub fn new() -> Self {
|
||||
// Create a temporary workspace to simulate container isolation
|
||||
let workspace =
|
||||
tempfile::tempdir().expect("Failed to create temporary workspace for emulation");
|
||||
|
||||
// Track this workspace for cleanup
|
||||
if let Ok(mut workspaces) = EMULATION_WORKSPACES.lock() {
|
||||
workspaces.push(workspace.path().to_path_buf());
|
||||
}
|
||||
|
||||
EmulationRuntime { workspace }
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
fn prepare_workspace(&self, _working_dir: &Path, volumes: &[(&Path, &Path)]) -> PathBuf {
|
||||
// Get the container root - this is the emulation workspace directory
|
||||
let container_root = self.workspace.path().to_path_buf();
|
||||
|
||||
// Make sure we have a github/workspace subdirectory which is where
|
||||
// commands will be executed
|
||||
let github_workspace = container_root.join("github").join("workspace");
|
||||
fs::create_dir_all(&github_workspace)
|
||||
.expect("Failed to create github/workspace directory structure");
|
||||
|
||||
// Map all volumes
|
||||
for (host_path, container_path) in volumes {
|
||||
// Determine target path - if it starts with /github/workspace, it goes to our workspace dir
|
||||
let target_path = if container_path.starts_with("/github/workspace") {
|
||||
// Map /github/workspace to our github_workspace directory
|
||||
let rel_path = container_path
|
||||
.strip_prefix("/github/workspace")
|
||||
.unwrap_or(Path::new(""));
|
||||
github_workspace.join(rel_path)
|
||||
} else if container_path.starts_with("/") {
|
||||
// Other absolute paths go under container_root
|
||||
container_root.join(container_path.strip_prefix("/").unwrap_or(container_path))
|
||||
} else {
|
||||
// Relative paths go directly under container_root
|
||||
container_root.join(container_path)
|
||||
};
|
||||
|
||||
// Create parent directories
|
||||
if let Some(parent) = target_path.parent() {
|
||||
fs::create_dir_all(parent).expect("Failed to create directory structure");
|
||||
}
|
||||
|
||||
// For directories, copy content recursively
|
||||
if host_path.is_dir() {
|
||||
// If the host path is the project root and container path is the workspace,
|
||||
// we want to copy all project files to the github/workspace directory
|
||||
if *container_path == Path::new("/github/workspace") {
|
||||
// Use a recursive copy function to copy all files and directories
|
||||
copy_directory_contents(host_path, &github_workspace)
|
||||
.expect("Failed to copy project files to workspace");
|
||||
} else {
|
||||
// Create the target directory
|
||||
fs::create_dir_all(&target_path).expect("Failed to create target directory");
|
||||
|
||||
// Copy files in this directory (not recursive for simplicity)
|
||||
for entry in fs::read_dir(host_path)
|
||||
.expect("Failed to read source directory")
|
||||
.flatten()
|
||||
{
|
||||
let source = entry.path();
|
||||
let file_name = match source.file_name() {
|
||||
Some(name) => name,
|
||||
None => {
|
||||
eprintln!(
|
||||
"Warning: Could not get file name from path: {:?}",
|
||||
source
|
||||
);
|
||||
continue; // Skip this file
|
||||
}
|
||||
};
|
||||
let dest = target_path.join(file_name);
|
||||
|
||||
if source.is_file() {
|
||||
if let Err(e) = fs::copy(&source, &dest) {
|
||||
eprintln!(
|
||||
"Warning: Failed to copy file from {:?} to {:?}: {}",
|
||||
&source, &dest, e
|
||||
);
|
||||
}
|
||||
} else {
|
||||
// We could make this recursive if needed
|
||||
fs::create_dir_all(&dest).expect("Failed to create subdirectory");
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if host_path.is_file() {
|
||||
// Copy individual file
|
||||
let file_name = match host_path.file_name() {
|
||||
Some(name) => name,
|
||||
None => {
|
||||
eprintln!(
|
||||
"Warning: Could not get file name from path: {:?}",
|
||||
host_path
|
||||
);
|
||||
continue; // Skip this file
|
||||
}
|
||||
};
|
||||
let dest = target_path.join(file_name);
|
||||
if let Err(e) = fs::copy(host_path, &dest) {
|
||||
eprintln!(
|
||||
"Warning: Failed to copy file from {:?} to {:?}: {}",
|
||||
host_path, &dest, e
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Return the github/workspace directory for command execution
|
||||
github_workspace
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl ContainerRuntime for EmulationRuntime {
|
||||
async fn run_container(
|
||||
&self,
|
||||
_image: &str,
|
||||
command: &[&str],
|
||||
env_vars: &[(&str, &str)],
|
||||
working_dir: &Path,
|
||||
_volumes: &[(&Path, &Path)],
|
||||
) -> Result<ContainerOutput, ContainerError> {
|
||||
// Build command string
|
||||
let mut command_str = String::new();
|
||||
for part in command {
|
||||
if !command_str.is_empty() {
|
||||
command_str.push(' ');
|
||||
}
|
||||
command_str.push_str(part);
|
||||
}
|
||||
|
||||
// Log more detailed debugging information
|
||||
logging::info(&format!("Executing command in container: {}", command_str));
|
||||
logging::info(&format!("Working directory: {}", working_dir.display()));
|
||||
logging::info(&format!("Command length: {}", command.len()));
|
||||
|
||||
if command.is_empty() {
|
||||
return Err(ContainerError::ContainerExecution(
|
||||
"Empty command array".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
// Print each command part separately for debugging
|
||||
for (i, part) in command.iter().enumerate() {
|
||||
logging::info(&format!("Command part {}: '{}'", i, part));
|
||||
}
|
||||
|
||||
// Log environment variables
|
||||
logging::info("Environment variables:");
|
||||
for (key, value) in env_vars {
|
||||
logging::info(&format!(" {}={}", key, value));
|
||||
}
|
||||
|
||||
// Find actual working directory - determine if we should use the current directory instead
|
||||
let actual_working_dir: PathBuf = if !working_dir.exists() {
|
||||
// Look for GITHUB_WORKSPACE or CI_PROJECT_DIR in env_vars
|
||||
let mut workspace_path = None;
|
||||
for (key, value) in env_vars {
|
||||
if *key == "GITHUB_WORKSPACE" || *key == "CI_PROJECT_DIR" {
|
||||
workspace_path = Some(PathBuf::from(value));
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// If found, use that as the working directory
|
||||
if let Some(path) = workspace_path {
|
||||
if path.exists() {
|
||||
logging::info(&format!(
|
||||
"Using environment-defined workspace: {}",
|
||||
path.display()
|
||||
));
|
||||
path
|
||||
} else {
|
||||
// Fallback to current directory
|
||||
let current_dir =
|
||||
std::env::current_dir().unwrap_or_else(|_| PathBuf::from("."));
|
||||
logging::info(&format!(
|
||||
"Using current directory: {}",
|
||||
current_dir.display()
|
||||
));
|
||||
current_dir
|
||||
}
|
||||
} else {
|
||||
// Fallback to current directory
|
||||
let current_dir = std::env::current_dir().unwrap_or_else(|_| PathBuf::from("."));
|
||||
logging::info(&format!(
|
||||
"Using current directory: {}",
|
||||
current_dir.display()
|
||||
));
|
||||
current_dir
|
||||
}
|
||||
} else {
|
||||
working_dir.to_path_buf()
|
||||
};
|
||||
|
||||
logging::info(&format!(
|
||||
"Using actual working directory: {}",
|
||||
actual_working_dir.display()
|
||||
));
|
||||
|
||||
// Check if path contains the command (for shell script execution)
|
||||
let command_path = which::which(command[0]);
|
||||
match &command_path {
|
||||
Ok(path) => logging::info(&format!("Found command at: {}", path.display())),
|
||||
Err(e) => logging::error(&format!(
|
||||
"Command not found in PATH: {} - Error: {}",
|
||||
command[0], e
|
||||
)),
|
||||
}
|
||||
|
||||
// First, check if this is a simple shell command (like echo)
|
||||
if command_str.starts_with("echo ")
|
||||
|| command_str.starts_with("cp ")
|
||||
|| command_str.starts_with("mkdir ")
|
||||
|| command_str.starts_with("mv ")
|
||||
{
|
||||
logging::info("Executing as shell command");
|
||||
// Execute as a shell command
|
||||
let mut cmd = Command::new("sh");
|
||||
cmd.arg("-c");
|
||||
cmd.arg(&command_str);
|
||||
cmd.current_dir(&actual_working_dir);
|
||||
|
||||
// Add environment variables
|
||||
for (key, value) in env_vars {
|
||||
cmd.env(key, value);
|
||||
}
|
||||
|
||||
match cmd.output() {
|
||||
Ok(output_result) => {
|
||||
let exit_code = output_result.status.code().unwrap_or(-1);
|
||||
let output = String::from_utf8_lossy(&output_result.stdout).to_string();
|
||||
let error = String::from_utf8_lossy(&output_result.stderr).to_string();
|
||||
|
||||
logging::debug(&format!(
|
||||
"Shell command completed with exit code: {}",
|
||||
exit_code
|
||||
));
|
||||
|
||||
if exit_code != 0 {
|
||||
let mut error_details = format!(
|
||||
"Command failed with exit code: {}\nCommand: {}\n\nError output:\n{}",
|
||||
exit_code, command_str, error
|
||||
);
|
||||
|
||||
// Add environment variables to error details
|
||||
error_details.push_str("\n\nEnvironment variables:\n");
|
||||
for (key, value) in env_vars {
|
||||
if key.starts_with("GITHUB_") || key.starts_with("CI_") {
|
||||
error_details.push_str(&format!("{}={}\n", key, value));
|
||||
}
|
||||
}
|
||||
|
||||
return Err(ContainerError::ContainerExecution(error_details));
|
||||
}
|
||||
|
||||
return Ok(ContainerOutput {
|
||||
stdout: output,
|
||||
stderr: error,
|
||||
exit_code,
|
||||
});
|
||||
}
|
||||
Err(e) => {
|
||||
return Err(ContainerError::ContainerExecution(format!(
|
||||
"Failed to execute command: {}\nError: {}",
|
||||
command_str, e
|
||||
)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Special handling for Rust/Cargo commands
|
||||
if command_str.starts_with("cargo ") || command_str.starts_with("rustup ") {
|
||||
let parts: Vec<&str> = command_str.split_whitespace().collect();
|
||||
if parts.is_empty() {
|
||||
return Err(ContainerError::ContainerExecution(
|
||||
"Empty command".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
let mut cmd = Command::new(parts[0]);
|
||||
|
||||
// Always use the current directory for cargo/rust commands rather than the temporary directory
|
||||
let current_dir = std::env::current_dir().unwrap_or_else(|_| PathBuf::from("."));
|
||||
logging::info(&format!(
|
||||
"Using project directory for Rust command: {}",
|
||||
current_dir.display()
|
||||
));
|
||||
cmd.current_dir(¤t_dir);
|
||||
|
||||
// Add environment variables
|
||||
for (key, value) in env_vars {
|
||||
// Don't use the CI_PROJECT_DIR for CARGO_HOME, use the actual project directory
|
||||
if *key == "CARGO_HOME" && value.contains("${CI_PROJECT_DIR}") {
|
||||
let cargo_home =
|
||||
value.replace("${CI_PROJECT_DIR}", ¤t_dir.to_string_lossy());
|
||||
logging::info(&format!("Setting CARGO_HOME to: {}", cargo_home));
|
||||
cmd.env(key, cargo_home);
|
||||
} else {
|
||||
cmd.env(key, value);
|
||||
}
|
||||
}
|
||||
|
||||
// Add command arguments
|
||||
if parts.len() > 1 {
|
||||
cmd.args(&parts[1..]);
|
||||
}
|
||||
|
||||
logging::debug(&format!(
|
||||
"Executing Rust command: {} in {}",
|
||||
command_str,
|
||||
current_dir.display()
|
||||
));
|
||||
|
||||
match cmd.output() {
|
||||
Ok(output_result) => {
|
||||
let exit_code = output_result.status.code().unwrap_or(-1);
|
||||
let output = String::from_utf8_lossy(&output_result.stdout).to_string();
|
||||
let error = String::from_utf8_lossy(&output_result.stderr).to_string();
|
||||
|
||||
logging::debug(&format!("Command exit code: {}", exit_code));
|
||||
|
||||
if exit_code != 0 {
|
||||
let mut error_details = format!(
|
||||
"Command failed with exit code: {}\nCommand: {}\n\nError output:\n{}",
|
||||
exit_code, command_str, error
|
||||
);
|
||||
|
||||
// Add environment variables to error details
|
||||
error_details.push_str("\n\nEnvironment variables:\n");
|
||||
for (key, value) in env_vars {
|
||||
if key.starts_with("GITHUB_")
|
||||
|| key.starts_with("RUST")
|
||||
|| key.starts_with("CARGO")
|
||||
|| key.starts_with("CI_")
|
||||
{
|
||||
error_details.push_str(&format!("{}={}\n", key, value));
|
||||
}
|
||||
}
|
||||
|
||||
return Err(ContainerError::ContainerExecution(error_details));
|
||||
}
|
||||
|
||||
return Ok(ContainerOutput {
|
||||
stdout: output,
|
||||
stderr: error,
|
||||
exit_code,
|
||||
});
|
||||
}
|
||||
Err(e) => {
|
||||
return Err(ContainerError::ContainerExecution(format!(
|
||||
"Failed to execute Rust command: {}",
|
||||
e
|
||||
)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// For other commands, use a shell as fallback
|
||||
let mut cmd = Command::new("sh");
|
||||
cmd.arg("-c");
|
||||
cmd.arg(&command_str);
|
||||
cmd.current_dir(&actual_working_dir);
|
||||
|
||||
// Add environment variables
|
||||
for (key, value) in env_vars {
|
||||
cmd.env(key, value);
|
||||
}
|
||||
|
||||
match cmd.output() {
|
||||
Ok(output_result) => {
|
||||
let exit_code = output_result.status.code().unwrap_or(-1);
|
||||
let output = String::from_utf8_lossy(&output_result.stdout).to_string();
|
||||
let error = String::from_utf8_lossy(&output_result.stderr).to_string();
|
||||
|
||||
logging::debug(&format!("Command completed with exit code: {}", exit_code));
|
||||
|
||||
if exit_code != 0 {
|
||||
let mut error_details = format!(
|
||||
"Command failed with exit code: {}\nCommand: {}\n\nError output:\n{}",
|
||||
exit_code, command_str, error
|
||||
);
|
||||
|
||||
// Add environment variables to error details
|
||||
error_details.push_str("\n\nEnvironment variables:\n");
|
||||
for (key, value) in env_vars {
|
||||
if key.starts_with("GITHUB_") || key.starts_with("CI_") {
|
||||
error_details.push_str(&format!("{}={}\n", key, value));
|
||||
}
|
||||
}
|
||||
|
||||
return Err(ContainerError::ContainerExecution(error_details));
|
||||
}
|
||||
|
||||
Ok(ContainerOutput {
|
||||
stdout: format!(
|
||||
"Emulated container execution with command: {}\n\nOutput:\n{}",
|
||||
command_str, output
|
||||
),
|
||||
stderr: error,
|
||||
exit_code,
|
||||
})
|
||||
}
|
||||
Err(e) => {
|
||||
return Err(ContainerError::ContainerExecution(format!(
|
||||
"Failed to execute command: {}\nError: {}",
|
||||
command_str, e
|
||||
)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn pull_image(&self, image: &str) -> Result<(), ContainerError> {
|
||||
logging::info(&format!("🔄 Emulation: Pretending to pull image {}", image));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn build_image(&self, dockerfile: &Path, tag: &str) -> Result<(), ContainerError> {
|
||||
logging::info(&format!(
|
||||
"🔄 Emulation: Pretending to build image {} from {}",
|
||||
tag,
|
||||
dockerfile.display()
|
||||
));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn prepare_language_environment(
|
||||
&self,
|
||||
language: &str,
|
||||
version: Option<&str>,
|
||||
_additional_packages: Option<Vec<String>>,
|
||||
) -> Result<String, ContainerError> {
|
||||
// For emulation runtime, we'll use a simplified approach
|
||||
// that doesn't require building custom images
|
||||
let base_image = match language {
|
||||
"python" => version.map_or("python:3.11-slim".to_string(), |v| format!("python:{}", v)),
|
||||
"node" => version.map_or("node:20-slim".to_string(), |v| format!("node:{}", v)),
|
||||
"java" => version.map_or("eclipse-temurin:17-jdk".to_string(), |v| {
|
||||
format!("eclipse-temurin:{}", v)
|
||||
}),
|
||||
"go" => version.map_or("golang:1.21-slim".to_string(), |v| format!("golang:{}", v)),
|
||||
"dotnet" => version.map_or("mcr.microsoft.com/dotnet/sdk:7.0".to_string(), |v| {
|
||||
format!("mcr.microsoft.com/dotnet/sdk:{}", v)
|
||||
}),
|
||||
"rust" => version.map_or("rust:latest".to_string(), |v| format!("rust:{}", v)),
|
||||
_ => {
|
||||
return Err(ContainerError::ContainerStart(format!(
|
||||
"Unsupported language: {}",
|
||||
language
|
||||
)))
|
||||
}
|
||||
};
|
||||
|
||||
// For emulation, we'll just return the base image
|
||||
// The actual package installation will be handled during container execution
|
||||
Ok(base_image)
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
fn copy_directory_contents(source: &Path, dest: &Path) -> std::io::Result<()> {
|
||||
// Create the destination directory if it doesn't exist
|
||||
fs::create_dir_all(dest)?;
|
||||
|
||||
// Iterate through all entries in the source directory
|
||||
for entry in fs::read_dir(source)? {
|
||||
let entry = entry?;
|
||||
let path = entry.path();
|
||||
let file_name = match path.file_name() {
|
||||
Some(name) => name,
|
||||
None => {
|
||||
eprintln!("Warning: Could not get file name from path: {:?}", path);
|
||||
continue; // Skip this file
|
||||
}
|
||||
};
|
||||
let dest_path = dest.join(file_name);
|
||||
|
||||
// Skip hidden files (except .gitignore and .github might be useful)
|
||||
let file_name_str = file_name.to_string_lossy();
|
||||
if file_name_str.starts_with(".")
|
||||
&& file_name_str != ".gitignore"
|
||||
&& file_name_str != ".github"
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
// Skip target directory for Rust projects
|
||||
if file_name_str == "target" {
|
||||
continue;
|
||||
}
|
||||
|
||||
if path.is_dir() {
|
||||
// Recursively copy subdirectories
|
||||
copy_directory_contents(&path, &dest_path)?;
|
||||
} else {
|
||||
// Copy files
|
||||
fs::copy(&path, &dest_path)?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn handle_special_action(action: &str) -> Result<(), ContainerError> {
|
||||
// Extract owner, repo and version from the action
|
||||
let action_parts: Vec<&str> = action.split('@').collect();
|
||||
let action_name = action_parts[0];
|
||||
let action_version = if action_parts.len() > 1 {
|
||||
action_parts[1]
|
||||
} else {
|
||||
"latest"
|
||||
};
|
||||
|
||||
logging::info(&format!(
|
||||
"🔄 Processing action: {} @ {}",
|
||||
action_name, action_version
|
||||
));
|
||||
|
||||
// Handle specific known actions with special requirements
|
||||
if action.starts_with("cachix/install-nix-action") {
|
||||
logging::info("🔄 Emulating cachix/install-nix-action");
|
||||
|
||||
// In emulation mode, check if nix is installed
|
||||
let nix_installed = Command::new("which")
|
||||
.arg("nix")
|
||||
.output()
|
||||
.map(|output| output.status.success())
|
||||
.unwrap_or(false);
|
||||
|
||||
if !nix_installed {
|
||||
logging::info("🔄 Emulation: Nix is required but not installed.");
|
||||
logging::info(
|
||||
"🔄 To use this workflow, please install Nix: https://nixos.org/download.html",
|
||||
);
|
||||
logging::info("🔄 Continuing emulation, but nix commands will fail.");
|
||||
} else {
|
||||
logging::info("🔄 Emulation: Using system-installed Nix");
|
||||
}
|
||||
} else if action.starts_with("actions-rs/cargo@") {
|
||||
// For actions-rs/cargo action, ensure Rust is available
|
||||
logging::info(&format!("🔄 Detected Rust cargo action: {}", action));
|
||||
|
||||
// Verify Rust/cargo is installed
|
||||
check_command_available("cargo", "Rust/Cargo", "https://rustup.rs/");
|
||||
} else if action.starts_with("actions-rs/toolchain@") {
|
||||
// For actions-rs/toolchain action, check for Rust installation
|
||||
logging::info(&format!("🔄 Detected Rust toolchain action: {}", action));
|
||||
|
||||
check_command_available("rustc", "Rust", "https://rustup.rs/");
|
||||
} else if action.starts_with("actions-rs/fmt@") {
|
||||
// For actions-rs/fmt action, check if rustfmt is available
|
||||
logging::info(&format!("🔄 Detected Rust formatter action: {}", action));
|
||||
|
||||
check_command_available("rustfmt", "rustfmt", "rustup component add rustfmt");
|
||||
} else if action.starts_with("actions/setup-node@") {
|
||||
// Node.js setup action
|
||||
logging::info(&format!("🔄 Detected Node.js setup action: {}", action));
|
||||
|
||||
check_command_available("node", "Node.js", "https://nodejs.org/");
|
||||
} else if action.starts_with("actions/setup-python@") {
|
||||
// Python setup action
|
||||
logging::info(&format!("🔄 Detected Python setup action: {}", action));
|
||||
|
||||
check_command_available("python", "Python", "https://www.python.org/downloads/");
|
||||
} else if action.starts_with("actions/setup-java@") {
|
||||
// Java setup action
|
||||
logging::info(&format!("🔄 Detected Java setup action: {}", action));
|
||||
|
||||
check_command_available("java", "Java", "https://adoptium.net/");
|
||||
} else if action.starts_with("actions/checkout@") {
|
||||
// Git checkout action - this is handled implicitly by our workspace setup
|
||||
logging::info("🔄 Detected checkout action - workspace files are already prepared");
|
||||
} else if action.starts_with("actions/cache@") {
|
||||
// Cache action - can't really emulate caching effectively
|
||||
logging::info(
|
||||
"🔄 Detected cache action - caching is not fully supported in emulation mode",
|
||||
);
|
||||
} else {
|
||||
// Generic action we don't have special handling for
|
||||
logging::info(&format!(
|
||||
"🔄 Action '{}' has no special handling in emulation mode",
|
||||
action_name
|
||||
));
|
||||
}
|
||||
|
||||
// Always return success - the actual command execution will happen in execute_step
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Helper function to check if a command is available on the system
|
||||
fn check_command_available(command: &str, name: &str, install_url: &str) {
|
||||
let is_available = Command::new("which")
|
||||
.arg(command)
|
||||
.output()
|
||||
.map(|output| output.status.success())
|
||||
.unwrap_or(false);
|
||||
|
||||
if !is_available {
|
||||
logging::warning(&format!("{} is required but not found on the system", name));
|
||||
logging::info(&format!(
|
||||
"To use this action, please install {}: {}",
|
||||
name, install_url
|
||||
));
|
||||
logging::info(&format!(
|
||||
"Continuing emulation, but {} commands will fail",
|
||||
name
|
||||
));
|
||||
} else {
|
||||
// Try to get version information
|
||||
if let Ok(output) = Command::new(command).arg("--version").output() {
|
||||
if output.status.success() {
|
||||
let version = String::from_utf8_lossy(&output.stdout);
|
||||
logging::info(&format!("🔄 Using system {}: {}", name, version.trim()));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Add a function to help set up appropriate environment variables for different actions
|
||||
#[allow(dead_code)]
|
||||
fn add_action_env_vars(
|
||||
env_map: &mut HashMap<String, String>,
|
||||
action: &str,
|
||||
with_params: &Option<HashMap<String, String>>,
|
||||
) {
|
||||
if let Some(params) = with_params {
|
||||
if action.starts_with("actions/setup-node") {
|
||||
// For Node.js actions, add NODE_VERSION
|
||||
if let Some(version) = params.get("node-version") {
|
||||
env_map.insert("NODE_VERSION".to_string(), version.clone());
|
||||
}
|
||||
|
||||
// Set NPM/Yarn paths if needed
|
||||
env_map.insert(
|
||||
"NPM_CONFIG_PREFIX".to_string(),
|
||||
"/tmp/.npm-global".to_string(),
|
||||
);
|
||||
env_map.insert("PATH".to_string(), "/tmp/.npm-global/bin:$PATH".to_string());
|
||||
} else if action.starts_with("actions/setup-python") {
|
||||
// For Python actions, add PYTHON_VERSION
|
||||
if let Some(version) = params.get("python-version") {
|
||||
env_map.insert("PYTHON_VERSION".to_string(), version.clone());
|
||||
}
|
||||
|
||||
// Set pip cache directories
|
||||
env_map.insert("PIP_CACHE_DIR".to_string(), "/tmp/.pip-cache".to_string());
|
||||
} else if action.starts_with("actions/setup-java") {
|
||||
// For Java actions, add JAVA_VERSION
|
||||
if let Some(version) = params.get("java-version") {
|
||||
env_map.insert("JAVA_VERSION".to_string(), version.clone());
|
||||
}
|
||||
|
||||
// Set JAVA_HOME
|
||||
env_map.insert(
|
||||
"JAVA_HOME".to_string(),
|
||||
"/usr/lib/jvm/default-java".to_string(),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Function to clean up emulation resources
|
||||
pub async fn cleanup_resources() {
|
||||
cleanup_processes().await;
|
||||
cleanup_workspaces().await;
|
||||
}
|
||||
|
||||
// Clean up any tracked processes
|
||||
async fn cleanup_processes() {
|
||||
let processes_to_cleanup = {
|
||||
if let Ok(processes) = EMULATION_PROCESSES.lock() {
|
||||
processes.clone()
|
||||
} else {
|
||||
vec![]
|
||||
}
|
||||
};
|
||||
|
||||
for pid in processes_to_cleanup {
|
||||
logging::info(&format!("Cleaning up emulated process: {}", pid));
|
||||
|
||||
#[cfg(unix)]
|
||||
{
|
||||
// On Unix-like systems, use kill command
|
||||
let _ = Command::new("kill")
|
||||
.arg("-TERM")
|
||||
.arg(pid.to_string())
|
||||
.output();
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
{
|
||||
// On Windows, use taskkill
|
||||
let _ = Command::new("taskkill")
|
||||
.arg("/F")
|
||||
.arg("/PID")
|
||||
.arg(&pid.to_string())
|
||||
.output();
|
||||
}
|
||||
|
||||
// Remove from tracking
|
||||
if let Ok(mut processes) = EMULATION_PROCESSES.lock() {
|
||||
processes.retain(|p| *p != pid);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Clean up any tracked workspaces
|
||||
async fn cleanup_workspaces() {
|
||||
let workspaces_to_cleanup = {
|
||||
if let Ok(workspaces) = EMULATION_WORKSPACES.lock() {
|
||||
workspaces.clone()
|
||||
} else {
|
||||
vec![]
|
||||
}
|
||||
};
|
||||
|
||||
for workspace_path in workspaces_to_cleanup {
|
||||
logging::info(&format!(
|
||||
"Cleaning up emulation workspace: {}",
|
||||
workspace_path.display()
|
||||
));
|
||||
|
||||
// Only attempt to remove if it exists
|
||||
if workspace_path.exists() {
|
||||
match fs::remove_dir_all(&workspace_path) {
|
||||
Ok(_) => logging::info("Successfully removed workspace directory"),
|
||||
Err(e) => logging::error(&format!("Error removing workspace: {}", e)),
|
||||
}
|
||||
}
|
||||
|
||||
// Remove from tracking
|
||||
if let Ok(mut workspaces) = EMULATION_WORKSPACES.lock() {
|
||||
workspaces.retain(|w| *w != workspace_path);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Add process to tracking
|
||||
#[allow(dead_code)]
|
||||
pub fn track_process(pid: u32) {
|
||||
if let Ok(mut processes) = EMULATION_PROCESSES.lock() {
|
||||
processes.push(pid);
|
||||
}
|
||||
}
|
||||
|
||||
// Remove process from tracking
|
||||
#[allow(dead_code)]
|
||||
pub fn untrack_process(pid: u32) {
|
||||
if let Ok(mut processes) = EMULATION_PROCESSES.lock() {
|
||||
processes.retain(|p| *p != pid);
|
||||
}
|
||||
}
|
||||
|
||||
// Track additional workspace paths if needed
|
||||
#[allow(dead_code)]
|
||||
pub fn track_workspace(path: &Path) {
|
||||
if let Ok(mut workspaces) = EMULATION_WORKSPACES.lock() {
|
||||
workspaces.push(path.to_path_buf());
|
||||
}
|
||||
}
|
||||
|
||||
// Remove workspace from tracking
|
||||
#[allow(dead_code)]
|
||||
pub fn untrack_workspace(path: &Path) {
|
||||
if let Ok(mut workspaces) = EMULATION_WORKSPACES.lock() {
|
||||
workspaces.retain(|w| *w != path);
|
||||
}
|
||||
}
|
||||
|
||||
// Public accessor functions for testing
|
||||
#[cfg(test)]
|
||||
pub fn get_tracked_workspaces() -> Vec<PathBuf> {
|
||||
if let Ok(workspaces) = EMULATION_WORKSPACES.lock() {
|
||||
workspaces.clone()
|
||||
} else {
|
||||
vec![]
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub fn get_tracked_processes() -> Vec<u32> {
|
||||
if let Ok(processes) = EMULATION_PROCESSES.lock() {
|
||||
processes.clone()
|
||||
} else {
|
||||
vec![]
|
||||
}
|
||||
}
|
||||
@@ -1,2 +1,4 @@
|
||||
// runtime crate
|
||||
|
||||
pub mod container;
|
||||
pub mod emulation;
|
||||
27
crates/ui/Cargo.toml
Normal file
27
crates/ui/Cargo.toml
Normal file
@@ -0,0 +1,27 @@
|
||||
[package]
|
||||
name = "ui"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
description = "user interface functionality for wrkflw"
|
||||
license.workspace = true
|
||||
|
||||
[dependencies]
|
||||
# Internal crates
|
||||
models = { path = "../models" }
|
||||
evaluator = { path = "../evaluator" }
|
||||
executor = { path = "../executor" }
|
||||
logging = { path = "../logging" }
|
||||
utils = { path = "../utils" }
|
||||
github = { path = "../github" }
|
||||
|
||||
# External dependencies
|
||||
chrono.workspace = true
|
||||
crossterm.workspace = true
|
||||
ratatui.workspace = true
|
||||
serde.workspace = true
|
||||
serde_yaml.workspace = true
|
||||
tokio.workspace = true
|
||||
serde_json.workspace = true
|
||||
reqwest = { workspace = true, features = ["json"] }
|
||||
regex.workspace = true
|
||||
futures.workspace = true
|
||||
462
crates/ui/src/app/mod.rs
Normal file
462
crates/ui/src/app/mod.rs
Normal file
@@ -0,0 +1,462 @@
|
||||
// App module for UI state and main TUI entry point
|
||||
mod state;
|
||||
|
||||
use crate::handlers::workflow::start_next_workflow_execution;
|
||||
use crate::models::{ExecutionResultMsg, Workflow, WorkflowStatus};
|
||||
use crate::utils::load_workflows;
|
||||
use crate::views::render_ui;
|
||||
use chrono::Local;
|
||||
use crossterm::{
|
||||
event::{self, DisableMouseCapture, EnableMouseCapture, Event, KeyCode, KeyModifiers},
|
||||
execute,
|
||||
terminal::{disable_raw_mode, enable_raw_mode, EnterAlternateScreen, LeaveAlternateScreen},
|
||||
};
|
||||
use executor::RuntimeType;
|
||||
use ratatui::{backend::CrosstermBackend, Terminal};
|
||||
use std::io::{self, stdout};
|
||||
use std::path::PathBuf;
|
||||
use std::sync::mpsc;
|
||||
use std::time::{Duration, Instant};
|
||||
|
||||
pub use state::App;
|
||||
|
||||
// Main entry point for the TUI interface
|
||||
#[allow(clippy::ptr_arg)]
|
||||
pub async fn run_wrkflw_tui(
|
||||
path: Option<&PathBuf>,
|
||||
runtime_type: RuntimeType,
|
||||
verbose: bool,
|
||||
preserve_containers_on_failure: bool,
|
||||
) -> io::Result<()> {
|
||||
// Terminal setup
|
||||
enable_raw_mode()?;
|
||||
let mut stdout = stdout();
|
||||
execute!(stdout, EnterAlternateScreen, EnableMouseCapture)?;
|
||||
let backend = CrosstermBackend::new(stdout);
|
||||
let mut terminal = Terminal::new(backend)?;
|
||||
|
||||
// Set up channel for async communication
|
||||
let (tx, rx): (
|
||||
mpsc::Sender<ExecutionResultMsg>,
|
||||
mpsc::Receiver<ExecutionResultMsg>,
|
||||
) = mpsc::channel();
|
||||
|
||||
// Initialize app state
|
||||
let mut app = App::new(
|
||||
runtime_type.clone(),
|
||||
tx.clone(),
|
||||
preserve_containers_on_failure,
|
||||
);
|
||||
|
||||
if app.validation_mode {
|
||||
app.logs.push("Starting in validation mode".to_string());
|
||||
logging::info("Starting in validation mode");
|
||||
}
|
||||
|
||||
// Load workflows
|
||||
let dir_path = match path {
|
||||
Some(path) if path.is_dir() => path.clone(),
|
||||
Some(path) if path.is_file() => {
|
||||
// Single workflow file
|
||||
let name = path
|
||||
.file_name()
|
||||
.unwrap_or_default()
|
||||
.to_string_lossy()
|
||||
.into_owned();
|
||||
|
||||
app.workflows = vec![Workflow {
|
||||
name: name.clone(),
|
||||
path: path.clone(),
|
||||
selected: true,
|
||||
status: WorkflowStatus::NotStarted,
|
||||
execution_details: None,
|
||||
}];
|
||||
|
||||
// Queue the single workflow for execution
|
||||
app.execution_queue = vec![0];
|
||||
app.start_execution();
|
||||
|
||||
// Return parent dir or current dir if no parent
|
||||
path.parent()
|
||||
.map(|p| p.to_path_buf())
|
||||
.unwrap_or_else(|| PathBuf::from("."))
|
||||
}
|
||||
_ => PathBuf::from(".github/workflows"),
|
||||
};
|
||||
|
||||
// Only load directory if we haven't already loaded a single file
|
||||
if app.workflows.is_empty() {
|
||||
app.workflows = load_workflows(&dir_path);
|
||||
}
|
||||
|
||||
// Run the main event loop
|
||||
let tx_clone = tx.clone();
|
||||
|
||||
// Run the event loop
|
||||
let result = run_tui_event_loop(&mut terminal, &mut app, &tx_clone, &rx, verbose);
|
||||
|
||||
// Clean up terminal
|
||||
disable_raw_mode()?;
|
||||
execute!(
|
||||
terminal.backend_mut(),
|
||||
LeaveAlternateScreen,
|
||||
DisableMouseCapture
|
||||
)?;
|
||||
terminal.show_cursor()?;
|
||||
|
||||
match result {
|
||||
Ok(_) => Ok(()),
|
||||
Err(e) => {
|
||||
// If the TUI fails to initialize or crashes, fall back to CLI mode
|
||||
logging::error(&format!("Failed to start UI: {}", e));
|
||||
|
||||
// Only for 'tui' command should we fall back to CLI mode for files
|
||||
// For other commands, return the error
|
||||
if let Some(path) = path {
|
||||
if path.is_file() {
|
||||
logging::error("Falling back to CLI mode...");
|
||||
crate::handlers::workflow::execute_workflow_cli(path, runtime_type, verbose)
|
||||
.await
|
||||
} else if path.is_dir() {
|
||||
crate::handlers::workflow::validate_workflow(path, verbose)
|
||||
} else {
|
||||
Err(e)
|
||||
}
|
||||
} else {
|
||||
Err(e)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Helper function to run the main event loop
|
||||
fn run_tui_event_loop(
|
||||
terminal: &mut Terminal<CrosstermBackend<io::Stdout>>,
|
||||
app: &mut App,
|
||||
tx_clone: &mpsc::Sender<ExecutionResultMsg>,
|
||||
rx: &mpsc::Receiver<ExecutionResultMsg>,
|
||||
verbose: bool,
|
||||
) -> io::Result<()> {
|
||||
// Max time to wait for events - keep this short to ensure UI responsiveness
|
||||
let event_poll_timeout = Duration::from_millis(50);
|
||||
|
||||
// Set up a dedicated tick timer
|
||||
let tick_rate = app.tick_rate;
|
||||
let mut last_tick = Instant::now();
|
||||
|
||||
loop {
|
||||
// Always redraw the UI on each loop iteration to keep it responsive
|
||||
terminal.draw(|f| {
|
||||
render_ui(f, app);
|
||||
})?;
|
||||
|
||||
// Update the UI on every tick
|
||||
if last_tick.elapsed() >= tick_rate {
|
||||
app.tick();
|
||||
app.update_running_workflow_progress();
|
||||
last_tick = Instant::now();
|
||||
}
|
||||
|
||||
// Non-blocking check for execution results
|
||||
if let Ok((workflow_idx, result)) = rx.try_recv() {
|
||||
app.process_execution_result(workflow_idx, result);
|
||||
app.current_execution = None;
|
||||
|
||||
// Get next workflow to execute using our helper function
|
||||
start_next_workflow_execution(app, tx_clone, verbose);
|
||||
}
|
||||
|
||||
// Start execution if we have a queued workflow and nothing is currently running
|
||||
if app.running && app.current_execution.is_none() && !app.execution_queue.is_empty() {
|
||||
start_next_workflow_execution(app, tx_clone, verbose);
|
||||
}
|
||||
|
||||
// Handle key events with a short timeout
|
||||
if event::poll(event_poll_timeout)? {
|
||||
if let Event::Key(key) = event::read()? {
|
||||
// Handle search input first if we're in search mode and logs tab
|
||||
if app.selected_tab == 2 && app.log_search_active {
|
||||
app.handle_log_search_input(key.code);
|
||||
continue;
|
||||
}
|
||||
|
||||
match key.code {
|
||||
KeyCode::Char('q') => {
|
||||
// Exit and clean up
|
||||
break Ok(());
|
||||
}
|
||||
KeyCode::Esc => {
|
||||
if app.detailed_view {
|
||||
app.detailed_view = false;
|
||||
} else if app.show_help {
|
||||
app.show_help = false;
|
||||
} else {
|
||||
// Exit and clean up
|
||||
break Ok(());
|
||||
}
|
||||
}
|
||||
KeyCode::Tab => {
|
||||
// Cycle through tabs
|
||||
app.switch_tab((app.selected_tab + 1) % 4);
|
||||
}
|
||||
KeyCode::BackTab => {
|
||||
// Cycle through tabs backwards
|
||||
app.switch_tab((app.selected_tab + 3) % 4);
|
||||
}
|
||||
KeyCode::Char('1') | KeyCode::Char('w') => app.switch_tab(0),
|
||||
KeyCode::Char('2') | KeyCode::Char('x') => app.switch_tab(1),
|
||||
KeyCode::Char('3') | KeyCode::Char('l') => app.switch_tab(2),
|
||||
KeyCode::Char('4') | KeyCode::Char('h') => app.switch_tab(3),
|
||||
KeyCode::Up | KeyCode::Char('k') => {
|
||||
if app.selected_tab == 2 {
|
||||
if !app.log_search_matches.is_empty() {
|
||||
app.previous_search_match();
|
||||
} else {
|
||||
app.scroll_logs_up();
|
||||
}
|
||||
} else if app.selected_tab == 0 {
|
||||
app.previous_workflow();
|
||||
} else if app.selected_tab == 1 {
|
||||
if app.detailed_view {
|
||||
app.previous_step();
|
||||
} else {
|
||||
app.previous_job();
|
||||
}
|
||||
}
|
||||
}
|
||||
KeyCode::Down | KeyCode::Char('j') => {
|
||||
if app.selected_tab == 2 {
|
||||
if !app.log_search_matches.is_empty() {
|
||||
app.next_search_match();
|
||||
} else {
|
||||
app.scroll_logs_down();
|
||||
}
|
||||
} else if app.selected_tab == 0 {
|
||||
app.next_workflow();
|
||||
} else if app.selected_tab == 1 {
|
||||
if app.detailed_view {
|
||||
app.next_step();
|
||||
} else {
|
||||
app.next_job();
|
||||
}
|
||||
}
|
||||
}
|
||||
KeyCode::Char(' ') => {
|
||||
if app.selected_tab == 0 && !app.running {
|
||||
app.toggle_selected();
|
||||
}
|
||||
}
|
||||
KeyCode::Enter => {
|
||||
match app.selected_tab {
|
||||
0 => {
|
||||
// In workflows tab, Enter runs the selected workflow
|
||||
if !app.running {
|
||||
if let Some(idx) = app.workflow_list_state.selected() {
|
||||
app.workflows[idx].selected = true;
|
||||
app.queue_selected_for_execution();
|
||||
app.start_execution();
|
||||
}
|
||||
}
|
||||
}
|
||||
1 => {
|
||||
// In execution tab, Enter shows job details
|
||||
app.toggle_detailed_view();
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
KeyCode::Char('r') => {
|
||||
// Check if shift is pressed - this might be receiving the reset command
|
||||
if key.modifiers.contains(KeyModifiers::SHIFT) {
|
||||
let timestamp = Local::now().format("%H:%M:%S").to_string();
|
||||
app.logs.push(format!(
|
||||
"[{}] DEBUG: Shift+r detected - this should be uppercase R",
|
||||
timestamp
|
||||
));
|
||||
logging::info(
|
||||
"Shift+r detected as lowercase - this should be uppercase R",
|
||||
);
|
||||
|
||||
if !app.running {
|
||||
// Reset workflow status with Shift+r
|
||||
app.logs.push(format!(
|
||||
"[{}] Attempting to reset workflow status via Shift+r...",
|
||||
timestamp
|
||||
));
|
||||
app.reset_workflow_status();
|
||||
|
||||
// Force redraw to update UI immediately
|
||||
terminal.draw(|f| {
|
||||
render_ui(f, app);
|
||||
})?;
|
||||
}
|
||||
} else if !app.running {
|
||||
app.queue_selected_for_execution();
|
||||
app.start_execution();
|
||||
}
|
||||
}
|
||||
KeyCode::Char('a') => {
|
||||
if !app.running {
|
||||
// Select all workflows
|
||||
for workflow in &mut app.workflows {
|
||||
workflow.selected = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
KeyCode::Char('e') => {
|
||||
if !app.running {
|
||||
app.toggle_emulation_mode();
|
||||
}
|
||||
}
|
||||
KeyCode::Char('v') => {
|
||||
if !app.running {
|
||||
app.toggle_validation_mode();
|
||||
}
|
||||
}
|
||||
KeyCode::Char('n') => {
|
||||
if app.selected_tab == 2 && !app.log_search_query.is_empty() {
|
||||
app.next_search_match();
|
||||
} else if app.selected_tab == 0 && !app.running {
|
||||
// Deselect all workflows
|
||||
for workflow in &mut app.workflows {
|
||||
workflow.selected = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
KeyCode::Char('R') => {
|
||||
let timestamp = Local::now().format("%H:%M:%S").to_string();
|
||||
app.logs.push(format!(
|
||||
"[{}] DEBUG: Reset key 'Shift+R' pressed",
|
||||
timestamp
|
||||
));
|
||||
logging::info("Reset key 'Shift+R' pressed");
|
||||
|
||||
if !app.running {
|
||||
// Reset workflow status
|
||||
app.logs.push(format!(
|
||||
"[{}] Attempting to reset workflow status...",
|
||||
timestamp
|
||||
));
|
||||
app.reset_workflow_status();
|
||||
|
||||
// Force redraw to update UI immediately
|
||||
terminal.draw(|f| {
|
||||
render_ui(f, app);
|
||||
})?;
|
||||
} else {
|
||||
app.logs.push(format!(
|
||||
"[{}] Cannot reset workflow while another operation is running",
|
||||
timestamp
|
||||
));
|
||||
}
|
||||
}
|
||||
KeyCode::Char('?') => {
|
||||
// Toggle help overlay
|
||||
app.show_help = !app.show_help;
|
||||
}
|
||||
KeyCode::Char('t') => {
|
||||
// Only trigger workflow if not already running and we're in the workflows tab
|
||||
if !app.running && app.selected_tab == 0 {
|
||||
if let Some(selected_idx) = app.workflow_list_state.selected() {
|
||||
if selected_idx < app.workflows.len() {
|
||||
let workflow = &app.workflows[selected_idx];
|
||||
if workflow.status == WorkflowStatus::NotStarted {
|
||||
app.trigger_selected_workflow();
|
||||
} else if workflow.status == WorkflowStatus::Running {
|
||||
app.logs.push(format!(
|
||||
"Workflow '{}' is already running",
|
||||
workflow.name
|
||||
));
|
||||
logging::warning(&format!(
|
||||
"Workflow '{}' is already running",
|
||||
workflow.name
|
||||
));
|
||||
} else {
|
||||
// First, get all the data we need from the workflow
|
||||
let workflow_name = workflow.name.clone();
|
||||
let status_text = match workflow.status {
|
||||
WorkflowStatus::Success => "Success",
|
||||
WorkflowStatus::Failed => "Failed",
|
||||
WorkflowStatus::Skipped => "Skipped",
|
||||
_ => "current",
|
||||
};
|
||||
let needs_reset_hint = workflow.status
|
||||
== WorkflowStatus::Success
|
||||
|| workflow.status == WorkflowStatus::Failed
|
||||
|| workflow.status == WorkflowStatus::Skipped;
|
||||
|
||||
// Now set the status message (mutable borrow)
|
||||
app.set_status_message(format!(
|
||||
"Cannot trigger workflow '{}' in {} state. Press Shift+R to reset.",
|
||||
workflow_name,
|
||||
status_text
|
||||
));
|
||||
|
||||
// Add log entries
|
||||
app.logs.push(format!(
|
||||
"Cannot trigger workflow '{}' in {} state",
|
||||
workflow_name, status_text
|
||||
));
|
||||
|
||||
// Add hint about using reset
|
||||
if needs_reset_hint {
|
||||
let timestamp =
|
||||
Local::now().format("%H:%M:%S").to_string();
|
||||
app.logs.push(format!(
|
||||
"[{}] Hint: Press 'Shift+R' to reset the workflow status and allow triggering",
|
||||
timestamp
|
||||
));
|
||||
}
|
||||
|
||||
logging::warning(&format!(
|
||||
"Cannot trigger workflow in {} state",
|
||||
status_text
|
||||
));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
app.logs.push("No workflow selected to trigger".to_string());
|
||||
logging::warning("No workflow selected to trigger");
|
||||
}
|
||||
} else if app.running {
|
||||
app.logs.push(
|
||||
"Cannot trigger workflow while another operation is in progress"
|
||||
.to_string(),
|
||||
);
|
||||
logging::warning(
|
||||
"Cannot trigger workflow while another operation is in progress",
|
||||
);
|
||||
} else if app.selected_tab != 0 {
|
||||
app.logs
|
||||
.push("Switch to Workflows tab to trigger a workflow".to_string());
|
||||
logging::warning("Switch to Workflows tab to trigger a workflow");
|
||||
// For better UX, we could also automatically switch to the Workflows tab here
|
||||
app.switch_tab(0);
|
||||
}
|
||||
}
|
||||
KeyCode::Char('s') => {
|
||||
if app.selected_tab == 2 {
|
||||
app.toggle_log_search();
|
||||
}
|
||||
}
|
||||
KeyCode::Char('f') => {
|
||||
if app.selected_tab == 2 {
|
||||
app.toggle_log_filter();
|
||||
}
|
||||
}
|
||||
KeyCode::Char('c') => {
|
||||
if app.selected_tab == 2 {
|
||||
app.clear_log_search_and_filter();
|
||||
}
|
||||
}
|
||||
KeyCode::Char(c) => {
|
||||
if app.selected_tab == 2 && app.log_search_active {
|
||||
app.handle_log_search_input(KeyCode::Char(c));
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
900
crates/ui/src/app/state.rs
Normal file
900
crates/ui/src/app/state.rs
Normal file
@@ -0,0 +1,900 @@
|
||||
// App state for the UI
|
||||
use crate::models::{
|
||||
ExecutionResultMsg, JobExecution, LogFilterLevel, StepExecution, Workflow, WorkflowExecution,
|
||||
WorkflowStatus,
|
||||
};
|
||||
use chrono::Local;
|
||||
use crossterm::event::KeyCode;
|
||||
use executor::{JobStatus, RuntimeType, StepStatus};
|
||||
use ratatui::widgets::{ListState, TableState};
|
||||
use std::sync::mpsc;
|
||||
use std::time::{Duration, Instant};
|
||||
|
||||
/// Application state
|
||||
pub struct App {
|
||||
pub workflows: Vec<Workflow>,
|
||||
pub workflow_list_state: ListState,
|
||||
pub selected_tab: usize,
|
||||
pub running: bool,
|
||||
pub show_help: bool,
|
||||
pub runtime_type: RuntimeType,
|
||||
pub validation_mode: bool,
|
||||
pub preserve_containers_on_failure: bool,
|
||||
pub execution_queue: Vec<usize>, // Indices of workflows to execute
|
||||
pub current_execution: Option<usize>,
|
||||
pub logs: Vec<String>, // Overall execution logs
|
||||
pub log_scroll: usize, // Scrolling position for logs
|
||||
pub job_list_state: ListState, // For viewing job details
|
||||
pub detailed_view: bool, // Whether we're in detailed view mode
|
||||
pub step_list_state: ListState, // For selecting steps in detailed view
|
||||
pub step_table_state: TableState, // For the steps table in detailed view
|
||||
pub last_tick: Instant, // For UI animations and updates
|
||||
pub tick_rate: Duration, // How often to update the UI
|
||||
pub tx: mpsc::Sender<ExecutionResultMsg>, // Channel for async communication
|
||||
pub status_message: Option<String>, // Temporary status message to display
|
||||
pub status_message_time: Option<Instant>, // When the message was set
|
||||
|
||||
// Search and filter functionality
|
||||
pub log_search_query: String, // Current search query for logs
|
||||
pub log_search_active: bool, // Whether search input is active
|
||||
pub log_filter_level: Option<LogFilterLevel>, // Current log level filter
|
||||
pub log_search_matches: Vec<usize>, // Indices of logs that match the search
|
||||
pub log_search_match_idx: usize, // Current match index for navigation
|
||||
}
|
||||
|
||||
impl App {
|
||||
pub fn new(
|
||||
runtime_type: RuntimeType,
|
||||
tx: mpsc::Sender<ExecutionResultMsg>,
|
||||
preserve_containers_on_failure: bool,
|
||||
) -> App {
|
||||
let mut workflow_list_state = ListState::default();
|
||||
workflow_list_state.select(Some(0));
|
||||
|
||||
let mut job_list_state = ListState::default();
|
||||
job_list_state.select(Some(0));
|
||||
|
||||
let mut step_list_state = ListState::default();
|
||||
step_list_state.select(Some(0));
|
||||
|
||||
let mut step_table_state = TableState::default();
|
||||
step_table_state.select(Some(0));
|
||||
|
||||
// Check Docker availability if Docker runtime is selected
|
||||
let mut initial_logs = Vec::new();
|
||||
let runtime_type = match runtime_type {
|
||||
RuntimeType::Docker => {
|
||||
// Use a timeout for the Docker availability check to prevent hanging
|
||||
let is_docker_available = match std::panic::catch_unwind(|| {
|
||||
// Use a very short timeout to prevent blocking the UI
|
||||
let result = std::thread::scope(|s| {
|
||||
let handle = s.spawn(|| {
|
||||
utils::fd::with_stderr_to_null(executor::docker::is_available)
|
||||
.unwrap_or(false)
|
||||
});
|
||||
|
||||
// Set a short timeout for the thread
|
||||
let start = std::time::Instant::now();
|
||||
let timeout = std::time::Duration::from_secs(1);
|
||||
|
||||
while start.elapsed() < timeout {
|
||||
if handle.is_finished() {
|
||||
return handle.join().unwrap_or(false);
|
||||
}
|
||||
std::thread::sleep(std::time::Duration::from_millis(10));
|
||||
}
|
||||
|
||||
// If we reach here, the check took too long
|
||||
logging::warning(
|
||||
"Docker availability check timed out, falling back to emulation mode",
|
||||
);
|
||||
false
|
||||
});
|
||||
result
|
||||
}) {
|
||||
Ok(result) => result,
|
||||
Err(_) => {
|
||||
logging::warning("Docker availability check failed with panic, falling back to emulation mode");
|
||||
false
|
||||
}
|
||||
};
|
||||
|
||||
if !is_docker_available {
|
||||
initial_logs.push(
|
||||
"Docker is not available or unresponsive. Using emulation mode instead."
|
||||
.to_string(),
|
||||
);
|
||||
logging::warning(
|
||||
"Docker is not available or unresponsive. Using emulation mode instead.",
|
||||
);
|
||||
RuntimeType::Emulation
|
||||
} else {
|
||||
logging::info("Docker is available, using Docker runtime");
|
||||
RuntimeType::Docker
|
||||
}
|
||||
}
|
||||
RuntimeType::Emulation => RuntimeType::Emulation,
|
||||
};
|
||||
|
||||
App {
|
||||
workflows: Vec::new(),
|
||||
workflow_list_state,
|
||||
selected_tab: 0,
|
||||
running: false,
|
||||
show_help: false,
|
||||
runtime_type,
|
||||
validation_mode: false,
|
||||
preserve_containers_on_failure,
|
||||
execution_queue: Vec::new(),
|
||||
current_execution: None,
|
||||
logs: initial_logs,
|
||||
log_scroll: 0,
|
||||
job_list_state,
|
||||
detailed_view: false,
|
||||
step_list_state,
|
||||
step_table_state,
|
||||
last_tick: Instant::now(),
|
||||
tick_rate: Duration::from_millis(250), // Update 4 times per second
|
||||
tx,
|
||||
status_message: None,
|
||||
status_message_time: None,
|
||||
|
||||
// Search and filter functionality
|
||||
log_search_query: String::new(),
|
||||
log_search_active: false,
|
||||
log_filter_level: Some(LogFilterLevel::All),
|
||||
log_search_matches: Vec::new(),
|
||||
log_search_match_idx: 0,
|
||||
}
|
||||
}
|
||||
|
||||
// Toggle workflow selection
|
||||
pub fn toggle_selected(&mut self) {
|
||||
if let Some(idx) = self.workflow_list_state.selected() {
|
||||
if idx < self.workflows.len() {
|
||||
self.workflows[idx].selected = !self.workflows[idx].selected;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn toggle_emulation_mode(&mut self) {
|
||||
self.runtime_type = match self.runtime_type {
|
||||
RuntimeType::Docker => RuntimeType::Emulation,
|
||||
RuntimeType::Emulation => RuntimeType::Docker,
|
||||
};
|
||||
self.logs
|
||||
.push(format!("Switched to {} mode", self.runtime_type_name()));
|
||||
}
|
||||
|
||||
pub fn toggle_validation_mode(&mut self) {
|
||||
self.validation_mode = !self.validation_mode;
|
||||
let mode = if self.validation_mode {
|
||||
"validation"
|
||||
} else {
|
||||
"normal"
|
||||
};
|
||||
let timestamp = Local::now().format("%H:%M:%S").to_string();
|
||||
self.logs
|
||||
.push(format!("[{}] Switched to {} mode", timestamp, mode));
|
||||
logging::info(&format!("Switched to {} mode", mode));
|
||||
}
|
||||
|
||||
pub fn runtime_type_name(&self) -> &str {
|
||||
match self.runtime_type {
|
||||
RuntimeType::Docker => "Docker",
|
||||
RuntimeType::Emulation => "Emulation",
|
||||
}
|
||||
}
|
||||
|
||||
// Move cursor up in the workflow list
|
||||
pub fn previous_workflow(&mut self) {
|
||||
if self.workflows.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let i = match self.workflow_list_state.selected() {
|
||||
Some(i) => {
|
||||
if i == 0 {
|
||||
self.workflows.len() - 1
|
||||
} else {
|
||||
i - 1
|
||||
}
|
||||
}
|
||||
None => 0,
|
||||
};
|
||||
self.workflow_list_state.select(Some(i));
|
||||
}
|
||||
|
||||
// Move cursor down in the workflow list
|
||||
pub fn next_workflow(&mut self) {
|
||||
if self.workflows.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let i = match self.workflow_list_state.selected() {
|
||||
Some(i) => {
|
||||
if i >= self.workflows.len() - 1 {
|
||||
0
|
||||
} else {
|
||||
i + 1
|
||||
}
|
||||
}
|
||||
None => 0,
|
||||
};
|
||||
self.workflow_list_state.select(Some(i));
|
||||
}
|
||||
|
||||
// Move cursor up in the job list
|
||||
pub fn previous_job(&mut self) {
|
||||
let current_workflow_idx = self
|
||||
.current_execution
|
||||
.or_else(|| self.workflow_list_state.selected());
|
||||
|
||||
if let Some(workflow_idx) = current_workflow_idx {
|
||||
if workflow_idx >= self.workflows.len() {
|
||||
return;
|
||||
}
|
||||
|
||||
if let Some(execution) = &self.workflows[workflow_idx].execution_details {
|
||||
if execution.jobs.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let i = match self.job_list_state.selected() {
|
||||
Some(i) => {
|
||||
if i == 0 {
|
||||
execution.jobs.len() - 1
|
||||
} else {
|
||||
i - 1
|
||||
}
|
||||
}
|
||||
None => 0,
|
||||
};
|
||||
self.job_list_state.select(Some(i));
|
||||
|
||||
// Reset step selection when changing jobs
|
||||
self.step_list_state.select(Some(0));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Move cursor down in the job list
|
||||
pub fn next_job(&mut self) {
|
||||
let current_workflow_idx = self
|
||||
.current_execution
|
||||
.or_else(|| self.workflow_list_state.selected())
|
||||
.filter(|&idx| idx < self.workflows.len());
|
||||
|
||||
if let Some(workflow_idx) = current_workflow_idx {
|
||||
if workflow_idx >= self.workflows.len() {
|
||||
return;
|
||||
}
|
||||
|
||||
if let Some(execution) = &self.workflows[workflow_idx].execution_details {
|
||||
if execution.jobs.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let i = match self.job_list_state.selected() {
|
||||
Some(i) => {
|
||||
if i >= execution.jobs.len() - 1 {
|
||||
0
|
||||
} else {
|
||||
i + 1
|
||||
}
|
||||
}
|
||||
None => 0,
|
||||
};
|
||||
self.job_list_state.select(Some(i));
|
||||
|
||||
// Reset step selection when changing jobs
|
||||
self.step_list_state.select(Some(0));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Move cursor up in step list
|
||||
pub fn previous_step(&mut self) {
|
||||
let current_workflow_idx = self
|
||||
.current_execution
|
||||
.or_else(|| self.workflow_list_state.selected())
|
||||
.filter(|&idx| idx < self.workflows.len());
|
||||
|
||||
if let Some(workflow_idx) = current_workflow_idx {
|
||||
if let Some(execution) = &self.workflows[workflow_idx].execution_details {
|
||||
if let Some(job_idx) = self.job_list_state.selected() {
|
||||
if job_idx < execution.jobs.len() {
|
||||
let steps = &execution.jobs[job_idx].steps;
|
||||
if steps.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let i = match self.step_list_state.selected() {
|
||||
Some(i) => {
|
||||
if i == 0 {
|
||||
steps.len() - 1
|
||||
} else {
|
||||
i - 1
|
||||
}
|
||||
}
|
||||
None => 0,
|
||||
};
|
||||
self.step_list_state.select(Some(i));
|
||||
// Update the table state to match
|
||||
self.step_table_state.select(Some(i));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Move cursor down in step list
|
||||
pub fn next_step(&mut self) {
|
||||
let current_workflow_idx = self
|
||||
.current_execution
|
||||
.or_else(|| self.workflow_list_state.selected())
|
||||
.filter(|&idx| idx < self.workflows.len());
|
||||
|
||||
if let Some(workflow_idx) = current_workflow_idx {
|
||||
if let Some(execution) = &self.workflows[workflow_idx].execution_details {
|
||||
if let Some(job_idx) = self.job_list_state.selected() {
|
||||
if job_idx < execution.jobs.len() {
|
||||
let steps = &execution.jobs[job_idx].steps;
|
||||
if steps.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let i = match self.step_list_state.selected() {
|
||||
Some(i) => {
|
||||
if i >= steps.len() - 1 {
|
||||
0
|
||||
} else {
|
||||
i + 1
|
||||
}
|
||||
}
|
||||
None => 0,
|
||||
};
|
||||
self.step_list_state.select(Some(i));
|
||||
// Update the table state to match
|
||||
self.step_table_state.select(Some(i));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Change the tab
|
||||
pub fn switch_tab(&mut self, tab: usize) {
|
||||
self.selected_tab = tab;
|
||||
}
|
||||
|
||||
// Queue selected workflows for execution
|
||||
pub fn queue_selected_for_execution(&mut self) {
|
||||
if let Some(idx) = self.workflow_list_state.selected() {
|
||||
if idx < self.workflows.len() && !self.execution_queue.contains(&idx) {
|
||||
self.execution_queue.push(idx);
|
||||
let timestamp = Local::now().format("%H:%M:%S").to_string();
|
||||
self.logs.push(format!(
|
||||
"[{}] Added '{}' to execution queue. Press 'Enter' to start.",
|
||||
timestamp, self.workflows[idx].name
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Start workflow execution process
|
||||
pub fn start_execution(&mut self) {
|
||||
// Only start if we have workflows in queue and nothing is currently running
|
||||
if !self.execution_queue.is_empty() && self.current_execution.is_none() {
|
||||
self.running = true;
|
||||
|
||||
// Log only once at the beginning - don't initialize execution details here
|
||||
// since that will happen in start_next_workflow_execution
|
||||
let timestamp = Local::now().format("%H:%M:%S").to_string();
|
||||
self.logs
|
||||
.push(format!("[{}] Starting workflow execution...", timestamp));
|
||||
logging::info("Starting workflow execution...");
|
||||
}
|
||||
}
|
||||
|
||||
// Process execution results and update UI
|
||||
pub fn process_execution_result(
|
||||
&mut self,
|
||||
workflow_idx: usize,
|
||||
result: Result<(Vec<executor::JobResult>, ()), String>,
|
||||
) {
|
||||
if workflow_idx >= self.workflows.len() {
|
||||
let timestamp = Local::now().format("%H:%M:%S").to_string();
|
||||
self.logs.push(format!(
|
||||
"[{}] Error: Invalid workflow index received",
|
||||
timestamp
|
||||
));
|
||||
logging::error("Invalid workflow index received in process_execution_result");
|
||||
return;
|
||||
}
|
||||
|
||||
let workflow = &mut self.workflows[workflow_idx];
|
||||
|
||||
// Ensure execution details exist
|
||||
if workflow.execution_details.is_none() {
|
||||
workflow.execution_details = Some(WorkflowExecution {
|
||||
jobs: Vec::new(),
|
||||
start_time: Local::now(),
|
||||
end_time: Some(Local::now()),
|
||||
logs: Vec::new(),
|
||||
progress: 1.0,
|
||||
});
|
||||
}
|
||||
|
||||
// Update execution details with end time
|
||||
if let Some(execution_details) = &mut workflow.execution_details {
|
||||
execution_details.end_time = Some(Local::now());
|
||||
|
||||
match &result {
|
||||
Ok((jobs, _)) => {
|
||||
let timestamp = Local::now().format("%H:%M:%S").to_string();
|
||||
execution_details
|
||||
.logs
|
||||
.push(format!("[{}] Operation completed successfully.", timestamp));
|
||||
execution_details.progress = 1.0;
|
||||
|
||||
// Convert executor::JobResult to our JobExecution struct
|
||||
execution_details.jobs = jobs
|
||||
.iter()
|
||||
.map(|job_result| JobExecution {
|
||||
name: job_result.name.clone(),
|
||||
status: match job_result.status {
|
||||
executor::JobStatus::Success => JobStatus::Success,
|
||||
executor::JobStatus::Failure => JobStatus::Failure,
|
||||
executor::JobStatus::Skipped => JobStatus::Skipped,
|
||||
},
|
||||
steps: job_result
|
||||
.steps
|
||||
.iter()
|
||||
.map(|step_result| StepExecution {
|
||||
name: step_result.name.clone(),
|
||||
status: match step_result.status {
|
||||
executor::StepStatus::Success => StepStatus::Success,
|
||||
executor::StepStatus::Failure => StepStatus::Failure,
|
||||
executor::StepStatus::Skipped => StepStatus::Skipped,
|
||||
},
|
||||
output: step_result.output.clone(),
|
||||
})
|
||||
.collect::<Vec<StepExecution>>(),
|
||||
logs: vec![job_result.logs.clone()],
|
||||
})
|
||||
.collect::<Vec<JobExecution>>();
|
||||
}
|
||||
Err(e) => {
|
||||
let timestamp = Local::now().format("%H:%M:%S").to_string();
|
||||
execution_details
|
||||
.logs
|
||||
.push(format!("[{}] Error: {}", timestamp, e));
|
||||
execution_details.progress = 1.0;
|
||||
|
||||
// Create a dummy job with the error information so users can see details
|
||||
execution_details.jobs = vec![JobExecution {
|
||||
name: "Workflow Execution".to_string(),
|
||||
status: JobStatus::Failure,
|
||||
steps: vec![StepExecution {
|
||||
name: "Execution Error".to_string(),
|
||||
status: StepStatus::Failure,
|
||||
output: format!("Error: {}\n\nThis error prevented the workflow from executing properly.", e),
|
||||
}],
|
||||
logs: vec![format!("Workflow execution error: {}", e)],
|
||||
}];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
match result {
|
||||
Ok(_) => {
|
||||
workflow.status = WorkflowStatus::Success;
|
||||
let timestamp = Local::now().format("%H:%M:%S").to_string();
|
||||
self.logs.push(format!(
|
||||
"[{}] Workflow '{}' completed successfully!",
|
||||
timestamp, workflow.name
|
||||
));
|
||||
logging::info(&format!(
|
||||
"[{}] Workflow '{}' completed successfully!",
|
||||
timestamp, workflow.name
|
||||
));
|
||||
}
|
||||
Err(e) => {
|
||||
workflow.status = WorkflowStatus::Failed;
|
||||
let timestamp = Local::now().format("%H:%M:%S").to_string();
|
||||
self.logs.push(format!(
|
||||
"[{}] Workflow '{}' failed: {}",
|
||||
timestamp, workflow.name, e
|
||||
));
|
||||
logging::error(&format!(
|
||||
"[{}] Workflow '{}' failed: {}",
|
||||
timestamp, workflow.name, e
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
// Only clear current_execution if it matches the processed workflow
|
||||
if let Some(current_idx) = self.current_execution {
|
||||
if current_idx == workflow_idx {
|
||||
self.current_execution = None;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Get next workflow for execution
|
||||
pub fn get_next_workflow_to_execute(&mut self) -> Option<usize> {
|
||||
if self.execution_queue.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let next = self.execution_queue.remove(0);
|
||||
self.workflows[next].status = WorkflowStatus::Running;
|
||||
self.current_execution = Some(next);
|
||||
self.logs
|
||||
.push(format!("Executing workflow: {}", self.workflows[next].name));
|
||||
logging::info(&format!(
|
||||
"Executing workflow: {}",
|
||||
self.workflows[next].name
|
||||
));
|
||||
|
||||
// Initialize execution details
|
||||
self.workflows[next].execution_details = Some(WorkflowExecution {
|
||||
jobs: Vec::new(),
|
||||
start_time: Local::now(),
|
||||
end_time: None,
|
||||
logs: vec!["Execution started".to_string()],
|
||||
progress: 0.0, // Just started
|
||||
});
|
||||
|
||||
Some(next)
|
||||
}
|
||||
|
||||
// Toggle detailed view mode
|
||||
pub fn toggle_detailed_view(&mut self) {
|
||||
self.detailed_view = !self.detailed_view;
|
||||
|
||||
// When entering detailed view, make sure step selection is initialized
|
||||
if self.detailed_view {
|
||||
// Ensure the step_table_state matches the step_list_state
|
||||
if let Some(step_idx) = self.step_list_state.selected() {
|
||||
self.step_table_state.select(Some(step_idx));
|
||||
} else {
|
||||
// Initialize both to the first item if nothing is selected
|
||||
self.step_list_state.select(Some(0));
|
||||
self.step_table_state.select(Some(0));
|
||||
}
|
||||
|
||||
// Also ensure job_list_state has a selection
|
||||
if self.job_list_state.selected().is_none() {
|
||||
self.job_list_state.select(Some(0));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Function to handle keyboard input for log search
|
||||
pub fn handle_log_search_input(&mut self, key: KeyCode) {
|
||||
match key {
|
||||
KeyCode::Esc => {
|
||||
self.log_search_active = false;
|
||||
self.log_search_query.clear();
|
||||
self.log_search_matches.clear();
|
||||
}
|
||||
KeyCode::Backspace => {
|
||||
self.log_search_query.pop();
|
||||
self.update_log_search_matches();
|
||||
}
|
||||
KeyCode::Enter => {
|
||||
self.log_search_active = false;
|
||||
// Keep the search query and matches
|
||||
}
|
||||
KeyCode::Char(c) => {
|
||||
self.log_search_query.push(c);
|
||||
self.update_log_search_matches();
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
// Toggle log search mode
|
||||
pub fn toggle_log_search(&mut self) {
|
||||
self.log_search_active = !self.log_search_active;
|
||||
if !self.log_search_active {
|
||||
// Don't clear the query, this allows toggling the search UI while keeping the filter
|
||||
} else {
|
||||
// When activating search, update matches
|
||||
self.update_log_search_matches();
|
||||
}
|
||||
}
|
||||
|
||||
// Toggle log filter
|
||||
pub fn toggle_log_filter(&mut self) {
|
||||
self.log_filter_level = match &self.log_filter_level {
|
||||
None => Some(LogFilterLevel::Info),
|
||||
Some(level) => Some(level.next()),
|
||||
};
|
||||
|
||||
// Update search matches when filter changes
|
||||
self.update_log_search_matches();
|
||||
}
|
||||
|
||||
// Clear log search and filter
|
||||
pub fn clear_log_search_and_filter(&mut self) {
|
||||
self.log_search_query.clear();
|
||||
self.log_filter_level = None;
|
||||
self.log_search_matches.clear();
|
||||
self.log_search_match_idx = 0;
|
||||
}
|
||||
|
||||
// Update matches based on current search and filter
|
||||
pub fn update_log_search_matches(&mut self) {
|
||||
self.log_search_matches.clear();
|
||||
self.log_search_match_idx = 0;
|
||||
|
||||
// Get all logs (app logs + system logs)
|
||||
let mut all_logs = Vec::new();
|
||||
for log in &self.logs {
|
||||
all_logs.push(log.clone());
|
||||
}
|
||||
for log in logging::get_logs() {
|
||||
all_logs.push(log.clone());
|
||||
}
|
||||
|
||||
// Apply filter and search
|
||||
for (idx, log) in all_logs.iter().enumerate() {
|
||||
let passes_filter = match &self.log_filter_level {
|
||||
None => true,
|
||||
Some(level) => level.matches(log),
|
||||
};
|
||||
|
||||
let matches_search = if self.log_search_query.is_empty() {
|
||||
true
|
||||
} else {
|
||||
log.to_lowercase()
|
||||
.contains(&self.log_search_query.to_lowercase())
|
||||
};
|
||||
|
||||
if passes_filter && matches_search {
|
||||
self.log_search_matches.push(idx);
|
||||
}
|
||||
}
|
||||
|
||||
// Jump to first match and provide feedback
|
||||
if !self.log_search_matches.is_empty() {
|
||||
// Jump to the first match
|
||||
if let Some(&idx) = self.log_search_matches.first() {
|
||||
self.log_scroll = idx;
|
||||
|
||||
if !self.log_search_query.is_empty() {
|
||||
self.set_status_message(format!(
|
||||
"Found {} matches for '{}'",
|
||||
self.log_search_matches.len(),
|
||||
self.log_search_query
|
||||
));
|
||||
}
|
||||
}
|
||||
} else if !self.log_search_query.is_empty() {
|
||||
// No matches found
|
||||
self.set_status_message(format!("No matches found for '{}'", self.log_search_query));
|
||||
}
|
||||
}
|
||||
|
||||
// Navigate to next search match
|
||||
pub fn next_search_match(&mut self) {
|
||||
if !self.log_search_matches.is_empty() {
|
||||
self.log_search_match_idx =
|
||||
(self.log_search_match_idx + 1) % self.log_search_matches.len();
|
||||
if let Some(&idx) = self.log_search_matches.get(self.log_search_match_idx) {
|
||||
self.log_scroll = idx;
|
||||
|
||||
// Set status message showing which match we're on
|
||||
self.set_status_message(format!(
|
||||
"Search match {}/{} for '{}'",
|
||||
self.log_search_match_idx + 1,
|
||||
self.log_search_matches.len(),
|
||||
self.log_search_query
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Navigate to previous search match
|
||||
pub fn previous_search_match(&mut self) {
|
||||
if !self.log_search_matches.is_empty() {
|
||||
self.log_search_match_idx = if self.log_search_match_idx == 0 {
|
||||
self.log_search_matches.len() - 1
|
||||
} else {
|
||||
self.log_search_match_idx - 1
|
||||
};
|
||||
if let Some(&idx) = self.log_search_matches.get(self.log_search_match_idx) {
|
||||
self.log_scroll = idx;
|
||||
|
||||
// Set status message showing which match we're on
|
||||
self.set_status_message(format!(
|
||||
"Search match {}/{} for '{}'",
|
||||
self.log_search_match_idx + 1,
|
||||
self.log_search_matches.len(),
|
||||
self.log_search_query
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Scroll logs up
|
||||
pub fn scroll_logs_up(&mut self) {
|
||||
self.log_scroll = self.log_scroll.saturating_sub(1);
|
||||
}
|
||||
|
||||
// Scroll logs down
|
||||
pub fn scroll_logs_down(&mut self) {
|
||||
// Get total log count including system logs
|
||||
let total_logs = self.logs.len() + logging::get_logs().len();
|
||||
if total_logs > 0 {
|
||||
self.log_scroll = (self.log_scroll + 1).min(total_logs - 1);
|
||||
}
|
||||
}
|
||||
|
||||
// Update progress for running workflows
|
||||
pub fn update_running_workflow_progress(&mut self) {
|
||||
if let Some(idx) = self.current_execution {
|
||||
if let Some(execution) = &mut self.workflows[idx].execution_details {
|
||||
if execution.end_time.is_none() {
|
||||
// Gradually increase progress for visual feedback
|
||||
execution.progress = (execution.progress + 0.01).min(0.95);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Set a temporary status message to be displayed in the UI
|
||||
pub fn set_status_message(&mut self, message: String) {
|
||||
self.status_message = Some(message);
|
||||
self.status_message_time = Some(Instant::now());
|
||||
}
|
||||
|
||||
// Check if tick should happen
|
||||
pub fn tick(&mut self) -> bool {
|
||||
let now = Instant::now();
|
||||
|
||||
// Check if we should clear a status message (after 3 seconds)
|
||||
if let Some(message_time) = self.status_message_time {
|
||||
if now.duration_since(message_time).as_secs() >= 3 {
|
||||
self.status_message = None;
|
||||
self.status_message_time = None;
|
||||
}
|
||||
}
|
||||
|
||||
if now.duration_since(self.last_tick) >= self.tick_rate {
|
||||
self.last_tick = now;
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
// Trigger the selected workflow
|
||||
pub fn trigger_selected_workflow(&mut self) {
|
||||
if let Some(selected_idx) = self.workflow_list_state.selected() {
|
||||
if selected_idx < self.workflows.len() {
|
||||
let workflow = &self.workflows[selected_idx];
|
||||
|
||||
if workflow.name.is_empty() {
|
||||
let timestamp = Local::now().format("%H:%M:%S").to_string();
|
||||
self.logs
|
||||
.push(format!("[{}] Error: Invalid workflow selection", timestamp));
|
||||
logging::error("Invalid workflow selection in trigger_selected_workflow");
|
||||
return;
|
||||
}
|
||||
|
||||
// Set up background task to execute the workflow via GitHub Actions REST API
|
||||
let timestamp = Local::now().format("%H:%M:%S").to_string();
|
||||
self.logs.push(format!(
|
||||
"[{}] Triggering workflow: {}",
|
||||
timestamp, workflow.name
|
||||
));
|
||||
logging::info(&format!("Triggering workflow: {}", workflow.name));
|
||||
|
||||
// Clone necessary values for the async task
|
||||
let workflow_name = workflow.name.clone();
|
||||
let tx_clone = self.tx.clone();
|
||||
|
||||
// Set this tab as the current execution to ensure it shows in the Execution tab
|
||||
self.current_execution = Some(selected_idx);
|
||||
|
||||
// Switch to execution tab for better user feedback
|
||||
self.selected_tab = 1; // Switch to Execution tab manually to avoid the borrowing issue
|
||||
|
||||
// Create a thread instead of using tokio runtime directly since send() is not async
|
||||
std::thread::spawn(move || {
|
||||
// Create a runtime for the thread
|
||||
let rt = match tokio::runtime::Runtime::new() {
|
||||
Ok(runtime) => runtime,
|
||||
Err(e) => {
|
||||
let _ = tx_clone.send((
|
||||
selected_idx,
|
||||
Err(format!("Failed to create Tokio runtime: {}", e)),
|
||||
));
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
// Execute the GitHub Actions trigger API call
|
||||
let result = rt.block_on(async {
|
||||
crate::handlers::workflow::execute_curl_trigger(&workflow_name, None).await
|
||||
});
|
||||
|
||||
// Send the result back to the main thread
|
||||
if let Err(e) = tx_clone.send((selected_idx, result)) {
|
||||
logging::error(&format!("Error sending trigger result: {}", e));
|
||||
}
|
||||
});
|
||||
} else {
|
||||
let timestamp = Local::now().format("%H:%M:%S").to_string();
|
||||
self.logs
|
||||
.push(format!("[{}] No workflow selected to trigger", timestamp));
|
||||
logging::warning("No workflow selected to trigger");
|
||||
}
|
||||
} else {
|
||||
self.logs
|
||||
.push("No workflow selected to trigger".to_string());
|
||||
logging::warning("No workflow selected to trigger");
|
||||
}
|
||||
}
|
||||
|
||||
// Reset a workflow's status to NotStarted
|
||||
pub fn reset_workflow_status(&mut self) {
|
||||
// Log whether a selection exists
|
||||
if self.workflow_list_state.selected().is_none() {
|
||||
let timestamp = Local::now().format("%H:%M:%S").to_string();
|
||||
self.logs.push(format!(
|
||||
"[{}] Debug: No workflow selected for reset",
|
||||
timestamp
|
||||
));
|
||||
logging::warning("No workflow selected for reset");
|
||||
return;
|
||||
}
|
||||
|
||||
if let Some(idx) = self.workflow_list_state.selected() {
|
||||
if idx < self.workflows.len() {
|
||||
let workflow = &mut self.workflows[idx];
|
||||
// Log before status
|
||||
let timestamp = Local::now().format("%H:%M:%S").to_string();
|
||||
self.logs.push(format!(
|
||||
"[{}] Debug: Attempting to reset workflow '{}' from {:?} state",
|
||||
timestamp, workflow.name, workflow.status
|
||||
));
|
||||
|
||||
// Debug: Reset unconditionally for testing
|
||||
// if workflow.status != WorkflowStatus::Running {
|
||||
let old_status = match workflow.status {
|
||||
WorkflowStatus::Success => "Success",
|
||||
WorkflowStatus::Failed => "Failed",
|
||||
WorkflowStatus::Skipped => "Skipped",
|
||||
WorkflowStatus::NotStarted => "NotStarted",
|
||||
WorkflowStatus::Running => "Running",
|
||||
};
|
||||
|
||||
// Store workflow name for the success message
|
||||
let workflow_name = workflow.name.clone();
|
||||
|
||||
// Reset regardless of current status (for debugging)
|
||||
workflow.status = WorkflowStatus::NotStarted;
|
||||
// Clear execution details to reset all state
|
||||
workflow.execution_details = None;
|
||||
|
||||
let timestamp = Local::now().format("%H:%M:%S").to_string();
|
||||
self.logs.push(format!(
|
||||
"[{}] Reset workflow '{}' from {} state to NotStarted - status is now {:?}",
|
||||
timestamp, workflow.name, old_status, workflow.status
|
||||
));
|
||||
logging::info(&format!(
|
||||
"Reset workflow '{}' from {} state to NotStarted - status is now {:?}",
|
||||
workflow.name, old_status, workflow.status
|
||||
));
|
||||
|
||||
// Set a success status message
|
||||
self.set_status_message(format!("✅ Workflow '{}' has been reset!", workflow_name));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
53
crates/ui/src/components/button.rs
Normal file
53
crates/ui/src/components/button.rs
Normal file
@@ -0,0 +1,53 @@
|
||||
// Button component
|
||||
use ratatui::{
|
||||
style::{Color, Modifier, Style},
|
||||
text::{Line, Span},
|
||||
widgets::Paragraph,
|
||||
};
|
||||
|
||||
/// A simple button component for the TUI
|
||||
pub struct Button {
|
||||
pub label: String,
|
||||
pub is_selected: bool,
|
||||
pub is_active: bool,
|
||||
}
|
||||
|
||||
impl Button {
|
||||
/// Create a new button
|
||||
pub fn new(label: &str) -> Self {
|
||||
Button {
|
||||
label: label.to_string(),
|
||||
is_selected: false,
|
||||
is_active: true,
|
||||
}
|
||||
}
|
||||
|
||||
/// Set selected state
|
||||
pub fn selected(mut self, is_selected: bool) -> Self {
|
||||
self.is_selected = is_selected;
|
||||
self
|
||||
}
|
||||
|
||||
/// Set active state
|
||||
pub fn active(mut self, is_active: bool) -> Self {
|
||||
self.is_active = is_active;
|
||||
self
|
||||
}
|
||||
|
||||
/// Render the button
|
||||
pub fn render(&self) -> Paragraph<'_> {
|
||||
let (fg, bg) = match (self.is_selected, self.is_active) {
|
||||
(true, true) => (Color::Black, Color::Yellow),
|
||||
(true, false) => (Color::Black, Color::DarkGray),
|
||||
(false, true) => (Color::White, Color::Blue),
|
||||
(false, false) => (Color::DarkGray, Color::Black),
|
||||
};
|
||||
|
||||
let style = Style::default().fg(fg).bg(bg).add_modifier(Modifier::BOLD);
|
||||
|
||||
Paragraph::new(Line::from(vec![Span::styled(
|
||||
format!(" {} ", self.label),
|
||||
style,
|
||||
)]))
|
||||
}
|
||||
}
|
||||
60
crates/ui/src/components/checkbox.rs
Normal file
60
crates/ui/src/components/checkbox.rs
Normal file
@@ -0,0 +1,60 @@
|
||||
// Checkbox component
|
||||
use ratatui::{
|
||||
style::{Color, Modifier, Style},
|
||||
text::{Line, Span},
|
||||
widgets::Paragraph,
|
||||
};
|
||||
|
||||
/// A simple checkbox component for the TUI
|
||||
pub struct Checkbox {
|
||||
pub label: String,
|
||||
pub is_checked: bool,
|
||||
pub is_selected: bool,
|
||||
}
|
||||
|
||||
impl Checkbox {
|
||||
/// Create a new checkbox
|
||||
pub fn new(label: &str) -> Self {
|
||||
Checkbox {
|
||||
label: label.to_string(),
|
||||
is_checked: false,
|
||||
is_selected: false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Set checked state
|
||||
pub fn checked(mut self, is_checked: bool) -> Self {
|
||||
self.is_checked = is_checked;
|
||||
self
|
||||
}
|
||||
|
||||
/// Set selected state
|
||||
pub fn selected(mut self, is_selected: bool) -> Self {
|
||||
self.is_selected = is_selected;
|
||||
self
|
||||
}
|
||||
|
||||
/// Toggle checked state
|
||||
pub fn toggle(&mut self) {
|
||||
self.is_checked = !self.is_checked;
|
||||
}
|
||||
|
||||
/// Render the checkbox
|
||||
pub fn render(&self) -> Paragraph<'_> {
|
||||
let checkbox = if self.is_checked { "[✓]" } else { "[ ]" };
|
||||
|
||||
let style = if self.is_selected {
|
||||
Style::default()
|
||||
.fg(Color::Yellow)
|
||||
.add_modifier(Modifier::BOLD)
|
||||
} else {
|
||||
Style::default().fg(Color::White)
|
||||
};
|
||||
|
||||
Paragraph::new(Line::from(vec![
|
||||
Span::styled(checkbox, style),
|
||||
Span::raw(" "),
|
||||
Span::styled(&self.label, style),
|
||||
]))
|
||||
}
|
||||
}
|
||||
12
crates/ui/src/components/mod.rs
Normal file
12
crates/ui/src/components/mod.rs
Normal file
@@ -0,0 +1,12 @@
|
||||
// UI Components
|
||||
mod button;
|
||||
mod checkbox;
|
||||
mod progress_bar;
|
||||
|
||||
// Re-export components for easier access
|
||||
pub use button::Button;
|
||||
pub use checkbox::Checkbox;
|
||||
pub use progress_bar::ProgressBar;
|
||||
|
||||
// This module will contain smaller reusable UI elements that
|
||||
// can be shared between different views of the application.
|
||||
53
crates/ui/src/components/progress_bar.rs
Normal file
53
crates/ui/src/components/progress_bar.rs
Normal file
@@ -0,0 +1,53 @@
|
||||
// Progress bar component
|
||||
use ratatui::{
|
||||
style::{Color, Style},
|
||||
widgets::Gauge,
|
||||
};
|
||||
|
||||
/// A simple progress bar component for the TUI
|
||||
pub struct ProgressBar {
|
||||
pub progress: f64,
|
||||
pub label: Option<String>,
|
||||
pub color: Color,
|
||||
}
|
||||
|
||||
impl ProgressBar {
|
||||
/// Create a new progress bar
|
||||
pub fn new(progress: f64) -> Self {
|
||||
ProgressBar {
|
||||
progress: progress.clamp(0.0, 1.0),
|
||||
label: None,
|
||||
color: Color::Blue,
|
||||
}
|
||||
}
|
||||
|
||||
/// Set label
|
||||
pub fn label(mut self, label: &str) -> Self {
|
||||
self.label = Some(label.to_string());
|
||||
self
|
||||
}
|
||||
|
||||
/// Set color
|
||||
pub fn color(mut self, color: Color) -> Self {
|
||||
self.color = color;
|
||||
self
|
||||
}
|
||||
|
||||
/// Update progress value
|
||||
pub fn update(&mut self, progress: f64) {
|
||||
self.progress = progress.clamp(0.0, 1.0);
|
||||
}
|
||||
|
||||
/// Render the progress bar
|
||||
pub fn render(&self) -> Gauge<'_> {
|
||||
let label = match &self.label {
|
||||
Some(lbl) => format!("{} {:.0}%", lbl, self.progress * 100.0),
|
||||
None => format!("{:.0}%", self.progress * 100.0),
|
||||
};
|
||||
|
||||
Gauge::default()
|
||||
.gauge_style(Style::default().fg(self.color).bg(Color::Black))
|
||||
.label(label)
|
||||
.ratio(self.progress)
|
||||
}
|
||||
}
|
||||
3
crates/ui/src/handlers/mod.rs
Normal file
3
crates/ui/src/handlers/mod.rs
Normal file
@@ -0,0 +1,3 @@
|
||||
// Handlers for the UI
|
||||
|
||||
pub mod workflow;
|
||||
528
crates/ui/src/handlers/workflow.rs
Normal file
528
crates/ui/src/handlers/workflow.rs
Normal file
@@ -0,0 +1,528 @@
|
||||
// Workflow handlers
|
||||
use crate::app::App;
|
||||
use crate::models::{ExecutionResultMsg, WorkflowExecution, WorkflowStatus};
|
||||
use chrono::Local;
|
||||
use evaluator::evaluate_workflow_file;
|
||||
use executor::{self, JobStatus, RuntimeType, StepStatus};
|
||||
use std::io;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::mpsc;
|
||||
use std::thread;
|
||||
|
||||
// Validate a workflow or directory containing workflows
|
||||
pub fn validate_workflow(path: &Path, verbose: bool) -> io::Result<()> {
|
||||
let mut workflows = Vec::new();
|
||||
|
||||
if path.is_dir() {
|
||||
let entries = std::fs::read_dir(path)?;
|
||||
|
||||
for entry in entries {
|
||||
let entry = entry?;
|
||||
let entry_path = entry.path();
|
||||
|
||||
if entry_path.is_file() && utils::is_workflow_file(&entry_path) {
|
||||
workflows.push(entry_path);
|
||||
}
|
||||
}
|
||||
} else if path.is_file() {
|
||||
workflows.push(PathBuf::from(path));
|
||||
} else {
|
||||
return Err(io::Error::new(
|
||||
io::ErrorKind::NotFound,
|
||||
format!("Path does not exist: {}", path.display()),
|
||||
));
|
||||
}
|
||||
|
||||
let mut valid_count = 0;
|
||||
let mut invalid_count = 0;
|
||||
|
||||
println!("Validating {} workflow file(s)...", workflows.len());
|
||||
|
||||
for workflow_path in workflows {
|
||||
match evaluate_workflow_file(&workflow_path, verbose) {
|
||||
Ok(result) => {
|
||||
if result.is_valid {
|
||||
println!("✅ Valid: {}", workflow_path.display());
|
||||
valid_count += 1;
|
||||
} else {
|
||||
println!("❌ Invalid: {}", workflow_path.display());
|
||||
for (i, issue) in result.issues.iter().enumerate() {
|
||||
println!(" {}. {}", i + 1, issue);
|
||||
}
|
||||
invalid_count += 1;
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
println!("❌ Error processing {}: {}", workflow_path.display(), e);
|
||||
invalid_count += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
println!(
|
||||
"\nSummary: {} valid, {} invalid",
|
||||
valid_count, invalid_count
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Execute a workflow through the CLI
|
||||
pub async fn execute_workflow_cli(
|
||||
path: &Path,
|
||||
runtime_type: RuntimeType,
|
||||
verbose: bool,
|
||||
) -> io::Result<()> {
|
||||
if !path.exists() {
|
||||
return Err(io::Error::new(
|
||||
io::ErrorKind::NotFound,
|
||||
format!("Workflow file does not exist: {}", path.display()),
|
||||
));
|
||||
}
|
||||
|
||||
println!("Validating workflow...");
|
||||
match evaluate_workflow_file(path, false) {
|
||||
Ok(result) => {
|
||||
if !result.is_valid {
|
||||
println!("❌ Cannot execute invalid workflow: {}", path.display());
|
||||
for (i, issue) in result.issues.iter().enumerate() {
|
||||
println!(" {}. {}", i + 1, issue);
|
||||
}
|
||||
return Err(io::Error::new(
|
||||
io::ErrorKind::InvalidData,
|
||||
"Workflow validation failed",
|
||||
));
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
return Err(io::Error::other(format!(
|
||||
"Error validating workflow: {}",
|
||||
e
|
||||
)));
|
||||
}
|
||||
}
|
||||
|
||||
// Check Docker availability if Docker runtime is selected
|
||||
let runtime_type = match runtime_type {
|
||||
RuntimeType::Docker => {
|
||||
if !executor::docker::is_available() {
|
||||
println!("⚠️ Docker is not available. Using emulation mode instead.");
|
||||
logging::warning("Docker is not available. Using emulation mode instead.");
|
||||
RuntimeType::Emulation
|
||||
} else {
|
||||
RuntimeType::Docker
|
||||
}
|
||||
}
|
||||
RuntimeType::Emulation => RuntimeType::Emulation,
|
||||
};
|
||||
|
||||
println!("Executing workflow: {}", path.display());
|
||||
println!("Runtime mode: {:?}", runtime_type);
|
||||
|
||||
// Log the start of the execution in debug mode with more details
|
||||
logging::debug(&format!(
|
||||
"Starting workflow execution: path={}, runtime={:?}, verbose={}",
|
||||
path.display(),
|
||||
runtime_type,
|
||||
verbose
|
||||
));
|
||||
|
||||
let config = executor::ExecutionConfig {
|
||||
runtime_type,
|
||||
verbose,
|
||||
preserve_containers_on_failure: false, // Default for this path
|
||||
};
|
||||
|
||||
match executor::execute_workflow(path, config).await {
|
||||
Ok(result) => {
|
||||
println!("\nWorkflow execution results:");
|
||||
|
||||
// Track if the workflow had any failures
|
||||
let mut any_job_failed = false;
|
||||
|
||||
for job in &result.jobs {
|
||||
match job.status {
|
||||
JobStatus::Success => {
|
||||
println!("\n✅ Job succeeded: {}", job.name);
|
||||
}
|
||||
JobStatus::Failure => {
|
||||
println!("\n❌ Job failed: {}", job.name);
|
||||
any_job_failed = true;
|
||||
}
|
||||
JobStatus::Skipped => {
|
||||
println!("\n⏭️ Job skipped: {}", job.name);
|
||||
}
|
||||
}
|
||||
|
||||
println!("-------------------------");
|
||||
|
||||
// Log the job details for debug purposes
|
||||
logging::debug(&format!("Job: {}, Status: {:?}", job.name, job.status));
|
||||
|
||||
for step in job.steps.iter() {
|
||||
match step.status {
|
||||
StepStatus::Success => {
|
||||
println!(" ✅ {}", step.name);
|
||||
|
||||
// Check if this is a GitHub action output that should be hidden
|
||||
let should_hide = std::env::var("WRKFLW_HIDE_ACTION_MESSAGES")
|
||||
.map(|val| val == "true")
|
||||
.unwrap_or(false)
|
||||
&& step.output.contains("Would execute GitHub action:");
|
||||
|
||||
// Only show output if not hidden and it's short
|
||||
if !should_hide
|
||||
&& !step.output.trim().is_empty()
|
||||
&& step.output.lines().count() <= 3
|
||||
{
|
||||
// For short outputs, show directly
|
||||
println!(" {}", step.output.trim());
|
||||
}
|
||||
}
|
||||
StepStatus::Failure => {
|
||||
println!(" ❌ {}", step.name);
|
||||
|
||||
// Ensure we capture and show exit code
|
||||
if let Some(exit_code) = step
|
||||
.output
|
||||
.lines()
|
||||
.find(|line| line.trim().starts_with("Exit code:"))
|
||||
.map(|line| line.trim().to_string())
|
||||
{
|
||||
println!(" {}", exit_code);
|
||||
}
|
||||
|
||||
// Show command/run details in debug mode
|
||||
if logging::get_log_level() <= logging::LogLevel::Debug {
|
||||
if let Some(cmd_output) = step
|
||||
.output
|
||||
.lines()
|
||||
.skip_while(|l| !l.trim().starts_with("$"))
|
||||
.take(1)
|
||||
.next()
|
||||
{
|
||||
println!(" Command: {}", cmd_output.trim());
|
||||
}
|
||||
}
|
||||
|
||||
// Always show error output from failed steps, but keep it to a reasonable length
|
||||
let output_lines: Vec<&str> = step
|
||||
.output
|
||||
.lines()
|
||||
.filter(|line| !line.trim().starts_with("Exit code:"))
|
||||
.collect();
|
||||
|
||||
if !output_lines.is_empty() {
|
||||
println!(" Error output:");
|
||||
for line in output_lines.iter().take(10) {
|
||||
println!(" {}", line.trim().replace('\n', "\n "));
|
||||
}
|
||||
|
||||
if output_lines.len() > 10 {
|
||||
println!(
|
||||
" ... (and {} more lines)",
|
||||
output_lines.len() - 10
|
||||
);
|
||||
println!(" Use --debug to see full output");
|
||||
}
|
||||
}
|
||||
}
|
||||
StepStatus::Skipped => {
|
||||
println!(" ⏭️ {} (skipped)", step.name);
|
||||
}
|
||||
}
|
||||
|
||||
// Always log the step details for debug purposes
|
||||
logging::debug(&format!(
|
||||
"Step: {}, Status: {:?}, Output length: {} lines",
|
||||
step.name,
|
||||
step.status,
|
||||
step.output.lines().count()
|
||||
));
|
||||
|
||||
// In debug mode, log all step output
|
||||
if logging::get_log_level() == logging::LogLevel::Debug
|
||||
&& !step.output.trim().is_empty()
|
||||
{
|
||||
logging::debug(&format!(
|
||||
"Step output for '{}': \n{}",
|
||||
step.name, step.output
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if any_job_failed {
|
||||
println!("\n❌ Workflow completed with failures");
|
||||
// In the case of failure, we'll also inform the user about the debug option
|
||||
// if they're not already using it
|
||||
if logging::get_log_level() > logging::LogLevel::Debug {
|
||||
println!(" Run with --debug for more detailed output");
|
||||
}
|
||||
} else {
|
||||
println!("\n✅ Workflow completed successfully!");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
Err(e) => {
|
||||
println!("❌ Failed to execute workflow: {}", e);
|
||||
logging::error(&format!("Failed to execute workflow: {}", e));
|
||||
Err(io::Error::other(e))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Helper function to execute workflow trigger using curl
|
||||
pub async fn execute_curl_trigger(
|
||||
workflow_name: &str,
|
||||
branch: Option<&str>,
|
||||
) -> Result<(Vec<executor::JobResult>, ()), String> {
|
||||
// Get GitHub token
|
||||
let token = std::env::var("GITHUB_TOKEN").map_err(|_| {
|
||||
"GitHub token not found. Please set GITHUB_TOKEN environment variable".to_string()
|
||||
})?;
|
||||
|
||||
// Debug log to check if GITHUB_TOKEN is set
|
||||
match std::env::var("GITHUB_TOKEN") {
|
||||
Ok(token) => logging::info(&format!("GITHUB_TOKEN is set: {}", &token[..5])), // Log first 5 characters for security
|
||||
Err(_) => logging::error("GITHUB_TOKEN is not set"),
|
||||
}
|
||||
|
||||
// Get repository information
|
||||
let repo_info =
|
||||
github::get_repo_info().map_err(|e| format!("Failed to get repository info: {}", e))?;
|
||||
|
||||
// Determine branch to use
|
||||
let branch_ref = branch.unwrap_or(&repo_info.default_branch);
|
||||
|
||||
// Extract just the workflow name from the path if it's a full path
|
||||
let workflow_name = if workflow_name.contains('/') {
|
||||
Path::new(workflow_name)
|
||||
.file_stem()
|
||||
.and_then(|s| s.to_str())
|
||||
.ok_or_else(|| "Invalid workflow name".to_string())?
|
||||
} else {
|
||||
workflow_name
|
||||
};
|
||||
|
||||
logging::info(&format!("Using workflow name: {}", workflow_name));
|
||||
|
||||
// Construct JSON payload
|
||||
let payload = serde_json::json!({
|
||||
"ref": branch_ref
|
||||
});
|
||||
|
||||
// Construct API URL
|
||||
let url = format!(
|
||||
"https://api.github.com/repos/{}/{}/actions/workflows/{}.yml/dispatches",
|
||||
repo_info.owner, repo_info.repo, workflow_name
|
||||
);
|
||||
|
||||
logging::info(&format!("Triggering workflow at URL: {}", url));
|
||||
|
||||
// Create a reqwest client
|
||||
let client = reqwest::Client::new();
|
||||
|
||||
// Send the request using reqwest
|
||||
let response = client
|
||||
.post(&url)
|
||||
.header("Authorization", format!("Bearer {}", token.trim()))
|
||||
.header("Accept", "application/vnd.github.v3+json")
|
||||
.header("Content-Type", "application/json")
|
||||
.header("User-Agent", "wrkflw-cli")
|
||||
.json(&payload)
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| format!("Failed to send request: {}", e))?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
let status = response.status().as_u16();
|
||||
let error_message = response
|
||||
.text()
|
||||
.await
|
||||
.unwrap_or_else(|_| format!("Unknown error (HTTP {})", status));
|
||||
|
||||
return Err(format!("API error: {} - {}", status, error_message));
|
||||
}
|
||||
|
||||
// Success message with URL to view the workflow
|
||||
let success_msg = format!(
|
||||
"Workflow triggered successfully. View it at: https://github.com/{}/{}/actions/workflows/{}.yml",
|
||||
repo_info.owner, repo_info.repo, workflow_name
|
||||
);
|
||||
|
||||
// Create a job result structure
|
||||
let job_result = executor::JobResult {
|
||||
name: "GitHub Trigger".to_string(),
|
||||
status: executor::JobStatus::Success,
|
||||
steps: vec![executor::StepResult {
|
||||
name: "Remote Trigger".to_string(),
|
||||
status: executor::StepStatus::Success,
|
||||
output: success_msg,
|
||||
}],
|
||||
logs: "Workflow triggered remotely on GitHub".to_string(),
|
||||
};
|
||||
|
||||
Ok((vec![job_result], ()))
|
||||
}
|
||||
|
||||
// Extract common workflow execution logic to avoid duplication
|
||||
pub fn start_next_workflow_execution(
|
||||
app: &mut App,
|
||||
tx_clone: &mpsc::Sender<ExecutionResultMsg>,
|
||||
verbose: bool,
|
||||
) {
|
||||
if let Some(next_idx) = app.get_next_workflow_to_execute() {
|
||||
app.current_execution = Some(next_idx);
|
||||
let tx_clone_inner = tx_clone.clone();
|
||||
let workflow_path = app.workflows[next_idx].path.clone();
|
||||
|
||||
// Log whether verbose mode is enabled
|
||||
if verbose {
|
||||
app.logs
|
||||
.push("Verbose mode: Step outputs will be displayed in full".to_string());
|
||||
logging::info("Verbose mode: Step outputs will be displayed in full");
|
||||
} else {
|
||||
app.logs.push(
|
||||
"Standard mode: Only step status will be shown (use --verbose for full output)"
|
||||
.to_string(),
|
||||
);
|
||||
logging::info(
|
||||
"Standard mode: Only step status will be shown (use --verbose for full output)",
|
||||
);
|
||||
}
|
||||
|
||||
// Check Docker availability again if Docker runtime is selected
|
||||
let runtime_type = match app.runtime_type {
|
||||
RuntimeType::Docker => {
|
||||
// Use safe FD redirection to check Docker availability
|
||||
let is_docker_available =
|
||||
match utils::fd::with_stderr_to_null(executor::docker::is_available) {
|
||||
Ok(result) => result,
|
||||
Err(_) => {
|
||||
logging::debug(
|
||||
"Failed to redirect stderr when checking Docker availability.",
|
||||
);
|
||||
false
|
||||
}
|
||||
};
|
||||
|
||||
if !is_docker_available {
|
||||
app.logs
|
||||
.push("Docker is not available. Using emulation mode instead.".to_string());
|
||||
logging::warning("Docker is not available. Using emulation mode instead.");
|
||||
RuntimeType::Emulation
|
||||
} else {
|
||||
RuntimeType::Docker
|
||||
}
|
||||
}
|
||||
RuntimeType::Emulation => RuntimeType::Emulation,
|
||||
};
|
||||
|
||||
let validation_mode = app.validation_mode;
|
||||
let preserve_containers_on_failure = app.preserve_containers_on_failure;
|
||||
|
||||
// Update workflow status and add execution details
|
||||
app.workflows[next_idx].status = WorkflowStatus::Running;
|
||||
|
||||
// Initialize execution details if not already done
|
||||
if app.workflows[next_idx].execution_details.is_none() {
|
||||
app.workflows[next_idx].execution_details = Some(WorkflowExecution {
|
||||
jobs: Vec::new(),
|
||||
start_time: Local::now(),
|
||||
end_time: None,
|
||||
logs: Vec::new(),
|
||||
progress: 0.0,
|
||||
});
|
||||
}
|
||||
|
||||
thread::spawn(move || {
|
||||
let rt = match tokio::runtime::Runtime::new() {
|
||||
Ok(runtime) => runtime,
|
||||
Err(e) => {
|
||||
let _ = tx_clone_inner.send((
|
||||
next_idx,
|
||||
Err(format!("Failed to create Tokio runtime: {}", e)),
|
||||
));
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let result = rt.block_on(async {
|
||||
if validation_mode {
|
||||
// Perform validation instead of execution
|
||||
match evaluate_workflow_file(&workflow_path, verbose) {
|
||||
Ok(validation_result) => {
|
||||
// Create execution result based on validation
|
||||
let status = if validation_result.is_valid {
|
||||
executor::JobStatus::Success
|
||||
} else {
|
||||
executor::JobStatus::Failure
|
||||
};
|
||||
|
||||
// Create a synthetic job result for validation
|
||||
let jobs = vec![executor::JobResult {
|
||||
name: "Validation".to_string(),
|
||||
status,
|
||||
steps: vec![executor::StepResult {
|
||||
name: "Validator".to_string(),
|
||||
status: if validation_result.is_valid {
|
||||
executor::StepStatus::Success
|
||||
} else {
|
||||
executor::StepStatus::Failure
|
||||
},
|
||||
output: validation_result.issues.join("\n"),
|
||||
}],
|
||||
logs: format!(
|
||||
"Validation result: {}",
|
||||
if validation_result.is_valid {
|
||||
"PASSED"
|
||||
} else {
|
||||
"FAILED"
|
||||
}
|
||||
),
|
||||
}];
|
||||
|
||||
Ok((jobs, ()))
|
||||
}
|
||||
Err(e) => Err(e.to_string()),
|
||||
}
|
||||
} else {
|
||||
// Use safe FD redirection for execution
|
||||
let config = executor::ExecutionConfig {
|
||||
runtime_type,
|
||||
verbose,
|
||||
preserve_containers_on_failure,
|
||||
};
|
||||
|
||||
let execution_result = utils::fd::with_stderr_to_null(|| {
|
||||
futures::executor::block_on(async {
|
||||
executor::execute_workflow(&workflow_path, config).await
|
||||
})
|
||||
})
|
||||
.map_err(|e| format!("Failed to redirect stderr during execution: {}", e))?;
|
||||
|
||||
match execution_result {
|
||||
Ok(execution_result) => {
|
||||
// Send back the job results in a wrapped result
|
||||
Ok((execution_result.jobs, ()))
|
||||
}
|
||||
Err(e) => Err(e.to_string()),
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Only send if we get a valid result
|
||||
if let Err(e) = tx_clone_inner.send((next_idx, result)) {
|
||||
logging::error(&format!("Error sending execution result: {}", e));
|
||||
}
|
||||
});
|
||||
} else {
|
||||
app.running = false;
|
||||
let timestamp = Local::now().format("%H:%M:%S").to_string();
|
||||
app.logs
|
||||
.push(format!("[{}] All workflows completed execution", timestamp));
|
||||
logging::info("All workflows completed execution");
|
||||
}
|
||||
}
|
||||
22
crates/ui/src/lib.rs
Normal file
22
crates/ui/src/lib.rs
Normal file
@@ -0,0 +1,22 @@
|
||||
// Modular UI crate for wrkflw
|
||||
//
|
||||
// This crate is organized into several modules:
|
||||
// - app: Contains the main App state and TUI entry point
|
||||
// - models: Contains the data structures for the UI
|
||||
// - components: Contains reusable UI elements
|
||||
// - handlers: Contains workflow handling logic
|
||||
// - utils: Contains utility functions
|
||||
// - views: Contains UI rendering code
|
||||
|
||||
// Re-export public modules
|
||||
pub mod app;
|
||||
pub mod components;
|
||||
pub mod handlers;
|
||||
pub mod models;
|
||||
pub mod utils;
|
||||
pub mod views;
|
||||
|
||||
// Re-export main entry points
|
||||
pub use app::run_wrkflw_tui;
|
||||
pub use handlers::workflow::execute_workflow_cli;
|
||||
pub use handlers::workflow::validate_workflow;
|
||||
99
crates/ui/src/models/mod.rs
Normal file
99
crates/ui/src/models/mod.rs
Normal file
@@ -0,0 +1,99 @@
|
||||
// UI Models for wrkflw
|
||||
use chrono::Local;
|
||||
use executor::{JobStatus, StepStatus};
|
||||
use std::path::PathBuf;
|
||||
|
||||
/// Type alias for the complex execution result type
|
||||
pub type ExecutionResultMsg = (usize, Result<(Vec<executor::JobResult>, ()), String>);
|
||||
|
||||
/// Represents an individual workflow file
|
||||
pub struct Workflow {
|
||||
pub name: String,
|
||||
pub path: PathBuf,
|
||||
pub selected: bool,
|
||||
pub status: WorkflowStatus,
|
||||
pub execution_details: Option<WorkflowExecution>,
|
||||
}
|
||||
|
||||
/// Status of a workflow
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub enum WorkflowStatus {
|
||||
NotStarted,
|
||||
Running,
|
||||
Success,
|
||||
Failed,
|
||||
Skipped,
|
||||
}
|
||||
|
||||
/// Detailed execution information
|
||||
pub struct WorkflowExecution {
|
||||
pub jobs: Vec<JobExecution>,
|
||||
pub start_time: chrono::DateTime<Local>,
|
||||
pub end_time: Option<chrono::DateTime<Local>>,
|
||||
pub logs: Vec<String>,
|
||||
pub progress: f64, // 0.0 - 1.0 for progress bar
|
||||
}
|
||||
|
||||
/// Job execution details
|
||||
pub struct JobExecution {
|
||||
pub name: String,
|
||||
pub status: JobStatus,
|
||||
pub steps: Vec<StepExecution>,
|
||||
pub logs: Vec<String>,
|
||||
}
|
||||
|
||||
/// Step execution details
|
||||
pub struct StepExecution {
|
||||
pub name: String,
|
||||
pub status: StepStatus,
|
||||
pub output: String,
|
||||
}
|
||||
|
||||
/// Log filter levels
|
||||
pub enum LogFilterLevel {
|
||||
Info,
|
||||
Warning,
|
||||
Error,
|
||||
Success,
|
||||
Trigger,
|
||||
All,
|
||||
}
|
||||
|
||||
impl LogFilterLevel {
|
||||
pub fn matches(&self, log: &str) -> bool {
|
||||
match self {
|
||||
LogFilterLevel::Info => {
|
||||
log.contains("ℹ️") || (log.contains("INFO") && !log.contains("SUCCESS"))
|
||||
}
|
||||
LogFilterLevel::Warning => log.contains("⚠️") || log.contains("WARN"),
|
||||
LogFilterLevel::Error => log.contains("❌") || log.contains("ERROR"),
|
||||
LogFilterLevel::Success => log.contains("SUCCESS") || log.contains("success"),
|
||||
LogFilterLevel::Trigger => {
|
||||
log.contains("Triggering") || log.contains("triggered") || log.contains("TRIG")
|
||||
}
|
||||
LogFilterLevel::All => true,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn next(&self) -> Self {
|
||||
match self {
|
||||
LogFilterLevel::All => LogFilterLevel::Info,
|
||||
LogFilterLevel::Info => LogFilterLevel::Warning,
|
||||
LogFilterLevel::Warning => LogFilterLevel::Error,
|
||||
LogFilterLevel::Error => LogFilterLevel::Success,
|
||||
LogFilterLevel::Success => LogFilterLevel::Trigger,
|
||||
LogFilterLevel::Trigger => LogFilterLevel::All,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn to_string(&self) -> &str {
|
||||
match self {
|
||||
LogFilterLevel::All => "ALL",
|
||||
LogFilterLevel::Info => "INFO",
|
||||
LogFilterLevel::Warning => "WARNING",
|
||||
LogFilterLevel::Error => "ERROR",
|
||||
LogFilterLevel::Success => "SUCCESS",
|
||||
LogFilterLevel::Trigger => "TRIGGER",
|
||||
}
|
||||
}
|
||||
}
|
||||
53
crates/ui/src/utils/mod.rs
Normal file
53
crates/ui/src/utils/mod.rs
Normal file
@@ -0,0 +1,53 @@
|
||||
// UI utilities
|
||||
use crate::models::{Workflow, WorkflowStatus};
|
||||
use std::path::{Path, PathBuf};
|
||||
use utils::is_workflow_file;
|
||||
|
||||
/// Find and load all workflow files in a directory
|
||||
pub fn load_workflows(dir_path: &Path) -> Vec<Workflow> {
|
||||
let mut workflows = Vec::new();
|
||||
|
||||
// Default path is .github/workflows
|
||||
let default_workflows_dir = Path::new(".github").join("workflows");
|
||||
let is_default_dir = dir_path == default_workflows_dir || dir_path.ends_with("workflows");
|
||||
|
||||
if let Ok(entries) = std::fs::read_dir(dir_path) {
|
||||
for entry in entries.flatten() {
|
||||
let path = entry.path();
|
||||
if path.is_file() && (is_workflow_file(&path) || !is_default_dir) {
|
||||
// Get just the base name without extension
|
||||
let name = path.file_stem().map_or_else(
|
||||
|| "[unknown]".to_string(),
|
||||
|fname| fname.to_string_lossy().into_owned(),
|
||||
);
|
||||
|
||||
workflows.push(Workflow {
|
||||
name,
|
||||
path,
|
||||
selected: false,
|
||||
status: WorkflowStatus::NotStarted,
|
||||
execution_details: None,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check for GitLab CI pipeline file in the root directory if we're in the default GitHub workflows dir
|
||||
if is_default_dir {
|
||||
// Look for .gitlab-ci.yml in the repository root
|
||||
let gitlab_ci_path = PathBuf::from(".gitlab-ci.yml");
|
||||
if gitlab_ci_path.exists() && gitlab_ci_path.is_file() {
|
||||
workflows.push(Workflow {
|
||||
name: "gitlab-ci".to_string(),
|
||||
path: gitlab_ci_path,
|
||||
selected: false,
|
||||
status: WorkflowStatus::NotStarted,
|
||||
execution_details: None,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Sort workflows by name
|
||||
workflows.sort_by(|a, b| a.name.cmp(&b.name));
|
||||
workflows
|
||||
}
|
||||
359
crates/ui/src/views/execution_tab.rs
Normal file
359
crates/ui/src/views/execution_tab.rs
Normal file
@@ -0,0 +1,359 @@
|
||||
// Execution tab rendering
|
||||
use crate::app::App;
|
||||
use crate::models::WorkflowStatus;
|
||||
use ratatui::{
|
||||
backend::CrosstermBackend,
|
||||
layout::{Alignment, Constraint, Direction, Layout, Rect},
|
||||
style::{Color, Modifier, Style},
|
||||
text::{Line, Span},
|
||||
widgets::{Block, BorderType, Borders, Gauge, List, ListItem, Paragraph},
|
||||
Frame,
|
||||
};
|
||||
use std::io;
|
||||
|
||||
// Render the execution tab
|
||||
pub fn render_execution_tab(
|
||||
f: &mut Frame<CrosstermBackend<io::Stdout>>,
|
||||
app: &mut App,
|
||||
area: Rect,
|
||||
) {
|
||||
// Get the workflow index either from current_execution or selected workflow
|
||||
let current_workflow_idx = app
|
||||
.current_execution
|
||||
.or_else(|| app.workflow_list_state.selected())
|
||||
.filter(|&idx| idx < app.workflows.len());
|
||||
|
||||
if let Some(idx) = current_workflow_idx {
|
||||
let workflow = &app.workflows[idx];
|
||||
|
||||
// Split the area into sections
|
||||
let chunks = Layout::default()
|
||||
.direction(Direction::Vertical)
|
||||
.constraints(
|
||||
[
|
||||
Constraint::Length(5), // Workflow info with progress bar
|
||||
Constraint::Min(5), // Jobs list or Remote execution info
|
||||
Constraint::Length(7), // Execution info
|
||||
]
|
||||
.as_ref(),
|
||||
)
|
||||
.margin(1)
|
||||
.split(area);
|
||||
|
||||
// Workflow info section
|
||||
let status_text = match workflow.status {
|
||||
WorkflowStatus::NotStarted => "Not Started",
|
||||
WorkflowStatus::Running => "Running",
|
||||
WorkflowStatus::Success => "Success",
|
||||
WorkflowStatus::Failed => "Failed",
|
||||
WorkflowStatus::Skipped => "Skipped",
|
||||
};
|
||||
|
||||
let status_style = match workflow.status {
|
||||
WorkflowStatus::NotStarted => Style::default().fg(Color::Gray),
|
||||
WorkflowStatus::Running => Style::default().fg(Color::Cyan),
|
||||
WorkflowStatus::Success => Style::default().fg(Color::Green),
|
||||
WorkflowStatus::Failed => Style::default().fg(Color::Red),
|
||||
WorkflowStatus::Skipped => Style::default().fg(Color::Yellow),
|
||||
};
|
||||
|
||||
let mut workflow_info = vec![
|
||||
Line::from(vec![
|
||||
Span::styled("Workflow: ", Style::default().fg(Color::Blue)),
|
||||
Span::styled(
|
||||
workflow.name.clone(),
|
||||
Style::default()
|
||||
.fg(Color::White)
|
||||
.add_modifier(Modifier::BOLD),
|
||||
),
|
||||
]),
|
||||
Line::from(vec![
|
||||
Span::styled("Status: ", Style::default().fg(Color::Blue)),
|
||||
Span::styled(status_text, status_style),
|
||||
]),
|
||||
];
|
||||
|
||||
// Add progress bar for running workflows or workflows with execution details
|
||||
if let Some(execution) = &workflow.execution_details {
|
||||
// Calculate progress
|
||||
let progress = execution.progress;
|
||||
|
||||
// Add progress bar
|
||||
let gauge_color = match workflow.status {
|
||||
WorkflowStatus::Running => Color::Cyan,
|
||||
WorkflowStatus::Success => Color::Green,
|
||||
WorkflowStatus::Failed => Color::Red,
|
||||
_ => Color::Gray,
|
||||
};
|
||||
|
||||
let progress_text = match workflow.status {
|
||||
WorkflowStatus::Running => format!("{:.0}%", progress * 100.0),
|
||||
WorkflowStatus::Success => "Completed".to_string(),
|
||||
WorkflowStatus::Failed => "Failed".to_string(),
|
||||
_ => "Not started".to_string(),
|
||||
};
|
||||
|
||||
// Add empty line before progress bar
|
||||
workflow_info.push(Line::from(""));
|
||||
|
||||
// Add the gauge widget to the paragraph data
|
||||
workflow_info.push(Line::from(vec![Span::styled(
|
||||
format!("Progress: {}", progress_text),
|
||||
Style::default().fg(Color::Blue),
|
||||
)]));
|
||||
|
||||
let gauge = Gauge::default()
|
||||
.block(Block::default())
|
||||
.gauge_style(Style::default().fg(gauge_color).bg(Color::Black))
|
||||
.percent((progress * 100.0) as u16);
|
||||
|
||||
// Render gauge separately after the paragraph
|
||||
let workflow_info_widget = Paragraph::new(workflow_info).block(
|
||||
Block::default()
|
||||
.borders(Borders::ALL)
|
||||
.border_type(BorderType::Rounded)
|
||||
.title(Span::styled(
|
||||
" Workflow Information ",
|
||||
Style::default().fg(Color::Yellow),
|
||||
)),
|
||||
);
|
||||
|
||||
let gauge_area = Rect {
|
||||
x: chunks[0].x + 2,
|
||||
y: chunks[0].y + 4,
|
||||
width: chunks[0].width - 4,
|
||||
height: 1,
|
||||
};
|
||||
|
||||
f.render_widget(workflow_info_widget, chunks[0]);
|
||||
f.render_widget(gauge, gauge_area);
|
||||
|
||||
// Jobs list section
|
||||
if execution.jobs.is_empty() {
|
||||
let placeholder = Paragraph::new("No jobs have started execution yet...")
|
||||
.block(
|
||||
Block::default()
|
||||
.borders(Borders::ALL)
|
||||
.border_type(BorderType::Rounded)
|
||||
.title(Span::styled(" Jobs ", Style::default().fg(Color::Yellow))),
|
||||
)
|
||||
.alignment(Alignment::Center);
|
||||
f.render_widget(placeholder, chunks[1]);
|
||||
} else {
|
||||
let job_items: Vec<ListItem> = execution
|
||||
.jobs
|
||||
.iter()
|
||||
.map(|job| {
|
||||
let status_symbol = match job.status {
|
||||
executor::JobStatus::Success => "✅",
|
||||
executor::JobStatus::Failure => "❌",
|
||||
executor::JobStatus::Skipped => "⏭",
|
||||
};
|
||||
|
||||
let status_style = match job.status {
|
||||
executor::JobStatus::Success => Style::default().fg(Color::Green),
|
||||
executor::JobStatus::Failure => Style::default().fg(Color::Red),
|
||||
executor::JobStatus::Skipped => Style::default().fg(Color::Gray),
|
||||
};
|
||||
|
||||
// Count completed and total steps
|
||||
let total_steps = job.steps.len();
|
||||
let completed_steps = job
|
||||
.steps
|
||||
.iter()
|
||||
.filter(|s| {
|
||||
s.status == executor::StepStatus::Success
|
||||
|| s.status == executor::StepStatus::Failure
|
||||
})
|
||||
.count();
|
||||
|
||||
let steps_info = format!("[{}/{}]", completed_steps, total_steps);
|
||||
|
||||
ListItem::new(Line::from(vec![
|
||||
Span::styled(status_symbol, status_style),
|
||||
Span::raw(" "),
|
||||
Span::styled(&job.name, Style::default().fg(Color::White)),
|
||||
Span::raw(" "),
|
||||
Span::styled(steps_info, Style::default().fg(Color::DarkGray)),
|
||||
]))
|
||||
})
|
||||
.collect();
|
||||
|
||||
let jobs_list = List::new(job_items)
|
||||
.block(
|
||||
Block::default()
|
||||
.borders(Borders::ALL)
|
||||
.border_type(BorderType::Rounded)
|
||||
.title(Span::styled(" Jobs ", Style::default().fg(Color::Yellow))),
|
||||
)
|
||||
.highlight_style(
|
||||
Style::default()
|
||||
.bg(Color::DarkGray)
|
||||
.add_modifier(Modifier::BOLD),
|
||||
)
|
||||
.highlight_symbol("» ");
|
||||
|
||||
f.render_stateful_widget(jobs_list, chunks[1], &mut app.job_list_state);
|
||||
}
|
||||
|
||||
// Execution info section
|
||||
let mut execution_info = Vec::new();
|
||||
|
||||
execution_info.push(Line::from(vec![
|
||||
Span::styled("Started: ", Style::default().fg(Color::Blue)),
|
||||
Span::styled(
|
||||
execution.start_time.format("%Y-%m-%d %H:%M:%S").to_string(),
|
||||
Style::default().fg(Color::White),
|
||||
),
|
||||
]));
|
||||
|
||||
if let Some(end_time) = execution.end_time {
|
||||
execution_info.push(Line::from(vec![
|
||||
Span::styled("Finished: ", Style::default().fg(Color::Blue)),
|
||||
Span::styled(
|
||||
end_time.format("%Y-%m-%d %H:%M:%S").to_string(),
|
||||
Style::default().fg(Color::White),
|
||||
),
|
||||
]));
|
||||
|
||||
// Calculate duration
|
||||
let duration = end_time.signed_duration_since(execution.start_time);
|
||||
execution_info.push(Line::from(vec![
|
||||
Span::styled("Duration: ", Style::default().fg(Color::Blue)),
|
||||
Span::styled(
|
||||
format!(
|
||||
"{}m {}s",
|
||||
duration.num_minutes(),
|
||||
duration.num_seconds() % 60
|
||||
),
|
||||
Style::default().fg(Color::White),
|
||||
),
|
||||
]));
|
||||
} else {
|
||||
// Show running time for active workflows
|
||||
let current_time = chrono::Local::now();
|
||||
let running_time = current_time.signed_duration_since(execution.start_time);
|
||||
execution_info.push(Line::from(vec![
|
||||
Span::styled("Running for: ", Style::default().fg(Color::Blue)),
|
||||
Span::styled(
|
||||
format!(
|
||||
"{}m {}s",
|
||||
running_time.num_minutes(),
|
||||
running_time.num_seconds() % 60
|
||||
),
|
||||
Style::default().fg(Color::White),
|
||||
),
|
||||
]));
|
||||
}
|
||||
|
||||
// Add hint for Enter key to see details
|
||||
execution_info.push(Line::from(""));
|
||||
execution_info.push(Line::from(vec![
|
||||
Span::styled("Press ", Style::default().fg(Color::DarkGray)),
|
||||
Span::styled("Enter", Style::default().fg(Color::Yellow)),
|
||||
Span::styled(" to view job details", Style::default().fg(Color::DarkGray)),
|
||||
]));
|
||||
|
||||
let info_widget = Paragraph::new(execution_info).block(
|
||||
Block::default()
|
||||
.borders(Borders::ALL)
|
||||
.border_type(BorderType::Rounded)
|
||||
.title(Span::styled(
|
||||
" Execution Information ",
|
||||
Style::default().fg(Color::Yellow),
|
||||
)),
|
||||
);
|
||||
|
||||
f.render_widget(info_widget, chunks[2]);
|
||||
} else {
|
||||
// No workflow execution to display
|
||||
let workflow_info_widget = Paragraph::new(workflow_info).block(
|
||||
Block::default()
|
||||
.borders(Borders::ALL)
|
||||
.border_type(BorderType::Rounded)
|
||||
.title(Span::styled(
|
||||
" Workflow Information ",
|
||||
Style::default().fg(Color::Yellow),
|
||||
)),
|
||||
);
|
||||
|
||||
f.render_widget(workflow_info_widget, chunks[0]);
|
||||
|
||||
// No execution details to display
|
||||
let placeholder = Paragraph::new(vec![
|
||||
Line::from(""),
|
||||
Line::from(vec![Span::styled(
|
||||
"No execution data available.",
|
||||
Style::default()
|
||||
.fg(Color::Yellow)
|
||||
.add_modifier(Modifier::BOLD),
|
||||
)]),
|
||||
Line::from(""),
|
||||
Line::from("Press 'Enter' to run this workflow."),
|
||||
Line::from(""),
|
||||
])
|
||||
.block(
|
||||
Block::default()
|
||||
.borders(Borders::ALL)
|
||||
.border_type(BorderType::Rounded)
|
||||
.title(Span::styled(" Jobs ", Style::default().fg(Color::Yellow))),
|
||||
)
|
||||
.alignment(Alignment::Center);
|
||||
|
||||
f.render_widget(placeholder, chunks[1]);
|
||||
|
||||
// Execution information
|
||||
let info_widget = Paragraph::new(vec![
|
||||
Line::from(""),
|
||||
Line::from(vec![Span::styled(
|
||||
"No execution has been started.",
|
||||
Style::default().fg(Color::Yellow),
|
||||
)]),
|
||||
Line::from(""),
|
||||
Line::from("Press 'Enter' in the Workflows tab to run,"),
|
||||
Line::from("or 't' to trigger on GitHub."),
|
||||
])
|
||||
.block(
|
||||
Block::default()
|
||||
.borders(Borders::ALL)
|
||||
.border_type(BorderType::Rounded)
|
||||
.title(Span::styled(
|
||||
" Execution Information ",
|
||||
Style::default().fg(Color::Yellow),
|
||||
)),
|
||||
)
|
||||
.alignment(Alignment::Center);
|
||||
|
||||
f.render_widget(info_widget, chunks[2]);
|
||||
}
|
||||
} else {
|
||||
// No workflow execution to display
|
||||
let placeholder = Paragraph::new(vec![
|
||||
Line::from(""),
|
||||
Line::from(vec![Span::styled(
|
||||
"No workflow execution data available.",
|
||||
Style::default()
|
||||
.fg(Color::Yellow)
|
||||
.add_modifier(Modifier::BOLD),
|
||||
)]),
|
||||
Line::from(""),
|
||||
Line::from("Select workflows in the Workflows tab and press 'r' to run them."),
|
||||
Line::from(""),
|
||||
Line::from("Or press Enter on a selected workflow to run it directly."),
|
||||
Line::from(""),
|
||||
Line::from("You can also press 't' to trigger a workflow on GitHub remotely."),
|
||||
])
|
||||
.block(
|
||||
Block::default()
|
||||
.borders(Borders::ALL)
|
||||
.border_type(BorderType::Rounded)
|
||||
.title(Span::styled(
|
||||
" Execution ",
|
||||
Style::default().fg(Color::Yellow),
|
||||
)),
|
||||
)
|
||||
.alignment(Alignment::Center);
|
||||
|
||||
f.render_widget(placeholder, area);
|
||||
}
|
||||
}
|
||||
69
crates/ui/src/views/help_overlay.rs
Normal file
69
crates/ui/src/views/help_overlay.rs
Normal file
@@ -0,0 +1,69 @@
|
||||
// Help overlay rendering
|
||||
use ratatui::{
|
||||
backend::CrosstermBackend,
|
||||
layout::Rect,
|
||||
style::{Color, Modifier, Style},
|
||||
text::{Line, Span},
|
||||
widgets::{Block, BorderType, Borders, Paragraph, Wrap},
|
||||
Frame,
|
||||
};
|
||||
use std::io;
|
||||
|
||||
// Render the help tab
|
||||
pub fn render_help_tab(f: &mut Frame<CrosstermBackend<io::Stdout>>, area: Rect) {
|
||||
let help_text = vec![
|
||||
Line::from(Span::styled(
|
||||
"Keyboard Controls",
|
||||
Style::default()
|
||||
.fg(Color::Cyan)
|
||||
.add_modifier(Modifier::BOLD),
|
||||
)),
|
||||
Line::from(""),
|
||||
Line::from(vec![
|
||||
Span::styled(
|
||||
"Tab",
|
||||
Style::default()
|
||||
.fg(Color::Yellow)
|
||||
.add_modifier(Modifier::BOLD),
|
||||
),
|
||||
Span::raw(" - Switch between tabs"),
|
||||
]),
|
||||
// More help text would follow...
|
||||
];
|
||||
|
||||
let help_widget = Paragraph::new(help_text)
|
||||
.block(
|
||||
Block::default()
|
||||
.borders(Borders::ALL)
|
||||
.border_type(BorderType::Rounded)
|
||||
.title(Span::styled(" Help ", Style::default().fg(Color::Yellow))),
|
||||
)
|
||||
.wrap(Wrap { trim: true });
|
||||
|
||||
f.render_widget(help_widget, area);
|
||||
}
|
||||
|
||||
// Render a help overlay
|
||||
pub fn render_help_overlay(f: &mut Frame<CrosstermBackend<io::Stdout>>) {
|
||||
let size = f.size();
|
||||
|
||||
// Create a slightly smaller centered modal
|
||||
let width = size.width.min(60);
|
||||
let height = size.height.min(20);
|
||||
let x = (size.width - width) / 2;
|
||||
let y = (size.height - height) / 2;
|
||||
|
||||
let help_area = Rect {
|
||||
x,
|
||||
y,
|
||||
width,
|
||||
height,
|
||||
};
|
||||
|
||||
// Create a clear background
|
||||
let clear = Block::default().style(Style::default().bg(Color::Black));
|
||||
f.render_widget(clear, size);
|
||||
|
||||
// Render the help content
|
||||
render_help_tab(f, help_area);
|
||||
}
|
||||
201
crates/ui/src/views/job_detail.rs
Normal file
201
crates/ui/src/views/job_detail.rs
Normal file
@@ -0,0 +1,201 @@
|
||||
// Job detail view rendering
|
||||
use crate::app::App;
|
||||
use ratatui::{
|
||||
backend::CrosstermBackend,
|
||||
layout::{Constraint, Direction, Layout, Rect},
|
||||
style::{Color, Modifier, Style},
|
||||
text::{Line, Span},
|
||||
widgets::{Block, BorderType, Borders, Paragraph, Row, Table},
|
||||
Frame,
|
||||
};
|
||||
use std::io;
|
||||
|
||||
// Render the job detail view
|
||||
pub fn render_job_detail_view(
|
||||
f: &mut Frame<CrosstermBackend<io::Stdout>>,
|
||||
app: &mut App,
|
||||
area: Rect,
|
||||
) {
|
||||
// Get the workflow index either from current_execution or selected workflow
|
||||
let current_workflow_idx = app
|
||||
.current_execution
|
||||
.or_else(|| app.workflow_list_state.selected())
|
||||
.filter(|&idx| idx < app.workflows.len());
|
||||
|
||||
if let Some(workflow_idx) = current_workflow_idx {
|
||||
// Only proceed if we have execution details
|
||||
if let Some(execution) = &app.workflows[workflow_idx].execution_details {
|
||||
// Only proceed if we have a valid job selection
|
||||
if let Some(job_idx) = app.job_list_state.selected() {
|
||||
if job_idx < execution.jobs.len() {
|
||||
let job = &execution.jobs[job_idx];
|
||||
|
||||
// Split the area into sections
|
||||
let chunks = Layout::default()
|
||||
.direction(Direction::Vertical)
|
||||
.constraints(
|
||||
[
|
||||
Constraint::Length(3), // Job title
|
||||
Constraint::Min(5), // Steps table
|
||||
Constraint::Length(8), // Step details
|
||||
]
|
||||
.as_ref(),
|
||||
)
|
||||
.margin(1)
|
||||
.split(area);
|
||||
|
||||
// Job title section
|
||||
let status_text = match job.status {
|
||||
executor::JobStatus::Success => "Success",
|
||||
executor::JobStatus::Failure => "Failed",
|
||||
executor::JobStatus::Skipped => "Skipped",
|
||||
};
|
||||
|
||||
let status_style = match job.status {
|
||||
executor::JobStatus::Success => Style::default().fg(Color::Green),
|
||||
executor::JobStatus::Failure => Style::default().fg(Color::Red),
|
||||
executor::JobStatus::Skipped => Style::default().fg(Color::Yellow),
|
||||
};
|
||||
|
||||
let job_title = Paragraph::new(vec![
|
||||
Line::from(vec![
|
||||
Span::styled("Job: ", Style::default().fg(Color::Blue)),
|
||||
Span::styled(
|
||||
job.name.clone(),
|
||||
Style::default()
|
||||
.fg(Color::White)
|
||||
.add_modifier(Modifier::BOLD),
|
||||
),
|
||||
Span::raw(" ("),
|
||||
Span::styled(status_text, status_style),
|
||||
Span::raw(")"),
|
||||
]),
|
||||
Line::from(vec![
|
||||
Span::styled("Steps: ", Style::default().fg(Color::Blue)),
|
||||
Span::styled(
|
||||
format!("{}", job.steps.len()),
|
||||
Style::default().fg(Color::White),
|
||||
),
|
||||
]),
|
||||
])
|
||||
.block(
|
||||
Block::default()
|
||||
.borders(Borders::ALL)
|
||||
.border_type(BorderType::Rounded)
|
||||
.title(Span::styled(
|
||||
" Job Details ",
|
||||
Style::default().fg(Color::Yellow),
|
||||
)),
|
||||
);
|
||||
|
||||
f.render_widget(job_title, chunks[0]);
|
||||
|
||||
// Steps section
|
||||
let header_cells = ["Status", "Step Name"].iter().map(|h| {
|
||||
ratatui::widgets::Cell::from(*h).style(Style::default().fg(Color::Yellow))
|
||||
});
|
||||
|
||||
let header = Row::new(header_cells)
|
||||
.style(Style::default().add_modifier(Modifier::BOLD))
|
||||
.height(1);
|
||||
|
||||
let rows = job.steps.iter().map(|step| {
|
||||
let status_symbol = match step.status {
|
||||
executor::StepStatus::Success => "✅",
|
||||
executor::StepStatus::Failure => "❌",
|
||||
executor::StepStatus::Skipped => "⏭",
|
||||
};
|
||||
|
||||
let status_style = match step.status {
|
||||
executor::StepStatus::Success => Style::default().fg(Color::Green),
|
||||
executor::StepStatus::Failure => Style::default().fg(Color::Red),
|
||||
executor::StepStatus::Skipped => Style::default().fg(Color::Gray),
|
||||
};
|
||||
|
||||
Row::new(vec![
|
||||
ratatui::widgets::Cell::from(status_symbol).style(status_style),
|
||||
ratatui::widgets::Cell::from(step.name.clone()),
|
||||
])
|
||||
});
|
||||
|
||||
let steps_table = Table::new(rows)
|
||||
.header(header)
|
||||
.block(
|
||||
Block::default()
|
||||
.borders(Borders::ALL)
|
||||
.border_type(BorderType::Rounded)
|
||||
.title(Span::styled(" Steps ", Style::default().fg(Color::Yellow))),
|
||||
)
|
||||
.highlight_style(
|
||||
Style::default()
|
||||
.bg(Color::DarkGray)
|
||||
.add_modifier(Modifier::BOLD),
|
||||
)
|
||||
.highlight_symbol("» ")
|
||||
.widths(&[
|
||||
Constraint::Length(8), // Status icon column
|
||||
Constraint::Percentage(92), // Name column
|
||||
]);
|
||||
|
||||
// We need to use the table state from the app
|
||||
f.render_stateful_widget(steps_table, chunks[1], &mut app.step_table_state);
|
||||
|
||||
// Step detail section
|
||||
if let Some(step_idx) = app.step_table_state.selected() {
|
||||
if step_idx < job.steps.len() {
|
||||
let step = &job.steps[step_idx];
|
||||
|
||||
// Show step output with proper styling
|
||||
let status_text = match step.status {
|
||||
executor::StepStatus::Success => "Success",
|
||||
executor::StepStatus::Failure => "Failed",
|
||||
executor::StepStatus::Skipped => "Skipped",
|
||||
};
|
||||
|
||||
let status_style = match step.status {
|
||||
executor::StepStatus::Success => Style::default().fg(Color::Green),
|
||||
executor::StepStatus::Failure => Style::default().fg(Color::Red),
|
||||
executor::StepStatus::Skipped => Style::default().fg(Color::Yellow),
|
||||
};
|
||||
|
||||
let mut output_text = step.output.clone();
|
||||
// Truncate if too long
|
||||
if output_text.len() > 1000 {
|
||||
output_text = format!("{}... [truncated]", &output_text[..1000]);
|
||||
}
|
||||
|
||||
let step_detail = Paragraph::new(vec![
|
||||
Line::from(vec![
|
||||
Span::styled("Step: ", Style::default().fg(Color::Blue)),
|
||||
Span::styled(
|
||||
step.name.clone(),
|
||||
Style::default()
|
||||
.fg(Color::White)
|
||||
.add_modifier(Modifier::BOLD),
|
||||
),
|
||||
Span::raw(" ("),
|
||||
Span::styled(status_text, status_style),
|
||||
Span::raw(")"),
|
||||
]),
|
||||
Line::from(""),
|
||||
Line::from(output_text),
|
||||
])
|
||||
.block(
|
||||
Block::default()
|
||||
.borders(Borders::ALL)
|
||||
.border_type(BorderType::Rounded)
|
||||
.title(Span::styled(
|
||||
" Step Output ",
|
||||
Style::default().fg(Color::Yellow),
|
||||
)),
|
||||
)
|
||||
.wrap(ratatui::widgets::Wrap { trim: false });
|
||||
|
||||
f.render_widget(step_detail, chunks[2]);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
345
crates/ui/src/views/logs_tab.rs
Normal file
345
crates/ui/src/views/logs_tab.rs
Normal file
@@ -0,0 +1,345 @@
|
||||
// Logs tab rendering
|
||||
use crate::app::App;
|
||||
use ratatui::{
|
||||
backend::CrosstermBackend,
|
||||
layout::{Alignment, Constraint, Direction, Layout, Rect},
|
||||
style::{Color, Modifier, Style},
|
||||
text::{Line, Span},
|
||||
widgets::{Block, BorderType, Borders, Cell, Paragraph, Row, Table, TableState},
|
||||
Frame,
|
||||
};
|
||||
use std::io;
|
||||
|
||||
// Render the logs tab
|
||||
pub fn render_logs_tab(f: &mut Frame<CrosstermBackend<io::Stdout>>, app: &App, area: Rect) {
|
||||
// Split the area into header, search bar (optionally shown), and log content
|
||||
let chunks = Layout::default()
|
||||
.direction(Direction::Vertical)
|
||||
.constraints(
|
||||
[
|
||||
Constraint::Length(3), // Header with instructions
|
||||
Constraint::Length(
|
||||
if app.log_search_active
|
||||
|| !app.log_search_query.is_empty()
|
||||
|| app.log_filter_level.is_some()
|
||||
{
|
||||
3
|
||||
} else {
|
||||
0
|
||||
},
|
||||
), // Search bar (optional)
|
||||
Constraint::Min(3), // Logs content
|
||||
]
|
||||
.as_ref(),
|
||||
)
|
||||
.margin(1)
|
||||
.split(area);
|
||||
|
||||
// Determine if search/filter bar should be shown
|
||||
let show_search_bar =
|
||||
app.log_search_active || !app.log_search_query.is_empty() || app.log_filter_level.is_some();
|
||||
|
||||
// Render header with instructions
|
||||
let mut header_text = vec![
|
||||
Line::from(vec![Span::styled(
|
||||
"Execution and System Logs",
|
||||
Style::default()
|
||||
.fg(Color::Yellow)
|
||||
.add_modifier(Modifier::BOLD),
|
||||
)]),
|
||||
Line::from(vec![
|
||||
Span::styled("↑/↓", Style::default().fg(Color::Cyan)),
|
||||
Span::raw(" or "),
|
||||
Span::styled("j/k", Style::default().fg(Color::Cyan)),
|
||||
Span::raw(": Navigate logs/matches "),
|
||||
Span::styled("s", Style::default().fg(Color::Cyan)),
|
||||
Span::raw(": Search "),
|
||||
Span::styled("f", Style::default().fg(Color::Cyan)),
|
||||
Span::raw(": Filter "),
|
||||
Span::styled("Tab", Style::default().fg(Color::Cyan)),
|
||||
Span::raw(": Switch tabs"),
|
||||
]),
|
||||
];
|
||||
|
||||
if show_search_bar {
|
||||
header_text.push(Line::from(vec![
|
||||
Span::styled("Enter", Style::default().fg(Color::Cyan)),
|
||||
Span::raw(": Apply search "),
|
||||
Span::styled("Esc", Style::default().fg(Color::Cyan)),
|
||||
Span::raw(": Clear search "),
|
||||
Span::styled("c", Style::default().fg(Color::Cyan)),
|
||||
Span::raw(": Clear all filters"),
|
||||
]));
|
||||
}
|
||||
|
||||
let header = Paragraph::new(header_text)
|
||||
.block(
|
||||
Block::default()
|
||||
.borders(Borders::ALL)
|
||||
.border_type(BorderType::Rounded),
|
||||
)
|
||||
.alignment(Alignment::Center);
|
||||
|
||||
f.render_widget(header, chunks[0]);
|
||||
|
||||
// Render search bar if active or has content
|
||||
if show_search_bar {
|
||||
let search_text = if app.log_search_active {
|
||||
format!("Search: {}█", app.log_search_query)
|
||||
} else {
|
||||
format!("Search: {}", app.log_search_query)
|
||||
};
|
||||
|
||||
let filter_text = match &app.log_filter_level {
|
||||
Some(level) => format!("Filter: {}", level.to_string()),
|
||||
None => "No filter".to_string(),
|
||||
};
|
||||
|
||||
let match_info = if !app.log_search_matches.is_empty() {
|
||||
format!(
|
||||
"Matches: {}/{}",
|
||||
app.log_search_match_idx + 1,
|
||||
app.log_search_matches.len()
|
||||
)
|
||||
} else if !app.log_search_query.is_empty() {
|
||||
"No matches".to_string()
|
||||
} else {
|
||||
"".to_string()
|
||||
};
|
||||
|
||||
let search_info = Line::from(vec![
|
||||
Span::raw(search_text),
|
||||
Span::raw(" "),
|
||||
Span::styled(
|
||||
filter_text,
|
||||
Style::default().fg(match &app.log_filter_level {
|
||||
Some(crate::models::LogFilterLevel::Error) => Color::Red,
|
||||
Some(crate::models::LogFilterLevel::Warning) => Color::Yellow,
|
||||
Some(crate::models::LogFilterLevel::Info) => Color::Cyan,
|
||||
Some(crate::models::LogFilterLevel::Success) => Color::Green,
|
||||
Some(crate::models::LogFilterLevel::Trigger) => Color::Magenta,
|
||||
Some(crate::models::LogFilterLevel::All) | None => Color::Gray,
|
||||
}),
|
||||
),
|
||||
Span::raw(" "),
|
||||
Span::styled(match_info, Style::default().fg(Color::Magenta)),
|
||||
]);
|
||||
|
||||
let search_block = Paragraph::new(search_info)
|
||||
.block(
|
||||
Block::default()
|
||||
.borders(Borders::ALL)
|
||||
.border_type(BorderType::Rounded)
|
||||
.title(Span::styled(
|
||||
" Search & Filter ",
|
||||
Style::default().fg(Color::Yellow),
|
||||
)),
|
||||
)
|
||||
.alignment(Alignment::Left);
|
||||
|
||||
f.render_widget(search_block, chunks[1]);
|
||||
}
|
||||
|
||||
// Combine application logs with system logs
|
||||
let mut all_logs = Vec::new();
|
||||
|
||||
// Now all logs should have timestamps in the format [HH:MM:SS]
|
||||
|
||||
// Process app logs
|
||||
for log in &app.logs {
|
||||
all_logs.push(log.clone());
|
||||
}
|
||||
|
||||
// Process system logs
|
||||
for log in logging::get_logs() {
|
||||
all_logs.push(log.clone());
|
||||
}
|
||||
|
||||
// Filter logs based on search query and filter level
|
||||
let filtered_logs = if !app.log_search_query.is_empty() || app.log_filter_level.is_some() {
|
||||
all_logs
|
||||
.iter()
|
||||
.filter(|log| {
|
||||
let passes_filter = match &app.log_filter_level {
|
||||
None => true,
|
||||
Some(level) => level.matches(log),
|
||||
};
|
||||
|
||||
let matches_search = if app.log_search_query.is_empty() {
|
||||
true
|
||||
} else {
|
||||
log.to_lowercase()
|
||||
.contains(&app.log_search_query.to_lowercase())
|
||||
};
|
||||
|
||||
passes_filter && matches_search
|
||||
})
|
||||
.cloned()
|
||||
.collect::<Vec<String>>()
|
||||
} else {
|
||||
all_logs.clone() // Clone to avoid moving all_logs
|
||||
};
|
||||
|
||||
// Create a table for logs for better organization
|
||||
let header_cells = ["Time", "Type", "Message"]
|
||||
.iter()
|
||||
.map(|h| Cell::from(*h).style(Style::default().fg(Color::Yellow)));
|
||||
|
||||
let header = Row::new(header_cells)
|
||||
.style(Style::default().add_modifier(Modifier::BOLD))
|
||||
.height(1);
|
||||
|
||||
let rows = filtered_logs.iter().map(|log_line| {
|
||||
// Parse log line to extract timestamp, type and message
|
||||
|
||||
// Extract timestamp from log format [HH:MM:SS]
|
||||
let timestamp = if log_line.starts_with('[') && log_line.contains(']') {
|
||||
let end = log_line.find(']').unwrap_or(0);
|
||||
if end > 1 {
|
||||
log_line[1..end].to_string()
|
||||
} else {
|
||||
"??:??:??".to_string() // Show placeholder for malformed logs
|
||||
}
|
||||
} else {
|
||||
"??:??:??".to_string() // Show placeholder for malformed logs
|
||||
};
|
||||
|
||||
let (log_type, log_style, _) =
|
||||
if log_line.contains("Error") || log_line.contains("error") || log_line.contains("❌")
|
||||
{
|
||||
("ERROR", Style::default().fg(Color::Red), log_line.as_str())
|
||||
} else if log_line.contains("Warning")
|
||||
|| log_line.contains("warning")
|
||||
|| log_line.contains("⚠️")
|
||||
{
|
||||
(
|
||||
"WARN",
|
||||
Style::default().fg(Color::Yellow),
|
||||
log_line.as_str(),
|
||||
)
|
||||
} else if log_line.contains("Success")
|
||||
|| log_line.contains("success")
|
||||
|| log_line.contains("✅")
|
||||
{
|
||||
(
|
||||
"SUCCESS",
|
||||
Style::default().fg(Color::Green),
|
||||
log_line.as_str(),
|
||||
)
|
||||
} else if log_line.contains("Running")
|
||||
|| log_line.contains("running")
|
||||
|| log_line.contains("⟳")
|
||||
{
|
||||
("INFO", Style::default().fg(Color::Cyan), log_line.as_str())
|
||||
} else if log_line.contains("Triggering") || log_line.contains("triggered") {
|
||||
(
|
||||
"TRIG",
|
||||
Style::default().fg(Color::Magenta),
|
||||
log_line.as_str(),
|
||||
)
|
||||
} else {
|
||||
("INFO", Style::default().fg(Color::Gray), log_line.as_str())
|
||||
};
|
||||
|
||||
// Extract content after timestamp
|
||||
let content = if log_line.starts_with('[') && log_line.contains(']') {
|
||||
let start = log_line.find(']').unwrap_or(0) + 1;
|
||||
log_line[start..].trim()
|
||||
} else {
|
||||
log_line.as_str()
|
||||
};
|
||||
|
||||
// Highlight search matches in content if search is active
|
||||
let mut content_spans = Vec::new();
|
||||
if !app.log_search_query.is_empty() {
|
||||
let lowercase_content = content.to_lowercase();
|
||||
let lowercase_query = app.log_search_query.to_lowercase();
|
||||
|
||||
if lowercase_content.contains(&lowercase_query) {
|
||||
let mut last_idx = 0;
|
||||
while let Some(idx) = lowercase_content[last_idx..].find(&lowercase_query) {
|
||||
let real_idx = last_idx + idx;
|
||||
|
||||
// Add text before match
|
||||
if real_idx > last_idx {
|
||||
content_spans.push(Span::raw(content[last_idx..real_idx].to_string()));
|
||||
}
|
||||
|
||||
// Add matched text with highlight
|
||||
let match_end = real_idx + app.log_search_query.len();
|
||||
content_spans.push(Span::styled(
|
||||
content[real_idx..match_end].to_string(),
|
||||
Style::default().bg(Color::Yellow).fg(Color::Black),
|
||||
));
|
||||
|
||||
last_idx = match_end;
|
||||
}
|
||||
|
||||
// Add remaining text after last match
|
||||
if last_idx < content.len() {
|
||||
content_spans.push(Span::raw(content[last_idx..].to_string()));
|
||||
}
|
||||
} else {
|
||||
content_spans.push(Span::raw(content));
|
||||
}
|
||||
} else {
|
||||
content_spans.push(Span::raw(content));
|
||||
}
|
||||
|
||||
Row::new(vec![
|
||||
Cell::from(timestamp),
|
||||
Cell::from(log_type).style(log_style),
|
||||
Cell::from(Line::from(content_spans)),
|
||||
])
|
||||
});
|
||||
|
||||
let content_idx = if show_search_bar { 2 } else { 1 };
|
||||
|
||||
let log_table = Table::new(rows)
|
||||
.header(header)
|
||||
.block(
|
||||
Block::default()
|
||||
.borders(Borders::ALL)
|
||||
.border_type(BorderType::Rounded)
|
||||
.title(Span::styled(
|
||||
format!(
|
||||
" Logs ({}/{}) ",
|
||||
if filtered_logs.is_empty() {
|
||||
0
|
||||
} else {
|
||||
app.log_scroll + 1
|
||||
},
|
||||
filtered_logs.len()
|
||||
),
|
||||
Style::default().fg(Color::Yellow),
|
||||
)),
|
||||
)
|
||||
.highlight_style(Style::default().bg(Color::DarkGray))
|
||||
.widths(&[
|
||||
Constraint::Length(10), // Timestamp column
|
||||
Constraint::Length(7), // Log type column
|
||||
Constraint::Percentage(80), // Message column
|
||||
]);
|
||||
|
||||
// We need to convert log_scroll index to a TableState
|
||||
let mut log_table_state = TableState::default();
|
||||
|
||||
if !filtered_logs.is_empty() {
|
||||
// If we have search matches, use the match index as the selected row
|
||||
if !app.log_search_matches.is_empty() {
|
||||
// Make sure we're within bounds
|
||||
let _match_index = app
|
||||
.log_search_match_idx
|
||||
.min(app.log_search_matches.len() - 1);
|
||||
|
||||
// This would involve more complex logic to go from search matches to the filtered logs
|
||||
// For simplicity in this placeholder, we'll just use the scroll position
|
||||
log_table_state.select(Some(app.log_scroll.min(filtered_logs.len() - 1)));
|
||||
} else {
|
||||
// No search matches, use regular scroll position
|
||||
log_table_state.select(Some(app.log_scroll.min(filtered_logs.len() - 1)));
|
||||
}
|
||||
}
|
||||
|
||||
f.render_stateful_widget(log_table, chunks[content_idx], &mut log_table_state);
|
||||
}
|
||||
57
crates/ui/src/views/mod.rs
Normal file
57
crates/ui/src/views/mod.rs
Normal file
@@ -0,0 +1,57 @@
|
||||
// UI Views module
|
||||
mod execution_tab;
|
||||
mod help_overlay;
|
||||
mod job_detail;
|
||||
mod logs_tab;
|
||||
mod status_bar;
|
||||
mod title_bar;
|
||||
mod workflows_tab;
|
||||
|
||||
use crate::app::App;
|
||||
use ratatui::{backend::CrosstermBackend, Frame};
|
||||
use std::io;
|
||||
|
||||
// Main render function for the UI
|
||||
pub fn render_ui(f: &mut Frame<CrosstermBackend<io::Stdout>>, app: &mut App) {
|
||||
// Check if help should be shown as an overlay
|
||||
if app.show_help {
|
||||
help_overlay::render_help_overlay(f);
|
||||
return;
|
||||
}
|
||||
|
||||
let size = f.size();
|
||||
|
||||
// Create main layout
|
||||
let main_chunks = ratatui::layout::Layout::default()
|
||||
.direction(ratatui::layout::Direction::Vertical)
|
||||
.constraints(
|
||||
[
|
||||
ratatui::layout::Constraint::Length(3), // Title bar and tabs
|
||||
ratatui::layout::Constraint::Min(5), // Main content
|
||||
ratatui::layout::Constraint::Length(2), // Status bar
|
||||
]
|
||||
.as_ref(),
|
||||
)
|
||||
.split(size);
|
||||
|
||||
// Render title bar with tabs
|
||||
title_bar::render_title_bar(f, app, main_chunks[0]);
|
||||
|
||||
// Render main content based on selected tab
|
||||
match app.selected_tab {
|
||||
0 => workflows_tab::render_workflows_tab(f, app, main_chunks[1]),
|
||||
1 => {
|
||||
if app.detailed_view {
|
||||
job_detail::render_job_detail_view(f, app, main_chunks[1])
|
||||
} else {
|
||||
execution_tab::render_execution_tab(f, app, main_chunks[1])
|
||||
}
|
||||
}
|
||||
2 => logs_tab::render_logs_tab(f, app, main_chunks[1]),
|
||||
3 => help_overlay::render_help_tab(f, main_chunks[1]),
|
||||
_ => {}
|
||||
}
|
||||
|
||||
// Render status bar
|
||||
status_bar::render_status_bar(f, app, main_chunks[2]);
|
||||
}
|
||||
166
crates/ui/src/views/status_bar.rs
Normal file
166
crates/ui/src/views/status_bar.rs
Normal file
@@ -0,0 +1,166 @@
|
||||
// Status bar rendering
|
||||
use crate::app::App;
|
||||
use executor::RuntimeType;
|
||||
use ratatui::{
|
||||
backend::CrosstermBackend,
|
||||
layout::{Alignment, Rect},
|
||||
style::{Color, Style},
|
||||
text::{Line, Span},
|
||||
widgets::Paragraph,
|
||||
Frame,
|
||||
};
|
||||
use std::io;
|
||||
|
||||
// Render the status bar
|
||||
pub fn render_status_bar(f: &mut Frame<CrosstermBackend<io::Stdout>>, app: &App, area: Rect) {
|
||||
// If we have a status message, show it instead of the normal status bar
|
||||
if let Some(message) = &app.status_message {
|
||||
// Determine if this is a success message (starts with ✅)
|
||||
let is_success = message.starts_with("✅");
|
||||
|
||||
let status_message = Paragraph::new(Line::from(vec![Span::styled(
|
||||
format!(" {} ", message),
|
||||
Style::default()
|
||||
.bg(if is_success { Color::Green } else { Color::Red })
|
||||
.fg(Color::White)
|
||||
.add_modifier(ratatui::style::Modifier::BOLD),
|
||||
)]))
|
||||
.alignment(Alignment::Center);
|
||||
|
||||
f.render_widget(status_message, area);
|
||||
return;
|
||||
}
|
||||
|
||||
// Normal status bar
|
||||
let mut status_items = vec![];
|
||||
|
||||
// Add mode info
|
||||
status_items.push(Span::styled(
|
||||
format!(" {} ", app.runtime_type_name()),
|
||||
Style::default()
|
||||
.bg(match app.runtime_type {
|
||||
RuntimeType::Docker => Color::Blue,
|
||||
RuntimeType::Emulation => Color::Magenta,
|
||||
})
|
||||
.fg(Color::White),
|
||||
));
|
||||
|
||||
// Add Docker status if relevant
|
||||
if app.runtime_type == RuntimeType::Docker {
|
||||
// Check Docker silently using safe FD redirection
|
||||
let is_docker_available =
|
||||
match utils::fd::with_stderr_to_null(executor::docker::is_available) {
|
||||
Ok(result) => result,
|
||||
Err(_) => {
|
||||
logging::debug("Failed to redirect stderr when checking Docker availability.");
|
||||
false
|
||||
}
|
||||
};
|
||||
|
||||
status_items.push(Span::raw(" "));
|
||||
status_items.push(Span::styled(
|
||||
if is_docker_available {
|
||||
" Docker: Connected "
|
||||
} else {
|
||||
" Docker: Not Available "
|
||||
},
|
||||
Style::default()
|
||||
.bg(if is_docker_available {
|
||||
Color::Green
|
||||
} else {
|
||||
Color::Red
|
||||
})
|
||||
.fg(Color::White),
|
||||
));
|
||||
}
|
||||
|
||||
// Add validation/execution mode
|
||||
status_items.push(Span::raw(" "));
|
||||
status_items.push(Span::styled(
|
||||
format!(
|
||||
" {} ",
|
||||
if app.validation_mode {
|
||||
"Validation"
|
||||
} else {
|
||||
"Execution"
|
||||
}
|
||||
),
|
||||
Style::default()
|
||||
.bg(if app.validation_mode {
|
||||
Color::Yellow
|
||||
} else {
|
||||
Color::Green
|
||||
})
|
||||
.fg(Color::Black),
|
||||
));
|
||||
|
||||
// Add context-specific help based on current tab
|
||||
status_items.push(Span::raw(" "));
|
||||
let help_text = match app.selected_tab {
|
||||
0 => {
|
||||
if let Some(idx) = app.workflow_list_state.selected() {
|
||||
if idx < app.workflows.len() {
|
||||
let workflow = &app.workflows[idx];
|
||||
match workflow.status {
|
||||
crate::models::WorkflowStatus::NotStarted => "[Space] Toggle selection [Enter] Run selected [r] Run all selected [t] Trigger Workflow [Shift+R] Reset workflow",
|
||||
crate::models::WorkflowStatus::Running => "[Space] Toggle selection [Enter] Run selected [r] Run all selected (Workflow running...)",
|
||||
crate::models::WorkflowStatus::Success | crate::models::WorkflowStatus::Failed | crate::models::WorkflowStatus::Skipped => "[Space] Toggle selection [Enter] Run selected [r] Run all selected [Shift+R] Reset workflow",
|
||||
}
|
||||
} else {
|
||||
"[Space] Toggle selection [Enter] Run selected [r] Run all selected"
|
||||
}
|
||||
} else {
|
||||
"[Space] Toggle selection [Enter] Run selected [r] Run all selected"
|
||||
}
|
||||
}
|
||||
1 => {
|
||||
if app.detailed_view {
|
||||
"[Esc] Back to jobs [↑/↓] Navigate steps"
|
||||
} else {
|
||||
"[Enter] View details [↑/↓] Navigate jobs"
|
||||
}
|
||||
}
|
||||
2 => {
|
||||
// For logs tab, show scrolling instructions
|
||||
let log_count = app.logs.len() + logging::get_logs().len();
|
||||
if log_count > 0 {
|
||||
// Convert to a static string for consistent return type
|
||||
let scroll_text = format!(
|
||||
"[↑/↓] Scroll logs ({}/{}) [s] Search [f] Filter",
|
||||
app.log_scroll + 1,
|
||||
log_count
|
||||
);
|
||||
Box::leak(scroll_text.into_boxed_str())
|
||||
} else {
|
||||
"[No logs to display]"
|
||||
}
|
||||
}
|
||||
3 => "[?] Toggle help overlay",
|
||||
_ => "",
|
||||
};
|
||||
status_items.push(Span::styled(
|
||||
format!(" {} ", help_text),
|
||||
Style::default().fg(Color::White),
|
||||
));
|
||||
|
||||
// Show keybindings for common actions
|
||||
status_items.push(Span::raw(" "));
|
||||
status_items.push(Span::styled(
|
||||
" [Tab] Switch tabs ",
|
||||
Style::default().fg(Color::White),
|
||||
));
|
||||
status_items.push(Span::styled(
|
||||
" [?] Help ",
|
||||
Style::default().fg(Color::White),
|
||||
));
|
||||
status_items.push(Span::styled(
|
||||
" [q] Quit ",
|
||||
Style::default().fg(Color::White),
|
||||
));
|
||||
|
||||
let status_bar = Paragraph::new(Line::from(status_items))
|
||||
.style(Style::default().bg(Color::DarkGray))
|
||||
.alignment(Alignment::Left);
|
||||
|
||||
f.render_widget(status_bar, area);
|
||||
}
|
||||
74
crates/ui/src/views/title_bar.rs
Normal file
74
crates/ui/src/views/title_bar.rs
Normal file
@@ -0,0 +1,74 @@
|
||||
// Title bar rendering
|
||||
use crate::app::App;
|
||||
use ratatui::{
|
||||
backend::CrosstermBackend,
|
||||
layout::{Alignment, Rect},
|
||||
style::{Color, Modifier, Style},
|
||||
text::{Line, Span},
|
||||
widgets::{Block, BorderType, Borders, Tabs},
|
||||
Frame,
|
||||
};
|
||||
use std::io;
|
||||
|
||||
// Render the title bar with tabs
|
||||
pub fn render_title_bar(f: &mut Frame<CrosstermBackend<io::Stdout>>, app: &App, area: Rect) {
|
||||
let titles = ["Workflows", "Execution", "Logs", "Help"];
|
||||
let tabs = Tabs::new(
|
||||
titles
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(i, t)| {
|
||||
if i == 1 {
|
||||
// Special case for "Execution"
|
||||
let e_part = &t[0..1]; // "E"
|
||||
let x_part = &t[1..2]; // "x"
|
||||
let rest = &t[2..]; // "ecution"
|
||||
Line::from(vec![
|
||||
Span::styled(e_part, Style::default().fg(Color::White)),
|
||||
Span::styled(
|
||||
x_part,
|
||||
Style::default()
|
||||
.fg(Color::Yellow)
|
||||
.add_modifier(Modifier::UNDERLINED),
|
||||
),
|
||||
Span::styled(rest, Style::default().fg(Color::White)),
|
||||
])
|
||||
} else {
|
||||
// Original styling for other tabs
|
||||
let (first, rest) = t.split_at(1);
|
||||
Line::from(vec![
|
||||
Span::styled(
|
||||
first,
|
||||
Style::default()
|
||||
.fg(Color::Yellow)
|
||||
.add_modifier(Modifier::UNDERLINED),
|
||||
),
|
||||
Span::styled(rest, Style::default().fg(Color::White)),
|
||||
])
|
||||
}
|
||||
})
|
||||
.collect(),
|
||||
)
|
||||
.block(
|
||||
Block::default()
|
||||
.borders(Borders::ALL)
|
||||
.border_type(BorderType::Rounded)
|
||||
.title(Span::styled(
|
||||
" wrkflw ",
|
||||
Style::default()
|
||||
.fg(Color::Cyan)
|
||||
.add_modifier(Modifier::BOLD),
|
||||
))
|
||||
.title_alignment(Alignment::Center),
|
||||
)
|
||||
.highlight_style(
|
||||
Style::default()
|
||||
.bg(Color::DarkGray)
|
||||
.fg(Color::Yellow)
|
||||
.add_modifier(Modifier::BOLD),
|
||||
)
|
||||
.select(app.selected_tab)
|
||||
.divider(Span::raw("|"));
|
||||
|
||||
f.render_widget(tabs, area);
|
||||
}
|
||||
131
crates/ui/src/views/workflows_tab.rs
Normal file
131
crates/ui/src/views/workflows_tab.rs
Normal file
@@ -0,0 +1,131 @@
|
||||
// Workflows tab rendering
|
||||
use crate::app::App;
|
||||
use crate::models::WorkflowStatus;
|
||||
use ratatui::{
|
||||
backend::CrosstermBackend,
|
||||
layout::{Alignment, Constraint, Direction, Layout, Rect},
|
||||
style::{Color, Modifier, Style},
|
||||
text::{Line, Span},
|
||||
widgets::{Block, BorderType, Borders, Cell, Paragraph, Row, Table, TableState},
|
||||
Frame,
|
||||
};
|
||||
use std::io;
|
||||
|
||||
// Render the workflow list tab
|
||||
pub fn render_workflows_tab(
|
||||
f: &mut Frame<CrosstermBackend<io::Stdout>>,
|
||||
app: &mut App,
|
||||
area: Rect,
|
||||
) {
|
||||
// Create a more structured layout for the workflow tab
|
||||
let chunks = Layout::default()
|
||||
.direction(Direction::Vertical)
|
||||
.constraints(
|
||||
[
|
||||
Constraint::Length(3), // Header with instructions
|
||||
Constraint::Min(5), // Workflow list
|
||||
]
|
||||
.as_ref(),
|
||||
)
|
||||
.margin(1)
|
||||
.split(area);
|
||||
|
||||
// Render header with instructions
|
||||
let header_text = vec![
|
||||
Line::from(vec![Span::styled(
|
||||
"Available Workflows",
|
||||
Style::default()
|
||||
.fg(Color::Yellow)
|
||||
.add_modifier(Modifier::BOLD),
|
||||
)]),
|
||||
Line::from(vec![
|
||||
Span::styled("Space", Style::default().fg(Color::Cyan)),
|
||||
Span::raw(": Toggle selection "),
|
||||
Span::styled("Enter", Style::default().fg(Color::Cyan)),
|
||||
Span::raw(": Run "),
|
||||
Span::styled("t", Style::default().fg(Color::Cyan)),
|
||||
Span::raw(": Trigger remotely"),
|
||||
]),
|
||||
];
|
||||
|
||||
let header = Paragraph::new(header_text)
|
||||
.block(
|
||||
Block::default()
|
||||
.borders(Borders::ALL)
|
||||
.border_type(BorderType::Rounded),
|
||||
)
|
||||
.alignment(Alignment::Center);
|
||||
|
||||
f.render_widget(header, chunks[0]);
|
||||
|
||||
// Create a table for workflows instead of a list for better organization
|
||||
let selected_style = Style::default()
|
||||
.bg(Color::DarkGray)
|
||||
.add_modifier(Modifier::BOLD);
|
||||
|
||||
// Normal style definition removed as it was unused
|
||||
|
||||
let header_cells = ["", "Status", "Workflow Name", "Path"]
|
||||
.iter()
|
||||
.map(|h| Cell::from(*h).style(Style::default().fg(Color::Yellow)));
|
||||
|
||||
let header = Row::new(header_cells)
|
||||
.style(Style::default().add_modifier(Modifier::BOLD))
|
||||
.height(1);
|
||||
|
||||
let rows = app.workflows.iter().map(|workflow| {
|
||||
// Create cells for each column
|
||||
let checkbox = if workflow.selected { "✓" } else { " " };
|
||||
|
||||
let (status_symbol, status_style) = match workflow.status {
|
||||
WorkflowStatus::NotStarted => ("○", Style::default().fg(Color::Gray)),
|
||||
WorkflowStatus::Running => ("⟳", Style::default().fg(Color::Cyan)),
|
||||
WorkflowStatus::Success => ("✅", Style::default().fg(Color::Green)),
|
||||
WorkflowStatus::Failed => ("❌", Style::default().fg(Color::Red)),
|
||||
WorkflowStatus::Skipped => ("⏭", Style::default().fg(Color::Yellow)),
|
||||
};
|
||||
|
||||
let path_display = workflow.path.to_string_lossy();
|
||||
let path_shortened = if path_display.len() > 30 {
|
||||
format!("...{}", &path_display[path_display.len() - 30..])
|
||||
} else {
|
||||
path_display.to_string()
|
||||
};
|
||||
|
||||
Row::new(vec![
|
||||
Cell::from(checkbox).style(Style::default().fg(Color::Green)),
|
||||
Cell::from(status_symbol).style(status_style),
|
||||
Cell::from(workflow.name.clone()),
|
||||
Cell::from(path_shortened).style(Style::default().fg(Color::DarkGray)),
|
||||
])
|
||||
});
|
||||
|
||||
let workflows_table = Table::new(rows)
|
||||
.header(header)
|
||||
.block(
|
||||
Block::default()
|
||||
.borders(Borders::ALL)
|
||||
.border_type(BorderType::Rounded)
|
||||
.title(Span::styled(
|
||||
" Workflows ",
|
||||
Style::default().fg(Color::Yellow),
|
||||
)),
|
||||
)
|
||||
.highlight_style(selected_style)
|
||||
.highlight_symbol("» ")
|
||||
.widths(&[
|
||||
Constraint::Length(3), // Checkbox column
|
||||
Constraint::Length(4), // Status icon column
|
||||
Constraint::Percentage(45), // Name column
|
||||
Constraint::Percentage(45), // Path column
|
||||
]);
|
||||
|
||||
// We need to convert ListState to TableState
|
||||
let mut table_state = TableState::default();
|
||||
table_state.select(app.workflow_list_state.selected());
|
||||
|
||||
f.render_stateful_widget(workflows_table, chunks[1], &mut table_state);
|
||||
|
||||
// Update the app list state to match the table state
|
||||
app.workflow_list_state.select(table_state.selected());
|
||||
}
|
||||
15
crates/utils/Cargo.toml
Normal file
15
crates/utils/Cargo.toml
Normal file
@@ -0,0 +1,15 @@
|
||||
[package]
|
||||
name = "utils"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
description = "utility functions for wrkflw"
|
||||
license.workspace = true
|
||||
|
||||
[dependencies]
|
||||
# Internal crates
|
||||
models = { path = "../models" }
|
||||
|
||||
# External dependencies
|
||||
serde.workspace = true
|
||||
serde_yaml.workspace = true
|
||||
nix.workspace = true
|
||||
@@ -1,6 +1,17 @@
|
||||
// utils crate
|
||||
|
||||
use std::path::Path;
|
||||
|
||||
pub fn is_workflow_file(path: &Path) -> bool {
|
||||
// First, check for GitLab CI files by name
|
||||
if let Some(file_name) = path.file_name() {
|
||||
let file_name_str = file_name.to_string_lossy().to_lowercase();
|
||||
if file_name_str == ".gitlab-ci.yml" || file_name_str.ends_with("gitlab-ci.yml") {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
// Then check for GitHub Actions workflows
|
||||
if let Some(ext) = path.extension() {
|
||||
if ext == "yml" || ext == "yaml" {
|
||||
// Check if the file is in a .github/workflows directory
|
||||
@@ -47,7 +58,7 @@ pub mod fd {
|
||||
// Duplicate the current stderr fd
|
||||
let stderr_backup = match dup(STDERR_FILENO) {
|
||||
Ok(fd) => fd,
|
||||
Err(e) => return Err(io::Error::new(io::ErrorKind::Other, e)),
|
||||
Err(e) => return Err(io::Error::other(e)),
|
||||
};
|
||||
|
||||
// Open /dev/null
|
||||
@@ -55,7 +66,7 @@ pub mod fd {
|
||||
Ok(fd) => fd,
|
||||
Err(e) => {
|
||||
let _ = close(stderr_backup); // Clean up on error
|
||||
return Err(io::Error::new(io::ErrorKind::Other, e));
|
||||
return Err(io::Error::other(e));
|
||||
}
|
||||
};
|
||||
|
||||
@@ -63,7 +74,7 @@ pub mod fd {
|
||||
if let Err(e) = dup2(null_fd, STDERR_FILENO) {
|
||||
let _ = close(stderr_backup); // Clean up on error
|
||||
let _ = close(null_fd);
|
||||
return Err(io::Error::new(io::ErrorKind::Other, e));
|
||||
return Err(io::Error::other(e));
|
||||
}
|
||||
|
||||
Ok(RedirectedStderr {
|
||||
15
crates/validators/Cargo.toml
Normal file
15
crates/validators/Cargo.toml
Normal file
@@ -0,0 +1,15 @@
|
||||
[package]
|
||||
name = "validators"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
description = "validation functionality for wrkflw"
|
||||
license.workspace = true
|
||||
|
||||
[dependencies]
|
||||
# Internal crates
|
||||
models = { path = "../models" }
|
||||
matrix = { path = "../matrix" }
|
||||
|
||||
# External dependencies
|
||||
serde.workspace = true
|
||||
serde_yaml.workspace = true
|
||||
@@ -1,4 +1,4 @@
|
||||
use crate::models::ValidationResult;
|
||||
use models::ValidationResult;
|
||||
|
||||
pub fn validate_action_reference(
|
||||
action_ref: &str,
|
||||
234
crates/validators/src/gitlab.rs
Normal file
234
crates/validators/src/gitlab.rs
Normal file
@@ -0,0 +1,234 @@
|
||||
use models::gitlab::{Job, Pipeline};
|
||||
use models::ValidationResult;
|
||||
use std::collections::HashMap;
|
||||
|
||||
/// Validate a GitLab CI/CD pipeline
|
||||
pub fn validate_gitlab_pipeline(pipeline: &Pipeline) -> ValidationResult {
|
||||
let mut result = ValidationResult::new();
|
||||
|
||||
// Basic structure validation
|
||||
if pipeline.jobs.is_empty() {
|
||||
result.add_issue("Pipeline must contain at least one job".to_string());
|
||||
}
|
||||
|
||||
// Validate jobs
|
||||
validate_jobs(&pipeline.jobs, &mut result);
|
||||
|
||||
// Validate stages if defined
|
||||
if let Some(stages) = &pipeline.stages {
|
||||
validate_stages(stages, &pipeline.jobs, &mut result);
|
||||
}
|
||||
|
||||
// Validate dependencies
|
||||
validate_dependencies(&pipeline.jobs, &mut result);
|
||||
|
||||
// Validate extends
|
||||
validate_extends(&pipeline.jobs, &mut result);
|
||||
|
||||
// Validate artifacts
|
||||
validate_artifacts(&pipeline.jobs, &mut result);
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
/// Validate GitLab CI/CD jobs
|
||||
fn validate_jobs(jobs: &HashMap<String, Job>, result: &mut ValidationResult) {
|
||||
for (job_name, job) in jobs {
|
||||
// Skip template jobs
|
||||
if let Some(true) = job.template {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check for script or extends
|
||||
if job.script.is_none() && job.extends.is_none() {
|
||||
result.add_issue(format!(
|
||||
"Job '{}' must have a script section or extend another job",
|
||||
job_name
|
||||
));
|
||||
}
|
||||
|
||||
// Check when value if present
|
||||
if let Some(when) = &job.when {
|
||||
match when.as_str() {
|
||||
"on_success" | "on_failure" | "always" | "manual" | "never" => {
|
||||
// Valid when value
|
||||
}
|
||||
_ => {
|
||||
result.add_issue(format!(
|
||||
"Job '{}' has invalid 'when' value: '{}'. Valid values are: on_success, on_failure, always, manual, never",
|
||||
job_name, when
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check retry configuration
|
||||
if let Some(retry) = &job.retry {
|
||||
match retry {
|
||||
models::gitlab::Retry::MaxAttempts(attempts) => {
|
||||
if *attempts > 10 {
|
||||
result.add_issue(format!(
|
||||
"Job '{}' has excessive retry count: {}. Consider reducing to avoid resource waste",
|
||||
job_name, attempts
|
||||
));
|
||||
}
|
||||
}
|
||||
models::gitlab::Retry::Detailed { max, when: _ } => {
|
||||
if *max > 10 {
|
||||
result.add_issue(format!(
|
||||
"Job '{}' has excessive retry count: {}. Consider reducing to avoid resource waste",
|
||||
job_name, max
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Validate GitLab CI/CD stages
|
||||
fn validate_stages(stages: &[String], jobs: &HashMap<String, Job>, result: &mut ValidationResult) {
|
||||
// Check that all jobs reference existing stages
|
||||
for (job_name, job) in jobs {
|
||||
if let Some(stage) = &job.stage {
|
||||
if !stages.contains(stage) {
|
||||
result.add_issue(format!(
|
||||
"Job '{}' references undefined stage '{}'. Available stages are: {}",
|
||||
job_name,
|
||||
stage,
|
||||
stages.join(", ")
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check for unused stages
|
||||
for stage in stages {
|
||||
let used = jobs.values().any(|job| {
|
||||
if let Some(job_stage) = &job.stage {
|
||||
job_stage == stage
|
||||
} else {
|
||||
false
|
||||
}
|
||||
});
|
||||
|
||||
if !used {
|
||||
result.add_issue(format!(
|
||||
"Stage '{}' is defined but not used by any job",
|
||||
stage
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Validate GitLab CI/CD job dependencies
|
||||
fn validate_dependencies(jobs: &HashMap<String, Job>, result: &mut ValidationResult) {
|
||||
for (job_name, job) in jobs {
|
||||
if let Some(dependencies) = &job.dependencies {
|
||||
for dependency in dependencies {
|
||||
if !jobs.contains_key(dependency) {
|
||||
result.add_issue(format!(
|
||||
"Job '{}' depends on undefined job '{}'",
|
||||
job_name, dependency
|
||||
));
|
||||
} else if job_name == dependency {
|
||||
result.add_issue(format!("Job '{}' cannot depend on itself", job_name));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Validate GitLab CI/CD job extends
|
||||
fn validate_extends(jobs: &HashMap<String, Job>, result: &mut ValidationResult) {
|
||||
// Check for circular extends
|
||||
for (job_name, job) in jobs {
|
||||
if let Some(extends) = &job.extends {
|
||||
// Check that all extended jobs exist
|
||||
for extend in extends {
|
||||
if !jobs.contains_key(extend) {
|
||||
result.add_issue(format!(
|
||||
"Job '{}' extends undefined job '{}'",
|
||||
job_name, extend
|
||||
));
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check for circular extends
|
||||
let mut visited = vec![job_name.clone()];
|
||||
check_circular_extends(extend, jobs, &mut visited, result);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Helper function to detect circular extends
|
||||
fn check_circular_extends(
|
||||
job_name: &str,
|
||||
jobs: &HashMap<String, Job>,
|
||||
visited: &mut Vec<String>,
|
||||
result: &mut ValidationResult,
|
||||
) {
|
||||
visited.push(job_name.to_string());
|
||||
|
||||
if let Some(job) = jobs.get(job_name) {
|
||||
if let Some(extends) = &job.extends {
|
||||
for extend in extends {
|
||||
if visited.contains(&extend.to_string()) {
|
||||
// Circular dependency detected
|
||||
let cycle = visited
|
||||
.iter()
|
||||
.skip(visited.iter().position(|x| x == extend).unwrap())
|
||||
.chain(std::iter::once(extend))
|
||||
.cloned()
|
||||
.collect::<Vec<_>>()
|
||||
.join(" -> ");
|
||||
|
||||
result.add_issue(format!("Circular extends detected: {}", cycle));
|
||||
return;
|
||||
}
|
||||
|
||||
check_circular_extends(extend, jobs, visited, result);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
visited.pop();
|
||||
}
|
||||
|
||||
/// Validate GitLab CI/CD job artifacts
|
||||
fn validate_artifacts(jobs: &HashMap<String, Job>, result: &mut ValidationResult) {
|
||||
for (job_name, job) in jobs {
|
||||
if let Some(artifacts) = &job.artifacts {
|
||||
// Check that paths are specified
|
||||
if let Some(paths) = &artifacts.paths {
|
||||
if paths.is_empty() {
|
||||
result.add_issue(format!(
|
||||
"Job '{}' has artifacts section with empty paths",
|
||||
job_name
|
||||
));
|
||||
}
|
||||
} else {
|
||||
result.add_issue(format!(
|
||||
"Job '{}' has artifacts section without specifying paths",
|
||||
job_name
|
||||
));
|
||||
}
|
||||
|
||||
// Check for valid 'when' value if present
|
||||
if let Some(when) = &artifacts.when {
|
||||
match when.as_str() {
|
||||
"on_success" | "on_failure" | "always" => {
|
||||
// Valid when value
|
||||
}
|
||||
_ => {
|
||||
result.add_issue(format!(
|
||||
"Job '{}' has artifacts with invalid 'when' value: '{}'. Valid values are: on_success, on_failure, always",
|
||||
job_name, when
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
use crate::models::ValidationResult;
|
||||
use crate::validators::{validate_matrix, validate_steps};
|
||||
use crate::{validate_matrix, validate_steps};
|
||||
use models::ValidationResult;
|
||||
use serde_yaml::Value;
|
||||
|
||||
pub fn validate_jobs(jobs: &Value, result: &mut ValidationResult) {
|
||||
@@ -12,34 +12,55 @@ pub fn validate_jobs(jobs: &Value, result: &mut ValidationResult) {
|
||||
for (job_name, job_config) in jobs_map {
|
||||
if let Some(job_name) = job_name.as_str() {
|
||||
if let Some(job_config) = job_config.as_mapping() {
|
||||
// Check for required 'runs-on'
|
||||
if !job_config.contains_key(Value::String("runs-on".to_string())) {
|
||||
// Check if this is a reusable workflow job (has 'uses' field)
|
||||
let is_reusable_workflow =
|
||||
job_config.contains_key(Value::String("uses".to_string()));
|
||||
|
||||
// Only check for 'runs-on' if it's not a reusable workflow
|
||||
if !is_reusable_workflow
|
||||
&& !job_config.contains_key(Value::String("runs-on".to_string()))
|
||||
{
|
||||
result.add_issue(format!("Job '{}' is missing 'runs-on' field", job_name));
|
||||
}
|
||||
|
||||
// Check for steps
|
||||
match job_config.get(Value::String("steps".to_string())) {
|
||||
Some(Value::Sequence(steps)) => {
|
||||
if steps.is_empty() {
|
||||
// Only check for steps if it's not a reusable workflow
|
||||
if !is_reusable_workflow {
|
||||
match job_config.get(Value::String("steps".to_string())) {
|
||||
Some(Value::Sequence(steps)) => {
|
||||
if steps.is_empty() {
|
||||
result.add_issue(format!(
|
||||
"Job '{}' has empty 'steps' section",
|
||||
job_name
|
||||
));
|
||||
} else {
|
||||
validate_steps(steps, job_name, result);
|
||||
}
|
||||
}
|
||||
Some(_) => {
|
||||
result.add_issue(format!(
|
||||
"Job '{}' has empty 'steps' section",
|
||||
"Job '{}': 'steps' section is not a sequence",
|
||||
job_name
|
||||
));
|
||||
}
|
||||
None => {
|
||||
result.add_issue(format!(
|
||||
"Job '{}' is missing 'steps' section",
|
||||
job_name
|
||||
));
|
||||
} else {
|
||||
validate_steps(steps, job_name, result);
|
||||
}
|
||||
}
|
||||
Some(_) => {
|
||||
result.add_issue(format!(
|
||||
"Job '{}': 'steps' section is not a sequence",
|
||||
job_name
|
||||
));
|
||||
}
|
||||
None => {
|
||||
result.add_issue(format!(
|
||||
"Job '{}' is missing 'steps' section",
|
||||
job_name
|
||||
));
|
||||
} else {
|
||||
// For reusable workflows, validate the 'uses' field format
|
||||
if let Some(Value::String(uses)) =
|
||||
job_config.get(Value::String("uses".to_string()))
|
||||
{
|
||||
// Simple validation for reusable workflow reference format
|
||||
if !uses.contains('/') || !uses.contains('.') {
|
||||
result.add_issue(format!(
|
||||
"Job '{}': Invalid reusable workflow reference format '{}'",
|
||||
job_name, uses
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,10 +1,14 @@
|
||||
// validators crate
|
||||
|
||||
mod actions;
|
||||
mod gitlab;
|
||||
mod jobs;
|
||||
mod matrix;
|
||||
mod steps;
|
||||
mod triggers;
|
||||
|
||||
pub use actions::validate_action_reference;
|
||||
pub use gitlab::validate_gitlab_pipeline;
|
||||
pub use jobs::validate_jobs;
|
||||
pub use matrix::validate_matrix;
|
||||
pub use steps::validate_steps;
|
||||
@@ -1,4 +1,4 @@
|
||||
use crate::models::ValidationResult;
|
||||
use models::ValidationResult;
|
||||
use serde_yaml::Value;
|
||||
|
||||
pub fn validate_matrix(matrix: &Value, result: &mut ValidationResult) {
|
||||
@@ -1,8 +1,11 @@
|
||||
use crate::models::ValidationResult;
|
||||
use crate::validators::validate_action_reference;
|
||||
use crate::validate_action_reference;
|
||||
use models::ValidationResult;
|
||||
use serde_yaml::Value;
|
||||
use std::collections::HashSet;
|
||||
|
||||
pub fn validate_steps(steps: &[Value], job_name: &str, result: &mut ValidationResult) {
|
||||
let mut step_ids: HashSet<String> = HashSet::new();
|
||||
|
||||
for (i, step) in steps.iter().enumerate() {
|
||||
if let Some(step_map) = step.as_mapping() {
|
||||
if !step_map.contains_key(Value::String("name".to_string()))
|
||||
@@ -27,6 +30,18 @@ pub fn validate_steps(steps: &[Value], job_name: &str, result: &mut ValidationRe
|
||||
));
|
||||
}
|
||||
|
||||
// Check for duplicate step IDs
|
||||
if let Some(Value::String(id)) = step_map.get(Value::String("id".to_string())) {
|
||||
if !step_ids.insert(id.clone()) {
|
||||
result.add_issue(format!(
|
||||
"Job '{}', step {}: The identifier '{}' may not be used more than once within the same scope",
|
||||
job_name,
|
||||
i + 1,
|
||||
id
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
// Validate action reference if 'uses' is present
|
||||
if let Some(Value::String(uses)) = step_map.get(Value::String("uses".to_string())) {
|
||||
validate_action_reference(uses, job_name, i, result);
|
||||
@@ -1,4 +1,4 @@
|
||||
use crate::models::ValidationResult;
|
||||
use models::ValidationResult;
|
||||
use serde_yaml::Value;
|
||||
|
||||
pub fn validate_triggers(on: &Value, result: &mut ValidationResult) {
|
||||
65
crates/wrkflw/Cargo.toml
Normal file
65
crates/wrkflw/Cargo.toml
Normal file
@@ -0,0 +1,65 @@
|
||||
[package]
|
||||
name = "wrkflw"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
description.workspace = true
|
||||
documentation.workspace = true
|
||||
homepage.workspace = true
|
||||
repository.workspace = true
|
||||
keywords.workspace = true
|
||||
categories.workspace = true
|
||||
license.workspace = true
|
||||
|
||||
[dependencies]
|
||||
# Workspace crates
|
||||
models = { path = "../models" }
|
||||
executor = { path = "../executor" }
|
||||
github = { path = "../github" }
|
||||
gitlab = { path = "../gitlab" }
|
||||
logging = { path = "../logging" }
|
||||
matrix = { path = "../matrix" }
|
||||
parser = { path = "../parser" }
|
||||
runtime = { path = "../runtime" }
|
||||
ui = { path = "../ui" }
|
||||
utils = { path = "../utils" }
|
||||
validators = { path = "../validators" }
|
||||
evaluator = { path = "../evaluator" }
|
||||
|
||||
# External dependencies
|
||||
clap.workspace = true
|
||||
bollard.workspace = true
|
||||
tokio.workspace = true
|
||||
futures-util.workspace = true
|
||||
futures.workspace = true
|
||||
chrono.workspace = true
|
||||
uuid.workspace = true
|
||||
tempfile.workspace = true
|
||||
dirs.workspace = true
|
||||
thiserror.workspace = true
|
||||
log.workspace = true
|
||||
regex.workspace = true
|
||||
lazy_static.workspace = true
|
||||
reqwest.workspace = true
|
||||
libc.workspace = true
|
||||
nix.workspace = true
|
||||
urlencoding.workspace = true
|
||||
serde.workspace = true
|
||||
serde_yaml.workspace = true
|
||||
serde_json.workspace = true
|
||||
colored.workspace = true
|
||||
indexmap.workspace = true
|
||||
rayon.workspace = true
|
||||
num_cpus.workspace = true
|
||||
itertools.workspace = true
|
||||
once_cell.workspace = true
|
||||
crossterm.workspace = true
|
||||
ratatui.workspace = true
|
||||
walkdir = "2.4"
|
||||
|
||||
[lib]
|
||||
name = "wrkflw_lib"
|
||||
path = "src/lib.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "wrkflw"
|
||||
path = "src/main.rs"
|
||||
12
crates/wrkflw/src/lib.rs
Normal file
12
crates/wrkflw/src/lib.rs
Normal file
@@ -0,0 +1,12 @@
|
||||
pub use evaluator;
|
||||
pub use executor;
|
||||
pub use github;
|
||||
pub use gitlab;
|
||||
pub use logging;
|
||||
pub use matrix;
|
||||
pub use models;
|
||||
pub use parser;
|
||||
pub use runtime;
|
||||
pub use ui;
|
||||
pub use utils;
|
||||
pub use validators;
|
||||
654
crates/wrkflw/src/main.rs
Normal file
654
crates/wrkflw/src/main.rs
Normal file
@@ -0,0 +1,654 @@
|
||||
use bollard::Docker;
|
||||
use clap::{Parser, Subcommand};
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
#[command(
|
||||
name = "wrkflw",
|
||||
about = "GitHub & GitLab CI/CD validator and executor",
|
||||
version,
|
||||
long_about = "A CI/CD validator and executor that runs workflows locally.\n\nExamples:\n wrkflw validate # Validate all workflows in .github/workflows\n wrkflw run .github/workflows/build.yml # Run a specific workflow\n wrkflw run .gitlab-ci.yml # Run a GitLab CI pipeline\n wrkflw --verbose run .github/workflows/build.yml # Run with more output\n wrkflw --debug run .github/workflows/build.yml # Run with detailed debug information\n wrkflw run --emulate .github/workflows/build.yml # Use emulation mode instead of Docker\n wrkflw run --preserve-containers-on-failure .github/workflows/build.yml # Keep failed containers for debugging"
|
||||
)]
|
||||
struct Wrkflw {
|
||||
#[command(subcommand)]
|
||||
command: Option<Commands>,
|
||||
|
||||
/// Run in verbose mode with detailed output
|
||||
#[arg(short, long, global = true)]
|
||||
verbose: bool,
|
||||
|
||||
/// Run in debug mode with extensive execution details
|
||||
#[arg(short, long, global = true)]
|
||||
debug: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Subcommand)]
|
||||
enum Commands {
|
||||
/// Validate workflow or pipeline files
|
||||
Validate {
|
||||
/// Path to workflow/pipeline file or directory (defaults to .github/workflows)
|
||||
path: Option<PathBuf>,
|
||||
|
||||
/// Explicitly validate as GitLab CI/CD pipeline
|
||||
#[arg(long)]
|
||||
gitlab: bool,
|
||||
|
||||
/// Set exit code to 1 on validation failure
|
||||
#[arg(long = "exit-code", default_value_t = true)]
|
||||
exit_code: bool,
|
||||
|
||||
/// Don't set exit code to 1 on validation failure (overrides --exit-code)
|
||||
#[arg(long = "no-exit-code", conflicts_with = "exit_code")]
|
||||
no_exit_code: bool,
|
||||
},
|
||||
|
||||
/// Execute workflow or pipeline files locally
|
||||
Run {
|
||||
/// Path to workflow/pipeline file to execute
|
||||
path: PathBuf,
|
||||
|
||||
/// Use emulation mode instead of Docker
|
||||
#[arg(short, long)]
|
||||
emulate: bool,
|
||||
|
||||
/// Show 'Would execute GitHub action' messages in emulation mode
|
||||
#[arg(long, default_value_t = false)]
|
||||
show_action_messages: bool,
|
||||
|
||||
/// Preserve Docker containers on failure for debugging (Docker mode only)
|
||||
#[arg(long)]
|
||||
preserve_containers_on_failure: bool,
|
||||
|
||||
/// Explicitly run as GitLab CI/CD pipeline
|
||||
#[arg(long)]
|
||||
gitlab: bool,
|
||||
},
|
||||
|
||||
/// Open TUI interface to manage workflows
|
||||
Tui {
|
||||
/// Path to workflow file or directory (defaults to .github/workflows)
|
||||
path: Option<PathBuf>,
|
||||
|
||||
/// Use emulation mode instead of Docker
|
||||
#[arg(short, long)]
|
||||
emulate: bool,
|
||||
|
||||
/// Show 'Would execute GitHub action' messages in emulation mode
|
||||
#[arg(long, default_value_t = false)]
|
||||
show_action_messages: bool,
|
||||
|
||||
/// Preserve Docker containers on failure for debugging (Docker mode only)
|
||||
#[arg(long)]
|
||||
preserve_containers_on_failure: bool,
|
||||
},
|
||||
|
||||
/// Trigger a GitHub workflow remotely
|
||||
Trigger {
|
||||
/// Name of the workflow file (without .yml extension)
|
||||
workflow: String,
|
||||
|
||||
/// Branch to run the workflow on
|
||||
#[arg(short, long)]
|
||||
branch: Option<String>,
|
||||
|
||||
/// Key-value inputs for the workflow in format key=value
|
||||
#[arg(short, long, value_parser = parse_key_val)]
|
||||
input: Option<Vec<(String, String)>>,
|
||||
},
|
||||
|
||||
/// Trigger a GitLab pipeline remotely
|
||||
TriggerGitlab {
|
||||
/// Branch to run the pipeline on
|
||||
#[arg(short, long)]
|
||||
branch: Option<String>,
|
||||
|
||||
/// Key-value variables for the pipeline in format key=value
|
||||
#[arg(short = 'V', long, value_parser = parse_key_val)]
|
||||
variable: Option<Vec<(String, String)>>,
|
||||
},
|
||||
|
||||
/// List available workflows and pipelines
|
||||
List,
|
||||
}
|
||||
|
||||
// Parser function for key-value pairs
|
||||
fn parse_key_val(s: &str) -> Result<(String, String), String> {
|
||||
let pos = s
|
||||
.find('=')
|
||||
.ok_or_else(|| format!("invalid KEY=value: no `=` found in `{}`", s))?;
|
||||
|
||||
Ok((s[..pos].to_string(), s[pos + 1..].to_string()))
|
||||
}
|
||||
|
||||
// Make this function public for testing? Or move to a utils/cleanup mod?
|
||||
// Or call executor::cleanup and runtime::cleanup directly?
|
||||
// Let's try calling them directly for now.
|
||||
async fn cleanup_on_exit() {
|
||||
// Clean up Docker resources if available, but don't let it block indefinitely
|
||||
match tokio::time::timeout(std::time::Duration::from_secs(3), async {
|
||||
match Docker::connect_with_local_defaults() {
|
||||
Ok(docker) => {
|
||||
// Assuming cleanup_resources exists in executor crate
|
||||
executor::cleanup_resources(&docker).await;
|
||||
}
|
||||
Err(_) => {
|
||||
// Docker not available
|
||||
logging::info("Docker not available, skipping Docker cleanup");
|
||||
}
|
||||
}
|
||||
})
|
||||
.await
|
||||
{
|
||||
Ok(_) => logging::debug("Docker cleanup completed successfully"),
|
||||
Err(_) => {
|
||||
logging::warning("Docker cleanup timed out after 3 seconds, continuing with shutdown")
|
||||
}
|
||||
}
|
||||
|
||||
// Always clean up emulation resources
|
||||
match tokio::time::timeout(
|
||||
std::time::Duration::from_secs(2),
|
||||
// Assuming cleanup_resources exists in runtime::emulation module
|
||||
runtime::emulation::cleanup_resources(),
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(_) => logging::debug("Emulation cleanup completed successfully"),
|
||||
Err(_) => logging::warning("Emulation cleanup timed out, continuing with shutdown"),
|
||||
}
|
||||
|
||||
logging::info("Resource cleanup completed");
|
||||
}
|
||||
|
||||
async fn handle_signals() {
|
||||
// Set up a hard exit timer in case cleanup takes too long
|
||||
// This ensures the app always exits even if Docker operations are stuck
|
||||
let hard_exit_time = std::time::Duration::from_secs(10);
|
||||
|
||||
// Wait for Ctrl+C
|
||||
match tokio::signal::ctrl_c().await {
|
||||
Ok(_) => {
|
||||
println!("Received Ctrl+C, shutting down and cleaning up...");
|
||||
}
|
||||
Err(e) => {
|
||||
// Log the error but continue with cleanup
|
||||
eprintln!("Warning: Failed to properly listen for ctrl+c event: {}", e);
|
||||
println!("Shutting down and cleaning up...");
|
||||
}
|
||||
}
|
||||
|
||||
// Set up a watchdog thread that will force exit if cleanup takes too long
|
||||
// This is important because Docker operations can sometimes hang indefinitely
|
||||
let _ = std::thread::spawn(move || {
|
||||
std::thread::sleep(hard_exit_time);
|
||||
eprintln!(
|
||||
"Cleanup taking too long (over {} seconds), forcing exit...",
|
||||
hard_exit_time.as_secs()
|
||||
);
|
||||
logging::error("Forced exit due to cleanup timeout");
|
||||
std::process::exit(1);
|
||||
});
|
||||
|
||||
// Clean up containers
|
||||
cleanup_on_exit().await;
|
||||
|
||||
// Exit with success status - the force exit thread will be terminated automatically
|
||||
std::process::exit(0);
|
||||
}
|
||||
|
||||
/// Determines if a file is a GitLab CI/CD pipeline based on its name and content
|
||||
fn is_gitlab_pipeline(path: &Path) -> bool {
|
||||
// First check the file name
|
||||
if let Some(file_name) = path.file_name() {
|
||||
if let Some(file_name_str) = file_name.to_str() {
|
||||
if file_name_str == ".gitlab-ci.yml" || file_name_str.ends_with("gitlab-ci.yml") {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check if file is in .gitlab/ci directory
|
||||
if let Some(parent) = path.parent() {
|
||||
if let Some(parent_str) = parent.to_str() {
|
||||
if parent_str.ends_with(".gitlab/ci")
|
||||
&& path
|
||||
.extension()
|
||||
.is_some_and(|ext| ext == "yml" || ext == "yaml")
|
||||
{
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If file exists, check the content
|
||||
if path.exists() {
|
||||
if let Ok(content) = std::fs::read_to_string(path) {
|
||||
// GitLab CI/CD pipelines typically have stages, before_script, after_script at the top level
|
||||
if content.contains("stages:")
|
||||
|| content.contains("before_script:")
|
||||
|| content.contains("after_script:")
|
||||
{
|
||||
// Check for GitHub Actions specific keys that would indicate it's not GitLab
|
||||
if !content.contains("on:")
|
||||
&& !content.contains("runs-on:")
|
||||
&& !content.contains("uses:")
|
||||
{
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
let cli = Wrkflw::parse();
|
||||
let verbose = cli.verbose;
|
||||
let debug = cli.debug;
|
||||
|
||||
// Set log level based on command line flags
|
||||
if debug {
|
||||
logging::set_log_level(logging::LogLevel::Debug);
|
||||
logging::debug("Debug mode enabled - showing detailed logs");
|
||||
} else if verbose {
|
||||
logging::set_log_level(logging::LogLevel::Info);
|
||||
logging::info("Verbose mode enabled");
|
||||
} else {
|
||||
logging::set_log_level(logging::LogLevel::Warning);
|
||||
}
|
||||
|
||||
// Setup a Ctrl+C handler that runs in the background
|
||||
tokio::spawn(handle_signals());
|
||||
|
||||
match &cli.command {
|
||||
Some(Commands::Validate {
|
||||
path,
|
||||
gitlab,
|
||||
exit_code,
|
||||
no_exit_code,
|
||||
}) => {
|
||||
// Determine the path to validate
|
||||
let validate_path = path
|
||||
.clone()
|
||||
.unwrap_or_else(|| PathBuf::from(".github/workflows"));
|
||||
|
||||
// Check if the path exists
|
||||
if !validate_path.exists() {
|
||||
eprintln!("Error: Path does not exist: {}", validate_path.display());
|
||||
std::process::exit(1);
|
||||
}
|
||||
|
||||
// Determine if we're validating a GitLab pipeline based on the --gitlab flag or file detection
|
||||
let force_gitlab = *gitlab;
|
||||
let mut validation_failed = false;
|
||||
|
||||
if validate_path.is_dir() {
|
||||
// Validate all workflow files in the directory
|
||||
let entries = std::fs::read_dir(&validate_path)
|
||||
.expect("Failed to read directory")
|
||||
.filter_map(|entry| entry.ok())
|
||||
.filter(|entry| {
|
||||
entry.path().is_file()
|
||||
&& entry
|
||||
.path()
|
||||
.extension()
|
||||
.is_some_and(|ext| ext == "yml" || ext == "yaml")
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
println!("Validating {} workflow file(s)...", entries.len());
|
||||
|
||||
for entry in entries {
|
||||
let path = entry.path();
|
||||
let is_gitlab = force_gitlab || is_gitlab_pipeline(&path);
|
||||
|
||||
let file_failed = if is_gitlab {
|
||||
validate_gitlab_pipeline(&path, verbose)
|
||||
} else {
|
||||
validate_github_workflow(&path, verbose)
|
||||
};
|
||||
|
||||
if file_failed {
|
||||
validation_failed = true;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Validate a single workflow file
|
||||
let is_gitlab = force_gitlab || is_gitlab_pipeline(&validate_path);
|
||||
|
||||
validation_failed = if is_gitlab {
|
||||
validate_gitlab_pipeline(&validate_path, verbose)
|
||||
} else {
|
||||
validate_github_workflow(&validate_path, verbose)
|
||||
};
|
||||
}
|
||||
|
||||
// Set exit code if validation failed and exit_code flag is true (and no_exit_code is false)
|
||||
if validation_failed && *exit_code && !*no_exit_code {
|
||||
std::process::exit(1);
|
||||
}
|
||||
}
|
||||
Some(Commands::Run {
|
||||
path,
|
||||
emulate,
|
||||
show_action_messages: _,
|
||||
preserve_containers_on_failure,
|
||||
gitlab,
|
||||
}) => {
|
||||
// Create execution configuration
|
||||
let config = executor::ExecutionConfig {
|
||||
runtime_type: if *emulate {
|
||||
executor::RuntimeType::Emulation
|
||||
} else {
|
||||
executor::RuntimeType::Docker
|
||||
},
|
||||
verbose,
|
||||
preserve_containers_on_failure: *preserve_containers_on_failure,
|
||||
};
|
||||
|
||||
// Check if we're explicitly or implicitly running a GitLab pipeline
|
||||
let is_gitlab = *gitlab || is_gitlab_pipeline(path);
|
||||
let workflow_type = if is_gitlab {
|
||||
"GitLab CI pipeline"
|
||||
} else {
|
||||
"GitHub workflow"
|
||||
};
|
||||
|
||||
logging::info(&format!("Running {} at: {}", workflow_type, path.display()));
|
||||
|
||||
// Execute the workflow
|
||||
let result = executor::execute_workflow(path, config)
|
||||
.await
|
||||
.unwrap_or_else(|e| {
|
||||
eprintln!("Error executing workflow: {}", e);
|
||||
std::process::exit(1);
|
||||
});
|
||||
|
||||
// Print execution summary
|
||||
if result.failure_details.is_some() {
|
||||
eprintln!("❌ Workflow execution failed:");
|
||||
if let Some(details) = result.failure_details {
|
||||
if verbose {
|
||||
// Show full error details in verbose mode
|
||||
eprintln!("{}", details);
|
||||
} else {
|
||||
// Show simplified error info in non-verbose mode
|
||||
let simplified_error = details
|
||||
.lines()
|
||||
.filter(|line| line.contains("❌") || line.trim().starts_with("Error:"))
|
||||
.take(5) // Limit to the first 5 error lines
|
||||
.collect::<Vec<&str>>()
|
||||
.join("\n");
|
||||
|
||||
eprintln!("{}", simplified_error);
|
||||
|
||||
if details.lines().count() > 5 {
|
||||
eprintln!("\nUse --verbose flag to see full error details");
|
||||
}
|
||||
}
|
||||
}
|
||||
std::process::exit(1);
|
||||
} else {
|
||||
println!("✅ Workflow execution completed successfully!");
|
||||
|
||||
// Print a summary of executed jobs
|
||||
if true {
|
||||
// Always show job summary
|
||||
println!("\nJob summary:");
|
||||
for job in result.jobs {
|
||||
println!(
|
||||
" {} {} ({})",
|
||||
match job.status {
|
||||
executor::JobStatus::Success => "✅",
|
||||
executor::JobStatus::Failure => "❌",
|
||||
executor::JobStatus::Skipped => "⏭️",
|
||||
},
|
||||
job.name,
|
||||
match job.status {
|
||||
executor::JobStatus::Success => "success",
|
||||
executor::JobStatus::Failure => "failure",
|
||||
executor::JobStatus::Skipped => "skipped",
|
||||
}
|
||||
);
|
||||
|
||||
// Always show steps, not just in debug mode
|
||||
println!(" Steps:");
|
||||
for step in job.steps {
|
||||
let step_status = match step.status {
|
||||
executor::StepStatus::Success => "✅",
|
||||
executor::StepStatus::Failure => "❌",
|
||||
executor::StepStatus::Skipped => "⏭️",
|
||||
};
|
||||
|
||||
println!(" {} {}", step_status, step.name);
|
||||
|
||||
// If step failed and we're not in verbose mode, show condensed error info
|
||||
if step.status == executor::StepStatus::Failure && !verbose {
|
||||
// Extract error information from step output
|
||||
let error_lines = step
|
||||
.output
|
||||
.lines()
|
||||
.filter(|line| {
|
||||
line.contains("error:")
|
||||
|| line.contains("Error:")
|
||||
|| line.trim().starts_with("Exit code:")
|
||||
|| line.contains("failed")
|
||||
})
|
||||
.take(3) // Limit to 3 most relevant error lines
|
||||
.collect::<Vec<&str>>();
|
||||
|
||||
if !error_lines.is_empty() {
|
||||
println!(" Error details:");
|
||||
for line in error_lines {
|
||||
println!(" {}", line.trim());
|
||||
}
|
||||
|
||||
if step.output.lines().count() > 3 {
|
||||
println!(" (Use --verbose for full output)");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Cleanup is handled automatically via the signal handler
|
||||
}
|
||||
Some(Commands::TriggerGitlab { branch, variable }) => {
|
||||
// Convert optional Vec<(String, String)> to Option<HashMap<String, String>>
|
||||
let variables = variable
|
||||
.as_ref()
|
||||
.map(|v| v.iter().cloned().collect::<HashMap<String, String>>());
|
||||
|
||||
// Trigger the pipeline
|
||||
if let Err(e) = gitlab::trigger_pipeline(branch.as_deref(), variables).await {
|
||||
eprintln!("Error triggering GitLab pipeline: {}", e);
|
||||
std::process::exit(1);
|
||||
}
|
||||
}
|
||||
Some(Commands::Tui {
|
||||
path,
|
||||
emulate,
|
||||
show_action_messages: _,
|
||||
preserve_containers_on_failure,
|
||||
}) => {
|
||||
// Set runtime type based on the emulate flag
|
||||
let runtime_type = if *emulate {
|
||||
executor::RuntimeType::Emulation
|
||||
} else {
|
||||
executor::RuntimeType::Docker
|
||||
};
|
||||
|
||||
// Call the TUI implementation from the ui crate
|
||||
if let Err(e) = ui::run_wrkflw_tui(
|
||||
path.as_ref(),
|
||||
runtime_type,
|
||||
verbose,
|
||||
*preserve_containers_on_failure,
|
||||
)
|
||||
.await
|
||||
{
|
||||
eprintln!("Error running TUI: {}", e);
|
||||
std::process::exit(1);
|
||||
}
|
||||
}
|
||||
Some(Commands::Trigger {
|
||||
workflow,
|
||||
branch,
|
||||
input,
|
||||
}) => {
|
||||
// Convert optional Vec<(String, String)> to Option<HashMap<String, String>>
|
||||
let inputs = input
|
||||
.as_ref()
|
||||
.map(|i| i.iter().cloned().collect::<HashMap<String, String>>());
|
||||
|
||||
// Trigger the workflow
|
||||
if let Err(e) = github::trigger_workflow(workflow, branch.as_deref(), inputs).await {
|
||||
eprintln!("Error triggering GitHub workflow: {}", e);
|
||||
std::process::exit(1);
|
||||
}
|
||||
}
|
||||
Some(Commands::List) => {
|
||||
list_workflows_and_pipelines(verbose);
|
||||
}
|
||||
None => {
|
||||
// Launch TUI by default when no command is provided
|
||||
let runtime_type = executor::RuntimeType::Docker;
|
||||
|
||||
// Call the TUI implementation from the ui crate with default path
|
||||
if let Err(e) = ui::run_wrkflw_tui(None, runtime_type, verbose, false).await {
|
||||
eprintln!("Error running TUI: {}", e);
|
||||
std::process::exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Validate a GitHub workflow file
|
||||
/// Returns true if validation failed, false if it passed
|
||||
fn validate_github_workflow(path: &Path, verbose: bool) -> bool {
|
||||
print!("Validating GitHub workflow file: {}... ", path.display());
|
||||
|
||||
// Use the ui crate's validate_workflow function
|
||||
match ui::validate_workflow(path, verbose) {
|
||||
Ok(_) => {
|
||||
// The detailed validation output is already printed by the function
|
||||
// We need to check if there were validation issues
|
||||
// Since ui::validate_workflow doesn't return the validation result directly,
|
||||
// we need to call the evaluator directly to get the result
|
||||
match evaluator::evaluate_workflow_file(path, verbose) {
|
||||
Ok(result) => !result.is_valid,
|
||||
Err(_) => true, // Parse errors count as validation failure
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Error validating workflow: {}", e);
|
||||
true // Any error counts as validation failure
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Validate a GitLab CI/CD pipeline file
|
||||
/// Returns true if validation failed, false if it passed
|
||||
fn validate_gitlab_pipeline(path: &Path, verbose: bool) -> bool {
|
||||
print!("Validating GitLab CI pipeline file: {}... ", path.display());
|
||||
|
||||
// Parse and validate the pipeline file
|
||||
match parser::gitlab::parse_pipeline(path) {
|
||||
Ok(pipeline) => {
|
||||
println!("✅ Valid syntax");
|
||||
|
||||
// Additional structural validation
|
||||
let validation_result = validators::validate_gitlab_pipeline(&pipeline);
|
||||
|
||||
if !validation_result.is_valid {
|
||||
println!("⚠️ Validation issues:");
|
||||
for issue in validation_result.issues {
|
||||
println!(" - {}", issue);
|
||||
}
|
||||
true // Validation failed
|
||||
} else {
|
||||
if verbose {
|
||||
println!("✅ All validation checks passed");
|
||||
}
|
||||
false // Validation passed
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
println!("❌ Invalid");
|
||||
eprintln!("Validation failed: {}", e);
|
||||
true // Parse error counts as validation failure
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// List available workflows and pipelines in the repository
|
||||
fn list_workflows_and_pipelines(verbose: bool) {
|
||||
// Check for GitHub workflows
|
||||
let github_path = PathBuf::from(".github/workflows");
|
||||
if github_path.exists() && github_path.is_dir() {
|
||||
println!("GitHub Workflows:");
|
||||
|
||||
let entries = std::fs::read_dir(&github_path)
|
||||
.expect("Failed to read directory")
|
||||
.filter_map(|entry| entry.ok())
|
||||
.filter(|entry| {
|
||||
entry.path().is_file()
|
||||
&& entry
|
||||
.path()
|
||||
.extension()
|
||||
.is_some_and(|ext| ext == "yml" || ext == "yaml")
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if entries.is_empty() {
|
||||
println!(" No workflow files found in .github/workflows");
|
||||
} else {
|
||||
for entry in entries {
|
||||
println!(" - {}", entry.path().display());
|
||||
}
|
||||
}
|
||||
} else {
|
||||
println!("GitHub Workflows: No .github/workflows directory found");
|
||||
}
|
||||
|
||||
// Check for GitLab CI pipeline
|
||||
let gitlab_path = PathBuf::from(".gitlab-ci.yml");
|
||||
if gitlab_path.exists() && gitlab_path.is_file() {
|
||||
println!("GitLab CI Pipeline:");
|
||||
println!(" - {}", gitlab_path.display());
|
||||
} else {
|
||||
println!("GitLab CI Pipeline: No .gitlab-ci.yml file found");
|
||||
}
|
||||
|
||||
// Check for other GitLab CI pipeline files
|
||||
if verbose {
|
||||
println!("Searching for other GitLab CI pipeline files...");
|
||||
|
||||
let entries = walkdir::WalkDir::new(".")
|
||||
.follow_links(true)
|
||||
.into_iter()
|
||||
.filter_map(|entry| entry.ok())
|
||||
.filter(|entry| {
|
||||
entry.path().is_file()
|
||||
&& entry
|
||||
.file_name()
|
||||
.to_string_lossy()
|
||||
.ends_with("gitlab-ci.yml")
|
||||
&& entry.path() != gitlab_path
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if !entries.is_empty() {
|
||||
println!("Additional GitLab CI Pipeline files:");
|
||||
for entry in entries {
|
||||
println!(" - {}", entry.path().display());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
774
red.cast
Normal file
774
red.cast
Normal file
@@ -0,0 +1,774 @@
|
||||
{"version": 2, "width": 245, "height": 61, "timestamp": 1746300930, "env": {"SHELL": "/bin/zsh", "TERM": "xterm-256color"}}
|
||||
[0.393681, "o", "\u001b[1m\u001b[7m%\u001b[27m\u001b[1m\u001b[0m \r \r"]
|
||||
[0.394167, "o", "\u001b]2;goku@Gokuls-MacBook-Pro:~/projects/wrkflw\u0007"]
|
||||
[0.394183, "o", "\u001b]1;..ojects/wrkflw\u0007"]
|
||||
[0.395693, "o", "\u001b]7;file://Gokuls-MacBook-Pro.local/Users/goku/projects/wrkflw\u001b\\"]
|
||||
[0.396236, "o", "\r\u001b[0m\u001b[27m\u001b[24m\u001b[J\u001b[01;32m➜ \u001b[36mwrkflw\u001b[00m \u001b[K"]
|
||||
[0.396293, "o", "\u001b[?1h\u001b="]
|
||||
[0.396318, "o", "\u001b[?2004h"]
|
||||
[0.437911, "o", "\r\r\u001b[0m\u001b[27m\u001b[24m\u001b[J\u001b[01;32m➜ \u001b[36mwrkflw\u001b[00m \u001b[01;34m(\u001b[31mmain\u001b[34m) \u001b[33m✗\u001b[00m \u001b[K"]
|
||||
[1.784697, "o", "c"]
|
||||
[1.885654, "o", "\bca"]
|
||||
[2.079234, "o", "t"]
|
||||
[2.252374, "o", " "]
|
||||
[2.858954, "o", "t"]
|
||||
[2.980971, "o", "e"]
|
||||
[3.172113, "o", "s"]
|
||||
[3.276278, "o", "t"]
|
||||
[3.976366, "o", "_"]
|
||||
[5.418211, "o", "g"]
|
||||
[5.804484, "o", "itlab_ci\u001b[1m/\u001b[0m"]
|
||||
[7.30486, "o", "\u0007"]
|
||||
[7.304922, "o", "\b\u001b[0m/\r\r\n"]
|
||||
[7.305065, "o", "\u001b[J\u001b[0madvanced.gitlab-ci.yml \u001b[Jbasic.gitlab-ci.yml \u001b[Jdocker.gitlab-ci.yml \u001b[Jincludes.gitlab-ci.yml \u001b[Jinvalid.gitlab-ci.yml \u001b[Jminimal.gitlab-ci.yml \u001b[Jservices.gitlab-ci.yml \u001b[Jworkflow.gitlab-ci.yml\u001b[J\u001b[A\u001b[0m\u001b[27m\u001b[24m\r\u001b[19Ccat test_gitlab_ci/\u001b[K"]
|
||||
[8.264729, "o", "m"]
|
||||
[8.4479, "o", "i"]
|
||||
[8.643085, "o", "nimal.gitlab-ci.yml\u001b[1m \u001b[0m"]
|
||||
[9.529005, "o", "\b\u001b[0m \b"]
|
||||
[9.529291, "o", "\u001b[?1l\u001b>\u001b[?2004l\r\r\n\u001b[J"]
|
||||
[9.531431, "o", "\u001b]2;cat test_gitlab_ci/minimal.gitlab-ci.yml\u0007\u001b]1;cat\u0007"]
|
||||
[9.563469, "o", "# Minimal GitLab CI configuration\r\n\r\nimage: rust:latest\r\n\r\nbuild:\r\n script:\r\n - cargo build\r\n\r\ntest:\r\n script:\r\n - cargo test "]
|
||||
[9.563717, "o", "\u001b[1m\u001b[7m%\u001b[27m\u001b[1m\u001b[0m \r \r"]
|
||||
[9.564397, "o", "\u001b]2;goku@Gokuls-MacBook-Pro:~/projects/wrkflw\u0007"]
|
||||
[9.564419, "o", "\u001b]1;..ojects/wrkflw\u0007"]
|
||||
[9.566692, "o", "\u001b]7;file://Gokuls-MacBook-Pro.local/Users/goku/projects/wrkflw\u001b\\"]
|
||||
[9.568961, "o", "\r\u001b[0m\u001b[27m\u001b[24m\u001b[J\u001b[01;32m➜ \u001b[36mwrkflw\u001b[00m \u001b[01;34m(\u001b[31mmain\u001b[34m) \u001b[33m✗\u001b[00m \u001b[K"]
|
||||
[9.569081, "o", "\u001b[?1h\u001b="]
|
||||
[9.569181, "o", "\u001b[?2004h"]
|
||||
[11.969049, "o", "c"]
|
||||
[12.088692, "o", "\bca"]
|
||||
[12.375203, "o", "r"]
|
||||
[12.467428, "o", "g"]
|
||||
[12.549475, "o", "o"]
|
||||
[12.816019, "o", "r"]
|
||||
[13.230493, "o", "\b \b"]
|
||||
[13.400359, "o", " "]
|
||||
[13.600041, "o", "r"]
|
||||
[13.715537, "o", " "]
|
||||
[14.313772, "o", "v"]
|
||||
[14.503158, "o", "a"]
|
||||
[14.615728, "o", "l"]
|
||||
[14.836236, "o", "i"]
|
||||
[14.961289, "o", "d"]
|
||||
[15.051538, "o", "a"]
|
||||
[15.243561, "o", "t"]
|
||||
[15.350827, "o", "e"]
|
||||
[15.447092, "o", " "]
|
||||
[19.359227, "o", "\u001b[7mtest_gitlab_ci/minimal.gitlab-ci.yml\u001b[27m"]
|
||||
[20.437202, "o", "\u001b[36D\u001b[27mt\u001b[27me\u001b[27ms\u001b[27mt\u001b[27m_\u001b[27mg\u001b[27mi\u001b[27mt\u001b[27ml\u001b[27ma\u001b[27mb\u001b[27m_\u001b[27mc\u001b[27mi\u001b[27m/\u001b[27mm\u001b[27mi\u001b[27mn\u001b[27mi\u001b[27mm\u001b[27ma\u001b[27ml\u001b[27m.\u001b[27mg\u001b[27mi\u001b[27mt\u001b[27ml\u001b[27ma\u001b[27mb\u001b[27m-\u001b[27mc\u001b[27mi\u001b[27m.\u001b[27my\u001b[27mm\u001b[27ml"]
|
||||
[20.437676, "o", "\u001b[?1l\u001b>\u001b[?2004l\r\r\n"]
|
||||
[20.439716, "o", "\u001b]2;cargo r validate test_gitlab_ci/minimal.gitlab-ci.yml\u0007\u001b]1;cargo\u0007"]
|
||||
[20.763171, "o", "\u001b[1m\u001b[32m Compiling\u001b[0m proc-macro2 v1.0.94\r\n\u001b[1m\u001b[32m Compiling\u001b[0m unicode-ident v1.0.18\r\n"]
|
||||
[20.763198, "o", "\u001b[1m\u001b[32m Compiling\u001b[0m libc v0.2.171\r\n\u001b[1m\u001b[32m Compiling\u001b[0m autocfg v1.4.0\r\n\u001b[1m\u001b[32m Compiling\u001b[0m cfg-if v1.0.0\r\n"]
|
||||
[20.763207, "o", "\u001b[1m\u001b[32m Compiling\u001b[0m memchr v2.7.4\r\n"]
|
||||
[20.763396, "o", "\u001b[1m\u001b[32m Compiling\u001b[0m serde v1.0.219\r\n\u001b[1m\u001b[32m Compiling\u001b[0m smallvec v1.14.0\r\n\u001b[1m\u001b[32m Compiling\u001b[0m itoa v1.0.15\r\n\u001b[1m\u001b[32m Compiling\u001b[0m bitflags v2.9.0\r\n\u001b[1m\u001b[32m Compiling\u001b[0m parking_lot_core v0.9.10\r\n\u001b[1m\u001b[32m Compiling\u001b[0m scopeguard v1.2.0\r\n\u001b[1m\u001b[36m Building\u001b[0m [ ] 0/270: scopeguard, autocfg, libc(build.rs), serde(build.rs), unicode-ident, memchr, cfg-if, smallvec, proc-macro2(build.rs), itoa, bitflags, parking_lot_core(build.rs) \r"]
|
||||
[20.863836, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m pin-project-lite v0.2.16\r\n\u001b[1m\u001b[36m Building\u001b[0m [ ] 1/270: scopeguard, autocfg, libc(build.rs), serde(build.rs), unicode-ident, memchr, pin-project-lite, smallvec, proc-macro2(build.rs), itoa, bitflags, parking_lot_core(build.rs) \r"]
|
||||
[20.888886, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m hashbrown v0.15.2\r\n\u001b[1m\u001b[36m Building\u001b[0m [ ] 2/270: autocfg, libc(build.rs), serde(build.rs), hashbrown, unicode-ident, memchr, pin-project-lite, smallvec, proc-macro2(build.rs), itoa, bitflags, parking_lot_core(build.rs) \r"]
|
||||
[20.889407, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m equivalent v1.0.2\r\n\u001b[1m\u001b[36m Building\u001b[0m [ ] 3/270: equivalent, autocfg, libc(build.rs), serde(build.rs), hashbrown, memchr, pin-project-lite, smallvec, proc-macro2(build.rs), itoa, bitflags, parking_lot_core(build.rs) \r"]
|
||||
[20.901807, "o", "\u001b[K"]
|
||||
[20.901967, "o", "\u001b[1m\u001b[32m Compiling\u001b[0m stable_deref_trait v1.2.0\r\n\u001b[1m\u001b[36m Building\u001b[0m [ ] 4/270: equivalent, autocfg, libc(build.rs), serde(build.rs), hashbrown, stable_deref_trait, memchr, pin-project-lite, smallvec, proc-macro2(build.rs), bitflags, parking_lot_core(build.rs) \r"]
|
||||
[20.916803, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m once_cell v1.21.2\r\n"]
|
||||
[20.916855, "o", "\u001b[1m\u001b[36m Building\u001b[0m [ ] 5/270: equivalent, autocfg, libc(build.rs), serde(build.rs), hashbrown, stable_deref_trait, memchr, once_cell, smallvec, proc-macro2(build.rs), bitflags, parking_lot_core(build.rs) \r"]
|
||||
[20.954688, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m core-foundation-sys v0.8.7\r\n\u001b[1m\u001b[36m Building\u001b[0m [ ] 6/270: equivalent, autocfg, core-foundation-sys, libc(build.rs), serde(build.rs), hashbrown, stable_deref_trait, memchr, once_cell, proc-macro2(build.rs), bitflags, parking_lot_core(build.rs) \r"]
|
||||
[20.960325, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m bytes v1.10.1\r\n\u001b[1m\u001b[36m Building\u001b[0m [ ] 7/270: autocfg, core-foundation-sys, hashbrown, once_cell, parking_lot_core(build.rs), equivalent, bytes, libc(build.rs), serde(build.rs), stable_deref_trait, memchr, proc-macro2(build.rs) \r"]
|
||||
[20.961287, "o", "\u001b[1m\u001b[36m Building\u001b[0m [ ] 8/270: autocfg, core-foundation-sys, hashbrown, once_cell, equivalent, bytes, libc(build.rs), serde(build.rs), stable_deref_trait, parking_lot_core(build), memchr, proc-macro2(build.rs) \r"]
|
||||
[20.964842, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m futures-sink v0.3.31\r\n\u001b[1m\u001b[32m Compiling\u001b[0m futures-core v0.3.31\r\n\u001b[1m\u001b[36m Building\u001b[0m [ ] 10/270: autocfg, core-foundation-sys, hashbrown, once_cell, bytes, futures-core, libc(build.rs), serde(build.rs), parking_lot_core(build), memchr, proc-macro2(build.rs), futures-sink \r"]
|
||||
[20.979236, "o", "\u001b[1m\u001b[36m Building\u001b[0m [> ] 11/270: autocfg, core-foundation-sys, hashbrown, once_cell, bytes, futures-core, proc-macro2(build), libc(build.rs), serde(build.rs), parking_lot_core(build), memchr, futures-sink \r"]
|
||||
[20.981339, "o", "\u001b[1m\u001b[36m Building\u001b[0m [> ] 12/270: autocfg, core-foundation-sys, hashbrown, once_cell, bytes, futures-core, proc-macro2(build), libc(build.rs), serde(build), parking_lot_core(build), memchr, futures-sink \r"]
|
||||
[21.033814, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m ryu v1.0.20\r\n"]
|
||||
[21.033863, "o", "\u001b[1m\u001b[36m Building\u001b[0m [> ] 13/270: autocfg, core-foundation-sys, ryu, hashbrown, bytes, futures-core, proc-macro2(build), libc(build.rs), serde(build), parking_lot_core(build), memchr, futures-sink \r"]
|
||||
[21.034272, "o", "\u001b[1m\u001b[36m Building\u001b[0m [> ] 14/270: autocfg, core-foundation-sys, ryu, hashbrown, bytes, futures-core, proc-macro2(build), libc(build), serde(build), parking_lot_core(build), memchr, futures-sink \r"]
|
||||
[21.04051, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m serde_json v1.0.140\r\n"]
|
||||
[21.040547, "o", "\u001b[1m\u001b[36m Building\u001b[0m [> ] 15/270: autocfg, core-foundation-sys, ryu, hashbrown, bytes, futures-core, serde_json(build.rs), proc-macro2(build), libc(build), serde(build), parking_lot_core(build), memchr \r"]
|
||||
[21.05181, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m futures-channel v0.3.31\r\n"]
|
||||
[21.052003, "o", "\u001b[1m\u001b[36m Building\u001b[0m [> ] 16/270: futures-channel, autocfg, core-foundation-sys, ryu, hashbrown, bytes, serde_json(build.rs), proc-macro2(build), libc(build), serde(build), parking_lot_core(build), memchr "]
|
||||
[21.052164, "o", "\r"]
|
||||
[21.063105, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m lock_api v0.4.12\r\n"]
|
||||
[21.063156, "o", "\u001b[1m\u001b[36m Building\u001b[0m [> ] 17/270: futures-channel, core-foundation-sys, ryu, hashbrown, bytes, lock_api(build.rs), serde_json(build.rs), proc-macro2(build), libc(build), serde(build), parking_lot_core(build), memchr \r"]
|
||||
[21.07565, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m slab v0.4.9\r\n"]
|
||||
[21.075716, "o", "\u001b[1m\u001b[36m Building\u001b[0m [> ] 18/270: futures-channel, ryu, hashbrown, slab(build.rs), bytes, lock_api(build.rs), serde_json(build.rs), proc-macro2(build), libc(build), serde(build), parking_lot_core(build), memchr \r"]
|
||||
[21.108286, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m num-traits v0.2.19\r\n"]
|
||||
[21.10843, "o", "\u001b[1m\u001b[36m Building\u001b[0m [> ] 19/270: futures-channel, ryu, hashbrown, slab(build.rs), bytes, lock_api(build.rs), serde_json(build.rs), proc-macro2(build), libc(build), serde(build), num-traits(build.rs), parking_lot_core(build) \r"]
|
||||
[21.135823, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m litemap v0.7.5\r\n"]
|
||||
[21.136189, "o", "\u001b[1m\u001b[36m Building\u001b[0m [> ] 20/270: futures-channel, hashbrown, slab(build.rs), bytes, lock_api(build.rs), serde_json(build.rs), proc-macro2(build), libc(build), serde(build), num-traits(build.rs), parking_lot_core(build), lit...\r"]
|
||||
[21.138845, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=> ] 21/270: futures-channel, hashbrown, slab(build.rs), bytes, lock_api(build.rs), serde_json(build), proc-macro2(build), libc(build), serde(build), num-traits(build.rs), parking_lot_core(build), litemap \r\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m icu_locid_transform_data v1.5.1\r\n\u001b[1m\u001b[36m Building\u001b[0m [=> ] 22/270: futures-channel, hashbrown, slab(build.rs), bytes, lock_api(build.rs), serde_json(build), proc-macro2(build), libc(build), serde(build), num-traits(build.rs), icu_locid_transform_data(build....\r"]
|
||||
[21.161233, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m pin-utils v0.1.0\r\n"]
|
||||
[21.161271, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=> ] 23/270: pin-utils, hashbrown, slab(build.rs), bytes, lock_api(build.rs), serde_json(build), proc-macro2(build), libc(build), serde(build), num-traits(build.rs), icu_locid_transform_data(build.rs), l...\r"]
|
||||
[21.161905, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=> ] 24/270: pin-utils, hashbrown, slab(build.rs), bytes, serde_json(build), proc-macro2(build), libc(build), serde(build), num-traits(build.rs), icu_locid_transform_data(build.rs), litemap, lock_api(build)\r"]
|
||||
[21.171062, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=> ] 25/270: pin-utils, hashbrown, bytes, serde_json(build), slab(build), proc-macro2(build), libc(build), serde(build), num-traits(build.rs), icu_locid_transform_data(build.rs), litemap, lock_api(build) \r"]
|
||||
[21.18027, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m rustix v1.0.3\r\n"]
|
||||
[21.180299, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=> ] 26/270: pin-utils, rustix(build.rs), bytes, serde_json(build), slab(build), proc-macro2(build), libc(build), serde(build), num-traits(build.rs), icu_locid_transform_data(build.rs), litemap, lock_api...\r"]
|
||||
[21.196422, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m writeable v0.5.5\r\n"]
|
||||
[21.19645, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=> ] 27/270: rustix(build.rs), writeable, bytes, serde_json(build), slab(build), proc-macro2(build), libc(build), serde(build), num-traits(build.rs), icu_locid_transform_data(build.rs), litemap, lock_api...\r"]
|
||||
[21.209074, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m futures-task v0.3.31\r\n\u001b[1m\u001b[36m Building\u001b[0m [=> ] 28/270: rustix(build.rs), writeable, bytes, serde_json(build), slab(build), proc-macro2(build), libc(build), serde(build), icu_locid_transform_data(build.rs), litemap, lock_api(build), futures-task \r"]
|
||||
[21.230428, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m futures-io v0.3.31\r\n"]
|
||||
[21.23048, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=> ] 29/270: rustix(build.rs), writeable, bytes, serde_json(build), slab(build), proc-macro2(build), libc(build), serde(build), futures-io, litemap, lock_api(build), futures-task \r"]
|
||||
[21.24605, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=> ] 30/270: rustix(build.rs), writeable, bytes, serde_json(build), slab(build), proc-macro2(build), libc(build), serde(build), futures-io, lock_api(build), icu_locid_transform_data(build), futures-task \r"]
|
||||
[21.27647, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=> ] 31/270: num-traits(build), rustix(build.rs), writeable, bytes, serde_json(build), slab(build), proc-macro2(build), libc(build), serde(build), futures-io, lock_api(build), icu_locid_transform_data(bu...\r"]
|
||||
[21.294729, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m thiserror v1.0.69\r\n"]
|
||||
[21.294924, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==> ] 32/270: num-traits(build), rustix(build.rs), writeable, bytes, serde_json(build), slab(build), proc-macro2(build), libc(build), serde(build), lock_api(build), icu_locid_transform_data(build), thiser...\r"]
|
||||
[21.312333, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m icu_properties_data v1.5.1\r\n"]
|
||||
[21.312427, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==> ] 33/270: num-traits(build), rustix(build.rs), writeable, serde_json(build), slab(build), proc-macro2(build), libc(build), serde(build), icu_properties_data(build.rs), lock_api(build), icu_locid_trans...\r"]
|
||||
[21.314227, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==> ] 34/270: num-traits(build), rustix(build), writeable, serde_json(build), slab(build), proc-macro2(build), libc(build), serde(build), icu_properties_data(build.rs), lock_api(build), icu_locid_transfor...\r"]
|
||||
[21.33527, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m getrandom v0.3.2\r\n"]
|
||||
[21.335367, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==> ] 35/270: num-traits(build), rustix(build), getrandom(build.rs), serde_json(build), slab(build), proc-macro2(build), libc(build), serde(build), icu_properties_data(build.rs), lock_api(build), icu_loci...\r"]
|
||||
[21.382291, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==> ] 36/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build.rs), serde_json(build), slab(build), proc-macro2(build), libc(build), serde(build), lock_api(build), icu_locid_t...\r"]
|
||||
[21.409263, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==> ] 37/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build.rs), proc-macro2, serde_json(build), slab(build), libc(build), serde(build), lock_api(build), icu_locid_transfor...\r"]
|
||||
[21.409608, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==> ] 38/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build.rs), proc-macro2, serde_json(build), slab(build), thiserror(build), libc(build), serde(build), lock_api(build), ...\r"]
|
||||
[21.452063, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==> ] 39/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), proc-macro2, serde_json(build), slab(build), thiserror(build), libc(build), serde(build), lock_api(build), icu...\r"]
|
||||
[21.640658, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m quote v1.0.40\r\n"]
|
||||
[21.640694, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==> ] 40/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), quote, proc-macro2, serde_json(build), slab(build), thiserror(build), libc(build), lock_api(build), icu_locid_...\r"]
|
||||
[21.686485, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m icu_normalizer_data v1.5.1\r\n"]
|
||||
[21.686579, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==> ] 41/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), icu_normalizer_data(build.rs), quote, serde_json(build), slab(build), thiserror(build), libc(build), lock_api(...\r"]
|
||||
[21.759368, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m syn v2.0.100\r\n"]
|
||||
[21.759454, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===> ] 42/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, quote, serde_json(build), slab(build), thiserror(build), libc(build), lock_api(build), icu_locid_transfor...\r"]
|
||||
[21.764469, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m fnv v1.0.7\r\n"]
|
||||
[21.764538, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===> ] 43/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, serde_json(build), fnv, slab(build), thiserror(build), libc(build), lock_api(build), icu_locid_transform_...\r"]
|
||||
[21.796702, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m http v0.2.12\r\n"]
|
||||
[21.796791, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===> ] 44/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, serde_json(build), slab(build), thiserror(build), libc(build), http, lock_api(build), icu_locid_transform...\r"]
|
||||
[21.889367, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===> ] 45/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, serde_json(build), slab(build), thiserror(build), http, lock_api(build), libc, icu_locid_transform_data(b...\r"]
|
||||
[22.197788, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===> ] 46/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, slab(build), thiserror(build), http, lock_api(build), libc, icu_locid_transform_data(build), icu_normaliz...\r"]
|
||||
[22.324053, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m signal-hook-registry v1.4.2\r\n"]
|
||||
[22.324162, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===> ] 47/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, slab(build), thiserror(build), signal-hook-registry, lock_api(build), libc, icu_locid_transform_data(buil...\r"]
|
||||
[22.335479, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===> ] 48/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, parking_lot_core, slab(build), thiserror(build), signal-hook-registry, lock_api(build), icu_locid_transfo...\r"]
|
||||
[22.463262, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m errno v0.3.10\r\n"]
|
||||
[22.463317, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===> ] 49/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, slab(build), thiserror(build), signal-hook-registry, lock_api(build), icu_locid_transform_data(build), ic...\r"]
|
||||
[22.46546, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===> ] 50/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, lock_api, slab(build), thiserror(build), signal-hook-registry, icu_locid_transform_data(build), icu_norma...\r"]
|
||||
[22.513539, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m mio v1.0.3\r\n"]
|
||||
[22.51357, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===> ] 51/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, lock_api, slab(build), thiserror(build), icu_locid_transform_data(build), mio, icu_normalizer_data(build)...\r"]
|
||||
[22.519736, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m socket2 v0.5.8\r\n"]
|
||||
[22.519842, "o", "\u001b[1m\u001b[36m Building\u001b[0m [====> ] 52/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, lock_api, slab(build), thiserror(build), icu_locid_transform_data(build), mio, icu_normalizer_data(build)...\r"]
|
||||
[22.556956, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m parking_lot v0.12.3\r\n"]
|
||||
[22.557057, "o", "\u001b[1m\u001b[36m Building\u001b[0m [====> ] 53/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, slab(build), thiserror(build), parking_lot, icu_locid_transform_data(build), mio, icu_normalizer_data(bui...\r"]
|
||||
[22.712544, "o", "\u001b[1m\u001b[36m Building\u001b[0m [====> ] 54/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, slab, thiserror(build), parking_lot, icu_locid_transform_data(build), mio, icu_normalizer_data(build), so...\r"]
|
||||
[22.747265, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m tracing-core v0.1.33\r\n"]
|
||||
[22.747298, "o", "\u001b[1m\u001b[36m Building\u001b[0m [====> ] 55/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, tracing-core, slab, thiserror(build), icu_locid_transform_data(build), mio, icu_normalizer_data(build), s...\r"]
|
||||
[22.747509, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m log v0.4.27\r\n"]
|
||||
[22.747596, "o", "\u001b[1m\u001b[36m Building\u001b[0m [====> ] 56/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, tracing-core, log, slab, thiserror(build), icu_locid_transform_data(build), mio, icu_normalizer_data(build) \r"]
|
||||
[22.777478, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m unsafe-libyaml v0.2.11\r\n"]
|
||||
[22.777508, "o", "\u001b[1m\u001b[36m Building\u001b[0m [====> ] 57/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, tracing-core, log, thiserror(build), icu_locid_transform_data(build), mio, icu_normalizer_data(build), un...\r"]
|
||||
[22.803035, "o", "\u001b[1m\u001b[36m Building\u001b[0m [====> ] 58/270: icu_locid_transform_data, num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, tracing-core, log, thiserror(build), mio, icu_normalizer_data(build), unsafe-li...\r"]
|
||||
[22.83338, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m httparse v1.10.1\r\n"]
|
||||
[22.833504, "o", "\u001b[1m\u001b[36m Building\u001b[0m [====> ] 59/270: icu_locid_transform_data, num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, tracing-core, httparse(build.rs), thiserror(build), mio, icu_normalizer_data(bu...\r"]
|
||||
[22.83581, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m core-foundation v0.9.4\r\n"]
|
||||
[22.835833, "o", "\u001b[1m\u001b[36m Building\u001b[0m [====> ] 60/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, tracing-core, httparse(build.rs), thiserror(build), core-foundation, mio, icu_normalizer_data(build), uns...\r"]
|
||||
[22.836268, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m fastrand v2.3.0\r\n\u001b[1m\u001b[36m Building\u001b[0m [====> ] 61/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, tracing-core, httparse(build.rs), thiserror(build), fastrand, core-foundation, icu_normalizer_data(build)...\r"]
|
||||
[22.941203, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m tracing v0.1.41\r\n"]
|
||||
[22.941376, "o", "\u001b[1m\u001b[36m Building\u001b[0m [====> ] 62/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, tracing-core, httparse(build.rs), thiserror(build), core-foundation, tracing, icu_normalizer_data(build),...\r"]
|
||||
[22.981021, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=====> ] 63/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, tracing-core, thiserror(build), httparse(build), core-foundation, tracing, icu_normalizer_data(build), un...\r"]
|
||||
[23.021436, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m utf8_iter v1.0.4\r\n"]
|
||||
[23.021547, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=====> ] 64/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, tracing-core, utf8_iter, thiserror(build), httparse(build), tracing, icu_normalizer_data(build), unsafe-l...\r"]
|
||||
[23.038774, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m write16 v1.0.0\r\n"]
|
||||
[23.038804, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=====> ] 65/270: num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, tracing-core, write16, utf8_iter, thiserror(build), httparse(build), icu_normalizer_data(build), unsafe-l...\r"]
|
||||
[23.055807, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m utf16_iter v1.0.5\r\n"]
|
||||
[23.055835, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=====> ] 66/270: utf16_iter, num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, write16, utf8_iter, thiserror(build), httparse(build), icu_normalizer_data(build), unsafe-lib...\r"]
|
||||
[23.074201, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m try-lock v0.2.5\r\n"]
|
||||
[23.07423, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=====> ] 67/270: utf16_iter, num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, try-lock, utf8_iter, thiserror(build), httparse(build), icu_normalizer_data(build), unsafe-li...\r"]
|
||||
[23.082638, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m security-framework-sys v2.14.0\r\n"]
|
||||
[23.082663, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=====> ] 68/270: utf16_iter, num-traits(build), rustix(build), icu_properties_data(build), getrandom(build), syn, security-framework-sys, try-lock, thiserror(build), httparse(build), icu_normalizer_data(buil...\r"]
|
||||
[23.086985, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=====> ] 69/270: utf16_iter, rustix(build), icu_properties_data(build), getrandom(build), syn, security-framework-sys, num-traits, try-lock, thiserror(build), httparse(build), icu_normalizer_data(build), uns...\r"]
|
||||
[23.107838, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m want v0.3.1\r\n\u001b[1m\u001b[36m Building\u001b[0m [=====> ] 70/270: rustix(build), icu_properties_data(build), getrandom(build), syn, security-framework-sys, num-traits, try-lock, thiserror(build), httparse(build), want, icu_normalizer_data(build), unsafe-li...\r"]
|
||||
[23.114348, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m http-body v0.4.6\r\n"]
|
||||
[23.11446, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=====> ] 71/270: rustix(build), icu_properties_data(build), getrandom(build), syn, security-framework-sys, num-traits, thiserror(build), httparse(build), http-body, want, icu_normalizer_data(build), unsafe-l...\r"]
|
||||
[23.167457, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m percent-encoding v2.3.1\r\n"]
|
||||
[23.167557, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=====> ] 72/270: rustix(build), icu_properties_data(build), getrandom(build), syn, percent-encoding, num-traits, thiserror(build), httparse(build), http-body, want, icu_normalizer_data(build), unsafe-libyaml \r"]
|
||||
[23.173471, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m httpdate v1.0.3\r\n"]
|
||||
[23.173495, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======> ] 73/270: rustix(build), icu_properties_data(build), getrandom(build), syn, percent-encoding, httpdate, num-traits, thiserror(build), httparse(build), http-body, icu_normalizer_data(build), unsafe-lib...\r"]
|
||||
[23.212014, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m tower-service v0.3.3\r\n"]
|
||||
[23.212044, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======> ] 74/270: rustix(build), icu_properties_data(build), getrandom(build), syn, percent-encoding, httpdate, num-traits, tower-service, thiserror(build), httparse(build), icu_normalizer_data(build), unsafe...\r"]
|
||||
[23.244991, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m native-tls v0.2.14\r\n"]
|
||||
[23.245021, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======> ] 75/270: rustix(build), icu_properties_data(build), getrandom(build), syn, httpdate, num-traits, tower-service, native-tls(build.rs), thiserror(build), httparse(build), icu_normalizer_data(build), un...\r\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m form_urlencoded v1.2.1\r\n"]
|
||||
[23.24516, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======> ] 76/270: rustix(build), icu_properties_data(build), getrandom(build), syn, httpdate, num-traits, native-tls(build.rs), thiserror(build), httparse(build), form_urlencoded, icu_normalizer_data(build), ...\r"]
|
||||
[23.280024, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m security-framework v2.11.1\r\n"]
|
||||
[23.280156, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======> ] 77/270: rustix(build), icu_properties_data(build), getrandom(build), syn, num-traits, native-tls(build.rs), security-framework, thiserror(build), httparse(build), form_urlencoded, icu_normalizer_dat...\r"]
|
||||
[23.291052, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m system-configuration-sys v0.5.0\r\n"]
|
||||
[23.291221, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======> ] 78/270: rustix(build), icu_properties_data(build), getrandom(build), syn, num-traits, native-tls(build.rs), system-configuration-sys(build.rs), security-framework, thiserror(build), httparse(build),...\r"]
|
||||
[23.325386, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m synstructure v0.13.1\r\n"]
|
||||
[23.325527, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======> ] 79/270: rustix(build), icu_properties_data(build), getrandom(build), syn, num-traits, native-tls(build.rs), system-configuration-sys(build.rs), security-framework, thiserror(build), httparse(build),...\r"]
|
||||
[23.330716, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======> ] 80/270: rustix(build), icu_properties_data(build), getrandom(build), syn, num-traits, system-configuration-sys(build.rs), security-framework, thiserror(build), httparse(build), native-tls(build), sy...\r"]
|
||||
[23.340335, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======> ] 81/270: icu_properties_data(build), getrandom(build), syn, num-traits, system-configuration-sys(build.rs), security-framework, thiserror(build), httparse(build), native-tls(build), synstructure, icu...\r"]
|
||||
[23.380593, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======> ] 82/270: system-configuration-sys(build), icu_properties_data(build), getrandom(build), syn, num-traits, security-framework, thiserror(build), httparse(build), native-tls(build), synstructure, icu_no...\r"]
|
||||
[23.429561, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m num-integer v0.1.46\r\n"]
|
||||
[23.42969, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======> ] 83/270: system-configuration-sys(build), icu_properties_data(build), getrandom(build), syn, num-integer, security-framework, thiserror(build), httparse(build), native-tls(build), synstructure, icu_n...\r"]
|
||||
[23.434159, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=======> ] 84/270: system-configuration-sys(build), getrandom(build), syn, num-integer, security-framework, thiserror(build), icu_properties_data, httparse(build), native-tls(build), synstructure, icu_normaliz...\r"]
|
||||
[23.516165, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m aho-corasick v1.1.3\r\n"]
|
||||
[23.516195, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=======> ] 85/270: system-configuration-sys(build), getrandom(build), syn, aho-corasick, num-integer, security-framework, thiserror(build), httparse(build), native-tls(build), synstructure, icu_normalizer_data...\r"]
|
||||
[23.601166, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m lazy_static v1.5.0\r\n"]
|
||||
[23.601196, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=======> ] 86/270: system-configuration-sys(build), getrandom(build), syn, aho-corasick, lazy_static, security-framework, thiserror(build), httparse(build), native-tls(build), synstructure, icu_normalizer_data...\r"]
|
||||
[23.613656, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m bitflags v1.3.2\r\n"]
|
||||
[23.613682, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=======> ] 87/270: system-configuration-sys(build), getrandom(build), syn, aho-corasick, lazy_static, security-framework, thiserror(build), httparse(build), bitflags, native-tls(build), icu_normalizer_data(bui...\r"]
|
||||
[23.638898, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m regex-syntax v0.8.5\r\n"]
|
||||
[23.639053, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=======> ] 88/270: system-configuration-sys(build), regex-syntax, getrandom(build), syn, aho-corasick, security-framework, thiserror(build), httparse(build), bitflags, native-tls(build), icu_normalizer_data(bu...\r"]
|
||||
[23.642064, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m base64 v0.21.7\r\n"]
|
||||
[23.642168, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=======> ] 89/270: system-configuration-sys(build), regex-syntax, getrandom(build), syn, aho-corasick, base64, security-framework, thiserror(build), httparse(build), native-tls(build), icu_normalizer_data(buil...\r"]
|
||||
[23.7033, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m utf8parse v0.2.2\r\n"]
|
||||
[23.703481, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=======> ] 90/270: system-configuration-sys(build), utf8parse, regex-syntax, getrandom(build), syn, aho-corasick, base64, security-framework, httparse(build), native-tls(build), icu_normalizer_data(build), rustix\r"]
|
||||
[23.747399, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m anstyle-parse v0.2.6\r\n"]
|
||||
[23.747622, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=======> ] 91/270: system-configuration-sys(build), regex-syntax, getrandom(build), syn, aho-corasick, base64, security-framework, httparse(build), anstyle-parse, native-tls(build), icu_normalizer_data(build),...\r"]
|
||||
[23.810049, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m rustls-pemfile v1.0.4\r\n"]
|
||||
[23.810086, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=======> ] 92/270: system-configuration-sys(build), regex-syntax, getrandom(build), syn, aho-corasick, security-framework, rustls-pemfile, httparse(build), anstyle-parse, native-tls(build), icu_normalizer_data...\r"]
|
||||
[23.826972, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m num-bigint v0.4.6\r\n"]
|
||||
[23.826999, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=======> ] 93/270: system-configuration-sys(build), regex-syntax, getrandom(build), syn, aho-corasick, num-bigint, security-framework, rustls-pemfile, httparse(build), native-tls(build), icu_normalizer_data(bu...\r"]
|
||||
[23.908947, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m encoding_rs v0.8.35\r\n"]
|
||||
[23.908978, "o", "\u001b[1m\u001b[36m Building\u001b[0m [========> ] 94/270: system-configuration-sys(build), regex-syntax, getrandom(build), syn, aho-corasick, num-bigint, encoding_rs, security-framework, httparse(build), native-tls(build), icu_normalizer_data(build...\r"]
|
||||
[23.92933, "o", "\u001b[1m\u001b[36m Building\u001b[0m [========> ] 95/270: system-configuration-sys(build), getrandom, regex-syntax, syn, aho-corasick, num-bigint, encoding_rs, security-framework, httparse(build), native-tls(build), icu_normalizer_data(build), rustix \r"]
|
||||
[24.004068, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m tempfile v3.19.1\r\n"]
|
||||
[24.004095, "o", "\u001b[1m\u001b[36m Building\u001b[0m [========> ] 96/270: system-configuration-sys(build), tempfile, regex-syntax, syn, aho-corasick, num-bigint, encoding_rs, security-framework, httparse(build), native-tls(build), icu_normalizer_data(build), rustix \r"]
|
||||
[24.007109, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m version_check v0.9.5\r\n"]
|
||||
[24.00727, "o", "\u001b[1m\u001b[36m Building\u001b[0m [========> ] 97/270: system-configuration-sys(build), tempfile, regex-syntax, syn, aho-corasick, num-bigint, encoding_rs, security-framework, httparse(build), version_check, native-tls(build), icu_normalizer_dat...\r"]
|
||||
[24.043623, "o", "\u001b[1m\u001b[36m Building\u001b[0m [========> ] 98/270: system-configuration-sys(build), tempfile, regex-syntax, syn, aho-corasick, num-bigint, encoding_rs, icu_normalizer_data, security-framework, httparse(build), version_check, native-tls(build) \r"]
|
||||
[24.068188, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m either v1.15.0\r\n"]
|
||||
[24.068525, "o", "\u001b[1m\u001b[36m Building\u001b[0m [========> ] 99/270: system-configuration-sys(build), tempfile, regex-syntax, syn, aho-corasick, num-bigint, encoding_rs, icu_normalizer_data, httparse(build), version_check, native-tls(build), either \r"]
|
||||
[24.08646, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m anstyle v1.0.10\r\n"]
|
||||
[24.086583, "o", "\u001b[1m\u001b[36m Building\u001b[0m [========> ] 100/270: system-configuration-sys(build), tempfile, regex-syntax, syn, aho-corasick, num-bigint, encoding_rs, httparse(build), anstyle, version_check, native-tls(build), either \r"]
|
||||
[24.167709, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m signal-hook v0.3.17\r\n"]
|
||||
[24.16779, "o", "\u001b[1m\u001b[36m Building\u001b[0m [========> ] 101/270: system-configuration-sys(build), tempfile, regex-syntax, syn, aho-corasick, num-bigint, encoding_rs, signal-hook(build.rs), httparse(build), anstyle, version_check, native-tls(build) \r"]
|
||||
[24.198748, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m anstyle-query v1.1.2\r\n"]
|
||||
[24.198821, "o", "\u001b[1m\u001b[36m Building\u001b[0m [========> ] 102/270: system-configuration-sys(build), tempfile, regex-syntax, syn, aho-corasick, num-bigint, encoding_rs, signal-hook(build.rs), httparse(build), anstyle, anstyle-query, native-tls(build) \r"]
|
||||
[24.213334, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m colorchoice v1.0.3\r\n\u001b[1m\u001b[36m Building\u001b[0m [========> ] 103/270: system-configuration-sys(build), tempfile, regex-syntax, syn, aho-corasick, num-bigint, encoding_rs, signal-hook(build.rs), colorchoice, httparse(build), anstyle-query, native-tls(build) \r"]
|
||||
[24.248936, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m mime v0.3.17\r\n\u001b[1m\u001b[36m Building\u001b[0m [========> ] 104/270: system-configuration-sys(build), tempfile, regex-syntax, syn, aho-corasick, num-bigint, encoding_rs, mime, signal-hook(build.rs), colorchoice, httparse(build), native-tls(build) \r"]
|
||||
[24.260788, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m is_terminal_polyfill v1.70.1\r\n"]
|
||||
[24.261031, "o", "\u001b[1m\u001b[36m Building\u001b[0m [========> ] 105/270: system-configuration-sys(build), regex-syntax, syn, aho-corasick, num-bigint, encoding_rs, mime, signal-hook(build.rs), colorchoice, httparse(build), is_terminal_polyfill, native-tls(build) \r"]
|
||||
[24.264716, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m sync_wrapper v0.1.2\r\n"]
|
||||
[24.264777, "o", "\u001b[1m\u001b[36m Building\u001b[0m [========> ] 106/270: system-configuration-sys(build), regex-syntax, syn, aho-corasick, num-bigint, encoding_rs, mime, signal-hook(build.rs), sync_wrapper, httparse(build), is_terminal_polyfill, native-tls(build) \r"]
|
||||
[24.26859, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m ipnet v2.11.0\r\n"]
|
||||
[24.26867, "o", "\u001b[1m\u001b[36m Building\u001b[0m [========> ] 107/270: system-configuration-sys(build), ipnet, regex-syntax, syn, aho-corasick, num-bigint, encoding_rs, mime, sync_wrapper, httparse(build), is_terminal_polyfill, native-tls(build) \r"]
|
||||
[24.269272, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m serde_derive v1.0.219\r\n"]
|
||||
[24.269318, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=========> ] 108/270: system-configuration-sys(build), serde_derive, ipnet, regex-syntax, aho-corasick, num-bigint, encoding_rs, mime, sync_wrapper, httparse(build), is_terminal_polyfill, native-tls(build) \r"]
|
||||
[24.300797, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m zerofrom-derive v0.1.6\r\n"]
|
||||
[24.30086, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=========> ] 109/270: system-configuration-sys(build), serde_derive, ipnet, regex-syntax, aho-corasick, num-bigint, encoding_rs, zerofrom-derive, mime, sync_wrapper, httparse(build), native-tls(build) \r"]
|
||||
[24.314188, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m yoke-derive v0.7.5\r\n"]
|
||||
[24.314355, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=========> ] 110/270: system-configuration-sys(build), serde_derive, ipnet, regex-syntax, yoke-derive, aho-corasick, num-bigint, encoding_rs, zerofrom-derive, mime, httparse(build), native-tls(build) \r"]
|
||||
[24.358675, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m zerovec-derive v0.10.3\r\n"]
|
||||
[24.358726, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=========> ] 111/270: system-configuration-sys(build), serde_derive, ipnet, regex-syntax, yoke-derive, aho-corasick, num-bigint, zerovec-derive, encoding_rs, zerofrom-derive, mime, native-tls(build) \r"]
|
||||
[24.409688, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m displaydoc v0.2.5\r\n\u001b[1m\u001b[36m Building\u001b[0m [=========> ] 112/270: system-configuration-sys(build), serde_derive, ipnet, regex-syntax, yoke-derive, aho-corasick, num-bigint, zerovec-derive, encoding_rs, zerofrom-derive, displaydoc, native-tls(build) \r"]
|
||||
[24.599591, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m tokio-macros v2.5.0\r\n\u001b[1m\u001b[36m Building\u001b[0m [=========> ] 113/270: system-configuration-sys(build), serde_derive, ipnet, tokio-macros, regex-syntax, yoke-derive, aho-corasick, num-bigint, zerovec-derive, encoding_rs, zerofrom-derive, displaydoc \r"]
|
||||
[24.643663, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m futures-macro v0.3.31\r\n"]
|
||||
[24.643744, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=========> ] 114/270: system-configuration-sys(build), serde_derive, ipnet, tokio-macros, regex-syntax, yoke-derive, num-bigint, zerovec-derive, encoding_rs, zerofrom-derive, displaydoc, futures-macro \r"]
|
||||
[24.659942, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m icu_provider_macros v1.5.0\r\n"]
|
||||
[24.660005, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=========> ] 115/270: system-configuration-sys(build), serde_derive, icu_provider_macros, tokio-macros, regex-syntax, yoke-derive, num-bigint, zerovec-derive, encoding_rs, zerofrom-derive, displaydoc, futures-macro \r"]
|
||||
[24.706738, "o", "\u001b[K"]
|
||||
[24.706797, "o", "\u001b[1m\u001b[32m Compiling\u001b[0m thiserror-impl v1.0.69\r\n\u001b[1m\u001b[36m Building\u001b[0m [=========> ] 116/270: system-configuration-sys(build), serde_derive, icu_provider_macros, tokio-macros, regex-syntax, yoke-derive, zerovec-derive, encoding_rs, zerofrom-derive, displaydoc, futures-macro, thiserro...\r"]
|
||||
[24.829133, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=========> ] 117/270: system-configuration-sys(build), serde_derive, icu_provider_macros, tokio-macros, regex-syntax, yoke-derive, zerovec-derive, encoding_rs, zerofrom-derive, httparse, futures-macro, thiserror-...\r"]
|
||||
[24.844696, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=========> ] 118/270: serde_derive, icu_provider_macros, native-tls, tokio-macros, regex-syntax, yoke-derive, zerovec-derive, encoding_rs, zerofrom-derive, httparse, futures-macro, thiserror-impl \r"]
|
||||
[24.935985, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==========> ] 119/270: serde_derive, native-tls, tokio-macros, regex-syntax, yoke-derive, zerovec-derive, encoding_rs, zerofrom-derive, httparse, futures-macro, system-configuration-sys, thiserror-impl \r"]
|
||||
[24.974286, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m regex-automata v0.4.9\r\n"]
|
||||
[24.974336, "o", "\u001b[1m\u001b[36m Building\u001b[0m "]
|
||||
[24.97454, "o", "[==========> ] 120/270: serde_derive, native-tls, tokio-macros, regex-syntax, zerovec-derive, encoding_rs, regex-automata, zerofrom-derive, httparse, futures-macro, system-configuration-sys, thiserror-impl \r"]
|
||||
[24.996321, "o", "\u001b[K"]
|
||||
[24.996368, "o", "\u001b[1m\u001b[32m Compiling\u001b[0m tokio v1.44.1\r\n"]
|
||||
[24.996527, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==========> ] 121/270: serde_derive, native-tls, regex-syntax, zerovec-derive, encoding_rs, regex-automata, zerofrom-derive, tokio, httparse, futures-macro, system-configuration-sys, thiserror-impl \r"]
|
||||
[25.002432, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m system-configuration v0.5.1\r\n"]
|
||||
[25.002466, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==========> ] 122/270: serde_derive, native-tls, regex-syntax, zerovec-derive, encoding_rs, regex-automata, zerofrom-derive, tokio, system-configuration, httparse, futures-macro, thiserror-impl \r"]
|
||||
[25.007122, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m num-rational v0.4.2\r\n\u001b[1m\u001b[36m Building\u001b[0m [==========> ] 123/270: serde_derive, native-tls, regex-syntax, encoding_rs, regex-automata, zerofrom-derive, tokio, system-configuration, httparse, num-rational, futures-macro, thiserror-impl \r"]
|
||||
[25.032568, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m anstream v0.6.18\r\n"]
|
||||
[25.033282, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==========> ] 124/270: serde_derive, native-tls, regex-syntax, encoding_rs, regex-automata, zerofrom-derive, tokio, system-configuration, anstream, num-rational, futures-macro, thiserror-impl \r"]
|
||||
[25.059859, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m zerofrom v0.1.6\r\n\u001b[1m\u001b[36m Building\u001b[0m "]
|
||||
[25.059907, "o", "[==========> ] 125/270: serde_derive, native-tls, regex-syntax, encoding_rs, regex-automata, tokio, system-configuration, anstream, num-rational, zerofrom, futures-macro, thiserror-impl \r"]
|
||||
[25.075759, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==========> ] 126/270: serde_derive, regex-syntax, encoding_rs, regex-automata, tokio, system-configuration, anstream, num-rational, zerofrom, signal-hook(build), futures-macro, thiserror-impl \r"]
|
||||
[25.164447, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m ahash v0.8.11\r\n"]
|
||||
[25.164496, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==========> ] 127/270: serde_derive, regex-syntax, encoding_rs, regex-automata, tokio, ahash(build.rs), anstream, num-rational, zerofrom, signal-hook(build), futures-macro, thiserror-impl \r"]
|
||||
[25.165147, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m futures-util v0.3.31\r\n"]
|
||||
[25.168247, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==========> ] 128/270: serde_derive, regex-syntax, futures-util, encoding_rs, regex-automata, tokio, ahash(build.rs), anstream, num-rational, zerofrom, signal-hook(build), thiserror-impl \r"]
|
||||
[25.213807, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m num-iter v0.1.45\r\n"]
|
||||
[25.214192, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==========> ] 129/270: serde_derive, regex-syntax, futures-util, encoding_rs, regex-automata, tokio, ahash(build.rs), anstream, num-rational, zerofrom, num-iter, thiserror-impl \r"]
|
||||
[25.236125, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m num-complex v0.4.6\r\n\u001b[1m\u001b[36m Building\u001b[0m [===========> ] 130/270: serde_derive, regex-syntax, futures-util, num-complex, encoding_rs, regex-automata, tokio, ahash(build.rs), num-rational, zerofrom, num-iter, thiserror-impl \r"]
|
||||
[25.278473, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m crossbeam-utils v0.8.21\r\n"]
|
||||
[25.278525, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===========> ] 131/270: serde_derive, regex-syntax, futures-util, num-complex, encoding_rs, regex-automata, tokio, ahash(build.rs), num-rational, zerofrom, thiserror-impl, crossbeam-utils(build.rs) \r"]
|
||||
[25.307289, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m anyhow v1.0.98\r\n"]
|
||||
[25.307434, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===========> ] 132/270: serde_derive, regex-syntax, futures-util, num-complex, encoding_rs, regex-automata, anyhow(build.rs), tokio, num-rational, zerofrom, thiserror-impl, crossbeam-utils(build.rs) \r"]
|
||||
[25.338384, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m num-conv v0.1.0\r\n"]
|
||||
[25.338438, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===========> ] 133/270: serde_derive, regex-syntax, futures-util, num-complex, regex-automata, anyhow(build.rs), tokio, num-rational, zerofrom, num-conv, thiserror-impl, crossbeam-utils(build.rs) \r"]
|
||||
[25.409289, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m rustix v0.38.44\r\n"]
|
||||
[25.409339, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===========> ] 134/270: serde_derive, regex-syntax, futures-util, rustix(build.rs), num-complex, regex-automata, anyhow(build.rs), tokio, num-rational, zerofrom, thiserror-impl, crossbeam-utils(build.rs) \r"]
|
||||
[25.447503, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m strsim v0.11.1\r\n"]
|
||||
[25.447618, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===========> ] 135/270: serde_derive, regex-syntax, futures-util, rustix(build.rs), strsim, num-complex, regex-automata, anyhow(build.rs), tokio, num-rational, zerofrom, thiserror-impl \r"]
|
||||
[25.467725, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===========> ] 136/270: thiserror, serde_derive, regex-syntax, futures-util, rustix(build.rs), strsim, num-complex, regex-automata, anyhow(build.rs), tokio, num-rational, zerofrom \r"]
|
||||
[25.473512, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m rustversion v1.0.20\r\n"]
|
||||
[25.473567, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===========> ] 137/270: thiserror, serde_derive, rustversion(build.rs), regex-syntax, futures-util, rustix(build.rs), strsim, num-complex, regex-automata, tokio, num-rational, zerofrom \r"]
|
||||
[25.477253, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m time-core v0.1.4\r\n"]
|
||||
[25.477339, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===========> ] 138/270: thiserror, serde_derive, rustversion(build.rs), regex-syntax, futures-util, rustix(build.rs), strsim, num-complex, time-core, regex-automata, tokio, zerofrom \r"]
|
||||
[25.493979, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m bit-vec v0.6.3\r\n"]
|
||||
[25.494049, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===========> ] 139/270: thiserror, serde_derive, rustversion(build.rs), regex-syntax, futures-util, rustix(build.rs), strsim, time-core, regex-automata, bit-vec, tokio, zerofrom \r"]
|
||||
[25.498947, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m powerfmt v0.2.0\r\n"]
|
||||
[25.498992, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===========> ] 140/270: thiserror, serde_derive, rustversion(build.rs), futures-util, rustix(build.rs), strsim, powerfmt, time-core, regex-automata, bit-vec, tokio, zerofrom \r"]
|
||||
[25.516976, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m yoke v0.7.5\r\n"]
|
||||
[25.517027, "o", "\u001b[1m\u001b[36m Building\u001b[0m [============> ] 141/270: thiserror, serde_derive, rustversion(build.rs), futures-util, rustix(build.rs), strsim, powerfmt, time-core, regex-automata, bit-vec, tokio, yoke \r"]
|
||||
[25.551361, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m clap_lex v0.7.4\r\n"]
|
||||
[25.551606, "o", "\u001b[1m\u001b[36m Building\u001b[0m [============> ] 142/270: thiserror, serde_derive, rustversion(build.rs), futures-util, rustix(build.rs), strsim, powerfmt, regex-automata, bit-vec, tokio, clap_lex, yoke \r"]
|
||||
[25.583082, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m heck v0.5.0\r\n"]
|
||||
[25.583155, "o", "\u001b[1m\u001b[36m Building\u001b[0m [============> ] 143/270: thiserror, serde_derive, rustversion(build.rs), futures-util, strsim, powerfmt, regex-automata, bit-vec, tokio, clap_lex, yoke, heck \r"]
|
||||
[25.620283, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m bit-set v0.5.3\r\n"]
|
||||
[25.620448, "o", "\u001b[1m\u001b[36m Building\u001b[0m [============> ] 144/270: thiserror, serde_derive, rustversion(build.rs), futures-util, strsim, regex-automata, bit-vec, tokio, clap_lex, yoke, bit-set, heck \r"]
|
||||
[25.620888, "o", "\u001b[1m\u001b[36m Building\u001b[0m [============> ] 145/270: thiserror, serde_derive, rustix(build), rustversion(build.rs), futures-util, strsim, regex-automata, tokio, clap_lex, yoke, bit-set, heck \r"]
|
||||
[25.668499, "o", "\u001b[1m\u001b[36m Building\u001b[0m [============> ] 146/270: thiserror, serde_derive, rustix(build), futures-util, strsim, regex-automata, tokio, clap_lex, yoke, bit-set, rustversion(build), heck \r"]
|
||||
[25.673766, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m clap_builder v4.5.34\r\n"]
|
||||
[25.673848, "o", "\u001b[1m\u001b[36m Building\u001b[0m [============> ] 147/270: thiserror, serde_derive, rustix(build), futures-util, strsim, regex-automata, tokio, yoke, bit-set, rustversion(build), heck, clap_builder \r"]
|
||||
[25.687039, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m deranged v0.4.1\r\n"]
|
||||
[25.687142, "o", "\u001b[1m\u001b[36m Building\u001b[0m [============> ] 148/270: thiserror, serde_derive, rustix(build), futures-util, strsim, regex-automata, tokio, yoke, rustversion(build), heck, clap_builder, deranged \r"]
|
||||
[25.694934, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m clap_derive v4.5.32\r\n"]
|
||||
[25.69508, "o", "\u001b[1m\u001b[36m Building\u001b[0m [============> ] 149/270: thiserror, serde_derive, clap_derive, rustix(build), futures-util, strsim, regex-automata, tokio, yoke, rustversion(build), clap_builder, deranged \r"]
|
||||
[25.702353, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m time-macros v0.2.22\r\n"]
|
||||
[25.702382, "o", "\u001b[1m\u001b[36m Building\u001b[0m [============> ] 150/270: thiserror, serde_derive, clap_derive, rustix(build), time-macros, futures-util, regex-automata, tokio, yoke, rustversion(build), clap_builder, deranged \r"]
|
||||
[25.946083, "o", "\u001b[1m\u001b[36m Building\u001b[0m [============> ] 151/270: thiserror, clap_derive, rustix(build), time-macros, futures-util, regex-automata, tokio, yoke, rustversion(build), clap_builder, serde, deranged \r"]
|
||||
[26.006998, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=============> ] 152/270: thiserror, clap_derive, rustix(build), time-macros, futures-util, regex-automata, tokio, yoke, rustversion(build), anyhow(build), clap_builder, serde \r"]
|
||||
[26.344192, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m regex v1.11.1\r\n"]
|
||||
[26.344222, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=============> ] 153/270: thiserror, clap_derive, rustix(build), futures-util, regex-automata, tokio, yoke, rustversion(build), anyhow(build), clap_builder, serde, regex \r"]
|
||||
[26.419882, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m num v0.4.3\r\n"]
|
||||
[26.419909, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=============> ] 154/270: num, clap_derive, rustix(build), futures-util, regex-automata, tokio, yoke, rustversion(build), anyhow(build), clap_builder, serde, regex \r"]
|
||||
[26.459008, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=============> ] 155/270: crossbeam-utils(build), clap_derive, rustix(build), futures-util, regex-automata, tokio, yoke, rustversion(build), anyhow(build), clap_builder, serde, regex \r"]
|
||||
[26.563875, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=============> ] 156/270: crossbeam-utils(build), rustix(build), futures-util, regex-automata, tokio, yoke, rustversion(build), anyhow(build), ahash(build), clap_builder, serde, regex \r"]
|
||||
[26.581267, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=============> ] 157/270: crossbeam-utils(build), rustix(build), futures-util, regex-automata, tokio, yoke, rustversion(build), anyhow(build), ahash(build), clap_builder, signal-hook, serde \r"]
|
||||
[26.732902, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m zerovec v0.10.4\r\n"]
|
||||
[26.733036, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=============> ] 158/270: crossbeam-utils(build), rustix(build), futures-util, zerovec, regex-automata, tokio, rustversion(build), anyhow(build), ahash(build), clap_builder, signal-hook, serde \r"]
|
||||
[26.805769, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m pin-project-internal v1.1.10\r\n"]
|
||||
[26.805815, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=============> ] 159/270: crossbeam-utils(build), rustix(build), futures-util, zerovec, pin-project-internal, regex-automata, tokio, rustversion(build), anyhow(build), ahash(build), clap_builder, serde \r"]
|
||||
[26.939474, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m mio v0.8.11\r\n"]
|
||||
[26.939612, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=============> ] 160/270: crossbeam-utils(build), futures-util, zerovec, pin-project-internal, regex-automata, tokio, mio, rustversion(build), anyhow(build), ahash(build), clap_builder, serde \r"]
|
||||
[27.135956, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m getrandom v0.2.15\r\n"]
|
||||
[27.13618, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=============> ] 161/270: crossbeam-utils(build), getrandom, zerovec, pin-project-internal, regex-automata, tokio, mio, rustversion(build), anyhow(build), ahash(build), clap_builder, serde \r"]
|
||||
[27.160029, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m iana-time-zone v0.1.62\r\n"]
|
||||
[27.160304, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==============> ] 162/270: crossbeam-utils(build), getrandom, iana-time-zone, zerovec, pin-project-internal, tokio, mio, rustversion(build), anyhow(build), ahash(build), clap_builder, serde \r"]
|
||||
[27.168847, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m nom v8.0.0\r\n"]
|
||||
[27.168897, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==============> ] 163/270: crossbeam-utils(build), getrandom, nom, iana-time-zone, zerovec, pin-project-internal, tokio, mio, anyhow(build), ahash(build), clap_builder, serde \r"]
|
||||
[27.201855, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m zerocopy v0.7.35\r\n"]
|
||||
[27.202076, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==============> ] 164/270: crossbeam-utils(build), nom, iana-time-zone, zerovec, pin-project-internal, tokio, mio, anyhow(build), ahash(build), zerocopy, clap_builder, serde \r"]
|
||||
[27.227265, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m chrono v0.4.40\r\n"]
|
||||
[27.227339, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==============> ] 165/270: crossbeam-utils(build), nom, zerovec, pin-project-internal, chrono, tokio, mio, anyhow(build), ahash(build), zerocopy, clap_builder, serde \r"]
|
||||
[27.296492, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==============> ] 166/270: crossbeam-utils(build), nom, zerovec, pin-project-internal, chrono, tokio, anyhow(build), ahash(build), zerocopy, clap_builder, serde, rustversion \r"]
|
||||
[27.314139, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m pin-project v1.1.10\r\n\u001b[1m\u001b[36m Building\u001b[0m [==============> ] 167/270: crossbeam-utils(build), nom, pin-project, zerovec, chrono, tokio, anyhow(build), ahash(build), zerocopy, clap_builder, serde, rustversion \r"]
|
||||
[27.373114, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m signal-hook-mio v0.2.4\r\n"]
|
||||
[27.37324, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==============> ] 168/270: crossbeam-utils(build), nom, pin-project, zerovec, chrono, tokio, signal-hook-mio, anyhow(build), ahash(build), clap_builder, serde, rustversion \r"]
|
||||
[27.415303, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m tokio-util v0.7.14\r\n"]
|
||||
[27.415331, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==============> ] 169/270: crossbeam-utils(build), nom, pin-project, zerovec, tokio-util, chrono, tokio, anyhow(build), ahash(build), clap_builder, serde, rustversion \r"]
|
||||
[27.536012, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m tokio-native-tls v0.3.1\r\n"]
|
||||
[27.536162, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==============> ] 170/270: crossbeam-utils(build), nom, tokio-native-tls, pin-project, zerovec, tokio-util, chrono, tokio, anyhow(build), ahash(build), clap_builder, serde \r"]
|
||||
[27.597054, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m futures-executor v0.3.31\r\n"]
|
||||
[27.597106, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==============> ] 171/270: crossbeam-utils(build), nom, pin-project, zerovec, tokio-util, chrono, tokio, futures-executor, anyhow(build), ahash(build), clap_builder, serde \r"]
|
||||
[27.709368, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==============> ] 172/270: crossbeam-utils(build), nom, pin-project, zerovec, anyhow, tokio-util, chrono, tokio, futures-executor, ahash(build), clap_builder, serde \r"]
|
||||
[27.716961, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===============> ] 173/270: crossbeam-utils(build), rustix, nom, pin-project, zerovec, anyhow, chrono, tokio, futures-executor, ahash(build), clap_builder, serde \r"]
|
||||
[27.738433, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m clap v4.5.34\r\n"]
|
||||
[27.738489, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===============> ] 174/270: crossbeam-utils(build), rustix, nom, pin-project, zerovec, anyhow, chrono, tokio, clap, ahash(build), clap_builder, serde \r"]
|
||||
[27.748759, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m fancy-regex v0.11.0\r\n"]
|
||||
[27.748798, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===============> ] 175/270: crossbeam-utils(build), rustix, nom, pin-project, zerovec, anyhow, chrono, tokio, clap, ahash(build), fancy-regex, serde \r"]
|
||||
[27.928307, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===============> ] 176/270: rustix, nom, pin-project, zerovec, anyhow, chrono, tokio, clap, ahash(build), fancy-regex, crossbeam-utils, serde \r"]
|
||||
[27.930598, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m fraction v0.13.1\r\n"]
|
||||
[27.930634, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===============> ] 177/270: rustix, nom, pin-project, zerovec, chrono, tokio, clap, ahash(build), fraction, fancy-regex, crossbeam-utils, serde \r"]
|
||||
[28.09247, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m time v0.3.41\r\n"]
|
||||
[28.094192, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===============> ] 178/270: rustix, nom, pin-project, zerovec, time, tokio, clap, ahash(build), fraction, fancy-regex, crossbeam-utils, serde \r"]
|
||||
[28.171592, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m uuid v1.16.0\r\n"]
|
||||
[28.171762, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===============> ] 179/270: rustix, uuid, nom, pin-project, zerovec, time, tokio, clap, ahash(build), fancy-regex, crossbeam-utils, serde \r"]
|
||||
[28.231062, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m nix v0.27.1\r\n"]
|
||||
[28.23111, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===============> ] 180/270: rustix, uuid, nom, pin-project, zerovec, time, tokio, clap, ahash(build), fancy-regex, nix, serde \r"]
|
||||
[28.244612, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m paste v1.0.15\r\n\u001b[1m\u001b[36m Building\u001b[0m "]
|
||||
[28.244867, "o", "[===============> ] 181/270: rustix, uuid, nom, pin-project, zerovec, time, tokio, clap, paste(build.rs), fancy-regex, nix, serde \r"]
|
||||
[28.286837, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m bytecount v0.6.8\r\n"]
|
||||
[28.286995, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===============> ] 182/270: rustix, uuid, nom, pin-project, zerovec, time, tokio, clap, paste(build.rs), bytecount, nix, serde \r"]
|
||||
[28.342152, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m heck v0.4.1\r\n"]
|
||||
[28.342211, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===============> ] 183/270: uuid, nom, pin-project, zerovec, time, tokio, clap, heck, paste(build.rs), bytecount, nix, serde \r"]
|
||||
[28.36806, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m num-cmp v0.1.0\r\n"]
|
||||
[28.368385, "o", "\u001b[1m\u001b[36m Building\u001b[0m [================> ] 184/270: nom, pin-project, zerovec, num-cmp, time, tokio, clap, heck, paste(build.rs), bytecount, nix, serde \r"]
|
||||
[28.381471, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m option-ext v0.2.0\r\n"]
|
||||
[28.381606, "o", "\u001b[1m\u001b[36m Building\u001b[0m [================> ] 185/270: nom, pin-project, zerovec, num-cmp, time, option-ext, tokio, clap, heck, bytecount, nix, serde \r"]
|
||||
[28.383891, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m hex v0.4.3\r\n"]
|
||||
[28.384002, "o", "\u001b[1m\u001b[36m Building\u001b[0m [================> ] 186/270: nom, hex, pin-project, zerovec, num-cmp, time, option-ext, tokio, clap, heck, nix, serde \r"]
|
||||
[28.42974, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m home v0.5.11\r\n"]
|
||||
[28.430048, "o", "\u001b[1m\u001b[36m Building\u001b[0m [================> ] 187/270: home, nom, hex, pin-project, zerovec, num-cmp, time, tokio, clap, heck, nix, serde \r"]
|
||||
[28.452346, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m strum_macros v0.25.3\r\n"]
|
||||
[28.452511, "o", "\u001b[1m\u001b[36m Building\u001b[0m [================> ] 188/270: home, nom, strum_macros, hex, pin-project, zerovec, num-cmp, time, tokio, clap, nix, serde \r"]
|
||||
[28.489991, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m which v4.4.2\r\n"]
|
||||
[28.490071, "o", "\u001b[1m\u001b[36m Building\u001b[0m [================> ] 189/270: nom, strum_macros, hex, pin-project, zerovec, num-cmp, time, tokio, clap, which, nix, serde \r"]
|
||||
[28.511349, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m iso8601 v0.6.2\r\n\u001b[1m\u001b[36m Building\u001b[0m [================> ] 190/270: nom, strum_macros, iso8601, pin-project, zerovec, num-cmp, time, tokio, clap, which, nix, serde \r"]
|
||||
[28.515445, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m dirs-sys v0.4.1\r\n"]
|
||||
[28.515849, "o", "\u001b[1m\u001b[36m Building\u001b[0m [================> ] 191/270: nom, strum_macros, iso8601, pin-project, zerovec, time, tokio, clap, dirs-sys, which, nix, serde \r"]
|
||||
[28.57553, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m indexmap v2.8.0\r\n"]
|
||||
[28.575846, "o", "\u001b[1m\u001b[36m Building\u001b[0m [================> ] 192/270: strum_macros, iso8601, pin-project, zerovec, indexmap, time, tokio, clap, dirs-sys, which, nix, serde \r"]
|
||||
[28.586559, "o", "\u001b[1m\u001b[36m Building\u001b[0m [================> ] 193/270: strum_macros, iso8601, pin-project, zerovec, indexmap, time, tokio, clap, serde_json, which, nix, serde \r"]
|
||||
[28.600211, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m serde_urlencoded v0.7.1\r\n"]
|
||||
[28.600611, "o", "\u001b[1m\u001b[36m Building\u001b[0m [================> ] 194/270: strum_macros, iso8601, pin-project, zerovec, indexmap, time, tokio, clap, serde_json, which, serde_urlencoded, nix \r"]
|
||||
[28.639939, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=================> ] 195/270: strum_macros, iso8601, pin-project, zerovec, indexmap, ahash, time, tokio, clap, serde_json, serde_urlencoded, nix \r"]
|
||||
[28.71242, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m serde_with v2.3.3\r\n"]
|
||||
[28.712715, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=================> ] 196/270: strum_macros, iso8601, pin-project, zerovec, indexmap, ahash, time, serde_with, tokio, clap, serde_json, serde_urlencoded \r"]
|
||||
[28.718502, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=================> ] 197/270: paste(build), strum_macros, iso8601, pin-project, zerovec, indexmap, ahash, time, serde_with, tokio, clap, serde_json \r"]
|
||||
[28.766112, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m crossbeam-epoch v0.9.18\r\n"]
|
||||
[28.766143, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=================> ] 198/270: paste(build), crossbeam-epoch, strum_macros, iso8601, pin-project, zerovec, indexmap, time, serde_with, tokio, clap, serde_json \r"]
|
||||
[28.804231, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m futures v0.3.31\r\n"]
|
||||
[28.804285, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=================> ] 199/270: paste(build), crossbeam-epoch, strum_macros, iso8601, zerovec, indexmap, time, serde_with, tokio, clap, futures, serde_json \r"]
|
||||
[28.848245, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m async-trait v0.1.88\r\n"]
|
||||
[28.848379, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=================> ] 200/270: paste(build), crossbeam-epoch, strum_macros, iso8601, zerovec, indexmap, time, serde_with, tokio, clap, serde_json, async-trait \r"]
|
||||
[28.883251, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m tinystr v0.7.6\r\n"]
|
||||
[28.88339, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=================> ] 201/270: paste(build), crossbeam-epoch, strum_macros, iso8601, zerovec, time, serde_with, tokio, clap, serde_json, tinystr, async-trait \r"]
|
||||
[28.895154, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m icu_collections v1.5.0\r\n"]
|
||||
[28.895327, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=================> ] 202/270: paste(build), crossbeam-epoch, strum_macros, zerovec, time, serde_with, tokio, clap, serde_json, tinystr, async-trait, icu_collections \r"]
|
||||
[28.951617, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m serde_yaml v0.9.34+deprecated\r\n"]
|
||||
[28.951647, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=================> ] 203/270: paste(build), serde_yaml, strum_macros, zerovec, time, serde_with, tokio, clap, serde_json, tinystr, async-trait, icu_collections \r"]
|
||||
[28.963731, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m h2 v0.3.26\r\n"]
|
||||
[28.963758, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=================> ] 204/270: paste(build), serde_yaml, strum_macros, zerovec, time, serde_with, clap, serde_json, h2, tinystr, async-trait, icu_collections \r"]
|
||||
[28.978714, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m serde_repr v0.1.20\r\n"]
|
||||
[28.978743, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=================> ] 205/270: paste(build), serde_yaml, strum_macros, serde_repr, time, serde_with, clap, serde_json, h2, tinystr, async-trait, icu_collections \r"]
|
||||
[29.061025, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m xattr v1.5.0\r\n"]
|
||||
[29.061094, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==================> ] 206/270: xattr, paste(build), serde_yaml, strum_macros, serde_repr, time, serde_with, serde_json, h2, tinystr, async-trait, icu_collections \r"]
|
||||
[29.168367, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m filetime v0.2.25\r\n"]
|
||||
[29.16851, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==================> ] 207/270: xattr, paste(build), serde_yaml, strum_macros, time, serde_with, serde_json, h2, filetime, tinystr, async-trait, icu_collections \r"]
|
||||
[29.200162, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m rayon-core v1.12.1\r\n"]
|
||||
[29.200297, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==================> ] 208/270: paste(build), serde_yaml, strum_macros, time, serde_with, rayon-core(build.rs), serde_json, h2, filetime, tinystr, async-trait, icu_collections \r"]
|
||||
[29.260113, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m tar v0.4.44\r\n"]
|
||||
[29.260221, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==================> ] 209/270: paste(build), serde_yaml, strum_macros, time, serde_with, rayon-core(build.rs), tar, serde_json, h2, tinystr, async-trait, icu_collections \r"]
|
||||
[29.277112, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==================> ] 210/270: rayon-core(build), paste(build), serde_yaml, strum_macros, time, serde_with, tar, serde_json, h2, tinystr, async-trait, icu_collections \r"]
|
||||
[29.279362, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m crossbeam-deque v0.8.6\r\n"]
|
||||
[29.279518, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==================> ] 211/270: rayon-core(build), paste(build), serde_yaml, strum_macros, crossbeam-deque, time, serde_with, tar, h2, tinystr, async-trait, icu_collections \r"]
|
||||
[29.328543, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m dirs v5.0.1\r\n"]
|
||||
[29.328571, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==================> ] 212/270: rayon-core(build), paste(build), serde_yaml, strum_macros, crossbeam-deque, time, serde_with, dirs, tar, h2, tinystr, icu_collections "]
|
||||
[29.328594, "o", "\r"]
|
||||
[29.359226, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m crossterm v0.27.0\r\n"]
|
||||
[29.359346, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==================> ] 213/270: rayon-core(build), paste(build), serde_yaml, strum_macros, time, serde_with, dirs, crossterm, tar, h2, tinystr, icu_collections \r"]
|
||||
[29.38027, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m itertools v0.11.0\r\n"]
|
||||
[29.380463, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==================> ] 214/270: rayon-core(build), paste(build), serde_yaml, strum_macros, itertools, time, serde_with, crossterm, tar, h2, tinystr, icu_collections \r"]
|
||||
[29.480834, "o", "\u001b[K"]
|
||||
[29.480864, "o", "\u001b[1m\u001b[32m Compiling\u001b[0m models v0.4.0 (/Users/goku/projects/wrkflw/crates/models)\r\n"]
|
||||
[29.48099, "o", "\u001b[1m\u001b[36m Building\u001b[0m [==================> ] 215/270: rayon-core(build), paste(build), serde_yaml, strum_macros, itertools, time, crossterm, tar, h2, tinystr, icu_collections, models \r"]
|
||||
[29.749227, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m matrix v0.4.0 (/Users/goku/projects/wrkflw/crates/matrix)\r\n"]
|
||||
[29.7494, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===================> ] 216/270: rayon-core(build), serde_yaml, strum_macros, itertools, time, crossterm, tar, h2, matrix, tinystr, icu_collections, models \r"]
|
||||
[29.784576, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m logging v0.4.0 (/Users/goku/projects/wrkflw/crates/logging)\r\n"]
|
||||
[29.784625, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===================> ] 217/270: rayon-core(build), serde_yaml, strum_macros, itertools, time, crossterm, tar, h2, matrix, logging, tinystr, icu_collections \r"]
|
||||
[29.893603, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m utils v0.4.0 (/Users/goku/projects/wrkflw/crates/utils)\r\n"]
|
||||
[29.893669, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===================> ] 218/270: rayon-core(build), serde_yaml, strum_macros, itertools, utils, time, crossterm, h2, matrix, logging, tinystr, icu_collections \r"]
|
||||
[29.974513, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m bollard-stubs v1.42.0-rc.7\r\n"]
|
||||
[29.974559, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===================> ] 219/270: rayon-core(build), bollard-stubs, strum_macros, itertools, utils, time, crossterm, h2, matrix, logging, tinystr, icu_collections \r"]
|
||||
[29.991577, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m runtime v0.4.0 (/Users/goku/projects/wrkflw/crates/runtime)\r\n"]
|
||||
[29.991626, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===================> ] 220/270: rayon-core(build), bollard-stubs, strum_macros, utils, time, crossterm, h2, matrix, logging, tinystr, icu_collections, runtime \r"]
|
||||
[29.996852, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m validators v0.4.0 (/Users/goku/projects/wrkflw/crates/validators)\r\n\u001b[1m\u001b[36m Building\u001b[0m [===================> ] 221/270: rayon-core(build), bollard-stubs, strum_macros, utils, time, h2, matrix, logging, tinystr, validators, icu_collections, runtime \r"]
|
||||
[30.028518, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===================> ] 222/270: rayon-core(build), bollard-stubs, strum_macros, utils, paste, time, h2, matrix, tinystr, validators, icu_collections, runtime \r"]
|
||||
[30.082507, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m icu_locid v1.5.0\r\n\u001b[1m\u001b[36m Building\u001b[0m [===================> ] 223/270: rayon-core(build), bollard-stubs, strum_macros, paste, time, h2, matrix, tinystr, validators, icu_locid, icu_collections, runtime \r"]
|
||||
[30.108104, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m hyper v0.14.32\r\n"]
|
||||
[30.108153, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===================> ] 224/270: rayon-core(build), bollard-stubs, strum_macros, paste, time, hyper, h2, matrix, validators, icu_locid, icu_collections, runtime \r"]
|
||||
[30.141236, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m colored v2.2.0\r\n"]
|
||||
[30.141305, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===================> ] 225/270: rayon-core(build), bollard-stubs, strum_macros, paste, time, hyper, h2, colored, validators, icu_locid, icu_collections, runtime \r"]
|
||||
[30.145174, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m num_cpus v1.16.0\r\n"]
|
||||
[30.145218, "o", "\u001b[1m\u001b[36m Building\u001b[0m [===================> ] 226/270: bollard-stubs, strum_macros, num_cpus, paste, time, hyper, h2, colored, validators, icu_locid, icu_collections, runtime \r"]
|
||||
[30.20999, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m cassowary v0.3.0\r\n\u001b[1m\u001b[36m Building\u001b[0m [====================> ] 227/270: bollard-stubs, strum_macros, paste, time, hyper, h2, colored, validators, icu_locid, cassowary, icu_collections, runtime \r"]
|
||||
[30.303977, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m unicode-segmentation v1.12.0\r\n"]
|
||||
[30.30402, "o", "\u001b[1m\u001b[36m Building\u001b[0m [====================> ] 228/270: unicode-segmentation, bollard-stubs, strum_macros, paste, time, hyper, h2, colored, icu_locid, cassowary, icu_collections, runtime \r"]
|
||||
[30.392998, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m indoc v2.0.6\r\n"]
|
||||
[30.393666, "o", "\u001b[1m\u001b[36m Building\u001b[0m [====================> ] 229/270: unicode-segmentation, bollard-stubs, strum_macros, paste, time, hyper, h2, indoc, colored, icu_locid, cassowary, runtime \r"]
|
||||
[30.396679, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m strum v0.25.0\r\n\u001b[1m\u001b[36m Building\u001b[0m [====================> ] 230/270: unicode-segmentation, bollard-stubs, strum, paste, time, hyper, h2, indoc, colored, icu_locid, cassowary, runtime \r"]
|
||||
[30.399296, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m unicode-width v0.1.14\r\n"]
|
||||
[30.399332, "o", "\u001b[1m\u001b[36m Building\u001b[0m [====================> ] 231/270: unicode-segmentation, bollard-stubs, strum, time, unicode-width, hyper, h2, indoc, colored, icu_locid, cassowary, runtime \r"]
|
||||
[30.407848, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m evaluator v0.4.0 (/Users/goku/projects/wrkflw/crates/evaluator)\r\n"]
|
||||
[30.407878, "o", "\u001b[1m\u001b[36m Building\u001b[0m [====================> ] 232/270: unicode-segmentation, bollard-stubs, strum, time, unicode-width, hyper, evaluator, h2, indoc, icu_locid, cassowary, runtime \r"]
|
||||
[30.554265, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m icu_provider v1.5.0\r\n"]
|
||||
[30.554908, "o", "\u001b[1m\u001b[36m Building\u001b[0m [====================> ] 233/270: unicode-segmentation, bollard-stubs, strum, time, hyper, evaluator, h2, indoc, icu_locid, cassowary, icu_provider, runtime \r"]
|
||||
[30.591042, "o", "\u001b[1m\u001b[36m Building\u001b[0m [====================> ] 234/270: bollard-stubs, strum, time, hyper, rayon-core, evaluator, h2, indoc, icu_locid, cassowary, icu_provider, runtime \r"]
|
||||
[30.604905, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m crossterm v0.26.1\r\n"]
|
||||
[30.605074, "o", "\u001b[1m\u001b[36m Building\u001b[0m [====================> ] 235/270: bollard-stubs, strum, time, hyper, rayon-core, crossterm, evaluator, h2, indoc, icu_locid, icu_provider, runtime \r"]
|
||||
[30.653721, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m same-file v1.0.6\r\n"]
|
||||
[30.653797, "o", "\u001b[1m\u001b[36m Building\u001b[0m [====================> ] 236/270: bollard-stubs, strum, time, hyper, rayon-core, crossterm, evaluator, h2, icu_locid, same-file, icu_provider, runtime \r"]
|
||||
[30.691821, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m urlencoding v2.1.3\r\n"]
|
||||
[30.691883, "o", "\u001b[1m\u001b[36m Building\u001b[0m [====================> ] 237/270: urlencoding, bollard-stubs, strum, time, hyper, rayon-core, crossterm, h2, icu_locid, same-file, icu_provider, runtime \r"]
|
||||
[30.708108, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=====================> ] 238/270: urlencoding, bollard-stubs, strum, hyper, rayon-core, crossterm, h2, icu_locid, same-file, icu_provider, runtime \r"]
|
||||
[30.749513, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m ratatui v0.23.0\r\n\u001b[1m\u001b[36m Building\u001b[0m [=====================> ] 239/270: urlencoding, bollard-stubs, ratatui, hyper, rayon-core, crossterm, h2, icu_locid, same-file, icu_provider, runtime \r"]
|
||||
[30.756806, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m walkdir v2.5.0\r\n\u001b[1m\u001b[36m Building\u001b[0m "]
|
||||
[30.756853, "o", "[=====================> ] 240/270: urlencoding, bollard-stubs, walkdir, ratatui, hyper, rayon-core, crossterm, h2, icu_locid, icu_provider, runtime \r"]
|
||||
[30.764425, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=====================> ] 241/270: urlencoding, bollard-stubs, walkdir, ratatui, hyper, rayon-core, crossterm, h2, icu_locid, icu_provider \r"]
|
||||
[30.794001, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m rayon v1.10.0\r\n"]
|
||||
[30.794057, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=====================> ] 241/270: urlencoding, bollard-stubs, walkdir, ratatui, hyper, rayon-core, crossterm, h2, rayon, icu_locid, icu_provider \r"]
|
||||
[30.83092, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=====================> ] 242/270: urlencoding, bollard-stubs, walkdir, ratatui, hyper, rayon-core, crossterm, h2, rayon, icu_provider \r"]
|
||||
[30.838227, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=====================> ] 243/270: bollard-stubs, walkdir, ratatui, hyper, rayon-core, crossterm, h2, rayon, icu_provider \r"]
|
||||
[30.846511, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=====================> ] 244/270: bollard-stubs, walkdir, ratatui, hyper, rayon-core, crossterm, rayon, icu_provider \r"]
|
||||
[30.914955, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=====================> ] 245/270: bollard-stubs, walkdir, ratatui, hyper, crossterm, rayon, icu_provider \r"]
|
||||
[30.917348, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=====================> ] 246/270: bollard-stubs, ratatui, hyper, crossterm, rayon, icu_provider \r"]
|
||||
[30.978944, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=====================> ] 247/270: bollard-stubs, ratatui, hyper, rayon, icu_provider \r"]
|
||||
[31.11234, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m icu_locid_transform v1.5.0\r\n"]
|
||||
[31.112427, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=====================> ] 247/270: bollard-stubs, ratatui, hyper, icu_locid_transform, rayon, icu_provider \r"]
|
||||
[31.162661, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=====================> ] 248/270: bollard-stubs, ratatui, hyper, icu_locid_transform, rayon \r"]
|
||||
[31.401195, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m icu_properties v1.5.1\r\n"]
|
||||
[31.401226, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=====================> ] 248/270: bollard-stubs, ratatui, hyper, icu_locid_transform, icu_properties, rayon \r"]
|
||||
[31.420216, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m hyper-tls v0.5.0\r\n"]
|
||||
[31.42035, "o", "\u001b[1m\u001b[32m Compiling\u001b[0m hyperlocal v0.8.0\r\n\u001b[1m\u001b[36m Building\u001b[0m [=====================> ] 248/270: bollard-stubs, hyperlocal, hyper-tls, ratatui, hyper, icu_locid_transform, icu_properties, rayon \r"]
|
||||
[31.524709, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======================> ] 249/270: bollard-stubs, hyperlocal, ratatui, hyper, icu_locid_transform, icu_properties, rayon \r"]
|
||||
[31.556444, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======================> ] 250/270: bollard-stubs, ratatui, hyper, icu_locid_transform, icu_properties, rayon \r"]
|
||||
[31.566354, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======================> ] 251/270: bollard-stubs, ratatui, hyper, icu_properties, rayon \r"]
|
||||
[31.685692, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======================> ] 252/270: bollard-stubs, ratatui, icu_properties, rayon \r"]
|
||||
[31.74925, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======================> ] 253/270: bollard-stubs, ratatui, icu_properties \r"]
|
||||
[31.927424, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======================> ] 254/270: bollard-stubs, icu_properties \r"]
|
||||
[32.170935, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m icu_normalizer v1.5.0\r\n"]
|
||||
[32.170967, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======================> ] 254/270: bollard-stubs, icu_normalizer, icu_properties \r"]
|
||||
[32.268484, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======================> ] 255/270: bollard-stubs, icu_normalizer \r"]
|
||||
[32.367189, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m idna_adapter v1.2.0\r\n"]
|
||||
[32.367291, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======================> ] 255/270: bollard-stubs, icu_normalizer, idna_adapter \r"]
|
||||
[32.408588, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m idna v1.0.3\r\n"]
|
||||
[32.408647, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======================> ] 255/270: bollard-stubs, idna, icu_normalizer, idna_adapter \r"]
|
||||
[32.41806, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======================> ] 256/270: bollard-stubs, idna, icu_normalizer \r"]
|
||||
[32.492269, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======================> ] 257/270: bollard-stubs, idna \r"]
|
||||
[32.545839, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m url v2.5.4\r\n"]
|
||||
[32.545928, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======================> ] 257/270: bollard-stubs, idna, url \r"]
|
||||
[32.629833, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======================> ] 258/270: bollard-stubs, url \r"]
|
||||
[32.759557, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m reqwest v0.11.27\r\n"]
|
||||
[32.759587, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======================> ] 258/270: bollard-stubs, reqwest, url \r"]
|
||||
[32.835456, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======================> ] 259/270: bollard-stubs, reqwest \r"]
|
||||
[33.312497, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m bollard v0.14.0\r\n"]
|
||||
[33.312523, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======================> ] 259/270: bollard-stubs, bollard, reqwest \r"]
|
||||
[33.331502, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m jsonschema v0.17.1\r\n"]
|
||||
[33.331527, "o", "\u001b[1m\u001b[32m Compiling\u001b[0m github v0.4.0 (/Users/goku/projects/wrkflw/crates/github)\r\n\u001b[1m\u001b[32m Compiling\u001b[0m gitlab v0.4.0 (/Users/goku/projects/wrkflw/crates/gitlab)\r\n"]
|
||||
[33.331737, "o", "\u001b[1m\u001b[36m Building\u001b[0m [======================> ] 259/270: bollard-stubs, github, bollard, gitlab, jsonschema, reqwest \r"]
|
||||
[33.393223, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=======================> ] 260/270: github, bollard, gitlab, jsonschema, reqwest \r"]
|
||||
[33.595709, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=======================> ] 261/270: github, bollard, jsonschema, reqwest \r"]
|
||||
[33.604039, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=======================> ] 262/270: bollard, jsonschema, reqwest \r"]
|
||||
[34.004668, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=======================> ] 263/270: bollard, jsonschema \r"]
|
||||
[34.015674, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m parser v0.4.0 (/Users/goku/projects/wrkflw/crates/parser)\r\n"]
|
||||
[34.015746, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=======================> ] 263/270: parser, bollard, jsonschema \r"]
|
||||
[34.346413, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=======================> ] 264/270: parser, bollard \r"]
|
||||
[34.500995, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=======================> ] 265/270: bollard \r"]
|
||||
[34.745632, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m executor v0.4.0 (/Users/goku/projects/wrkflw/crates/executor)\r\n"]
|
||||
[34.74581, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=======================> ] 265/270: bollard, executor \r"]
|
||||
[35.477556, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=======================> ] 266/270: executor \r"]
|
||||
[35.613178, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m ui v0.4.0 (/Users/goku/projects/wrkflw/crates/ui)\r\n"]
|
||||
[35.613272, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=======================> ] 266/270: ui, executor \r"]
|
||||
[35.999581, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=======================> ] 267/270: ui \r"]
|
||||
[36.013859, "o", "\u001b[K\u001b[1m\u001b[32m Compiling\u001b[0m wrkflw v0.4.0 (/Users/goku/projects/wrkflw/crates/wrkflw)\r\n"]
|
||||
[36.013926, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=======================> ] 267/270: ui, wrkflw \r"]
|
||||
[36.063415, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=======================> ] 268/270: ui \r"]
|
||||
[36.374123, "o", "\u001b[1m\u001b[36m Building\u001b[0m [=======================> ] 269/270: wrkflw(bin) \r"]
|
||||
[36.934682, "o", "\u001b[K\u001b[1m\u001b[32m Finished\u001b[0m `dev` profile [unoptimized + debuginfo] target(s) in 16.45s\r\n"]
|
||||
[36.946554, "o", "\u001b[1m\u001b[32m Running\u001b[0m `target/debug/wrkflw validate test_gitlab_ci/minimal.gitlab-ci.yml`\r\n"]
|
||||
[37.469642, "o", "Validating GitLab CI pipeline file: test_gitlab_ci/minimal.gitlab-ci.yml... ✅ Valid syntax\r\n"]
|
||||
[37.470535, "o", "\u001b[1m\u001b[7m%\u001b[27m\u001b[1m\u001b[0m \r \r"]
|
||||
[37.471315, "o", "\u001b]2;goku@Gokuls-MacBook-Pro:~/projects/wrkflw\u0007"]
|
||||
[37.471326, "o", "\u001b]1;..ojects/wrkflw\u0007"]
|
||||
[37.473048, "o", "\u001b]7;file://Gokuls-MacBook-Pro.local/Users/goku/projects/wrkflw\u001b\\"]
|
||||
[37.47485, "o", "\r\u001b[0m\u001b[27m\u001b[24m\u001b[J\u001b[01;32m➜ \u001b[36mwrkflw\u001b[00m \u001b[01;34m(\u001b[31mmain\u001b[34m) \u001b[33m✗\u001b[00m \u001b[K"]
|
||||
[37.474976, "o", "\u001b[?1h\u001b="]
|
||||
[37.475042, "o", "\u001b[?2004h"]
|
||||
[39.504083, "o", "c"]
|
||||
[39.575281, "o", "\bca"]
|
||||
[39.985807, "o", "r"]
|
||||
[40.110435, "o", "g"]
|
||||
[40.247171, "o", "o"]
|
||||
[40.366603, "o", " "]
|
||||
[40.491496, "o", "r"]
|
||||
[41.167474, "o", "\b \b"]
|
||||
[41.318578, "o", "\b"]
|
||||
[41.464227, "o", "\b \b"]
|
||||
[41.588577, "o", "\b \b"]
|
||||
[41.725879, "o", "\b \b"]
|
||||
[41.849987, "o", "\b\bc \b"]
|
||||
[42.776052, "o", "\bcl"]
|
||||
[42.880903, "o", "e"]
|
||||
[43.132681, "o", "a"]
|
||||
[43.245463, "o", "r"]
|
||||
[43.601618, "o", "\u001b[?1l\u001b>"]
|
||||
[43.601729, "o", "\u001b[?2004l\r\r\n"]
|
||||
[43.603201, "o", "\u001b]2;clear\u0007\u001b]1;clear\u0007"]
|
||||
[43.630852, "o", "\u001b[3J\u001b[H\u001b[2J"]
|
||||
[43.631162, "o", "\u001b[1m\u001b[7m%\u001b[27m\u001b[1m\u001b[0m \r \r"]
|
||||
[43.632238, "o", "\u001b]2;goku@Gokuls-MacBook-Pro:~/projects/wrkflw\u0007"]
|
||||
[43.632263, "o", "\u001b]1;..ojects/wrkflw\u0007"]
|
||||
[43.635069, "o", "\u001b]7;file://Gokuls-MacBook-Pro.local/Users/goku/projects/wrkflw\u001b\\"]
|
||||
[43.637553, "o", "\r\u001b[0m\u001b[27m\u001b[24m\u001b[J\u001b[01;32m➜ \u001b[36mwrkflw\u001b[00m \u001b[01;34m(\u001b[31mmain\u001b[34m) \u001b[33m✗\u001b[00m \u001b[K"]
|
||||
[43.637652, "o", "\u001b[?1h\u001b="]
|
||||
[43.637664, "o", "\u001b[?2004h"]
|
||||
[43.991397, "o", "c"]
|
||||
[44.088651, "o", "\bca"]
|
||||
[44.374368, "o", "r"]
|
||||
[44.446833, "o", "g"]
|
||||
[44.53755, "o", "o"]
|
||||
[44.628977, "o", " "]
|
||||
[44.812984, "o", "r"]
|
||||
[44.922289, "o", " "]
|
||||
[46.356703, "o", "-"]
|
||||
[46.687628, "o", "-"]
|
||||
[47.264144, "o", " "]
|
||||
[47.638826, "o", "r"]
|
||||
[47.824999, "o", "u"]
|
||||
[48.00395, "o", "n"]
|
||||
[48.099902, "o", " "]
|
||||
[50.32697, "o", "t"]
|
||||
[50.449608, "o", "e"]
|
||||
[50.661865, "o", "s"]
|
||||
[50.768766, "o", "t"]
|
||||
[51.489835, "o", "_"]
|
||||
[51.868906, "o", "g"]
|
||||
[51.965985, "o", "itlab_ci\u001b[1m/\u001b[0m"]
|
||||
[53.657521, "o", "\b\u001b[0m/m"]
|
||||
[53.813699, "o", "i"]
|
||||
[53.937831, "o", "nimal.gitlab-ci.yml\u001b[1m \u001b[0m"]
|
||||
[55.072612, "o", "\b\u001b[0m -"]
|
||||
[55.326911, "o", "e"]
|
||||
[56.520262, "o", "\u001b[?1l\u001b>"]
|
||||
[56.520344, "o", "\u001b[?2004l\r\r\n"]
|
||||
[56.521995, "o", "\u001b]2;cargo r -- run test_gitlab_ci/minimal.gitlab-ci.yml -e\u0007\u001b]1;cargo\u0007"]
|
||||
[56.760823, "o", "\u001b[1m\u001b[32m Finished\u001b[0m `dev` profile [unoptimized + debuginfo] target(s) in 0.19s\r\n"]
|
||||
[56.766792, "o", "\u001b[1m\u001b[32m Running\u001b[0m `target/debug/wrkflw run test_gitlab_ci/minimal.gitlab-ci.yml -e`\r\n"]
|
||||
[63.060648, "o", "✅ Workflow execution completed successfully!\r\n\r\nJob summary:\r\n ✅ build (success)\r\n Steps:\r\n ✅ Run script line 1\r\n ✅ test (success)\r\n Steps:\r\n ✅ Run script line 1\r\n ✅ build (success)\r\n Steps:\r\n ✅ Run script line 1\r\n ✅ test (success)\r\n Steps:\r\n ✅ Run script line 1\r\n"]
|
||||
[63.062528, "o", "\u001b[1m\u001b[7m%\u001b[27m\u001b[1m\u001b[0m \r \r"]
|
||||
[63.063152, "o", "\u001b]2;goku@Gokuls-MacBook-Pro:~/projects/wrkflw\u0007"]
|
||||
[63.063163, "o", "\u001b]1;..ojects/wrkflw\u0007"]
|
||||
[63.064999, "o", "\u001b]7;file://Gokuls-MacBook-Pro.local/Users/goku/projects/wrkflw\u001b\\"]
|
||||
[63.06677, "o", "\r\u001b[0m\u001b[27m\u001b[24m\u001b[J\u001b[01;32m➜ \u001b[36mwrkflw\u001b[00m \u001b[01;34m(\u001b[31mmain\u001b[34m) \u001b[33m✗\u001b[00m \u001b[K"]
|
||||
[63.066845, "o", "\u001b[?1h\u001b=\u001b[?2004h"]
|
||||
[66.121168, "o", "c"]
|
||||
[66.234389, "o", "\bca"]
|
||||
[66.398021, "o", "t"]
|
||||
[66.595798, "o", " "]
|
||||
[67.93179, "o", "t"]
|
||||
[68.057573, "o", "e"]
|
||||
[68.252993, "o", "s"]
|
||||
[68.380648, "o", "t"]
|
||||
[68.977726, "o", "_"]
|
||||
[69.395102, "o", "g"]
|
||||
[69.506881, "o", "itlab_ci\u001b[1m/\u001b[0m"]
|
||||
[72.095324, "o", "\b\u001b[0m/i"]
|
||||
[72.270688, "o", "n"]
|
||||
[72.41996, "o", "\u0007\r\r\n"]
|
||||
[72.420018, "o", "\u001b[J\u001b[0mincludes.gitlab-ci.yml \u001b[Jinvalid.gitlab-ci.yml \u001b[J\u001b[A\u001b[0m\u001b[27m\u001b[24m\r\u001b[19Ccat test_gitlab_ci/in\u001b[K"]
|
||||
[73.498026, "o", "v"]
|
||||
[73.636495, "o", "alid.gitlab-ci.yml\u001b[1m \u001b[0m"]
|
||||
[75.147715, "o", "\b\u001b[0m \b"]
|
||||
[75.148084, "o", "\u001b[?1l\u001b>\u001b[?2004l\r\r\n\u001b[J"]
|
||||
[75.149613, "o", "\u001b]2;cat test_gitlab_ci/invalid.gitlab-ci.yml\u0007\u001b]1;cat\u0007"]
|
||||
[75.175256, "o", "# Invalid GitLab CI file with common mistakes\r\n\r\n# Missing stages definition\r\n# stages:\r\n# - build\r\n# - test\r\n\r\nvariables:\r\n CARGO_HOME: ${CI_PROJECT_DIR}/.cargo # Missing quotes around value with variables\r\n\r\n# Invalid job definition (missing script)\r\nbuild:\r\n stage: build # Referring to undefined stage\r\n # Missing required script section\r\n artifacts:\r\n paths:\r\n - target/release/\r\n expire_in: 1 week\r\n\r\n# Invalid job with incorrect when value\r\ntest:\r\n stage: test\r\n script:\r\n - cargo test\r\n when: never # Invalid value for when (should be always, manual, or delayed)\r\n dependencies:\r\n - non_existent_job # Dependency on non-existent job\r\n\r\n# Improperly structured job with invalid keys\r\ndeploy:\r\n stagee: deploy # Typo in stage key\r\n scriptt: # Typo in script key\r\n - echo \"Deploying...\"\r\n only:\r\n - main\r\n environment:\r\n production # Incorrect format for environment\r\n retry: hello # Incorrect type for retry (should be integer or object)\r\n\r\n# Invalid rules section\r\nl"]
|
||||
[75.175425, "o", "int:\r\n stage: test\r\n script:\r\n - cargo clippy\r\n rules:\r\n - equals: $CI_COMMIT_BRANCH == \"main\" # Invalid rule (should be if, changes, exists, etc.)\r\n \r\n# Job with invalid cache configuration\r\ncache-test:\r\n stage: test\r\n script:\r\n - echo \"Testing cache\"\r\n cache:\r\n paths:\r\n - ${CARGO_HOME}\r\n key: [invalid, key, type] # Invalid type for key (should be string)\r\n policy: invalid-policy # Invalid policy value "]
|
||||
[75.175543, "o", "\u001b[1m\u001b[7m%\u001b[27m\u001b[1m\u001b[0m \r \r"]
|
||||
[75.176254, "o", "\u001b]2;goku@Gokuls-MacBook-Pro:~/projects/wrkflw\u0007"]
|
||||
[75.17627, "o", "\u001b]1;..ojects/wrkflw\u0007"]
|
||||
[75.179062, "o", "\u001b]7;file://Gokuls-MacBook-Pro.local/Users/goku/projects/wrkflw\u001b\\"]
|
||||
[75.181195, "o", "\r\u001b[0m\u001b[27m\u001b[24m\u001b[J\u001b[01;32m➜ \u001b[36mwrkflw\u001b[00m \u001b[01;34m(\u001b[31mmain\u001b[34m) \u001b[33m✗\u001b[00m \u001b[K"]
|
||||
[75.181307, "o", "\u001b[?1h\u001b="]
|
||||
[75.181372, "o", "\u001b[?2004h"]
|
||||
[78.644579, "o", "c"]
|
||||
[78.757216, "o", "\bca"]
|
||||
[79.422982, "o", "\b\bc \b"]
|
||||
[80.126467, "o", "\bcl"]
|
||||
[80.241618, "o", "e"]
|
||||
[80.499926, "o", "a"]
|
||||
[80.620047, "o", "r"]
|
||||
[80.768709, "o", "\u001b[?1l\u001b>"]
|
||||
[80.768793, "o", "\u001b[?2004l\r\r\n"]
|
||||
[80.770763, "o", "\u001b]2;clear\u0007\u001b]1;clear\u0007"]
|
||||
[80.796043, "o", "\u001b[3J\u001b[H\u001b[2J"]
|
||||
[80.796272, "o", "\u001b[1m\u001b[7m%\u001b[27m\u001b[1m\u001b[0m \r \r"]
|
||||
[80.797072, "o", "\u001b]2;goku@Gokuls-MacBook-Pro:~/projects/wrkflw\u0007\u001b]1;..ojects/wrkflw\u0007"]
|
||||
[80.799811, "o", "\u001b]7;file://Gokuls-MacBook-Pro.local/Users/goku/projects/wrkflw\u001b\\"]
|
||||
[80.802093, "o", "\r\u001b[0m\u001b[27m\u001b[24m\u001b[J\u001b[01;32m➜ \u001b[36mwrkflw\u001b[00m \u001b[01;34m(\u001b[31mmain\u001b[34m) \u001b[33m✗\u001b[00m \u001b[K"]
|
||||
[80.802198, "o", "\u001b[?1h\u001b="]
|
||||
[80.802212, "o", "\u001b[?2004h"]
|
||||
[81.165962, "o", "c"]
|
||||
[81.237876, "o", "\bca"]
|
||||
[81.541593, "o", "r"]
|
||||
[81.632992, "o", "g"]
|
||||
[81.702718, "o", "o"]
|
||||
[81.811783, "o", " "]
|
||||
[82.041789, "o", "r"]
|
||||
[82.171861, "o", " "]
|
||||
[83.210945, "o", "-"]
|
||||
[83.370683, "o", "-"]
|
||||
[83.531883, "o", " "]
|
||||
[84.72197, "o", "v"]
|
||||
[85.400474, "o", "\b \b"]
|
||||
[85.531347, "o", "\b"]
|
||||
[85.666295, "o", "\b \b"]
|
||||
[85.92588, "o", "\b \b"]
|
||||
[86.620454, "o", "v"]
|
||||
[86.804257, "o", "a"]
|
||||
[86.911944, "o", "l"]
|
||||
[87.132942, "o", "i"]
|
||||
[87.276373, "o", "d"]
|
||||
[87.352783, "o", "a"]
|
||||
[87.544066, "o", "t"]
|
||||
[87.657321, "o", "e"]
|
||||
[87.785925, "o", " "]
|
||||
[88.963881, "o", "t"]
|
||||
[89.074873, "o", "e"]
|
||||
[89.258553, "o", "s"]
|
||||
[89.357494, "o", "t"]
|
||||
[89.816142, "o", "\u0007"]
|
||||
[89.816398, "o", "\r\r\n"]
|
||||
[89.816612, "o", "\u001b[J\u001b[1;36mtest_gitlab_ci\u001b[0m/ \u001b[J\u001b[1;36mtest-workflows\u001b[0m/ \u001b[J\u001b[1;36mtests\u001b[0m/ \u001b[J\u001b[A\u001b[0m\u001b[27m\u001b[24m\r\u001b[19Ccargo r validate test\u001b[K"]
|
||||
[90.569999, "o", "_"]
|
||||
[90.950079, "o", "g"]
|
||||
[91.040342, "o", "itlab_ci\u001b[1m/\u001b[0m"]
|
||||
[92.906492, "o", "\b\u001b[0m/m"]
|
||||
[93.078283, "o", "i"]
|
||||
[93.194416, "o", "nimal.gitlab-ci.yml\u001b[1m \u001b[0m"]
|
||||
[94.398323, "o", "\b\u001b[0m \b"]
|
||||
[94.899238, "o", "\b \b"]
|
||||
[94.982652, "o", "\b \b"]
|
||||
[95.065722, "o", "\b \b"]
|
||||
[95.149466, "o", "\b \b"]
|
||||
[95.233618, "o", "\b \b"]
|
||||
[95.317716, "o", "\b \b"]
|
||||
[95.4019, "o", "\b \b"]
|
||||
[95.485971, "o", "\b \b"]
|
||||
[95.569449, "o", "\b \b"]
|
||||
[95.653691, "o", "\b \b"]
|
||||
[95.736766, "o", "\b \b"]
|
||||
[95.82133, "o", "\b \b"]
|
||||
[95.905257, "o", "\b \b"]
|
||||
[95.988404, "o", "\b \b"]
|
||||
[96.072177, "o", "\b \b"]
|
||||
[96.156204, "o", "\b \b"]
|
||||
[96.240362, "o", "\b \b"]
|
||||
[96.324551, "o", "\b \b"]
|
||||
[96.513245, "o", "\b \b"]
|
||||
[96.673025, "o", "\b \b"]
|
||||
[96.851629, "o", "\b \b"]
|
||||
[97.496169, "o", "i"]
|
||||
[97.698031, "o", "n"]
|
||||
[97.987174, "o", "v"]
|
||||
[98.138347, "o", "alid.gitlab-ci.yml\u001b[1m \u001b[0m"]
|
||||
[98.957859, "o", "\b\u001b[0m \b"]
|
||||
[98.958383, "o", "\u001b[?1l\u001b>\u001b[?2004l\r\r\n\u001b[J"]
|
||||
[98.960319, "o", "\u001b]2;cargo r validate test_gitlab_ci/invalid.gitlab-ci.yml\u0007\u001b]1;cargo\u0007"]
|
||||
[99.107154, "o", "\u001b[1m\u001b[32m Finished\u001b[0m `dev` profile [unoptimized + debuginfo] target(s) in 0.09s\r\n"]
|
||||
[99.114895, "o", "\u001b[1m\u001b[32m Running\u001b[0m `target/debug/wrkflw validate test_gitlab_ci/invalid.gitlab-ci.yml`\r\n"]
|
||||
[99.636477, "o", "Validating GitLab CI pipeline file: test_gitlab_ci/invalid.gitlab-ci.yml... ❌ Invalid\r\nValidation failed: Schema validation error: GitLab CI validation failed:\r\n- {\"key\":[\"invalid\",\"key\",\"type\"],\"paths\":[\"${CARGO_HOME}\"],\"policy\":\"invalid-policy\"} is not valid under any of the schemas listed in the 'oneOf' keyword\r\n- \"hello\" is not valid under any of the schemas listed in the 'oneOf' keyword\r\n- Additional properties are not allowed ('scriptt', 'stagee' were unexpected)\r\n- {\"equals\":\"$CI_COMMIT_BRANCH == \\\"main\\\"\"} is not valid under any of the schemas listed in the 'anyOf' keyword\r\n\r\n"]
|
||||
[99.637323, "o", "\u001b[1m\u001b[7m%\u001b[27m\u001b[1m\u001b[0m \r \r"]
|
||||
[99.638217, "o", "\u001b]2;goku@Gokuls-MacBook-Pro:~/projects/wrkflw\u0007"]
|
||||
[99.638226, "o", "\u001b]1;..ojects/wrkflw\u0007"]
|
||||
[99.639979, "o", "\u001b]7;file://Gokuls-MacBook-Pro.local/Users/goku/projects/wrkflw\u001b\\"]
|
||||
[99.642108, "o", "\r\u001b[0m\u001b[27m\u001b[24m\u001b[J\u001b[01;32m➜ \u001b[36mwrkflw\u001b[00m \u001b[01;34m(\u001b[31mmain\u001b[34m) \u001b[33m✗\u001b[00m \u001b[K"]
|
||||
[99.642189, "o", "\u001b[?1h\u001b="]
|
||||
[99.642244, "o", "\u001b[?2004h"]
|
||||
[101.389433, "o", "c"]
|
||||
[101.489821, "o", "\bca"]
|
||||
[101.781592, "o", "r"]
|
||||
[101.870935, "o", "g"]
|
||||
[101.913828, "o", "o"]
|
||||
[102.021608, "o", " "]
|
||||
[102.173967, "o", "r"]
|
||||
[102.282804, "o", " "]
|
||||
[103.113368, "o", "-"]
|
||||
[103.251079, "o", "-"]
|
||||
[103.3802, "o", " "]
|
||||
[103.637955, "o", "r"]
|
||||
[103.756731, "o", "u"]
|
||||
[104.035863, "o", " "]
|
||||
[104.396646, "o", "\b"]
|
||||
[104.88292, "o", "n"]
|
||||
[104.97564, "o", " "]
|
||||
[106.361505, "o", "t"]
|
||||
[106.453323, "o", "e"]
|
||||
[106.66181, "o", "s"]
|
||||
[106.761957, "o", "t"]
|
||||
[107.423959, "o", "_"]
|
||||
[107.591679, "o", "gitlab_ci\u001b[1m/\u001b[0m"]
|
||||
[109.594052, "o", "\b\u001b[0m/i"]
|
||||
[109.78732, "o", "n"]
|
||||
[110.089516, "o", "v"]
|
||||
[110.259654, "o", "alid.gitlab-ci.yml\u001b[1m \u001b[0m"]
|
||||
[112.918071, "o", "\b\u001b[0m -"]
|
||||
[113.487665, "o", "e"]
|
||||
[114.05841, "o", "\u001b[?1l\u001b>"]
|
||||
[114.05869, "o", "\u001b[?2004l\r\r\n"]
|
||||
[114.060284, "o", "\u001b]2;cargo r -- run test_gitlab_ci/invalid.gitlab-ci.yml -e\u0007\u001b]1;cargo\u0007"]
|
||||
[114.193654, "o", "\u001b[1m\u001b[32m Finished\u001b[0m `dev` profile [unoptimized + debuginfo] target(s) in 0.09s\r\n"]
|
||||
[114.200619, "o", "\u001b[1m\u001b[32m Running\u001b[0m `target/debug/wrkflw run test_gitlab_ci/invalid.gitlab-ci.yml -e`\r\n"]
|
||||
[114.727902, "o", "Error executing workflow: Parse error: Failed to parse GitLab pipeline: Schema validation error: GitLab CI validation failed:\r\n- {\"key\":[\"invalid\",\"key\",\"type\"],\"paths\":[\"${CARGO_HOME}\"],\"policy\":\"invalid-policy\"} is not valid under any of the schemas listed in the 'oneOf' keyword\r\n- \"hello\" is not valid under any of the schemas listed in the 'oneOf' keyword\r\n- Additional properties are not allowed ('scriptt', 'stagee' were unexpected)\r\n- {\"equals\":\"$CI_COMMIT_BRANCH == \\\"main\\\"\"} is not valid under any of the schemas listed in the 'anyOf' keyword\r\n\r\n"]
|
||||
[114.728458, "o", "\u001b[1m\u001b[7m%\u001b[27m\u001b[1m\u001b[0m \r \r"]
|
||||
[114.72932, "o", "\u001b]2;goku@Gokuls-MacBook-Pro:~/projects/wrkflw\u0007"]
|
||||
[114.729328, "o", "\u001b]1;..ojects/wrkflw\u0007"]
|
||||
[114.731093, "o", "\u001b]7;file://Gokuls-MacBook-Pro.local/Users/goku/projects/wrkflw\u001b\\"]
|
||||
[114.732938, "o", "\r\u001b[0m\u001b[27m\u001b[24m\u001b[J\u001b[01;31m➜ \u001b[36mwrkflw\u001b[00m \u001b[01;34m(\u001b[31mmain\u001b[34m) \u001b[33m✗\u001b[00m \u001b[K"]
|
||||
[114.73302, "o", "\u001b[?1h\u001b="]
|
||||
[114.733045, "o", "\u001b[?2004h"]
|
||||
[118.210217, "o", "\u001b[?2004l\r\r\n"]
|
||||
1711
schemas/github-workflow.json
Normal file
1711
schemas/github-workflow.json
Normal file
File diff suppressed because it is too large
Load Diff
3012
schemas/gitlab-ci.json
Normal file
3012
schemas/gitlab-ci.json
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,292 +0,0 @@
|
||||
#[cfg(test)]
|
||||
mod cleanup_tests {
|
||||
use crate::{
|
||||
cleanup_on_exit,
|
||||
executor::docker,
|
||||
runtime::emulation::{self, EmulationRuntime},
|
||||
};
|
||||
use bollard::Docker;
|
||||
use std::process::Command;
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_docker_container_cleanup() {
|
||||
// Skip if Docker is not available
|
||||
if !docker::is_available() {
|
||||
println!("Docker not available, skipping test");
|
||||
return;
|
||||
}
|
||||
|
||||
// Connect to Docker
|
||||
let docker = match Docker::connect_with_local_defaults() {
|
||||
Ok(client) => client,
|
||||
Err(_) => {
|
||||
println!("Could not connect to Docker, skipping test");
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
// Create a test container by tracking it
|
||||
let container_id = format!("test-container-{}", uuid::Uuid::new_v4());
|
||||
docker::track_container(&container_id);
|
||||
|
||||
// Verify container is tracked
|
||||
let containers = docker::get_tracked_containers();
|
||||
let is_tracked = containers.contains(&container_id);
|
||||
|
||||
assert!(is_tracked, "Container should be tracked for cleanup");
|
||||
|
||||
// Run cleanup
|
||||
docker::cleanup_containers(&docker).await;
|
||||
|
||||
// Verify container is no longer tracked
|
||||
let containers = docker::get_tracked_containers();
|
||||
let still_tracked = containers.contains(&container_id);
|
||||
|
||||
assert!(
|
||||
!still_tracked,
|
||||
"Container should be removed from tracking after cleanup"
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_docker_network_cleanup() {
|
||||
// Skip if Docker is not available
|
||||
if !docker::is_available() {
|
||||
println!("Docker not available, skipping test");
|
||||
return;
|
||||
}
|
||||
|
||||
// Connect to Docker
|
||||
let docker = match Docker::connect_with_local_defaults() {
|
||||
Ok(client) => client,
|
||||
Err(_) => {
|
||||
println!("Could not connect to Docker, skipping test");
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
// Create a test network
|
||||
let network_id = match docker::create_job_network(&docker).await {
|
||||
Ok(id) => id,
|
||||
Err(_) => {
|
||||
println!("Could not create test network, skipping test");
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
// Verify network is tracked
|
||||
let networks = docker::get_tracked_networks();
|
||||
let is_tracked = networks.contains(&network_id);
|
||||
|
||||
assert!(is_tracked, "Network should be tracked for cleanup");
|
||||
|
||||
// Run cleanup
|
||||
docker::cleanup_networks(&docker).await;
|
||||
|
||||
// Verify network is no longer tracked
|
||||
let networks = docker::get_tracked_networks();
|
||||
let still_tracked = networks.contains(&network_id);
|
||||
|
||||
assert!(
|
||||
!still_tracked,
|
||||
"Network should be removed from tracking after cleanup"
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_emulation_workspace_cleanup() {
|
||||
// Create an emulation runtime instance
|
||||
let _runtime = EmulationRuntime::new();
|
||||
|
||||
// Get the workspace path
|
||||
let workspaces = emulation::get_tracked_workspaces();
|
||||
if workspaces.is_empty() {
|
||||
println!("No workspace was tracked, skipping test");
|
||||
return;
|
||||
}
|
||||
|
||||
let workspace_path = &workspaces[0];
|
||||
|
||||
// Verify workspace exists
|
||||
assert!(
|
||||
workspace_path.exists(),
|
||||
"Workspace should exist before cleanup"
|
||||
);
|
||||
|
||||
// Run cleanup
|
||||
emulation::cleanup_resources().await;
|
||||
|
||||
// Verify workspace is removed from tracking
|
||||
let workspaces = emulation::get_tracked_workspaces();
|
||||
let still_tracked = workspaces.iter().any(|w| w == workspace_path);
|
||||
|
||||
assert!(
|
||||
!still_tracked,
|
||||
"Workspace should be removed from tracking after cleanup"
|
||||
);
|
||||
|
||||
// Verify workspace directory is deleted
|
||||
assert!(
|
||||
!workspace_path.exists(),
|
||||
"Workspace directory should be deleted after cleanup"
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_emulation_process_cleanup() {
|
||||
// Skip tests on CI or environments where spawning processes might be restricted
|
||||
if std::env::var("CI").is_ok() {
|
||||
println!("Running in CI environment, skipping test");
|
||||
return;
|
||||
}
|
||||
|
||||
// Create a process for testing
|
||||
let process_id = if cfg!(unix) {
|
||||
// Use sleep on Unix to create a long-running process
|
||||
let child = Command::new("sh")
|
||||
.arg("-c")
|
||||
.arg("sleep 30 &") // Run sleep for 30 seconds in background
|
||||
.spawn();
|
||||
|
||||
match child {
|
||||
Ok(child) => {
|
||||
// Get the PID and track it
|
||||
let pid = child.id();
|
||||
emulation::track_process(pid);
|
||||
Some(pid)
|
||||
}
|
||||
Err(_) => None,
|
||||
}
|
||||
} else if cfg!(windows) {
|
||||
// Use timeout on Windows (equivalent to sleep)
|
||||
let child = Command::new("cmd")
|
||||
.arg("/C")
|
||||
.arg("start /b timeout /t 30") // Run timeout for 30 seconds
|
||||
.spawn();
|
||||
|
||||
match child {
|
||||
Ok(child) => {
|
||||
// Get the PID and track it
|
||||
let pid = child.id();
|
||||
emulation::track_process(pid);
|
||||
Some(pid)
|
||||
}
|
||||
Err(_) => None,
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
// Skip if we couldn't create a process
|
||||
let process_id = match process_id {
|
||||
Some(id) => id,
|
||||
None => {
|
||||
println!("Could not create test process, skipping test");
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
// Verify process is tracked
|
||||
let processes = emulation::get_tracked_processes();
|
||||
let is_tracked = processes.contains(&process_id);
|
||||
|
||||
assert!(is_tracked, "Process should be tracked for cleanup");
|
||||
|
||||
// Run cleanup
|
||||
emulation::cleanup_resources().await;
|
||||
|
||||
// Verify process is removed from tracking
|
||||
let processes = emulation::get_tracked_processes();
|
||||
let still_tracked = processes.contains(&process_id);
|
||||
|
||||
assert!(
|
||||
!still_tracked,
|
||||
"Process should be removed from tracking after cleanup"
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_cleanup_on_exit_function() {
|
||||
// Skip test on CI where we may not have permission
|
||||
if std::env::var("CI").is_ok() {
|
||||
println!("Running in CI environment, skipping test");
|
||||
return;
|
||||
}
|
||||
|
||||
// Create Docker resources if available
|
||||
let docker_client = match Docker::connect_with_local_defaults() {
|
||||
Ok(client) => {
|
||||
// Create a network
|
||||
let _ = docker::create_job_network(&client).await;
|
||||
Some(client)
|
||||
}
|
||||
Err(_) => None,
|
||||
};
|
||||
|
||||
// Create an emulation runtime to track a workspace
|
||||
let _runtime = EmulationRuntime::new();
|
||||
|
||||
// Create a process to track in emulation mode
|
||||
if cfg!(unix) {
|
||||
let child = Command::new("sh").arg("-c").arg("sleep 30 &").spawn();
|
||||
|
||||
if let Ok(child) = child {
|
||||
emulation::track_process(child.id());
|
||||
}
|
||||
}
|
||||
|
||||
// Count initial resource tracking
|
||||
let docker_resources = if docker_client.is_some() {
|
||||
let containers = docker::get_tracked_containers().len();
|
||||
let networks = docker::get_tracked_networks().len();
|
||||
containers + networks
|
||||
} else {
|
||||
0
|
||||
};
|
||||
|
||||
let emulation_resources = {
|
||||
let processes = emulation::get_tracked_processes().len();
|
||||
let workspaces = emulation::get_tracked_workspaces().len();
|
||||
processes + workspaces
|
||||
};
|
||||
|
||||
// Verify we have resources to clean up
|
||||
let total_resources = docker_resources + emulation_resources;
|
||||
if total_resources == 0 {
|
||||
println!("No resources were created for testing, skipping test");
|
||||
return;
|
||||
}
|
||||
|
||||
// Run the main cleanup function
|
||||
cleanup_on_exit().await;
|
||||
|
||||
// Add a small delay to ensure async cleanup operations complete
|
||||
tokio::time::sleep(tokio::time::Duration::from_millis(100)).await;
|
||||
|
||||
// Check if Docker resources were cleaned up
|
||||
let docker_resources_after = if docker_client.is_some() {
|
||||
let containers = docker::get_tracked_containers().len();
|
||||
let networks = docker::get_tracked_networks().len();
|
||||
containers + networks
|
||||
} else {
|
||||
0
|
||||
};
|
||||
|
||||
// Check if emulation resources were cleaned up
|
||||
let emulation_resources_after = {
|
||||
let processes = emulation::get_tracked_processes().len();
|
||||
let workspaces = emulation::get_tracked_workspaces().len();
|
||||
processes + workspaces
|
||||
};
|
||||
|
||||
// Verify all resources were cleaned up
|
||||
assert_eq!(
|
||||
docker_resources_after, 0,
|
||||
"All Docker resources should be cleaned up"
|
||||
);
|
||||
assert_eq!(
|
||||
emulation_resources_after, 0,
|
||||
"All emulation resources should be cleaned up"
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -1,439 +0,0 @@
|
||||
use crate::logging;
|
||||
use crate::runtime::container::{ContainerError, ContainerOutput, ContainerRuntime};
|
||||
use async_trait::async_trait;
|
||||
use bollard::{
|
||||
container::{Config, CreateContainerOptions},
|
||||
models::HostConfig,
|
||||
network::CreateNetworkOptions,
|
||||
Docker,
|
||||
};
|
||||
use futures_util::StreamExt;
|
||||
use once_cell::sync::Lazy;
|
||||
use std::path::Path;
|
||||
use std::sync::Mutex;
|
||||
|
||||
static RUNNING_CONTAINERS: Lazy<Mutex<Vec<String>>> = Lazy::new(|| Mutex::new(Vec::new()));
|
||||
static CREATED_NETWORKS: Lazy<Mutex<Vec<String>>> = Lazy::new(|| Mutex::new(Vec::new()));
|
||||
|
||||
pub struct DockerRuntime {
|
||||
docker: Docker,
|
||||
}
|
||||
|
||||
impl DockerRuntime {
|
||||
pub fn new() -> Result<Self, ContainerError> {
|
||||
let docker = Docker::connect_with_local_defaults().map_err(|e| {
|
||||
ContainerError::ContainerStart(format!("Failed to connect to Docker: {}", e))
|
||||
})?;
|
||||
|
||||
Ok(DockerRuntime { docker })
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_available() -> bool {
|
||||
// Use the safe FD redirection utility from utils
|
||||
match crate::utils::fd::with_stderr_to_null(|| {
|
||||
match Docker::connect_with_local_defaults() {
|
||||
Ok(docker) => match futures::executor::block_on(async { docker.ping().await }) {
|
||||
Ok(_) => true,
|
||||
Err(_) => {
|
||||
// Only log at debug level to avoid cluttering the console with technical errors
|
||||
logging::debug("Docker daemon is running but ping failed. Docker may not be properly configured.");
|
||||
false
|
||||
}
|
||||
},
|
||||
Err(_) => {
|
||||
// Only log at debug level to avoid confusing users
|
||||
logging::debug("Docker daemon is not running or not properly configured.");
|
||||
false
|
||||
}
|
||||
}
|
||||
}) {
|
||||
Ok(result) => result,
|
||||
Err(_) => {
|
||||
logging::debug("Failed to redirect stderr when checking Docker availability.");
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Add container to tracking
|
||||
pub fn track_container(id: &str) {
|
||||
if let Ok(mut containers) = RUNNING_CONTAINERS.lock() {
|
||||
containers.push(id.to_string());
|
||||
}
|
||||
}
|
||||
|
||||
// Remove container from tracking
|
||||
pub fn untrack_container(id: &str) {
|
||||
if let Ok(mut containers) = RUNNING_CONTAINERS.lock() {
|
||||
containers.retain(|c| c != id);
|
||||
}
|
||||
}
|
||||
|
||||
// Add network to tracking
|
||||
pub fn track_network(id: &str) {
|
||||
if let Ok(mut networks) = CREATED_NETWORKS.lock() {
|
||||
networks.push(id.to_string());
|
||||
}
|
||||
}
|
||||
|
||||
// Remove network from tracking
|
||||
pub fn untrack_network(id: &str) {
|
||||
if let Ok(mut networks) = CREATED_NETWORKS.lock() {
|
||||
networks.retain(|n| n != id);
|
||||
}
|
||||
}
|
||||
|
||||
// Clean up all tracked resources
|
||||
pub async fn cleanup_resources(docker: &Docker) {
|
||||
cleanup_containers(docker).await;
|
||||
cleanup_networks(docker).await;
|
||||
}
|
||||
|
||||
// Clean up all tracked containers
|
||||
pub async fn cleanup_containers(docker: &Docker) {
|
||||
let containers_to_cleanup = {
|
||||
if let Ok(containers) = RUNNING_CONTAINERS.lock() {
|
||||
containers.clone()
|
||||
} else {
|
||||
vec![]
|
||||
}
|
||||
};
|
||||
|
||||
for container_id in containers_to_cleanup {
|
||||
logging::info(&format!("Cleaning up container: {}", container_id));
|
||||
let _ = docker.stop_container(&container_id, None).await;
|
||||
let _ = docker.remove_container(&container_id, None).await;
|
||||
untrack_container(&container_id);
|
||||
}
|
||||
}
|
||||
|
||||
// Clean up all tracked networks
|
||||
pub async fn cleanup_networks(docker: &Docker) {
|
||||
let networks_to_cleanup = {
|
||||
if let Ok(networks) = CREATED_NETWORKS.lock() {
|
||||
networks.clone()
|
||||
} else {
|
||||
vec![]
|
||||
}
|
||||
};
|
||||
|
||||
for network_id in networks_to_cleanup {
|
||||
logging::info(&format!("Cleaning up network: {}", network_id));
|
||||
match docker.remove_network(&network_id).await {
|
||||
Ok(_) => logging::info(&format!("Successfully removed network: {}", network_id)),
|
||||
Err(e) => logging::error(&format!("Error removing network {}: {}", network_id, e)),
|
||||
}
|
||||
untrack_network(&network_id);
|
||||
}
|
||||
}
|
||||
|
||||
// Create a new Docker network for a job
|
||||
pub async fn create_job_network(docker: &Docker) -> Result<String, ContainerError> {
|
||||
let network_name = format!("wrkflw-network-{}", uuid::Uuid::new_v4());
|
||||
|
||||
let options = CreateNetworkOptions {
|
||||
name: network_name.clone(),
|
||||
driver: "bridge".to_string(),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let network = docker
|
||||
.create_network(options)
|
||||
.await
|
||||
.map_err(|e| ContainerError::NetworkCreation(e.to_string()))?;
|
||||
|
||||
// network.id is Option<String>, unwrap it safely
|
||||
let network_id = network.id.ok_or_else(|| {
|
||||
ContainerError::NetworkOperation("Network created but no ID returned".to_string())
|
||||
})?;
|
||||
|
||||
track_network(&network_id);
|
||||
logging::info(&format!("Created Docker network: {}", network_id));
|
||||
|
||||
Ok(network_id)
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl ContainerRuntime for DockerRuntime {
|
||||
async fn run_container(
|
||||
&self,
|
||||
image: &str,
|
||||
cmd: &[&str],
|
||||
env_vars: &[(&str, &str)],
|
||||
working_dir: &Path,
|
||||
volumes: &[(&Path, &Path)],
|
||||
) -> Result<ContainerOutput, ContainerError> {
|
||||
// Print detailed debugging info
|
||||
logging::info(&format!("Docker: Running container with image: {}", image));
|
||||
|
||||
// Check if command contains background processes
|
||||
let has_background = cmd.iter().any(|c| c.contains(" &"));
|
||||
|
||||
// Check if any command uses GITHUB_ variables and needs special handling
|
||||
let uses_github_vars = cmd.iter().any(|c| c.contains("GITHUB_"));
|
||||
|
||||
// If there's a command using GitHub variables, we need to wrap it properly
|
||||
let cmd_vec: Vec<String> = if uses_github_vars {
|
||||
let mut shell_cmd = Vec::new();
|
||||
shell_cmd.push("sh".to_string());
|
||||
shell_cmd.push("-c".to_string());
|
||||
|
||||
// Join the original command and fix GitHub variables reference
|
||||
let command_with_fixes =
|
||||
if cmd.len() > 2 && (cmd[0] == "sh" || cmd[0] == "bash") && cmd[1] == "-c" {
|
||||
// For shell commands, we need to modify the command string to handle GitHub variables
|
||||
let fixed_cmd = cmd[2]
|
||||
.replace(">>$GITHUB_OUTPUT", ">>\"$GITHUB_OUTPUT\"")
|
||||
.replace(">>$GITHUB_ENV", ">>\"$GITHUB_ENV\"")
|
||||
.replace(">>$GITHUB_PATH", ">>\"$GITHUB_PATH\"")
|
||||
.replace(">>$GITHUB_STEP_SUMMARY", ">>\"$GITHUB_STEP_SUMMARY\"");
|
||||
|
||||
format!("{} ; wait", fixed_cmd)
|
||||
} else {
|
||||
// Otherwise join all parts and add wait
|
||||
let cmd_str: Vec<String> = cmd.iter().map(|s| s.to_string()).collect();
|
||||
format!("{} ; wait", cmd_str.join(" "))
|
||||
};
|
||||
|
||||
shell_cmd.push(command_with_fixes);
|
||||
shell_cmd
|
||||
} else if has_background {
|
||||
// If the command contains a background process, wrap it in a shell script
|
||||
// that properly manages the background process and exits when the foreground completes
|
||||
let mut shell_cmd = Vec::new();
|
||||
shell_cmd.push("sh".to_string());
|
||||
shell_cmd.push("-c".to_string());
|
||||
|
||||
// Join the original command and add a wait for any child processes
|
||||
let command_with_wait =
|
||||
if cmd.len() > 2 && (cmd[0] == "sh" || cmd[0] == "bash") && cmd[1] == "-c" {
|
||||
// For shell commands, we just need to modify the command string
|
||||
format!("{} ; wait", cmd[2])
|
||||
} else {
|
||||
// Otherwise join all parts and add wait
|
||||
let cmd_str: Vec<String> = cmd.iter().map(|s| s.to_string()).collect();
|
||||
format!("{} ; wait", cmd_str.join(" "))
|
||||
};
|
||||
|
||||
shell_cmd.push(command_with_wait);
|
||||
shell_cmd
|
||||
} else {
|
||||
// No background processes, use original command
|
||||
cmd.iter().map(|s| s.to_string()).collect()
|
||||
};
|
||||
|
||||
// Always try to pull the image first
|
||||
match self.pull_image(image).await {
|
||||
Ok(_) => logging::info(&format!("🐳 Successfully pulled image: {}", image)),
|
||||
Err(e) => logging::error(&format!("🐳 Warning: Failed to pull image: {}. Continuing with existing image if available.", e)),
|
||||
}
|
||||
// Map env vars to format Docker expects
|
||||
let env: Vec<String> = env_vars
|
||||
.iter()
|
||||
.map(|(k, v)| format!("{}={}", k, v))
|
||||
.collect();
|
||||
|
||||
// Setup volume bindings
|
||||
let mut binds = Vec::new();
|
||||
for (host, container) in volumes {
|
||||
binds.push(format!(
|
||||
"{}:{}",
|
||||
host.to_string_lossy(),
|
||||
container.to_string_lossy()
|
||||
));
|
||||
}
|
||||
|
||||
// Create container
|
||||
let options = Some(CreateContainerOptions {
|
||||
name: format!("wrkflw-{}", uuid::Uuid::new_v4()),
|
||||
platform: None,
|
||||
});
|
||||
|
||||
let host_config = HostConfig {
|
||||
binds: Some(binds),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let config = Config {
|
||||
image: Some(image.to_string()),
|
||||
cmd: Some(cmd_vec),
|
||||
env: Some(env),
|
||||
working_dir: Some(working_dir.to_string_lossy().to_string()),
|
||||
host_config: Some(host_config),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let container = self
|
||||
.docker
|
||||
.create_container(options, config)
|
||||
.await
|
||||
.map_err(|e| ContainerError::ContainerStart(e.to_string()))?;
|
||||
|
||||
// Start container
|
||||
self.docker
|
||||
.start_container::<String>(&container.id, None)
|
||||
.await
|
||||
.map_err(|e| ContainerError::ContainerExecution(e.to_string()))?;
|
||||
|
||||
track_container(&container.id);
|
||||
|
||||
// Wait for container to finish
|
||||
let wait_result = self
|
||||
.docker
|
||||
.wait_container::<String>(&container.id, None)
|
||||
.collect::<Vec<_>>()
|
||||
.await;
|
||||
|
||||
let exit_code = match wait_result.first() {
|
||||
Some(Ok(exit)) => exit.status_code as i32,
|
||||
_ => -1,
|
||||
};
|
||||
|
||||
// Get logs
|
||||
let logs = self
|
||||
.docker
|
||||
.logs::<String>(&container.id, None)
|
||||
.collect::<Vec<_>>()
|
||||
.await;
|
||||
|
||||
let mut stdout = String::new();
|
||||
let mut stderr = String::new();
|
||||
|
||||
for log in logs.into_iter().flatten() {
|
||||
match log {
|
||||
bollard::container::LogOutput::StdOut { message } => {
|
||||
stdout.push_str(&String::from_utf8_lossy(&message));
|
||||
}
|
||||
bollard::container::LogOutput::StdErr { message } => {
|
||||
stderr.push_str(&String::from_utf8_lossy(&message));
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
// Clean up container
|
||||
let _ = self.docker.remove_container(&container.id, None).await;
|
||||
untrack_container(&container.id);
|
||||
|
||||
Ok(ContainerOutput {
|
||||
stdout,
|
||||
stderr,
|
||||
exit_code,
|
||||
})
|
||||
}
|
||||
|
||||
async fn pull_image(&self, image: &str) -> Result<(), ContainerError> {
|
||||
let options = bollard::image::CreateImageOptions {
|
||||
from_image: image,
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let mut stream = self.docker.create_image(Some(options), None, None);
|
||||
|
||||
while let Some(result) = stream.next().await {
|
||||
if let Err(e) = result {
|
||||
return Err(ContainerError::ImagePull(e.to_string()));
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn build_image(&self, dockerfile: &Path, tag: &str) -> Result<(), ContainerError> {
|
||||
let _context_dir = dockerfile.parent().unwrap_or(Path::new("."));
|
||||
|
||||
let tar_buffer = {
|
||||
let mut tar_builder = tar::Builder::new(Vec::new());
|
||||
|
||||
// Add Dockerfile to tar
|
||||
if let Ok(file) = std::fs::File::open(dockerfile) {
|
||||
let mut header = tar::Header::new_gnu();
|
||||
let metadata = file.metadata().map_err(|e| {
|
||||
ContainerError::ContainerExecution(format!(
|
||||
"Failed to get file metadata: {}",
|
||||
e
|
||||
))
|
||||
})?;
|
||||
let modified_time = metadata
|
||||
.modified()
|
||||
.map_err(|e| {
|
||||
ContainerError::ContainerExecution(format!(
|
||||
"Failed to get file modification time: {}",
|
||||
e
|
||||
))
|
||||
})?
|
||||
.elapsed()
|
||||
.map_err(|e| {
|
||||
ContainerError::ContainerExecution(format!(
|
||||
"Failed to get elapsed time since modification: {}",
|
||||
e
|
||||
))
|
||||
})?
|
||||
.as_secs();
|
||||
header.set_size(metadata.len());
|
||||
header.set_mode(0o644);
|
||||
header.set_mtime(modified_time);
|
||||
header.set_cksum();
|
||||
|
||||
tar_builder
|
||||
.append_data(&mut header, "Dockerfile", file)
|
||||
.map_err(|e| ContainerError::ImageBuild(e.to_string()))?;
|
||||
} else {
|
||||
return Err(ContainerError::ImageBuild(format!(
|
||||
"Cannot open Dockerfile at {}",
|
||||
dockerfile.display()
|
||||
)));
|
||||
}
|
||||
|
||||
tar_builder
|
||||
.into_inner()
|
||||
.map_err(|e| ContainerError::ImageBuild(e.to_string()))?
|
||||
};
|
||||
|
||||
let options = bollard::image::BuildImageOptions {
|
||||
dockerfile: "Dockerfile",
|
||||
t: tag,
|
||||
q: false,
|
||||
nocache: false,
|
||||
rm: true,
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let mut stream = self
|
||||
.docker
|
||||
.build_image(options, None, Some(tar_buffer.into()));
|
||||
|
||||
while let Some(result) = stream.next().await {
|
||||
match result {
|
||||
Ok(_) => {
|
||||
// For verbose output, we could log the build progress here
|
||||
}
|
||||
Err(e) => {
|
||||
return Err(ContainerError::ImageBuild(e.to_string()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
// Public accessor functions for testing
|
||||
#[cfg(test)]
|
||||
pub fn get_tracked_containers() -> Vec<String> {
|
||||
if let Ok(containers) = RUNNING_CONTAINERS.lock() {
|
||||
containers.clone()
|
||||
} else {
|
||||
vec![]
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub fn get_tracked_networks() -> Vec<String> {
|
||||
if let Ok(networks) = CREATED_NETWORKS.lock() {
|
||||
networks.clone()
|
||||
} else {
|
||||
vec![]
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
288
src/main.rs
288
src/main.rs
@@ -1,288 +0,0 @@
|
||||
mod cleanup_test;
|
||||
mod evaluator;
|
||||
mod executor;
|
||||
mod github;
|
||||
mod logging;
|
||||
mod matrix;
|
||||
mod matrix_test;
|
||||
mod models;
|
||||
mod parser;
|
||||
mod runtime;
|
||||
mod ui;
|
||||
mod utils;
|
||||
mod validators;
|
||||
|
||||
use bollard::Docker;
|
||||
use clap::{Parser, Subcommand};
|
||||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
#[command(
|
||||
name = "wrkflw",
|
||||
about = "GitHub Workflow validator and executor",
|
||||
version
|
||||
)]
|
||||
struct Wrkflw {
|
||||
#[command(subcommand)]
|
||||
command: Option<Commands>,
|
||||
|
||||
/// Run in verbose mode with detailed output
|
||||
#[arg(short, long, global = true)]
|
||||
verbose: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Subcommand)]
|
||||
enum Commands {
|
||||
/// Validate GitHub workflow files
|
||||
Validate {
|
||||
/// Path to workflow file or directory (defaults to .github/workflows)
|
||||
path: Option<PathBuf>,
|
||||
},
|
||||
|
||||
/// Execute GitHub workflow files locally
|
||||
Run {
|
||||
/// Path to workflow file to execute
|
||||
path: PathBuf,
|
||||
|
||||
/// Use emulation mode instead of Docker
|
||||
#[arg(short, long)]
|
||||
emulate: bool,
|
||||
},
|
||||
|
||||
/// Open TUI interface to manage workflows
|
||||
Tui {
|
||||
/// Path to workflow file or directory (defaults to .github/workflows)
|
||||
path: Option<PathBuf>,
|
||||
|
||||
/// Use emulation mode instead of Docker
|
||||
#[arg(short, long)]
|
||||
emulate: bool,
|
||||
},
|
||||
|
||||
/// Trigger a GitHub workflow remotely
|
||||
Trigger {
|
||||
/// Name of the workflow file (without .yml extension)
|
||||
workflow: String,
|
||||
|
||||
/// Branch to run the workflow on
|
||||
#[arg(short, long)]
|
||||
branch: Option<String>,
|
||||
|
||||
/// Key-value inputs for the workflow in format key=value
|
||||
#[arg(short, long, value_parser = parse_key_val)]
|
||||
input: Option<Vec<(String, String)>>,
|
||||
},
|
||||
|
||||
/// List available workflows
|
||||
List,
|
||||
}
|
||||
|
||||
// Parser function for key-value pairs
|
||||
fn parse_key_val(s: &str) -> Result<(String, String), String> {
|
||||
let pos = s
|
||||
.find('=')
|
||||
.ok_or_else(|| format!("invalid KEY=value: no `=` found in `{}`", s))?;
|
||||
|
||||
Ok((s[..pos].to_string(), s[pos + 1..].to_string()))
|
||||
}
|
||||
|
||||
// Make this function public for testing
|
||||
pub async fn cleanup_on_exit() {
|
||||
// Clean up Docker resources if available
|
||||
match Docker::connect_with_local_defaults() {
|
||||
Ok(docker) => {
|
||||
executor::cleanup_resources(&docker).await;
|
||||
}
|
||||
Err(_) => {
|
||||
// Docker not available
|
||||
logging::info("Docker not available, skipping Docker cleanup");
|
||||
}
|
||||
}
|
||||
|
||||
// Always clean up emulation resources
|
||||
runtime::emulation::cleanup_resources().await;
|
||||
|
||||
logging::info("Resource cleanup completed");
|
||||
}
|
||||
|
||||
async fn handle_signals() {
|
||||
// Wait for Ctrl+C
|
||||
match tokio::signal::ctrl_c().await {
|
||||
Ok(_) => {
|
||||
println!("Received Ctrl+C, shutting down and cleaning up...");
|
||||
}
|
||||
Err(e) => {
|
||||
// Log the error but continue with cleanup
|
||||
eprintln!("Warning: Failed to properly listen for ctrl+c event: {}", e);
|
||||
println!("Shutting down and cleaning up...");
|
||||
}
|
||||
}
|
||||
|
||||
// Clean up containers
|
||||
cleanup_on_exit().await;
|
||||
|
||||
// Exit with success status
|
||||
std::process::exit(0);
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
let cli = Wrkflw::parse();
|
||||
let verbose = cli.verbose;
|
||||
|
||||
// Setup a Ctrl+C handler that runs in the background
|
||||
tokio::spawn(handle_signals());
|
||||
|
||||
match &cli.command {
|
||||
Some(Commands::Validate { path }) => {
|
||||
// Determine the path to validate
|
||||
let validate_path = path
|
||||
.clone()
|
||||
.unwrap_or_else(|| PathBuf::from(".github/workflows"));
|
||||
|
||||
// Run the validation
|
||||
ui::validate_workflow(&validate_path, verbose).unwrap_or_else(|e| {
|
||||
eprintln!("Error: {}", e);
|
||||
std::process::exit(1);
|
||||
});
|
||||
}
|
||||
|
||||
Some(Commands::Run { path, emulate }) => {
|
||||
// Run the workflow execution
|
||||
let runtime_type = if *emulate {
|
||||
executor::RuntimeType::Emulation
|
||||
} else {
|
||||
// Check if Docker is available, fall back to emulation if not
|
||||
if !executor::docker::is_available() {
|
||||
println!("⚠️ Docker is not available. Using emulation mode instead.");
|
||||
logging::warning("Docker is not available. Using emulation mode instead.");
|
||||
executor::RuntimeType::Emulation
|
||||
} else {
|
||||
executor::RuntimeType::Docker
|
||||
}
|
||||
};
|
||||
|
||||
// Run in CLI mode with the specific workflow
|
||||
match ui::execute_workflow_cli(path, runtime_type, verbose).await {
|
||||
Ok(_) => {
|
||||
// Clean up on successful exit
|
||||
cleanup_on_exit().await;
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Error: {}", e);
|
||||
cleanup_on_exit().await;
|
||||
std::process::exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Some(Commands::Tui { path, emulate }) => {
|
||||
// Open the TUI interface
|
||||
let runtime_type = if *emulate {
|
||||
executor::RuntimeType::Emulation
|
||||
} else {
|
||||
// Check if Docker is available, fall back to emulation if not
|
||||
if !executor::docker::is_available() {
|
||||
println!("⚠️ Docker is not available. Using emulation mode instead.");
|
||||
logging::warning("Docker is not available. Using emulation mode instead.");
|
||||
executor::RuntimeType::Emulation
|
||||
} else {
|
||||
executor::RuntimeType::Docker
|
||||
}
|
||||
};
|
||||
|
||||
match ui::run_wrkflw_tui(path.as_ref(), runtime_type, verbose).await {
|
||||
Ok(_) => {
|
||||
// Clean up on successful exit
|
||||
cleanup_on_exit().await;
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Error: {}", e);
|
||||
cleanup_on_exit().await;
|
||||
std::process::exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Some(Commands::Trigger {
|
||||
workflow,
|
||||
branch,
|
||||
input,
|
||||
}) => {
|
||||
let inputs = input.as_ref().map(|kv_pairs| {
|
||||
kv_pairs
|
||||
.iter()
|
||||
.cloned()
|
||||
.collect::<HashMap<String, String>>()
|
||||
});
|
||||
|
||||
match github::trigger_workflow(workflow, branch.as_deref(), inputs.clone()).await {
|
||||
Ok(_) => {
|
||||
// Success is already reported in the github module with detailed info
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Error triggering workflow: {}", e);
|
||||
std::process::exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Some(Commands::List) => match github::get_repo_info() {
|
||||
Ok(repo_info) => match github::list_workflows(&repo_info).await {
|
||||
Ok(workflows) => {
|
||||
if workflows.is_empty() {
|
||||
println!("No workflows found in the .github/workflows directory");
|
||||
} else {
|
||||
println!("Available workflows:");
|
||||
for workflow in workflows {
|
||||
println!(" {}", workflow);
|
||||
}
|
||||
println!("\nTrigger a workflow with: wrkflw trigger <workflow> [options]");
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Error listing workflows: {}", e);
|
||||
std::process::exit(1);
|
||||
}
|
||||
},
|
||||
Err(e) => {
|
||||
eprintln!("Error getting repository info: {}", e);
|
||||
std::process::exit(1);
|
||||
}
|
||||
},
|
||||
|
||||
None => {
|
||||
// Default to TUI interface if no subcommand
|
||||
// Check if Docker is available, fall back to emulation if not
|
||||
let runtime_type = if !executor::docker::is_available() {
|
||||
println!("⚠️ Docker is not available. Using emulation mode instead.");
|
||||
logging::warning("Docker is not available. Using emulation mode instead.");
|
||||
executor::RuntimeType::Emulation
|
||||
} else {
|
||||
executor::RuntimeType::Docker
|
||||
};
|
||||
|
||||
match ui::run_wrkflw_tui(
|
||||
Some(&PathBuf::from(".github/workflows")),
|
||||
runtime_type,
|
||||
verbose,
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(_) => {
|
||||
// Clean up on successful exit
|
||||
cleanup_on_exit().await;
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Error: {}", e);
|
||||
cleanup_on_exit().await;
|
||||
std::process::exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Final cleanup before program exit
|
||||
cleanup_on_exit().await;
|
||||
}
|
||||
@@ -1,129 +0,0 @@
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::matrix::{self, MatrixCombination, MatrixConfig};
|
||||
use indexmap::IndexMap;
|
||||
use serde_yaml::Value;
|
||||
use std::collections::HashMap;
|
||||
|
||||
fn create_test_matrix() -> MatrixConfig {
|
||||
let mut matrix = MatrixConfig::default();
|
||||
|
||||
// Add basic parameters
|
||||
let mut params = IndexMap::new();
|
||||
|
||||
// Add 'os' parameter with array values
|
||||
let os_array = vec![
|
||||
Value::String("ubuntu".to_string()),
|
||||
Value::String("windows".to_string()),
|
||||
Value::String("macos".to_string()),
|
||||
];
|
||||
params.insert("os".to_string(), Value::Sequence(os_array));
|
||||
|
||||
// Add 'node' parameter with array values
|
||||
let node_array = vec![
|
||||
Value::Number(serde_yaml::Number::from(14)),
|
||||
Value::Number(serde_yaml::Number::from(16)),
|
||||
];
|
||||
params.insert("node".to_string(), Value::Sequence(node_array));
|
||||
|
||||
matrix.parameters = params;
|
||||
|
||||
// Add exclude pattern
|
||||
let mut exclude_item = HashMap::new();
|
||||
exclude_item.insert("os".to_string(), Value::String("windows".to_string()));
|
||||
exclude_item.insert(
|
||||
"node".to_string(),
|
||||
Value::Number(serde_yaml::Number::from(14)),
|
||||
);
|
||||
matrix.exclude = vec![exclude_item];
|
||||
|
||||
// Add include pattern
|
||||
let mut include_item = HashMap::new();
|
||||
include_item.insert("os".to_string(), Value::String("ubuntu".to_string()));
|
||||
include_item.insert(
|
||||
"node".to_string(),
|
||||
Value::Number(serde_yaml::Number::from(18)),
|
||||
);
|
||||
include_item.insert("experimental".to_string(), Value::Bool(true));
|
||||
matrix.include = vec![include_item];
|
||||
|
||||
// Set max-parallel
|
||||
matrix.max_parallel = Some(2);
|
||||
|
||||
// Set fail-fast
|
||||
matrix.fail_fast = Some(true);
|
||||
|
||||
matrix
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_matrix_expansion() {
|
||||
let matrix = create_test_matrix();
|
||||
|
||||
// Expand the matrix
|
||||
let combinations = matrix::expand_matrix(&matrix).unwrap();
|
||||
|
||||
// We should have 6 combinations:
|
||||
// 3 OS x 2 Node versions = 6 base combinations
|
||||
// - 1 excluded (windows + node 14)
|
||||
// + 1 included (ubuntu + node 18 + experimental)
|
||||
// = 6 total combinations
|
||||
assert_eq!(combinations.len(), 6);
|
||||
|
||||
// Check that the excluded combination is not present
|
||||
let excluded =
|
||||
combinations
|
||||
.iter()
|
||||
.find(|c| match (c.values.get("os"), c.values.get("node")) {
|
||||
(Some(Value::String(os)), Some(Value::Number(node))) => {
|
||||
os == "windows" && node.as_u64() == Some(14)
|
||||
}
|
||||
_ => false,
|
||||
});
|
||||
assert!(
|
||||
excluded.is_none(),
|
||||
"Excluded combination should not be present"
|
||||
);
|
||||
|
||||
// Check that the included combination is present
|
||||
let included = combinations.iter().find(|c| {
|
||||
match (
|
||||
c.values.get("os"),
|
||||
c.values.get("node"),
|
||||
c.values.get("experimental"),
|
||||
) {
|
||||
(Some(Value::String(os)), Some(Value::Number(node)), Some(Value::Bool(exp))) => {
|
||||
os == "ubuntu" && node.as_u64() == Some(18) && *exp
|
||||
}
|
||||
_ => false,
|
||||
}
|
||||
});
|
||||
assert!(included.is_some(), "Included combination should be present");
|
||||
assert!(
|
||||
included.unwrap().is_included,
|
||||
"Combination should be marked as included"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_format_combination_name() {
|
||||
let mut values = HashMap::new();
|
||||
values.insert("os".to_string(), Value::String("ubuntu".to_string()));
|
||||
values.insert(
|
||||
"node".to_string(),
|
||||
Value::Number(serde_yaml::Number::from(14)),
|
||||
);
|
||||
|
||||
let combination = MatrixCombination {
|
||||
values,
|
||||
is_included: false,
|
||||
};
|
||||
|
||||
let formatted = matrix::format_combination_name("test-job", &combination);
|
||||
|
||||
// Should format as "test-job (os: ubuntu, node: 14)" or similar
|
||||
assert!(formatted.contains("test-job"));
|
||||
assert!(formatted.contains("os: ubuntu"));
|
||||
assert!(formatted.contains("node: 14"));
|
||||
}
|
||||
}
|
||||
@@ -1,18 +0,0 @@
|
||||
pub struct ValidationResult {
|
||||
pub is_valid: bool,
|
||||
pub issues: Vec<String>,
|
||||
}
|
||||
|
||||
impl ValidationResult {
|
||||
pub fn new() -> Self {
|
||||
ValidationResult {
|
||||
is_valid: true,
|
||||
issues: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn add_issue(&mut self, issue: String) {
|
||||
self.is_valid = false;
|
||||
self.issues.push(issue);
|
||||
}
|
||||
}
|
||||
@@ -1 +0,0 @@
|
||||
pub mod workflow;
|
||||
@@ -1,562 +0,0 @@
|
||||
use crate::logging;
|
||||
use crate::runtime::container::{ContainerError, ContainerOutput, ContainerRuntime};
|
||||
use async_trait::async_trait;
|
||||
use once_cell::sync::Lazy;
|
||||
use std::fs;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::process::Command;
|
||||
use std::sync::Mutex;
|
||||
use tempfile::TempDir;
|
||||
|
||||
// Global collection of resources to clean up
|
||||
static EMULATION_WORKSPACES: Lazy<Mutex<Vec<PathBuf>>> = Lazy::new(|| Mutex::new(Vec::new()));
|
||||
static EMULATION_PROCESSES: Lazy<Mutex<Vec<u32>>> = Lazy::new(|| Mutex::new(Vec::new()));
|
||||
|
||||
pub struct EmulationRuntime {
|
||||
workspace: TempDir,
|
||||
}
|
||||
|
||||
impl EmulationRuntime {
|
||||
pub fn new() -> Self {
|
||||
// Create a temporary workspace to simulate container isolation
|
||||
let workspace =
|
||||
tempfile::tempdir().expect("Failed to create temporary workspace for emulation");
|
||||
|
||||
// Track this workspace for cleanup
|
||||
if let Ok(mut workspaces) = EMULATION_WORKSPACES.lock() {
|
||||
workspaces.push(workspace.path().to_path_buf());
|
||||
}
|
||||
|
||||
EmulationRuntime { workspace }
|
||||
}
|
||||
|
||||
fn prepare_workspace(&self, _working_dir: &Path, volumes: &[(&Path, &Path)]) -> PathBuf {
|
||||
// Get the container root - this is the emulation workspace directory
|
||||
let container_root = self.workspace.path().to_path_buf();
|
||||
|
||||
// Make sure we have a github/workspace subdirectory which is where
|
||||
// commands will be executed
|
||||
let github_workspace = container_root.join("github").join("workspace");
|
||||
fs::create_dir_all(&github_workspace)
|
||||
.expect("Failed to create github/workspace directory structure");
|
||||
|
||||
// Map all volumes
|
||||
for (host_path, container_path) in volumes {
|
||||
// Determine target path - if it starts with /github/workspace, it goes to our workspace dir
|
||||
let target_path = if container_path.starts_with("/github/workspace") {
|
||||
// Map /github/workspace to our github_workspace directory
|
||||
let rel_path = container_path
|
||||
.strip_prefix("/github/workspace")
|
||||
.unwrap_or(Path::new(""));
|
||||
github_workspace.join(rel_path)
|
||||
} else if container_path.starts_with("/") {
|
||||
// Other absolute paths go under container_root
|
||||
container_root.join(container_path.strip_prefix("/").unwrap_or(container_path))
|
||||
} else {
|
||||
// Relative paths go directly under container_root
|
||||
container_root.join(container_path)
|
||||
};
|
||||
|
||||
// Create parent directories
|
||||
if let Some(parent) = target_path.parent() {
|
||||
fs::create_dir_all(parent).expect("Failed to create directory structure");
|
||||
}
|
||||
|
||||
// For directories, copy content recursively
|
||||
if host_path.is_dir() {
|
||||
// If the host path is the project root and container path is the workspace,
|
||||
// we want to copy all project files to the github/workspace directory
|
||||
if *container_path == Path::new("/github/workspace") {
|
||||
// Use a recursive copy function to copy all files and directories
|
||||
copy_directory_contents(host_path, &github_workspace)
|
||||
.expect("Failed to copy project files to workspace");
|
||||
} else {
|
||||
// Create the target directory
|
||||
fs::create_dir_all(&target_path).expect("Failed to create target directory");
|
||||
|
||||
// Copy files in this directory (not recursive for simplicity)
|
||||
for entry in fs::read_dir(host_path)
|
||||
.expect("Failed to read source directory")
|
||||
.flatten()
|
||||
{
|
||||
let source = entry.path();
|
||||
let file_name = match source.file_name() {
|
||||
Some(name) => name,
|
||||
None => {
|
||||
eprintln!(
|
||||
"Warning: Could not get file name from path: {:?}",
|
||||
source
|
||||
);
|
||||
continue; // Skip this file
|
||||
}
|
||||
};
|
||||
let dest = target_path.join(file_name);
|
||||
|
||||
if source.is_file() {
|
||||
if let Err(e) = fs::copy(&source, &dest) {
|
||||
eprintln!(
|
||||
"Warning: Failed to copy file from {:?} to {:?}: {}",
|
||||
&source, &dest, e
|
||||
);
|
||||
}
|
||||
} else {
|
||||
// We could make this recursive if needed
|
||||
fs::create_dir_all(&dest).expect("Failed to create subdirectory");
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if host_path.is_file() {
|
||||
// Copy individual file
|
||||
let file_name = match host_path.file_name() {
|
||||
Some(name) => name,
|
||||
None => {
|
||||
eprintln!(
|
||||
"Warning: Could not get file name from path: {:?}",
|
||||
host_path
|
||||
);
|
||||
continue; // Skip this file
|
||||
}
|
||||
};
|
||||
let dest = target_path.join(file_name);
|
||||
if let Err(e) = fs::copy(host_path, &dest) {
|
||||
eprintln!(
|
||||
"Warning: Failed to copy file from {:?} to {:?}: {}",
|
||||
host_path, &dest, e
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Return the github/workspace directory for command execution
|
||||
github_workspace
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl ContainerRuntime for EmulationRuntime {
|
||||
async fn run_container(
|
||||
&self,
|
||||
image: &str,
|
||||
cmd: &[&str],
|
||||
env_vars: &[(&str, &str)],
|
||||
working_dir: &Path,
|
||||
volumes: &[(&Path, &Path)],
|
||||
) -> Result<ContainerOutput, ContainerError> {
|
||||
// Print emulation info
|
||||
logging::info(&format!("Emulating container: {}", image));
|
||||
// Prepare the workspace
|
||||
let container_working_dir = self.prepare_workspace(working_dir, volumes);
|
||||
|
||||
// Detect if this is a long-running command that should be spawned as a detached process
|
||||
let is_long_running = cmd.iter().any(|&c| {
|
||||
c.contains("server")
|
||||
|| c.contains("daemon")
|
||||
|| c.contains("listen")
|
||||
|| c.contains("watch")
|
||||
|| c.contains("-d")
|
||||
|| c.contains("--detach")
|
||||
});
|
||||
|
||||
if is_long_running {
|
||||
logging::info("Detected long-running command, will run detached");
|
||||
|
||||
let mut command = Command::new(cmd[0]);
|
||||
command.current_dir(&container_working_dir);
|
||||
|
||||
// Add all arguments
|
||||
for arg in &cmd[1..] {
|
||||
command.arg(arg);
|
||||
}
|
||||
|
||||
// Set environment variables
|
||||
for (key, value) in env_vars {
|
||||
command.env(key, value);
|
||||
}
|
||||
|
||||
// Run detached
|
||||
match command.spawn() {
|
||||
Ok(child) => {
|
||||
let pid = child.id();
|
||||
track_process(pid);
|
||||
logging::info(&format!("Started detached process with PID: {}", pid));
|
||||
|
||||
return Ok(ContainerOutput {
|
||||
stdout: format!("Started long-running process with PID: {}", pid),
|
||||
stderr: String::new(),
|
||||
exit_code: 0,
|
||||
});
|
||||
}
|
||||
Err(e) => {
|
||||
return Err(ContainerError::ContainerExecution(format!(
|
||||
"Failed to start detached process: {}",
|
||||
e
|
||||
)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// For Nix-specific commands, ensure Nix is installed
|
||||
let contains_nix_command = cmd.iter().any(|&arg| arg.contains("nix "));
|
||||
|
||||
if contains_nix_command {
|
||||
let nix_installed = Command::new("which")
|
||||
.arg("nix")
|
||||
.output()
|
||||
.map(|output| output.status.success())
|
||||
.unwrap_or(false);
|
||||
|
||||
if !nix_installed {
|
||||
logging::info("⚠️ Nix commands detected but Nix is not installed!");
|
||||
logging::info(
|
||||
"🔄 To use this workflow, please install Nix: https://nixos.org/download.html",
|
||||
);
|
||||
|
||||
return Ok(ContainerOutput {
|
||||
stdout: String::new(),
|
||||
stderr: "Nix is required for this workflow but not installed on your system.\nPlease install Nix first: https://nixos.org/download.html".to_string(),
|
||||
exit_code: 1,
|
||||
});
|
||||
} else {
|
||||
logging::info("✅ Nix is installed, proceeding with command");
|
||||
}
|
||||
}
|
||||
|
||||
// Ensure we have a command
|
||||
if cmd.is_empty() {
|
||||
return Err(ContainerError::ContainerExecution(
|
||||
"No command specified".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
let has_background = cmd.iter().any(|c| c.contains(" &"));
|
||||
|
||||
// For bash/sh with -c, handle specially
|
||||
if (cmd[0] == "bash" || cmd[0] == "sh")
|
||||
&& cmd.len() >= 2
|
||||
&& (cmd[1] == "-c" || cmd[1] == "-e" || cmd[1] == "-ec")
|
||||
{
|
||||
let shell = cmd[0];
|
||||
|
||||
// Find the index of -c flag (could be -e -c or just -c)
|
||||
let c_flag_index = cmd.iter().position(|&arg| arg == "-c");
|
||||
|
||||
if let Some(idx) = c_flag_index {
|
||||
// Ensure there's an argument after -c
|
||||
if idx + 1 < cmd.len() {
|
||||
// Get the actual command
|
||||
let command_str = cmd[idx + 1];
|
||||
|
||||
// Handle GitHub variables properly
|
||||
let fixed_cmd = command_str
|
||||
.replace(">>$GITHUB_OUTPUT", ">>\"$GITHUB_OUTPUT\"")
|
||||
.replace(">>$GITHUB_ENV", ">>\"$GITHUB_ENV\"")
|
||||
.replace(">>$GITHUB_PATH", ">>\"$GITHUB_PATH\"")
|
||||
.replace(">>$GITHUB_STEP_SUMMARY", ">>\"$GITHUB_STEP_SUMMARY\"");
|
||||
|
||||
// If we have background processes, add a wait command
|
||||
let final_cmd = if has_background && !fixed_cmd.contains(" wait") {
|
||||
format!("{{ {}; }} && wait", fixed_cmd)
|
||||
} else {
|
||||
fixed_cmd
|
||||
};
|
||||
|
||||
// Create command
|
||||
let mut command = Command::new(shell);
|
||||
command.current_dir(&container_working_dir);
|
||||
|
||||
// Add flags
|
||||
for arg in cmd.iter().skip(1).take(idx) {
|
||||
command.arg(arg);
|
||||
}
|
||||
|
||||
// Add the command
|
||||
command.arg(final_cmd);
|
||||
|
||||
// Set environment variables
|
||||
for (key, value) in env_vars {
|
||||
command.env(key, value);
|
||||
}
|
||||
|
||||
// Execute
|
||||
let output = command
|
||||
.output()
|
||||
.map_err(|e| ContainerError::ContainerExecution(e.to_string()))?;
|
||||
|
||||
return Ok(ContainerOutput {
|
||||
stdout: String::from_utf8_lossy(&output.stdout).to_string(),
|
||||
stderr: String::from_utf8_lossy(&output.stderr).to_string(),
|
||||
exit_code: output.status.code().unwrap_or(-1),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if has_background {
|
||||
// For commands with background processes, use shell wrapper
|
||||
let mut shell_command = Command::new("sh");
|
||||
shell_command.current_dir(&container_working_dir);
|
||||
shell_command.arg("-c");
|
||||
|
||||
// Join the original command and add trap for cleanup
|
||||
let command_str = format!("{{ {}; }} && wait", cmd.join(" "));
|
||||
shell_command.arg(command_str);
|
||||
|
||||
// Set environment variables
|
||||
for (key, value) in env_vars {
|
||||
shell_command.env(key, value);
|
||||
}
|
||||
|
||||
// Log that we're running a background process
|
||||
logging::info("Emulation: Running command with background processes");
|
||||
|
||||
// For commands with background processes, we could potentially track PIDs
|
||||
// However, since they're in a shell wrapper, we'd need to parse them from output
|
||||
|
||||
let output = shell_command
|
||||
.output()
|
||||
.map_err(|e| ContainerError::ContainerExecution(e.to_string()))?;
|
||||
|
||||
return Ok(ContainerOutput {
|
||||
stdout: String::from_utf8_lossy(&output.stdout).to_string(),
|
||||
stderr: String::from_utf8_lossy(&output.stderr).to_string(),
|
||||
exit_code: output.status.code().unwrap_or(-1),
|
||||
});
|
||||
}
|
||||
|
||||
// For all other commands
|
||||
let mut command = Command::new(cmd[0]);
|
||||
command.current_dir(&container_working_dir);
|
||||
|
||||
// Add all arguments
|
||||
for arg in &cmd[1..] {
|
||||
command.arg(arg);
|
||||
}
|
||||
|
||||
// Set environment variables
|
||||
for (key, value) in env_vars {
|
||||
command.env(key, value);
|
||||
}
|
||||
|
||||
// Execute
|
||||
let output = command
|
||||
.output()
|
||||
.map_err(|e| ContainerError::ContainerExecution(e.to_string()))?;
|
||||
|
||||
Ok(ContainerOutput {
|
||||
stdout: String::from_utf8_lossy(&output.stdout).to_string(),
|
||||
stderr: String::from_utf8_lossy(&output.stderr).to_string(),
|
||||
exit_code: output.status.code().unwrap_or(-1),
|
||||
})
|
||||
}
|
||||
|
||||
async fn pull_image(&self, image: &str) -> Result<(), ContainerError> {
|
||||
logging::info(&format!("🔄 Emulation: Pretending to pull image {}", image));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn build_image(&self, dockerfile: &Path, tag: &str) -> Result<(), ContainerError> {
|
||||
logging::info(&format!(
|
||||
"🔄 Emulation: Pretending to build image {} from {}",
|
||||
tag,
|
||||
dockerfile.display()
|
||||
));
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
// Helper function for recursive directory copying
|
||||
fn copy_directory_contents(source: &Path, dest: &Path) -> std::io::Result<()> {
|
||||
// Create the destination directory if it doesn't exist
|
||||
fs::create_dir_all(dest)?;
|
||||
|
||||
// Iterate through all entries in the source directory
|
||||
for entry in fs::read_dir(source)? {
|
||||
let entry = entry?;
|
||||
let path = entry.path();
|
||||
let file_name = match path.file_name() {
|
||||
Some(name) => name,
|
||||
None => {
|
||||
eprintln!("Warning: Could not get file name from path: {:?}", path);
|
||||
continue; // Skip this file
|
||||
}
|
||||
};
|
||||
let dest_path = dest.join(file_name);
|
||||
|
||||
// Skip hidden files (except .gitignore and .github might be useful)
|
||||
let file_name_str = file_name.to_string_lossy();
|
||||
if file_name_str.starts_with(".")
|
||||
&& file_name_str != ".gitignore"
|
||||
&& file_name_str != ".github"
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
// Skip target directory for Rust projects
|
||||
if file_name_str == "target" {
|
||||
continue;
|
||||
}
|
||||
|
||||
if path.is_dir() {
|
||||
// Recursively copy subdirectories
|
||||
copy_directory_contents(&path, &dest_path)?;
|
||||
} else {
|
||||
// Copy files
|
||||
fs::copy(&path, &dest_path)?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn handle_special_action(action: &str) -> Result<(), ContainerError> {
|
||||
if action.starts_with("cachix/install-nix-action") {
|
||||
logging::info("🔄 Emulating cachix/install-nix-action");
|
||||
|
||||
// In emulation mode, check if nix is installed
|
||||
let nix_installed = Command::new("which")
|
||||
.arg("nix")
|
||||
.output()
|
||||
.map(|output| output.status.success())
|
||||
.unwrap_or(false);
|
||||
|
||||
if !nix_installed {
|
||||
logging::info("🔄 Emulation: Nix is required but not installed.");
|
||||
logging::info(
|
||||
"🔄 To use this workflow, please install Nix: https://nixos.org/download.html",
|
||||
);
|
||||
logging::info("🔄 Continuing emulation, but nix commands will fail.");
|
||||
} else {
|
||||
logging::info("🔄 Emulation: Using system-installed Nix");
|
||||
}
|
||||
Ok(())
|
||||
} else {
|
||||
// Ignore other actions in emulation mode
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
// Function to clean up emulation resources
|
||||
pub async fn cleanup_resources() {
|
||||
cleanup_processes().await;
|
||||
cleanup_workspaces().await;
|
||||
}
|
||||
|
||||
// Clean up any tracked processes
|
||||
async fn cleanup_processes() {
|
||||
let processes_to_cleanup = {
|
||||
if let Ok(processes) = EMULATION_PROCESSES.lock() {
|
||||
processes.clone()
|
||||
} else {
|
||||
vec![]
|
||||
}
|
||||
};
|
||||
|
||||
for pid in processes_to_cleanup {
|
||||
logging::info(&format!("Cleaning up emulated process: {}", pid));
|
||||
|
||||
#[cfg(unix)]
|
||||
{
|
||||
// On Unix-like systems, use kill command
|
||||
let _ = Command::new("kill")
|
||||
.arg("-TERM")
|
||||
.arg(pid.to_string())
|
||||
.output();
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
{
|
||||
// On Windows, use taskkill
|
||||
let _ = Command::new("taskkill")
|
||||
.arg("/F")
|
||||
.arg("/PID")
|
||||
.arg(&pid.to_string())
|
||||
.output();
|
||||
}
|
||||
|
||||
// Remove from tracking
|
||||
if let Ok(mut processes) = EMULATION_PROCESSES.lock() {
|
||||
processes.retain(|p| *p != pid);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Clean up any tracked workspaces
|
||||
async fn cleanup_workspaces() {
|
||||
let workspaces_to_cleanup = {
|
||||
if let Ok(workspaces) = EMULATION_WORKSPACES.lock() {
|
||||
workspaces.clone()
|
||||
} else {
|
||||
vec![]
|
||||
}
|
||||
};
|
||||
|
||||
for workspace_path in workspaces_to_cleanup {
|
||||
logging::info(&format!(
|
||||
"Cleaning up emulation workspace: {}",
|
||||
workspace_path.display()
|
||||
));
|
||||
|
||||
// Only attempt to remove if it exists
|
||||
if workspace_path.exists() {
|
||||
match fs::remove_dir_all(&workspace_path) {
|
||||
Ok(_) => logging::info("Successfully removed workspace directory"),
|
||||
Err(e) => logging::error(&format!("Error removing workspace: {}", e)),
|
||||
}
|
||||
}
|
||||
|
||||
// Remove from tracking
|
||||
if let Ok(mut workspaces) = EMULATION_WORKSPACES.lock() {
|
||||
workspaces.retain(|w| *w != workspace_path);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Add process to tracking
|
||||
pub fn track_process(pid: u32) {
|
||||
if let Ok(mut processes) = EMULATION_PROCESSES.lock() {
|
||||
processes.push(pid);
|
||||
}
|
||||
}
|
||||
|
||||
// Remove process from tracking
|
||||
#[allow(dead_code)]
|
||||
pub fn untrack_process(pid: u32) {
|
||||
if let Ok(mut processes) = EMULATION_PROCESSES.lock() {
|
||||
processes.retain(|p| *p != pid);
|
||||
}
|
||||
}
|
||||
|
||||
// Track additional workspace paths if needed
|
||||
#[allow(dead_code)]
|
||||
pub fn track_workspace(path: &Path) {
|
||||
if let Ok(mut workspaces) = EMULATION_WORKSPACES.lock() {
|
||||
workspaces.push(path.to_path_buf());
|
||||
}
|
||||
}
|
||||
|
||||
// Remove workspace from tracking
|
||||
#[allow(dead_code)]
|
||||
pub fn untrack_workspace(path: &Path) {
|
||||
if let Ok(mut workspaces) = EMULATION_WORKSPACES.lock() {
|
||||
workspaces.retain(|w| *w != path);
|
||||
}
|
||||
}
|
||||
|
||||
// Public accessor functions for testing
|
||||
#[cfg(test)]
|
||||
pub fn get_tracked_workspaces() -> Vec<PathBuf> {
|
||||
if let Ok(workspaces) = EMULATION_WORKSPACES.lock() {
|
||||
workspaces.clone()
|
||||
} else {
|
||||
vec![]
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub fn get_tracked_processes() -> Vec<u32> {
|
||||
if let Ok(processes) = EMULATION_PROCESSES.lock() {
|
||||
processes.clone()
|
||||
} else {
|
||||
vec![]
|
||||
}
|
||||
}
|
||||
21
test-workflows/1-basic-workflow.yml
Normal file
21
test-workflows/1-basic-workflow.yml
Normal file
@@ -0,0 +1,21 @@
|
||||
name: Basic Workflow
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
pull_request:
|
||||
branches: [main]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: '16'
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
- name: Run tests
|
||||
run: npm test
|
||||
20
test-workflows/2-reusable-workflow-caller.yml
Normal file
20
test-workflows/2-reusable-workflow-caller.yml
Normal file
@@ -0,0 +1,20 @@
|
||||
name: Reusable Workflow Caller
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
|
||||
jobs:
|
||||
call-workflow-1:
|
||||
uses: octo-org/example-repo/.github/workflows/workflow-A.yml@v1
|
||||
|
||||
call-workflow-2:
|
||||
uses: ./local-workflows/build.yml
|
||||
with:
|
||||
config-path: ./config/test.yml
|
||||
secrets:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
call-workflow-3:
|
||||
uses: octo-org/example-repo/.github/workflows/workflow-B.yml@main
|
||||
needs: [call-workflow-1]
|
||||
32
test-workflows/3-reusable-workflow-definition.yml
Normal file
32
test-workflows/3-reusable-workflow-definition.yml
Normal file
@@ -0,0 +1,32 @@
|
||||
name: Reusable Workflow Definition
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
config-path:
|
||||
required: true
|
||||
type: string
|
||||
description: "Path to the configuration file"
|
||||
environment:
|
||||
required: false
|
||||
type: string
|
||||
default: "production"
|
||||
description: "Environment to run in"
|
||||
secrets:
|
||||
token:
|
||||
required: true
|
||||
description: "GitHub token for authentication"
|
||||
|
||||
jobs:
|
||||
reusable-job:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Load configuration
|
||||
run: echo "Loading configuration from ${{ inputs.config-path }}"
|
||||
- name: Run in environment
|
||||
run: echo "Running in ${{ inputs.environment }} environment"
|
||||
- name: Use secret
|
||||
run: echo "Using secret with length ${#TOKEN}"
|
||||
env:
|
||||
TOKEN: ${{ secrets.token }}
|
||||
25
test-workflows/4-mixed-jobs.yml
Normal file
25
test-workflows/4-mixed-jobs.yml
Normal file
@@ -0,0 +1,25 @@
|
||||
name: Mixed Regular and Reusable Jobs
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
|
||||
jobs:
|
||||
regular-job:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Run regular task
|
||||
run: echo "This is a regular job"
|
||||
|
||||
reusable-job:
|
||||
uses: octo-org/example-repo/.github/workflows/reusable.yml@main
|
||||
with:
|
||||
parameter: "value"
|
||||
|
||||
dependent-job:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [regular-job, reusable-job]
|
||||
steps:
|
||||
- name: Run dependent task
|
||||
run: echo "This job depends on both a regular and reusable job"
|
||||
12
test-workflows/5-no-name-reusable-caller.yml
Normal file
12
test-workflows/5-no-name-reusable-caller.yml
Normal file
@@ -0,0 +1,12 @@
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
|
||||
jobs:
|
||||
call-workflow-1:
|
||||
uses: octo-org/example-repo/.github/workflows/workflow-A.yml@v1
|
||||
|
||||
call-workflow-2:
|
||||
uses: ./local-workflows/build.yml
|
||||
with:
|
||||
config-path: ./config/test.yml
|
||||
17
test-workflows/6-invalid-reusable-format.yml
Normal file
17
test-workflows/6-invalid-reusable-format.yml
Normal file
@@ -0,0 +1,17 @@
|
||||
name: Invalid Reusable Format
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
|
||||
jobs:
|
||||
valid-job:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Test step
|
||||
run: echo "This is a valid job"
|
||||
|
||||
invalid-reusable-job:
|
||||
uses: invalid-format
|
||||
with:
|
||||
param: "value"
|
||||
19
test-workflows/7-invalid-regular-job.yml
Normal file
19
test-workflows/7-invalid-regular-job.yml
Normal file
@@ -0,0 +1,19 @@
|
||||
name: Invalid Regular Job
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
|
||||
jobs:
|
||||
job-missing-runs-on:
|
||||
# Missing runs-on field
|
||||
steps:
|
||||
- name: Test step
|
||||
run: echo "This job is missing runs-on field"
|
||||
|
||||
job-missing-steps:
|
||||
runs-on: ubuntu-latest
|
||||
# Missing steps section
|
||||
|
||||
valid-reusable-job:
|
||||
uses: octo-org/example-repo/.github/workflows/reusable.yml@main
|
||||
31
test-workflows/8-cyclic-dependencies.yml
Normal file
31
test-workflows/8-cyclic-dependencies.yml
Normal file
@@ -0,0 +1,31 @@
|
||||
name: Cyclic Dependencies
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
|
||||
jobs:
|
||||
job-a:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [job-c]
|
||||
steps:
|
||||
- name: Job A
|
||||
run: echo "Job A"
|
||||
|
||||
job-b:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [job-a]
|
||||
steps:
|
||||
- name: Job B
|
||||
run: echo "Job B"
|
||||
|
||||
job-c:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [job-b]
|
||||
steps:
|
||||
- name: Job C
|
||||
run: echo "Job C"
|
||||
|
||||
reusable-job:
|
||||
uses: octo-org/example-repo/.github/workflows/reusable.yml@main
|
||||
needs: [job-a]
|
||||
38
test-workflows/cpp-test.yml
Normal file
38
test-workflows/cpp-test.yml
Normal file
@@ -0,0 +1,38 @@
|
||||
name: C++ Test
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ main ]
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
test:
|
||||
name: Test C++
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Setup GCC
|
||||
uses: egor-tensin/setup-gcc@v1
|
||||
with:
|
||||
version: 11
|
||||
|
||||
- name: Check GCC version
|
||||
run: g++ --version
|
||||
|
||||
- name: Create simple program
|
||||
run: |
|
||||
echo '#include <iostream>' > hello.cpp
|
||||
echo 'int main() {' >> hello.cpp
|
||||
echo ' std::cout << "Hello from C++!" << std::endl;' >> hello.cpp
|
||||
echo ' std::cout << "Running on GCC" << std::endl;' >> hello.cpp
|
||||
echo ' return 0;' >> hello.cpp
|
||||
echo '}' >> hello.cpp
|
||||
|
||||
- name: Build C++ program
|
||||
run: g++ hello.cpp -o hello
|
||||
|
||||
- name: Run C++ program
|
||||
run: ./hello
|
||||
26
test-workflows/example.yml
Normal file
26
test-workflows/example.yml
Normal file
@@ -0,0 +1,26 @@
|
||||
name: Basic Workflow Example
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: ["main"]
|
||||
pull_request:
|
||||
branches: ["main"]
|
||||
|
||||
env:
|
||||
GLOBAL_VAR: "global value"
|
||||
|
||||
jobs:
|
||||
test-job:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Echo Hello
|
||||
run: echo "Hello World"
|
||||
|
||||
- name: Show Environment
|
||||
run: echo "Using global var: $GLOBAL_VAR"
|
||||
|
||||
- name: Run Multiple Commands
|
||||
run: |
|
||||
echo "This is a multi-line command"
|
||||
echo "Current directory: $PWD"
|
||||
ls -la
|
||||
44
test-workflows/matrix-example.yml
Normal file
44
test-workflows/matrix-example.yml
Normal file
@@ -0,0 +1,44 @@
|
||||
name: Matrix Example
|
||||
|
||||
triggers:
|
||||
push:
|
||||
branches: ["main"]
|
||||
pull_request:
|
||||
branches: ["main"]
|
||||
|
||||
env:
|
||||
GLOBAL_VAR: "This applies to all jobs"
|
||||
|
||||
jobs:
|
||||
test:
|
||||
name: "Test"
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest, macos-latest]
|
||||
node-version: [14, 16, 18]
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
node-version: 20
|
||||
experimental: true
|
||||
exclude:
|
||||
- os: windows-latest
|
||||
node-version: 14
|
||||
fail-fast: false
|
||||
max-parallel: 2
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
|
||||
- name: Show configuration
|
||||
run: |
|
||||
echo "Running on: ${{ matrix.os }}"
|
||||
echo "Node version: ${{ matrix.node-version }}"
|
||||
if [ "${{ matrix.experimental }}" = "true" ]; then
|
||||
echo "This is an experimental configuration"
|
||||
fi
|
||||
31
test-workflows/node-test.yml
Normal file
31
test-workflows/node-test.yml
Normal file
@@ -0,0 +1,31 @@
|
||||
name: Node.js Test
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ main ]
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
test:
|
||||
name: Test Node.js
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: '16.x'
|
||||
|
||||
- name: Check Node.js version
|
||||
run: node --version
|
||||
|
||||
- name: Create simple script
|
||||
run: |
|
||||
echo 'console.log("Hello from Node.js!");' > test.js
|
||||
echo 'console.log(`Node.js version: ${process.version}`);' >> test.js
|
||||
|
||||
- name: Run Node.js script
|
||||
run: node test.js
|
||||
31
test-workflows/python-test.yml
Normal file
31
test-workflows/python-test.yml
Normal file
@@ -0,0 +1,31 @@
|
||||
name: Python Test
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ main ]
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
test:
|
||||
name: Test Python
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.9'
|
||||
|
||||
- name: Check Python version
|
||||
run: python3 --version
|
||||
|
||||
- name: Create simple script
|
||||
run: |
|
||||
echo 'print("Hello from Python!")' > test.py
|
||||
echo 'import sys; print(f"Python version: {sys.version}")' >> test.py
|
||||
|
||||
- name: Run Python script
|
||||
run: python3 test.py
|
||||
38
test-workflows/rust-test.yml
Normal file
38
test-workflows/rust-test.yml
Normal file
@@ -0,0 +1,38 @@
|
||||
name: Rust Test
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ main ]
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
test:
|
||||
name: Test Rust
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Setup Rust
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
profile: minimal
|
||||
override: true
|
||||
|
||||
- name: Check Rust version
|
||||
run: rustc --version
|
||||
|
||||
- name: Create simple program
|
||||
run: |
|
||||
echo 'fn main() {' > hello.rs
|
||||
echo ' println!("Hello from Rust!");' >> hello.rs
|
||||
echo ' println!("Running on Rust");' >> hello.rs
|
||||
echo '}' >> hello.rs
|
||||
|
||||
- name: Build Rust program
|
||||
run: rustc hello.rs -o hello
|
||||
|
||||
- name: Run Rust program
|
||||
run: ./hello
|
||||
79
test-workflows/trigger_gitlab.sh
Executable file
79
test-workflows/trigger_gitlab.sh
Executable file
@@ -0,0 +1,79 @@
|
||||
#!/bin/bash
|
||||
# Example script to trigger GitLab pipelines using wrkflw
|
||||
|
||||
# Check if GITLAB_TOKEN is set
|
||||
if [ -z "${GITLAB_TOKEN}" ]; then
|
||||
echo "Error: GITLAB_TOKEN environment variable is not set."
|
||||
echo "Please set it with: export GITLAB_TOKEN=your_token_here"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Ensure we're in a Git repository
|
||||
if ! git rev-parse --is-inside-work-tree > /dev/null 2>&1; then
|
||||
echo "Error: Not in a Git repository."
|
||||
echo "Please run this script from within a Git repository with a GitLab remote."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check for .gitlab-ci.yml file
|
||||
if [ ! -f .gitlab-ci.yml ]; then
|
||||
echo "Warning: No .gitlab-ci.yml file found in the current directory."
|
||||
echo "The pipeline trigger might fail if there is no pipeline configuration."
|
||||
fi
|
||||
|
||||
# Function to display help
|
||||
show_help() {
|
||||
echo "GitLab Pipeline Trigger Examples"
|
||||
echo "--------------------------------"
|
||||
echo "Usage: $0 [example-number]"
|
||||
echo ""
|
||||
echo "Available examples:"
|
||||
echo " 1: Trigger default pipeline on the current branch"
|
||||
echo " 2: Trigger pipeline on main branch"
|
||||
echo " 3: Trigger release build"
|
||||
echo " 4: Trigger documentation build"
|
||||
echo " 5: Trigger pipeline with multiple variables"
|
||||
echo ""
|
||||
echo "For custom commands, modify this script or run wrkflw directly:"
|
||||
echo " wrkflw trigger-gitlab [options]"
|
||||
}
|
||||
|
||||
# No arguments, show help
|
||||
if [ $# -eq 0 ]; then
|
||||
show_help
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Handle examples
|
||||
case "$1" in
|
||||
"1")
|
||||
echo "Triggering default pipeline on the current branch..."
|
||||
wrkflw trigger-gitlab
|
||||
;;
|
||||
|
||||
"2")
|
||||
echo "Triggering pipeline on main branch..."
|
||||
wrkflw trigger-gitlab --branch main
|
||||
;;
|
||||
|
||||
"3")
|
||||
echo "Triggering release build..."
|
||||
wrkflw trigger-gitlab --variable BUILD_RELEASE=true
|
||||
;;
|
||||
|
||||
"4")
|
||||
echo "Triggering documentation build..."
|
||||
wrkflw trigger-gitlab --variable BUILD_DOCS=true
|
||||
;;
|
||||
|
||||
"5")
|
||||
echo "Triggering pipeline with multiple variables..."
|
||||
wrkflw trigger-gitlab --variable BUILD_RELEASE=true --variable BUILD_DOCS=true
|
||||
;;
|
||||
|
||||
*)
|
||||
echo "Unknown example: $1"
|
||||
show_help
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user