mirror of
https://github.com/bahdotsh/wrkflw.git
synced 2025-12-15 19:27:44 +01:00
feat(gitlab): add comprehensive GitLab CI/CD pipeline support
This commit adds full support for GitLab CI/CD pipelines: - Add GitLab CI pipeline models with complete spec support (jobs, stages, artifacts, cache, etc.) - Implement GitLab CI/CD pipeline parsing and validation - Add schema validation against GitLab CI JSON schema - Support automatic pipeline type detection based on filename and content - Add GitLab-specific CLI commands and flags - Implement pipeline conversion for executor compatibility - Add validation for common GitLab CI configuration issues - Update CLI help text to reflect GitLab CI/CD support - Support listing both GitHub and GitLab pipeline files This expands wrkflw to be a multi-CI tool that can validate and execute both GitHub Actions workflows and GitLab CI/CD pipelines locally.
This commit is contained in:
@@ -2,38 +2,30 @@
|
||||
# This pipeline will build and test the Rust project
|
||||
|
||||
stages:
|
||||
- lint
|
||||
- build
|
||||
- test
|
||||
- release
|
||||
- deploy
|
||||
|
||||
variables:
|
||||
CARGO_HOME: ${CI_PROJECT_DIR}/.cargo
|
||||
RUST_VERSION: stable
|
||||
RUST_VERSION: "1.70.0"
|
||||
CARGO_TERM_COLOR: always
|
||||
|
||||
# Cache dependencies between jobs
|
||||
# Cache settings
|
||||
cache:
|
||||
key: "$CI_COMMIT_REF_SLUG"
|
||||
paths:
|
||||
- .cargo/
|
||||
- target/
|
||||
script:
|
||||
- echo "This is a placeholder - the cache directive doesn't need a script"
|
||||
|
||||
# Lint job - runs rustfmt and clippy
|
||||
lint:
|
||||
stage: lint
|
||||
stage: test
|
||||
image: rust:${RUST_VERSION}
|
||||
script:
|
||||
- rustup component add rustfmt clippy
|
||||
- cargo fmt -- --check
|
||||
- rustup component add clippy
|
||||
- cargo clippy -- -D warnings
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "web"
|
||||
when: always
|
||||
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
|
||||
when: always
|
||||
- if: $CI_COMMIT_TAG
|
||||
when: never
|
||||
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
|
||||
when: always
|
||||
allow_failure: true
|
||||
|
||||
# Build job - builds the application
|
||||
build:
|
||||
@@ -43,17 +35,8 @@ build:
|
||||
- cargo build --verbose
|
||||
artifacts:
|
||||
paths:
|
||||
- target/debug/wrkflw
|
||||
- target/debug
|
||||
expire_in: 1 week
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "web"
|
||||
when: always
|
||||
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
|
||||
when: always
|
||||
- if: $CI_COMMIT_TAG
|
||||
when: always
|
||||
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
|
||||
when: always
|
||||
|
||||
# Test job - runs unit and integration tests
|
||||
test:
|
||||
@@ -61,21 +44,12 @@ test:
|
||||
image: rust:${RUST_VERSION}
|
||||
script:
|
||||
- cargo test --verbose
|
||||
needs:
|
||||
dependencies:
|
||||
- build
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "web"
|
||||
when: always
|
||||
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
|
||||
when: always
|
||||
- if: $CI_COMMIT_TAG
|
||||
when: always
|
||||
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
|
||||
when: always
|
||||
|
||||
# Release job - creates a release build
|
||||
release:
|
||||
stage: release
|
||||
stage: deploy
|
||||
image: rust:${RUST_VERSION}
|
||||
script:
|
||||
- cargo build --release --verbose
|
||||
@@ -92,16 +66,35 @@ release:
|
||||
|
||||
# Custom job for documentation
|
||||
docs:
|
||||
stage: release
|
||||
stage: deploy
|
||||
image: rust:${RUST_VERSION}
|
||||
script:
|
||||
- cargo doc --no-deps
|
||||
- mkdir -p public
|
||||
- cp -r target/doc/* public/
|
||||
artifacts:
|
||||
paths:
|
||||
- target/doc/
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "web" && $BUILD_DOCS == "true"
|
||||
when: always
|
||||
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
|
||||
when: always
|
||||
- when: never
|
||||
- public
|
||||
only:
|
||||
- main
|
||||
|
||||
format:
|
||||
stage: test
|
||||
image: rust:${RUST_VERSION}
|
||||
script:
|
||||
- rustup component add rustfmt
|
||||
- cargo fmt --check
|
||||
allow_failure: true
|
||||
|
||||
pages:
|
||||
stage: deploy
|
||||
image: rust:${RUST_VERSION}
|
||||
script:
|
||||
- cargo doc --no-deps
|
||||
- mkdir -p public
|
||||
- cp -r target/doc/* public/
|
||||
artifacts:
|
||||
paths:
|
||||
- public
|
||||
only:
|
||||
- main
|
||||
31
Cargo.lock
generated
31
Cargo.lock
generated
@@ -1506,6 +1506,8 @@ dependencies = [
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_yaml",
|
||||
"tempfile",
|
||||
"thiserror",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1768,6 +1770,15 @@ version = "1.0.20"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f"
|
||||
|
||||
[[package]]
|
||||
name = "same-file"
|
||||
version = "1.0.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502"
|
||||
dependencies = [
|
||||
"winapi-util",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "schannel"
|
||||
version = "0.1.27"
|
||||
@@ -2323,6 +2334,16 @@ version = "0.9.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a"
|
||||
|
||||
[[package]]
|
||||
name = "walkdir"
|
||||
version = "2.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b"
|
||||
dependencies = [
|
||||
"same-file",
|
||||
"winapi-util",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "want"
|
||||
version = "0.3.1"
|
||||
@@ -2444,6 +2465,15 @@ version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
|
||||
|
||||
[[package]]
|
||||
name = "winapi-util"
|
||||
version = "0.1.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb"
|
||||
dependencies = [
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "winapi-x86_64-pc-windows-gnu"
|
||||
version = "0.4.0"
|
||||
@@ -2688,6 +2718,7 @@ dependencies = [
|
||||
"utils",
|
||||
"uuid",
|
||||
"validators",
|
||||
"walkdir",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
||||
@@ -13,6 +13,8 @@ use crate::docker;
|
||||
use crate::environment;
|
||||
use logging;
|
||||
use matrix::MatrixCombination;
|
||||
use models::gitlab::Pipeline;
|
||||
use parser::gitlab::{self, parse_pipeline};
|
||||
use parser::workflow::{self, parse_workflow, ActionInfo, Job, WorkflowDefinition};
|
||||
use runtime::container::ContainerRuntime;
|
||||
use runtime::emulation;
|
||||
@@ -27,6 +29,51 @@ pub async fn execute_workflow(
|
||||
logging::info(&format!("Executing workflow: {}", workflow_path.display()));
|
||||
logging::info(&format!("Runtime: {:?}", runtime_type));
|
||||
|
||||
// Determine if this is a GitLab CI/CD pipeline or GitHub Actions workflow
|
||||
let is_gitlab = is_gitlab_pipeline(workflow_path);
|
||||
|
||||
if is_gitlab {
|
||||
execute_gitlab_pipeline(workflow_path, runtime_type, verbose).await
|
||||
} else {
|
||||
execute_github_workflow(workflow_path, runtime_type, verbose).await
|
||||
}
|
||||
}
|
||||
|
||||
/// Determine if a file is a GitLab CI/CD pipeline
|
||||
fn is_gitlab_pipeline(path: &Path) -> bool {
|
||||
// Check the file name
|
||||
if let Some(file_name) = path.file_name() {
|
||||
if let Some(file_name_str) = file_name.to_str() {
|
||||
return file_name_str == ".gitlab-ci.yml" || file_name_str.ends_with("gitlab-ci.yml");
|
||||
}
|
||||
}
|
||||
|
||||
// If file name check fails, try to read and determine by content
|
||||
if let Ok(content) = fs::read_to_string(path) {
|
||||
// GitLab CI/CD pipelines typically have stages, before_script, after_script at the top level
|
||||
if content.contains("stages:")
|
||||
|| content.contains("before_script:")
|
||||
|| content.contains("after_script:")
|
||||
{
|
||||
// Check for GitHub Actions specific keys that would indicate it's not GitLab
|
||||
if !content.contains("on:")
|
||||
&& !content.contains("runs-on:")
|
||||
&& !content.contains("uses:")
|
||||
{
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
/// Execute a GitHub Actions workflow file locally
|
||||
async fn execute_github_workflow(
|
||||
workflow_path: &Path,
|
||||
runtime_type: RuntimeType,
|
||||
verbose: bool,
|
||||
) -> Result<ExecutionResult, ExecutionError> {
|
||||
// 1. Parse workflow file
|
||||
let workflow = parse_workflow(workflow_path)?;
|
||||
|
||||
@@ -113,6 +160,192 @@ pub async fn execute_workflow(
|
||||
})
|
||||
}
|
||||
|
||||
/// Execute a GitLab CI/CD pipeline locally
|
||||
async fn execute_gitlab_pipeline(
|
||||
pipeline_path: &Path,
|
||||
runtime_type: RuntimeType,
|
||||
verbose: bool,
|
||||
) -> Result<ExecutionResult, ExecutionError> {
|
||||
logging::info("Executing GitLab CI/CD pipeline");
|
||||
|
||||
// 1. Parse the GitLab pipeline file
|
||||
let pipeline = parse_pipeline(pipeline_path)
|
||||
.map_err(|e| ExecutionError::Parse(format!("Failed to parse GitLab pipeline: {}", e)))?;
|
||||
|
||||
// 2. Convert the GitLab pipeline to a format compatible with the workflow executor
|
||||
let workflow = gitlab::convert_to_workflow_format(&pipeline);
|
||||
|
||||
// 3. Resolve job dependencies based on stages
|
||||
let execution_plan = resolve_gitlab_dependencies(&pipeline, &workflow)?;
|
||||
|
||||
// 4. Initialize appropriate runtime
|
||||
let runtime = initialize_runtime(runtime_type.clone())?;
|
||||
|
||||
// Create a temporary workspace directory
|
||||
let workspace_dir = tempfile::tempdir()
|
||||
.map_err(|e| ExecutionError::Execution(format!("Failed to create workspace: {}", e)))?;
|
||||
|
||||
// 5. Set up GitLab-like environment
|
||||
let mut env_context = create_gitlab_context(&pipeline, workspace_dir.path());
|
||||
|
||||
// Add runtime mode to environment
|
||||
env_context.insert(
|
||||
"WRKFLW_RUNTIME_MODE".to_string(),
|
||||
if runtime_type == RuntimeType::Emulation {
|
||||
"emulation".to_string()
|
||||
} else {
|
||||
"docker".to_string()
|
||||
},
|
||||
);
|
||||
|
||||
// Setup environment files
|
||||
environment::setup_github_environment_files(workspace_dir.path()).map_err(|e| {
|
||||
ExecutionError::Execution(format!("Failed to setup environment files: {}", e))
|
||||
})?;
|
||||
|
||||
// 6. Execute jobs according to the plan
|
||||
let mut results = Vec::new();
|
||||
let mut has_failures = false;
|
||||
let mut failure_details = String::new();
|
||||
|
||||
for job_batch in execution_plan {
|
||||
// Execute jobs in parallel if they don't depend on each other
|
||||
let job_results = execute_job_batch(
|
||||
&job_batch,
|
||||
&workflow,
|
||||
runtime.as_ref(),
|
||||
&env_context,
|
||||
verbose,
|
||||
)
|
||||
.await?;
|
||||
|
||||
// Check for job failures and collect details
|
||||
for job_result in &job_results {
|
||||
if job_result.status == JobStatus::Failure {
|
||||
has_failures = true;
|
||||
failure_details.push_str(&format!("\n❌ Job failed: {}\n", job_result.name));
|
||||
|
||||
// Add step details for failed jobs
|
||||
for step in &job_result.steps {
|
||||
if step.status == StepStatus::Failure {
|
||||
failure_details.push_str(&format!(" ❌ {}: {}\n", step.name, step.output));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
results.extend(job_results);
|
||||
}
|
||||
|
||||
// If there were failures, add detailed failure information to the result
|
||||
if has_failures {
|
||||
logging::error(&format!("Pipeline execution failed:{}", failure_details));
|
||||
}
|
||||
|
||||
Ok(ExecutionResult {
|
||||
jobs: results,
|
||||
failure_details: if has_failures {
|
||||
Some(failure_details)
|
||||
} else {
|
||||
None
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
/// Create an environment context for GitLab CI/CD pipeline execution
|
||||
fn create_gitlab_context(pipeline: &Pipeline, workspace_dir: &Path) -> HashMap<String, String> {
|
||||
let mut env_context = HashMap::new();
|
||||
|
||||
// Add GitLab CI/CD environment variables
|
||||
env_context.insert("CI".to_string(), "true".to_string());
|
||||
env_context.insert("GITLAB_CI".to_string(), "true".to_string());
|
||||
|
||||
// Add custom environment variable to indicate use in wrkflw
|
||||
env_context.insert("WRKFLW_CI".to_string(), "true".to_string());
|
||||
|
||||
// Add workspace directory
|
||||
env_context.insert(
|
||||
"CI_PROJECT_DIR".to_string(),
|
||||
workspace_dir.to_string_lossy().to_string(),
|
||||
);
|
||||
|
||||
// Add global variables from the pipeline
|
||||
if let Some(variables) = &pipeline.variables {
|
||||
for (key, value) in variables {
|
||||
env_context.insert(key.clone(), value.clone());
|
||||
}
|
||||
}
|
||||
|
||||
env_context
|
||||
}
|
||||
|
||||
/// Resolve GitLab CI/CD pipeline dependencies
|
||||
fn resolve_gitlab_dependencies(
|
||||
pipeline: &Pipeline,
|
||||
workflow: &WorkflowDefinition,
|
||||
) -> Result<Vec<Vec<String>>, ExecutionError> {
|
||||
// For GitLab CI/CD pipelines, jobs within the same stage can run in parallel,
|
||||
// but jobs in different stages run sequentially
|
||||
|
||||
// Get stages from the pipeline or create a default one
|
||||
let stages = match &pipeline.stages {
|
||||
Some(defined_stages) => defined_stages.clone(),
|
||||
None => vec![
|
||||
"build".to_string(),
|
||||
"test".to_string(),
|
||||
"deploy".to_string(),
|
||||
],
|
||||
};
|
||||
|
||||
// Create an execution plan based on stages
|
||||
let mut execution_plan = Vec::new();
|
||||
|
||||
// For each stage, collect the jobs that belong to it
|
||||
for stage in stages {
|
||||
let mut stage_jobs = Vec::new();
|
||||
|
||||
for (job_name, job) in &pipeline.jobs {
|
||||
// Skip template jobs
|
||||
if let Some(true) = job.template {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Get the job's stage, or assume "test" if not specified
|
||||
let default_stage = "test".to_string();
|
||||
let job_stage = job.stage.as_ref().unwrap_or(&default_stage);
|
||||
|
||||
// If the job belongs to the current stage, add it to the batch
|
||||
if job_stage == &stage {
|
||||
stage_jobs.push(job_name.clone());
|
||||
}
|
||||
}
|
||||
|
||||
if !stage_jobs.is_empty() {
|
||||
execution_plan.push(stage_jobs);
|
||||
}
|
||||
}
|
||||
|
||||
// Also create a batch for jobs without a stage
|
||||
let mut stageless_jobs = Vec::new();
|
||||
|
||||
for (job_name, job) in &pipeline.jobs {
|
||||
// Skip template jobs
|
||||
if let Some(true) = job.template {
|
||||
continue;
|
||||
}
|
||||
|
||||
if job.stage.is_none() {
|
||||
stageless_jobs.push(job_name.clone());
|
||||
}
|
||||
}
|
||||
|
||||
if !stageless_jobs.is_empty() {
|
||||
execution_plan.push(stageless_jobs);
|
||||
}
|
||||
|
||||
Ok(execution_plan)
|
||||
}
|
||||
|
||||
// Determine if Docker is available or fall back to emulation
|
||||
fn initialize_runtime(
|
||||
runtime_type: RuntimeType,
|
||||
@@ -1425,7 +1658,12 @@ async fn execute_step(ctx: StepExecutionContext<'_>) -> Result<StepResult, Execu
|
||||
} else {
|
||||
StepStatus::Failure
|
||||
},
|
||||
output: output_text,
|
||||
output: format!(
|
||||
"Exit code: {}
|
||||
{}
|
||||
{}",
|
||||
output.exit_code, output.stdout, output.stderr
|
||||
),
|
||||
}
|
||||
} else {
|
||||
StepResult {
|
||||
|
||||
@@ -22,3 +22,317 @@ impl ValidationResult {
|
||||
self.issues.push(issue);
|
||||
}
|
||||
}
|
||||
|
||||
// GitLab pipeline models
|
||||
pub mod gitlab {
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
|
||||
/// Represents a GitLab CI/CD pipeline configuration
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct Pipeline {
|
||||
/// Default image for all jobs
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub image: Option<Image>,
|
||||
|
||||
/// Global variables available to all jobs
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub variables: Option<HashMap<String, String>>,
|
||||
|
||||
/// Pipeline stages in execution order
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub stages: Option<Vec<String>>,
|
||||
|
||||
/// Default before_script for all jobs
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub before_script: Option<Vec<String>>,
|
||||
|
||||
/// Default after_script for all jobs
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub after_script: Option<Vec<String>>,
|
||||
|
||||
/// Job definitions (name => job)
|
||||
#[serde(flatten)]
|
||||
pub jobs: HashMap<String, Job>,
|
||||
|
||||
/// Workflow rules for the pipeline
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub workflow: Option<Workflow>,
|
||||
|
||||
/// Includes for pipeline configuration
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub include: Option<Vec<Include>>,
|
||||
}
|
||||
|
||||
/// A job in a GitLab CI/CD pipeline
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct Job {
|
||||
/// The stage this job belongs to
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub stage: Option<String>,
|
||||
|
||||
/// Docker image to use for this job
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub image: Option<Image>,
|
||||
|
||||
/// Script commands to run
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub script: Option<Vec<String>>,
|
||||
|
||||
/// Commands to run before the main script
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub before_script: Option<Vec<String>>,
|
||||
|
||||
/// Commands to run after the main script
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub after_script: Option<Vec<String>>,
|
||||
|
||||
/// When to run the job (on_success, on_failure, always, manual)
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub when: Option<String>,
|
||||
|
||||
/// Allow job failure
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub allow_failure: Option<bool>,
|
||||
|
||||
/// Services to run alongside the job
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub services: Option<Vec<Service>>,
|
||||
|
||||
/// Tags to define which runners can execute this job
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub tags: Option<Vec<String>>,
|
||||
|
||||
/// Job-specific variables
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub variables: Option<HashMap<String, String>>,
|
||||
|
||||
/// Job dependencies
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub dependencies: Option<Vec<String>>,
|
||||
|
||||
/// Artifacts to store after job execution
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub artifacts: Option<Artifacts>,
|
||||
|
||||
/// Cache configuration
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub cache: Option<Cache>,
|
||||
|
||||
/// Rules for when this job should run
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub rules: Option<Vec<Rule>>,
|
||||
|
||||
/// Only run on specified refs
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub only: Option<Only>,
|
||||
|
||||
/// Exclude specified refs
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub except: Option<Except>,
|
||||
|
||||
/// Retry configuration
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub retry: Option<Retry>,
|
||||
|
||||
/// Timeout for the job in seconds
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub timeout: Option<String>,
|
||||
|
||||
/// Mark job as parallel and specify instance count
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub parallel: Option<usize>,
|
||||
|
||||
/// Flag to indicate this is a template job
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub template: Option<bool>,
|
||||
|
||||
/// List of jobs this job extends from
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub extends: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
/// Docker image configuration
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
#[serde(untagged)]
|
||||
pub enum Image {
|
||||
/// Simple image name as string
|
||||
Simple(String),
|
||||
/// Detailed image configuration
|
||||
Detailed {
|
||||
/// Image name
|
||||
name: String,
|
||||
/// Entrypoint to override in the image
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
entrypoint: Option<Vec<String>>,
|
||||
},
|
||||
}
|
||||
|
||||
/// Service container to run alongside a job
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
#[serde(untagged)]
|
||||
pub enum Service {
|
||||
/// Simple service name as string
|
||||
Simple(String),
|
||||
/// Detailed service configuration
|
||||
Detailed {
|
||||
/// Service name/image
|
||||
name: String,
|
||||
/// Command to run in the service container
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
command: Option<Vec<String>>,
|
||||
/// Entrypoint to override in the image
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
entrypoint: Option<Vec<String>>,
|
||||
},
|
||||
}
|
||||
|
||||
/// Artifacts configuration
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct Artifacts {
|
||||
/// Paths to include as artifacts
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub paths: Option<Vec<String>>,
|
||||
/// Artifact expiration duration
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub expire_in: Option<String>,
|
||||
/// When to upload artifacts (on_success, on_failure, always)
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub when: Option<String>,
|
||||
}
|
||||
|
||||
/// Cache configuration
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct Cache {
|
||||
/// Cache key
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub key: Option<String>,
|
||||
/// Paths to cache
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub paths: Option<Vec<String>>,
|
||||
/// When to save cache (on_success, on_failure, always)
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub when: Option<String>,
|
||||
/// Cache policy
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub policy: Option<String>,
|
||||
}
|
||||
|
||||
/// Rule for conditional job execution
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct Rule {
|
||||
/// If condition expression
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub if_: Option<String>,
|
||||
/// When to run if condition is true
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub when: Option<String>,
|
||||
/// Variables to set if condition is true
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub variables: Option<HashMap<String, String>>,
|
||||
}
|
||||
|
||||
/// Only/except configuration
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
#[serde(untagged)]
|
||||
pub enum Only {
|
||||
/// Simple list of refs
|
||||
Refs(Vec<String>),
|
||||
/// Detailed configuration
|
||||
Complex {
|
||||
/// Refs to include
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
refs: Option<Vec<String>>,
|
||||
/// Branch patterns to include
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
branches: Option<Vec<String>>,
|
||||
/// Tags to include
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
tags: Option<Vec<String>>,
|
||||
/// Pipeline types to include
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
variables: Option<Vec<String>>,
|
||||
/// Changes to files that trigger the job
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
changes: Option<Vec<String>>,
|
||||
},
|
||||
}
|
||||
|
||||
/// Except configuration
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
#[serde(untagged)]
|
||||
pub enum Except {
|
||||
/// Simple list of refs
|
||||
Refs(Vec<String>),
|
||||
/// Detailed configuration
|
||||
Complex {
|
||||
/// Refs to exclude
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
refs: Option<Vec<String>>,
|
||||
/// Branch patterns to exclude
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
branches: Option<Vec<String>>,
|
||||
/// Tags to exclude
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
tags: Option<Vec<String>>,
|
||||
/// Pipeline types to exclude
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
variables: Option<Vec<String>>,
|
||||
/// Changes to files that don't trigger the job
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
changes: Option<Vec<String>>,
|
||||
},
|
||||
}
|
||||
|
||||
/// Workflow configuration
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct Workflow {
|
||||
/// Rules for when to run the pipeline
|
||||
pub rules: Vec<Rule>,
|
||||
}
|
||||
|
||||
/// Retry configuration
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
#[serde(untagged)]
|
||||
pub enum Retry {
|
||||
/// Simple max attempts
|
||||
MaxAttempts(u32),
|
||||
/// Detailed retry configuration
|
||||
Detailed {
|
||||
/// Maximum retry attempts
|
||||
max: u32,
|
||||
/// When to retry
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
when: Option<Vec<String>>,
|
||||
},
|
||||
}
|
||||
|
||||
/// Include configuration for external pipeline files
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
#[serde(untagged)]
|
||||
pub enum Include {
|
||||
/// Simple string include
|
||||
Local(String),
|
||||
/// Detailed include configuration
|
||||
Detailed {
|
||||
/// Local file path
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
local: Option<String>,
|
||||
/// Remote file URL
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
remote: Option<String>,
|
||||
/// Include from project
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
project: Option<String>,
|
||||
/// Include specific file from project
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
file: Option<String>,
|
||||
/// Include template
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
template: Option<String>,
|
||||
/// Ref to use when including from project
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
ref_: Option<String>,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,3 +15,7 @@ jsonschema.workspace = true
|
||||
serde.workspace = true
|
||||
serde_yaml.workspace = true
|
||||
serde_json.workspace = true
|
||||
thiserror.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
tempfile = "3.7"
|
||||
|
||||
273
crates/parser/src/gitlab.rs
Normal file
273
crates/parser/src/gitlab.rs
Normal file
@@ -0,0 +1,273 @@
|
||||
use crate::schema::{SchemaType, SchemaValidator};
|
||||
use crate::workflow;
|
||||
use models::gitlab::Pipeline;
|
||||
use models::ValidationResult;
|
||||
use std::collections::HashMap;
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
use thiserror::Error;
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum GitlabParserError {
|
||||
#[error("I/O error: {0}")]
|
||||
IoError(#[from] std::io::Error),
|
||||
|
||||
#[error("YAML parsing error: {0}")]
|
||||
YamlError(#[from] serde_yaml::Error),
|
||||
|
||||
#[error("Invalid pipeline structure: {0}")]
|
||||
InvalidStructure(String),
|
||||
|
||||
#[error("Schema validation error: {0}")]
|
||||
SchemaValidationError(String),
|
||||
}
|
||||
|
||||
/// Parse a GitLab CI/CD pipeline file
|
||||
pub fn parse_pipeline(pipeline_path: &Path) -> Result<Pipeline, GitlabParserError> {
|
||||
// Read the pipeline file
|
||||
let pipeline_content = fs::read_to_string(pipeline_path)?;
|
||||
|
||||
// Validate against schema
|
||||
let validator =
|
||||
SchemaValidator::new().map_err(GitlabParserError::SchemaValidationError)?;
|
||||
|
||||
validator
|
||||
.validate_with_specific_schema(&pipeline_content, SchemaType::GitLab)
|
||||
.map_err(GitlabParserError::SchemaValidationError)?;
|
||||
|
||||
// Parse the pipeline YAML
|
||||
let pipeline: Pipeline = serde_yaml::from_str(&pipeline_content)?;
|
||||
|
||||
// Return the parsed pipeline
|
||||
Ok(pipeline)
|
||||
}
|
||||
|
||||
/// Validate the basic structure of a GitLab CI/CD pipeline
|
||||
pub fn validate_pipeline_structure(pipeline: &Pipeline) -> ValidationResult {
|
||||
let mut result = ValidationResult::new();
|
||||
|
||||
// Check for at least one job
|
||||
if pipeline.jobs.is_empty() {
|
||||
result.add_issue("Pipeline must contain at least one job".to_string());
|
||||
}
|
||||
|
||||
// Check for script in jobs
|
||||
for (job_name, job) in &pipeline.jobs {
|
||||
// Skip template jobs
|
||||
if let Some(true) = job.template {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check for script or extends
|
||||
if job.script.is_none() && job.extends.is_none() {
|
||||
result.add_issue(format!(
|
||||
"Job '{}' must have a script section or extend another job",
|
||||
job_name
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
// Check that referenced stages are defined
|
||||
if let Some(stages) = &pipeline.stages {
|
||||
for (job_name, job) in &pipeline.jobs {
|
||||
if let Some(stage) = &job.stage {
|
||||
if !stages.contains(stage) {
|
||||
result.add_issue(format!(
|
||||
"Job '{}' references undefined stage '{}'",
|
||||
job_name, stage
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check that job dependencies exist
|
||||
for (job_name, job) in &pipeline.jobs {
|
||||
if let Some(dependencies) = &job.dependencies {
|
||||
for dependency in dependencies {
|
||||
if !pipeline.jobs.contains_key(dependency) {
|
||||
result.add_issue(format!(
|
||||
"Job '{}' depends on undefined job '{}'",
|
||||
job_name, dependency
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check that job extensions exist
|
||||
for (job_name, job) in &pipeline.jobs {
|
||||
if let Some(extends) = &job.extends {
|
||||
for extend in extends {
|
||||
if !pipeline.jobs.contains_key(extend) {
|
||||
result.add_issue(format!(
|
||||
"Job '{}' extends undefined job '{}'",
|
||||
job_name, extend
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
/// Convert a GitLab CI/CD pipeline to a format compatible with the workflow executor
|
||||
pub fn convert_to_workflow_format(pipeline: &Pipeline) -> workflow::WorkflowDefinition {
|
||||
// Create a new workflow with required fields
|
||||
let mut workflow = workflow::WorkflowDefinition {
|
||||
name: "Converted GitLab CI Pipeline".to_string(),
|
||||
on: vec!["push".to_string()], // Default trigger
|
||||
on_raw: serde_yaml::Value::String("push".to_string()),
|
||||
jobs: HashMap::new(),
|
||||
};
|
||||
|
||||
// Convert each GitLab job to a GitHub Actions job
|
||||
for (job_name, gitlab_job) in &pipeline.jobs {
|
||||
// Skip template jobs
|
||||
if let Some(true) = gitlab_job.template {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Create a new job
|
||||
let mut job = workflow::Job {
|
||||
runs_on: "ubuntu-latest".to_string(), // Default runner
|
||||
needs: None,
|
||||
steps: Vec::new(),
|
||||
env: HashMap::new(),
|
||||
matrix: None,
|
||||
services: HashMap::new(),
|
||||
};
|
||||
|
||||
// Add job-specific environment variables
|
||||
if let Some(variables) = &gitlab_job.variables {
|
||||
job.env.extend(variables.clone());
|
||||
}
|
||||
|
||||
// Add global variables if they exist
|
||||
if let Some(variables) = &pipeline.variables {
|
||||
// Only add if not already defined at job level
|
||||
for (key, value) in variables {
|
||||
job.env.entry(key.clone()).or_insert_with(|| value.clone());
|
||||
}
|
||||
}
|
||||
|
||||
// Convert before_script to steps if it exists
|
||||
if let Some(before_script) = &gitlab_job.before_script {
|
||||
for (i, cmd) in before_script.iter().enumerate() {
|
||||
let step = workflow::Step {
|
||||
name: Some(format!("Before script {}", i + 1)),
|
||||
uses: None,
|
||||
run: Some(cmd.clone()),
|
||||
with: None,
|
||||
env: HashMap::new(),
|
||||
continue_on_error: None,
|
||||
};
|
||||
job.steps.push(step);
|
||||
}
|
||||
}
|
||||
|
||||
// Convert main script to steps
|
||||
if let Some(script) = &gitlab_job.script {
|
||||
for (i, cmd) in script.iter().enumerate() {
|
||||
let step = workflow::Step {
|
||||
name: Some(format!("Run script line {}", i + 1)),
|
||||
uses: None,
|
||||
run: Some(cmd.clone()),
|
||||
with: None,
|
||||
env: HashMap::new(),
|
||||
continue_on_error: None,
|
||||
};
|
||||
job.steps.push(step);
|
||||
}
|
||||
}
|
||||
|
||||
// Convert after_script to steps if it exists
|
||||
if let Some(after_script) = &gitlab_job.after_script {
|
||||
for (i, cmd) in after_script.iter().enumerate() {
|
||||
let step = workflow::Step {
|
||||
name: Some(format!("After script {}", i + 1)),
|
||||
uses: None,
|
||||
run: Some(cmd.clone()),
|
||||
with: None,
|
||||
env: HashMap::new(),
|
||||
continue_on_error: Some(true), // After script should continue even if previous steps fail
|
||||
};
|
||||
job.steps.push(step);
|
||||
}
|
||||
}
|
||||
|
||||
// Add services if they exist
|
||||
if let Some(services) = &gitlab_job.services {
|
||||
for (i, service) in services.iter().enumerate() {
|
||||
let service_name = format!("service-{}", i);
|
||||
let service_image = match service {
|
||||
models::gitlab::Service::Simple(name) => name.clone(),
|
||||
models::gitlab::Service::Detailed { name, .. } => name.clone(),
|
||||
};
|
||||
|
||||
let service = workflow::Service {
|
||||
image: service_image,
|
||||
ports: None,
|
||||
env: HashMap::new(),
|
||||
volumes: None,
|
||||
options: None,
|
||||
};
|
||||
|
||||
job.services.insert(service_name, service);
|
||||
}
|
||||
}
|
||||
|
||||
// Add the job to the workflow
|
||||
workflow.jobs.insert(job_name.clone(), job);
|
||||
}
|
||||
|
||||
workflow
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::path::PathBuf;
|
||||
use tempfile::NamedTempFile;
|
||||
|
||||
#[test]
|
||||
fn test_parse_simple_pipeline() {
|
||||
// Create a temporary file with a simple GitLab CI/CD pipeline
|
||||
let mut file = NamedTempFile::new().unwrap();
|
||||
let content = r#"
|
||||
stages:
|
||||
- build
|
||||
- test
|
||||
|
||||
build_job:
|
||||
stage: build
|
||||
script:
|
||||
- echo "Building..."
|
||||
- make build
|
||||
|
||||
test_job:
|
||||
stage: test
|
||||
script:
|
||||
- echo "Testing..."
|
||||
- make test
|
||||
"#;
|
||||
fs::write(&file, content).unwrap();
|
||||
|
||||
// Parse the pipeline
|
||||
let pipeline = parse_pipeline(&file.path()).unwrap();
|
||||
|
||||
// Validate basic structure
|
||||
assert_eq!(pipeline.stages.as_ref().unwrap().len(), 2);
|
||||
assert_eq!(pipeline.jobs.len(), 2);
|
||||
|
||||
// Check job contents
|
||||
let build_job = pipeline.jobs.get("build_job").unwrap();
|
||||
assert_eq!(build_job.stage.as_ref().unwrap(), "build");
|
||||
assert_eq!(build_job.script.as_ref().unwrap().len(), 2);
|
||||
|
||||
let test_job = pipeline.jobs.get("test_job").unwrap();
|
||||
assert_eq!(test_job.stage.as_ref().unwrap(), "test");
|
||||
assert_eq!(test_job.script.as_ref().unwrap().len(), 2);
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,5 @@
|
||||
// parser crate
|
||||
|
||||
pub mod gitlab;
|
||||
pub mod schema;
|
||||
pub mod workflow;
|
||||
|
||||
@@ -4,23 +4,50 @@ use std::fs;
|
||||
use std::path::Path;
|
||||
|
||||
const GITHUB_WORKFLOW_SCHEMA: &str = include_str!("../../../schemas/github-workflow.json");
|
||||
const GITLAB_CI_SCHEMA: &str = include_str!("../../../schemas/gitlab-ci.json");
|
||||
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub enum SchemaType {
|
||||
GitHub,
|
||||
GitLab,
|
||||
}
|
||||
|
||||
pub struct SchemaValidator {
|
||||
schema: JSONSchema,
|
||||
github_schema: JSONSchema,
|
||||
gitlab_schema: JSONSchema,
|
||||
}
|
||||
|
||||
impl SchemaValidator {
|
||||
pub fn new() -> Result<Self, String> {
|
||||
let schema_json: Value = serde_json::from_str(GITHUB_WORKFLOW_SCHEMA)
|
||||
let github_schema_json: Value = serde_json::from_str(GITHUB_WORKFLOW_SCHEMA)
|
||||
.map_err(|e| format!("Failed to parse GitHub workflow schema: {}", e))?;
|
||||
|
||||
let schema = JSONSchema::compile(&schema_json)
|
||||
.map_err(|e| format!("Failed to compile JSON schema: {}", e))?;
|
||||
let gitlab_schema_json: Value = serde_json::from_str(GITLAB_CI_SCHEMA)
|
||||
.map_err(|e| format!("Failed to parse GitLab CI schema: {}", e))?;
|
||||
|
||||
Ok(Self { schema })
|
||||
let github_schema = JSONSchema::compile(&github_schema_json)
|
||||
.map_err(|e| format!("Failed to compile GitHub JSON schema: {}", e))?;
|
||||
|
||||
let gitlab_schema = JSONSchema::compile(&gitlab_schema_json)
|
||||
.map_err(|e| format!("Failed to compile GitLab JSON schema: {}", e))?;
|
||||
|
||||
Ok(Self {
|
||||
github_schema,
|
||||
gitlab_schema,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn validate_workflow(&self, workflow_path: &Path) -> Result<(), String> {
|
||||
// Determine the schema type based on the filename
|
||||
let schema_type = if workflow_path.file_name().is_some_and(|name| {
|
||||
let name_str = name.to_string_lossy();
|
||||
name_str.ends_with(".gitlab-ci.yml") || name_str.ends_with(".gitlab-ci.yaml")
|
||||
}) {
|
||||
SchemaType::GitLab
|
||||
} else {
|
||||
SchemaType::GitHub
|
||||
};
|
||||
|
||||
// Read the workflow file
|
||||
let content = fs::read_to_string(workflow_path)
|
||||
.map_err(|e| format!("Failed to read workflow file: {}", e))?;
|
||||
@@ -29,9 +56,50 @@ impl SchemaValidator {
|
||||
let workflow_json: Value = serde_yaml::from_str(&content)
|
||||
.map_err(|e| format!("Failed to parse workflow YAML: {}", e))?;
|
||||
|
||||
// Validate against schema
|
||||
if let Err(errors) = self.schema.validate(&workflow_json) {
|
||||
let mut error_msg = String::from("Workflow validation failed:\n");
|
||||
// Validate against the appropriate schema
|
||||
let validation_result = match schema_type {
|
||||
SchemaType::GitHub => self.github_schema.validate(&workflow_json),
|
||||
SchemaType::GitLab => self.gitlab_schema.validate(&workflow_json),
|
||||
};
|
||||
|
||||
// Handle validation errors
|
||||
if let Err(errors) = validation_result {
|
||||
let schema_name = match schema_type {
|
||||
SchemaType::GitHub => "GitHub workflow",
|
||||
SchemaType::GitLab => "GitLab CI",
|
||||
};
|
||||
let mut error_msg = format!("{} validation failed:\n", schema_name);
|
||||
for error in errors {
|
||||
error_msg.push_str(&format!("- {}\n", error));
|
||||
}
|
||||
return Err(error_msg);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn validate_with_specific_schema(
|
||||
&self,
|
||||
content: &str,
|
||||
schema_type: SchemaType,
|
||||
) -> Result<(), String> {
|
||||
// Parse YAML to JSON Value
|
||||
let workflow_json: Value =
|
||||
serde_yaml::from_str(content).map_err(|e| format!("Failed to parse YAML: {}", e))?;
|
||||
|
||||
// Validate against the appropriate schema
|
||||
let validation_result = match schema_type {
|
||||
SchemaType::GitHub => self.github_schema.validate(&workflow_json),
|
||||
SchemaType::GitLab => self.gitlab_schema.validate(&workflow_json),
|
||||
};
|
||||
|
||||
// Handle validation errors
|
||||
if let Err(errors) = validation_result {
|
||||
let schema_name = match schema_type {
|
||||
SchemaType::GitHub => "GitHub workflow",
|
||||
SchemaType::GitLab => "GitLab CI",
|
||||
};
|
||||
let mut error_msg = format!("{} validation failed:\n", schema_name);
|
||||
for error in errors {
|
||||
error_msg.push_str(&format!("- {}\n", error));
|
||||
}
|
||||
|
||||
@@ -10,8 +10,7 @@ use std::sync::mpsc;
|
||||
use std::thread;
|
||||
|
||||
// Validate a workflow or directory containing workflows
|
||||
#[allow(clippy::ptr_arg)]
|
||||
pub fn validate_workflow(path: &PathBuf, verbose: bool) -> io::Result<()> {
|
||||
pub fn validate_workflow(path: &Path, verbose: bool) -> io::Result<()> {
|
||||
let mut workflows = Vec::new();
|
||||
|
||||
if path.is_dir() {
|
||||
@@ -26,7 +25,7 @@ pub fn validate_workflow(path: &PathBuf, verbose: bool) -> io::Result<()> {
|
||||
}
|
||||
}
|
||||
} else if path.is_file() {
|
||||
workflows.push(path.clone());
|
||||
workflows.push(PathBuf::from(path));
|
||||
} else {
|
||||
return Err(io::Error::new(
|
||||
io::ErrorKind::NotFound,
|
||||
@@ -69,9 +68,8 @@ pub fn validate_workflow(path: &PathBuf, verbose: bool) -> io::Result<()> {
|
||||
}
|
||||
|
||||
// Execute a workflow through the CLI
|
||||
#[allow(clippy::ptr_arg)]
|
||||
pub async fn execute_workflow_cli(
|
||||
path: &PathBuf,
|
||||
path: &Path,
|
||||
runtime_type: RuntimeType,
|
||||
verbose: bool,
|
||||
) -> io::Result<()> {
|
||||
|
||||
234
crates/validators/src/gitlab.rs
Normal file
234
crates/validators/src/gitlab.rs
Normal file
@@ -0,0 +1,234 @@
|
||||
use models::gitlab::{Job, Pipeline};
|
||||
use models::ValidationResult;
|
||||
use std::collections::HashMap;
|
||||
|
||||
/// Validate a GitLab CI/CD pipeline
|
||||
pub fn validate_gitlab_pipeline(pipeline: &Pipeline) -> ValidationResult {
|
||||
let mut result = ValidationResult::new();
|
||||
|
||||
// Basic structure validation
|
||||
if pipeline.jobs.is_empty() {
|
||||
result.add_issue("Pipeline must contain at least one job".to_string());
|
||||
}
|
||||
|
||||
// Validate jobs
|
||||
validate_jobs(&pipeline.jobs, &mut result);
|
||||
|
||||
// Validate stages if defined
|
||||
if let Some(stages) = &pipeline.stages {
|
||||
validate_stages(stages, &pipeline.jobs, &mut result);
|
||||
}
|
||||
|
||||
// Validate dependencies
|
||||
validate_dependencies(&pipeline.jobs, &mut result);
|
||||
|
||||
// Validate extends
|
||||
validate_extends(&pipeline.jobs, &mut result);
|
||||
|
||||
// Validate artifacts
|
||||
validate_artifacts(&pipeline.jobs, &mut result);
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
/// Validate GitLab CI/CD jobs
|
||||
fn validate_jobs(jobs: &HashMap<String, Job>, result: &mut ValidationResult) {
|
||||
for (job_name, job) in jobs {
|
||||
// Skip template jobs
|
||||
if let Some(true) = job.template {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check for script or extends
|
||||
if job.script.is_none() && job.extends.is_none() {
|
||||
result.add_issue(format!(
|
||||
"Job '{}' must have a script section or extend another job",
|
||||
job_name
|
||||
));
|
||||
}
|
||||
|
||||
// Check when value if present
|
||||
if let Some(when) = &job.when {
|
||||
match when.as_str() {
|
||||
"on_success" | "on_failure" | "always" | "manual" | "never" => {
|
||||
// Valid when value
|
||||
}
|
||||
_ => {
|
||||
result.add_issue(format!(
|
||||
"Job '{}' has invalid 'when' value: '{}'. Valid values are: on_success, on_failure, always, manual, never",
|
||||
job_name, when
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check retry configuration
|
||||
if let Some(retry) = &job.retry {
|
||||
match retry {
|
||||
models::gitlab::Retry::MaxAttempts(attempts) => {
|
||||
if *attempts > 10 {
|
||||
result.add_issue(format!(
|
||||
"Job '{}' has excessive retry count: {}. Consider reducing to avoid resource waste",
|
||||
job_name, attempts
|
||||
));
|
||||
}
|
||||
}
|
||||
models::gitlab::Retry::Detailed { max, when: _ } => {
|
||||
if *max > 10 {
|
||||
result.add_issue(format!(
|
||||
"Job '{}' has excessive retry count: {}. Consider reducing to avoid resource waste",
|
||||
job_name, max
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Validate GitLab CI/CD stages
|
||||
fn validate_stages(stages: &[String], jobs: &HashMap<String, Job>, result: &mut ValidationResult) {
|
||||
// Check that all jobs reference existing stages
|
||||
for (job_name, job) in jobs {
|
||||
if let Some(stage) = &job.stage {
|
||||
if !stages.contains(stage) {
|
||||
result.add_issue(format!(
|
||||
"Job '{}' references undefined stage '{}'. Available stages are: {}",
|
||||
job_name,
|
||||
stage,
|
||||
stages.join(", ")
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check for unused stages
|
||||
for stage in stages {
|
||||
let used = jobs.values().any(|job| {
|
||||
if let Some(job_stage) = &job.stage {
|
||||
job_stage == stage
|
||||
} else {
|
||||
false
|
||||
}
|
||||
});
|
||||
|
||||
if !used {
|
||||
result.add_issue(format!(
|
||||
"Stage '{}' is defined but not used by any job",
|
||||
stage
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Validate GitLab CI/CD job dependencies
|
||||
fn validate_dependencies(jobs: &HashMap<String, Job>, result: &mut ValidationResult) {
|
||||
for (job_name, job) in jobs {
|
||||
if let Some(dependencies) = &job.dependencies {
|
||||
for dependency in dependencies {
|
||||
if !jobs.contains_key(dependency) {
|
||||
result.add_issue(format!(
|
||||
"Job '{}' depends on undefined job '{}'",
|
||||
job_name, dependency
|
||||
));
|
||||
} else if job_name == dependency {
|
||||
result.add_issue(format!("Job '{}' cannot depend on itself", job_name));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Validate GitLab CI/CD job extends
|
||||
fn validate_extends(jobs: &HashMap<String, Job>, result: &mut ValidationResult) {
|
||||
// Check for circular extends
|
||||
for (job_name, job) in jobs {
|
||||
if let Some(extends) = &job.extends {
|
||||
// Check that all extended jobs exist
|
||||
for extend in extends {
|
||||
if !jobs.contains_key(extend) {
|
||||
result.add_issue(format!(
|
||||
"Job '{}' extends undefined job '{}'",
|
||||
job_name, extend
|
||||
));
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check for circular extends
|
||||
let mut visited = vec![job_name.clone()];
|
||||
check_circular_extends(extend, jobs, &mut visited, result);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Helper function to detect circular extends
|
||||
fn check_circular_extends(
|
||||
job_name: &str,
|
||||
jobs: &HashMap<String, Job>,
|
||||
visited: &mut Vec<String>,
|
||||
result: &mut ValidationResult,
|
||||
) {
|
||||
visited.push(job_name.to_string());
|
||||
|
||||
if let Some(job) = jobs.get(job_name) {
|
||||
if let Some(extends) = &job.extends {
|
||||
for extend in extends {
|
||||
if visited.contains(&extend.to_string()) {
|
||||
// Circular dependency detected
|
||||
let cycle = visited
|
||||
.iter()
|
||||
.skip(visited.iter().position(|x| x == extend).unwrap())
|
||||
.chain(std::iter::once(extend))
|
||||
.cloned()
|
||||
.collect::<Vec<_>>()
|
||||
.join(" -> ");
|
||||
|
||||
result.add_issue(format!("Circular extends detected: {}", cycle));
|
||||
return;
|
||||
}
|
||||
|
||||
check_circular_extends(extend, jobs, visited, result);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
visited.pop();
|
||||
}
|
||||
|
||||
/// Validate GitLab CI/CD job artifacts
|
||||
fn validate_artifacts(jobs: &HashMap<String, Job>, result: &mut ValidationResult) {
|
||||
for (job_name, job) in jobs {
|
||||
if let Some(artifacts) = &job.artifacts {
|
||||
// Check that paths are specified
|
||||
if let Some(paths) = &artifacts.paths {
|
||||
if paths.is_empty() {
|
||||
result.add_issue(format!(
|
||||
"Job '{}' has artifacts section with empty paths",
|
||||
job_name
|
||||
));
|
||||
}
|
||||
} else {
|
||||
result.add_issue(format!(
|
||||
"Job '{}' has artifacts section without specifying paths",
|
||||
job_name
|
||||
));
|
||||
}
|
||||
|
||||
// Check for valid 'when' value if present
|
||||
if let Some(when) = &artifacts.when {
|
||||
match when.as_str() {
|
||||
"on_success" | "on_failure" | "always" => {
|
||||
// Valid when value
|
||||
}
|
||||
_ => {
|
||||
result.add_issue(format!(
|
||||
"Job '{}' has artifacts with invalid 'when' value: '{}'. Valid values are: on_success, on_failure, always",
|
||||
job_name, when
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,12 +1,14 @@
|
||||
// validators crate
|
||||
|
||||
mod actions;
|
||||
mod gitlab;
|
||||
mod jobs;
|
||||
mod matrix;
|
||||
mod steps;
|
||||
mod triggers;
|
||||
|
||||
pub use actions::validate_action_reference;
|
||||
pub use gitlab::validate_gitlab_pipeline;
|
||||
pub use jobs::validate_jobs;
|
||||
pub use matrix::validate_matrix;
|
||||
pub use steps::validate_steps;
|
||||
|
||||
@@ -54,6 +54,7 @@ itertools.workspace = true
|
||||
once_cell.workspace = true
|
||||
crossterm.workspace = true
|
||||
ratatui.workspace = true
|
||||
walkdir = "2.4"
|
||||
|
||||
[lib]
|
||||
name = "wrkflw_lib"
|
||||
|
||||
@@ -2,13 +2,14 @@ use bollard::Docker;
|
||||
use clap::{Parser, Subcommand};
|
||||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
use std::path::Path;
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
#[command(
|
||||
name = "wrkflw",
|
||||
about = "GitHub Workflow validator and executor",
|
||||
about = "GitHub & GitLab CI/CD validator and executor",
|
||||
version,
|
||||
long_about = "A GitHub Workflow validator and executor that runs workflows locally.\n\nExamples:\n wrkflw validate # Validate all workflows in .github/workflows\n wrkflw run .github/workflows/build.yml # Run a specific workflow\n wrkflw --verbose run .github/workflows/build.yml # Run with more output\n wrkflw --debug run .github/workflows/build.yml # Run with detailed debug information\n wrkflw run --emulate .github/workflows/build.yml # Use emulation mode instead of Docker"
|
||||
long_about = "A CI/CD validator and executor that runs workflows locally.\n\nExamples:\n wrkflw validate # Validate all workflows in .github/workflows\n wrkflw run .github/workflows/build.yml # Run a specific workflow\n wrkflw run .gitlab-ci.yml # Run a GitLab CI pipeline\n wrkflw --verbose run .github/workflows/build.yml # Run with more output\n wrkflw --debug run .github/workflows/build.yml # Run with detailed debug information\n wrkflw run --emulate .github/workflows/build.yml # Use emulation mode instead of Docker"
|
||||
)]
|
||||
struct Wrkflw {
|
||||
#[command(subcommand)]
|
||||
@@ -25,15 +26,19 @@ struct Wrkflw {
|
||||
|
||||
#[derive(Debug, Subcommand)]
|
||||
enum Commands {
|
||||
/// Validate GitHub workflow files
|
||||
/// Validate workflow or pipeline files
|
||||
Validate {
|
||||
/// Path to workflow file or directory (defaults to .github/workflows)
|
||||
/// Path to workflow/pipeline file or directory (defaults to .github/workflows)
|
||||
path: Option<PathBuf>,
|
||||
|
||||
/// Explicitly validate as GitLab CI/CD pipeline
|
||||
#[arg(long)]
|
||||
gitlab: bool,
|
||||
},
|
||||
|
||||
/// Execute GitHub workflow files locally
|
||||
/// Execute workflow or pipeline files locally
|
||||
Run {
|
||||
/// Path to workflow file to execute
|
||||
/// Path to workflow/pipeline file to execute
|
||||
path: PathBuf,
|
||||
|
||||
/// Use emulation mode instead of Docker
|
||||
@@ -43,6 +48,10 @@ enum Commands {
|
||||
/// Show 'Would execute GitHub action' messages in emulation mode
|
||||
#[arg(long, default_value_t = false)]
|
||||
show_action_messages: bool,
|
||||
|
||||
/// Explicitly run as GitLab CI/CD pipeline
|
||||
#[arg(long)]
|
||||
gitlab: bool,
|
||||
},
|
||||
|
||||
/// Open TUI interface to manage workflows
|
||||
@@ -84,7 +93,7 @@ enum Commands {
|
||||
variable: Option<Vec<(String, String)>>,
|
||||
},
|
||||
|
||||
/// List available workflows
|
||||
/// List available workflows and pipelines
|
||||
List,
|
||||
}
|
||||
|
||||
@@ -173,6 +182,51 @@ async fn handle_signals() {
|
||||
std::process::exit(0);
|
||||
}
|
||||
|
||||
/// Determines if a file is a GitLab CI/CD pipeline based on its name and content
|
||||
fn is_gitlab_pipeline(path: &Path) -> bool {
|
||||
// First check the file name
|
||||
if let Some(file_name) = path.file_name() {
|
||||
if let Some(file_name_str) = file_name.to_str() {
|
||||
if file_name_str == ".gitlab-ci.yml" || file_name_str.ends_with("gitlab-ci.yml") {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check if file is in .gitlab/ci directory
|
||||
if let Some(parent) = path.parent() {
|
||||
if let Some(parent_str) = parent.to_str() {
|
||||
if parent_str.ends_with(".gitlab/ci")
|
||||
&& path
|
||||
.extension().is_some_and(|ext| ext == "yml" || ext == "yaml")
|
||||
{
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If file exists, check the content
|
||||
if path.exists() {
|
||||
if let Ok(content) = std::fs::read_to_string(path) {
|
||||
// GitLab CI/CD pipelines typically have stages, before_script, after_script at the top level
|
||||
if content.contains("stages:")
|
||||
|| content.contains("before_script:")
|
||||
|| content.contains("after_script:")
|
||||
{
|
||||
// Check for GitHub Actions specific keys that would indicate it's not GitLab
|
||||
if !content.contains("on:")
|
||||
&& !content.contains("runs-on:")
|
||||
&& !content.contains("uses:")
|
||||
{
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
let cli = Wrkflw::parse();
|
||||
@@ -194,285 +248,303 @@ async fn main() {
|
||||
tokio::spawn(handle_signals());
|
||||
|
||||
match &cli.command {
|
||||
Some(Commands::Validate { path }) => {
|
||||
Some(Commands::Validate { path, gitlab }) => {
|
||||
// Determine the path to validate
|
||||
let validate_path = path
|
||||
.clone()
|
||||
.unwrap_or_else(|| PathBuf::from(".github/workflows"));
|
||||
|
||||
// Run the validation using ui crate
|
||||
ui::validate_workflow(&validate_path, verbose).unwrap_or_else(|e| {
|
||||
eprintln!("Error: {}", e);
|
||||
// Check if the path exists
|
||||
if !validate_path.exists() {
|
||||
eprintln!("Error: Path does not exist: {}", validate_path.display());
|
||||
std::process::exit(1);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Determine if we're validating a GitLab pipeline based on the --gitlab flag or file detection
|
||||
let force_gitlab = *gitlab;
|
||||
|
||||
if validate_path.is_dir() {
|
||||
// Validate all workflow files in the directory
|
||||
let entries = std::fs::read_dir(&validate_path)
|
||||
.expect("Failed to read directory")
|
||||
.filter_map(|entry| entry.ok())
|
||||
.filter(|entry| {
|
||||
entry.path().is_file()
|
||||
&& entry
|
||||
.path()
|
||||
.extension().is_some_and(|ext| ext == "yml" || ext == "yaml")
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
println!("Validating {} workflow file(s)...", entries.len());
|
||||
|
||||
for entry in entries {
|
||||
let path = entry.path();
|
||||
let is_gitlab = force_gitlab || is_gitlab_pipeline(&path);
|
||||
|
||||
if is_gitlab {
|
||||
validate_gitlab_pipeline(&path, verbose);
|
||||
} else {
|
||||
validate_github_workflow(&path, verbose);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Validate a single workflow file
|
||||
let is_gitlab = force_gitlab || is_gitlab_pipeline(&validate_path);
|
||||
|
||||
if is_gitlab {
|
||||
validate_gitlab_pipeline(&validate_path, verbose);
|
||||
} else {
|
||||
validate_github_workflow(&validate_path, verbose);
|
||||
}
|
||||
}
|
||||
}
|
||||
Some(Commands::Run {
|
||||
path,
|
||||
emulate,
|
||||
show_action_messages: _, // Assuming this flag is handled within executor/runtime
|
||||
show_action_messages: _,
|
||||
gitlab,
|
||||
}) => {
|
||||
// Set runner mode based on flags
|
||||
// Determine the runtime type
|
||||
let runtime_type = if *emulate {
|
||||
executor::RuntimeType::Emulation
|
||||
} else {
|
||||
executor::RuntimeType::Docker
|
||||
};
|
||||
|
||||
// First validate the workflow file using parser crate
|
||||
match parser::workflow::parse_workflow(path) {
|
||||
Ok(_) => logging::info("Validating workflow..."),
|
||||
Err(e) => {
|
||||
logging::error(&format!("Workflow validation failed: {}", e));
|
||||
std::process::exit(1);
|
||||
}
|
||||
}
|
||||
// Check if we're explicitly or implicitly running a GitLab pipeline
|
||||
let is_gitlab = *gitlab || is_gitlab_pipeline(path);
|
||||
let workflow_type = if is_gitlab {
|
||||
"GitLab CI pipeline"
|
||||
} else {
|
||||
"GitHub workflow"
|
||||
};
|
||||
|
||||
// Execute the workflow using executor crate
|
||||
match executor::execute_workflow(path, runtime_type, verbose || debug).await {
|
||||
Ok(result) => {
|
||||
// Print job results
|
||||
for job in &result.jobs {
|
||||
logging::info(&format!("Running {} at: {}", workflow_type, path.display()));
|
||||
|
||||
// Execute the workflow
|
||||
let result = executor::execute_workflow(path, runtime_type, verbose)
|
||||
.await
|
||||
.unwrap_or_else(|e| {
|
||||
eprintln!("Error executing workflow: {}", e);
|
||||
std::process::exit(1);
|
||||
});
|
||||
|
||||
// Print execution summary
|
||||
if result.failure_details.is_some() {
|
||||
eprintln!("❌ Workflow execution failed:");
|
||||
if let Some(details) = result.failure_details {
|
||||
eprintln!("{}", details);
|
||||
}
|
||||
std::process::exit(1);
|
||||
} else {
|
||||
println!("✅ Workflow execution completed successfully!");
|
||||
|
||||
// Print a summary of executed jobs
|
||||
if verbose {
|
||||
println!("\nJob summary:");
|
||||
for job in result.jobs {
|
||||
println!(
|
||||
"\n{} Job {}: {}",
|
||||
if job.status == executor::JobStatus::Success {
|
||||
"✅"
|
||||
} else {
|
||||
"❌"
|
||||
" {} {} ({})",
|
||||
match job.status {
|
||||
executor::JobStatus::Success => "✅",
|
||||
executor::JobStatus::Failure => "❌",
|
||||
executor::JobStatus::Skipped => "⏭️",
|
||||
},
|
||||
job.name,
|
||||
if job.status == executor::JobStatus::Success {
|
||||
"succeeded"
|
||||
} else {
|
||||
"failed"
|
||||
match job.status {
|
||||
executor::JobStatus::Success => "success",
|
||||
executor::JobStatus::Failure => "failure",
|
||||
executor::JobStatus::Skipped => "skipped",
|
||||
}
|
||||
);
|
||||
|
||||
// Print step results
|
||||
for step in &job.steps {
|
||||
println!(
|
||||
" {} {}",
|
||||
if step.status == executor::StepStatus::Success {
|
||||
"✅"
|
||||
} else {
|
||||
"❌"
|
||||
},
|
||||
step.name
|
||||
);
|
||||
|
||||
if !step.output.trim().is_empty() {
|
||||
// If the output is very long, trim it
|
||||
let output_lines = step.output.lines().collect::<Vec<&str>>();
|
||||
|
||||
println!(" Output:");
|
||||
|
||||
// In verbose mode, show complete output
|
||||
if verbose || debug {
|
||||
for line in &output_lines {
|
||||
println!(" {}", line);
|
||||
}
|
||||
} else {
|
||||
// Show only the first few lines
|
||||
let max_lines = 5;
|
||||
for line in output_lines.iter().take(max_lines) {
|
||||
println!(" {}", line);
|
||||
}
|
||||
|
||||
if output_lines.len() > max_lines {
|
||||
println!(" ... ({} more lines, use --verbose to see full output)",
|
||||
output_lines.len() - max_lines);
|
||||
}
|
||||
}
|
||||
if debug {
|
||||
println!(" Steps:");
|
||||
for step in job.steps {
|
||||
println!(
|
||||
" {} {}",
|
||||
match step.status {
|
||||
executor::StepStatus::Success => "✅",
|
||||
executor::StepStatus::Failure => "❌",
|
||||
executor::StepStatus::Skipped => "⏭️",
|
||||
},
|
||||
step.name
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Print detailed failure information if available
|
||||
if let Some(failure_details) = &result.failure_details {
|
||||
println!("\n❌ Workflow execution failed!");
|
||||
println!("{}", failure_details);
|
||||
println!("\nTo fix these issues:");
|
||||
println!("1. Check the formatting issues with: cargo fmt");
|
||||
println!("2. Fix clippy warnings with: cargo clippy -- -D warnings");
|
||||
println!("3. Run tests to ensure everything passes: cargo test");
|
||||
std::process::exit(1);
|
||||
} else {
|
||||
println!("\n✅ Workflow completed successfully!");
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
logging::error(&format!("Workflow execution failed: {}", e));
|
||||
std::process::exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Cleanup is handled automatically via the signal handler
|
||||
}
|
||||
Some(Commands::TriggerGitlab { branch, variable }) => {
|
||||
// Convert optional Vec<(String, String)> to Option<HashMap<String, String>>
|
||||
let variables = variable
|
||||
.as_ref()
|
||||
.map(|v| v.iter().cloned().collect::<HashMap<String, String>>());
|
||||
|
||||
// Trigger the pipeline
|
||||
if let Err(e) = gitlab::trigger_pipeline(branch.as_deref(), variables).await {
|
||||
eprintln!("Error triggering GitLab pipeline: {}", e);
|
||||
std::process::exit(1);
|
||||
}
|
||||
}
|
||||
Some(Commands::Tui {
|
||||
path,
|
||||
emulate,
|
||||
show_action_messages,
|
||||
show_action_messages: _,
|
||||
}) => {
|
||||
// Open the TUI interface using ui crate
|
||||
// Set runtime type based on the emulate flag
|
||||
let runtime_type = if *emulate {
|
||||
executor::RuntimeType::Emulation
|
||||
} else {
|
||||
// Check if Docker is available, fall back to emulation if not
|
||||
// Assuming executor::docker::is_available() exists
|
||||
if !executor::docker::is_available() {
|
||||
println!("⚠️ Docker is not available. Using emulation mode instead.");
|
||||
logging::warning("Docker is not available. Using emulation mode instead.");
|
||||
executor::RuntimeType::Emulation
|
||||
} else {
|
||||
executor::RuntimeType::Docker
|
||||
}
|
||||
executor::RuntimeType::Docker
|
||||
};
|
||||
|
||||
// Control hiding action messages based on the flag
|
||||
if !show_action_messages {
|
||||
std::env::set_var("WRKFLW_HIDE_ACTION_MESSAGES", "true");
|
||||
} else {
|
||||
std::env::set_var("WRKFLW_HIDE_ACTION_MESSAGES", "false");
|
||||
}
|
||||
|
||||
match ui::run_wrkflw_tui(path.as_ref(), runtime_type, verbose).await {
|
||||
Ok(_) => {
|
||||
// Clean up on successful exit
|
||||
cleanup_on_exit().await;
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Error: {}", e);
|
||||
cleanup_on_exit().await; // Ensure cleanup even on error
|
||||
std::process::exit(1);
|
||||
}
|
||||
// Call the TUI implementation from the ui crate
|
||||
if let Err(e) = ui::run_wrkflw_tui(path.as_ref(), runtime_type, verbose).await {
|
||||
eprintln!("Error running TUI: {}", e);
|
||||
std::process::exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
Some(Commands::Trigger {
|
||||
workflow,
|
||||
branch,
|
||||
input,
|
||||
}) => {
|
||||
logging::info(&format!("Triggering workflow {} on GitHub", workflow));
|
||||
// Convert optional Vec<(String, String)> to Option<HashMap<String, String>>
|
||||
let inputs = input
|
||||
.as_ref()
|
||||
.map(|i| i.iter().cloned().collect::<HashMap<String, String>>());
|
||||
|
||||
// Convert inputs to HashMap
|
||||
let input_map = input.as_ref().map(|i| {
|
||||
i.iter()
|
||||
.map(|(k, v)| (k.clone(), v.clone()))
|
||||
.collect::<HashMap<String, String>>()
|
||||
});
|
||||
|
||||
// Use github crate
|
||||
match github::trigger_workflow(workflow, branch.as_deref(), input_map).await {
|
||||
Ok(_) => logging::info("Workflow triggered successfully"),
|
||||
Err(e) => {
|
||||
eprintln!("Error triggering workflow: {}", e);
|
||||
std::process::exit(1);
|
||||
}
|
||||
// Trigger the workflow
|
||||
if let Err(e) = github::trigger_workflow(workflow, branch.as_deref(), inputs).await {
|
||||
eprintln!("Error triggering GitHub workflow: {}", e);
|
||||
std::process::exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
Some(Commands::TriggerGitlab { branch, variable }) => {
|
||||
logging::info("Triggering pipeline on GitLab");
|
||||
|
||||
// Convert variables to HashMap
|
||||
let variable_map = variable.as_ref().map(|v| {
|
||||
v.iter()
|
||||
.map(|(k, v)| (k.clone(), v.clone()))
|
||||
.collect::<HashMap<String, String>>()
|
||||
});
|
||||
|
||||
// Use gitlab crate
|
||||
match gitlab::trigger_pipeline(branch.as_deref(), variable_map).await {
|
||||
Ok(_) => logging::info("GitLab pipeline triggered successfully"),
|
||||
Err(e) => {
|
||||
eprintln!("Error triggering GitLab pipeline: {}", e);
|
||||
std::process::exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Some(Commands::List) => {
|
||||
logging::info("Listing available workflows");
|
||||
|
||||
// Attempt to get GitHub repo info using github crate
|
||||
if let Ok(repo_info) = github::get_repo_info() {
|
||||
match github::list_workflows(&repo_info).await {
|
||||
Ok(workflows) => {
|
||||
if workflows.is_empty() {
|
||||
println!("No GitHub workflows found in repository");
|
||||
} else {
|
||||
println!("GitHub workflows:");
|
||||
for workflow in workflows {
|
||||
println!(" {}", workflow);
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Error listing GitHub workflows: {}", e);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
println!("Not a GitHub repository or unable to get repository information");
|
||||
}
|
||||
|
||||
// Attempt to get GitLab repo info using gitlab crate
|
||||
if let Ok(repo_info) = gitlab::get_repo_info() {
|
||||
match gitlab::list_pipelines(&repo_info).await {
|
||||
Ok(pipelines) => {
|
||||
if pipelines.is_empty() {
|
||||
println!("No GitLab pipelines found in repository");
|
||||
} else {
|
||||
println!("GitLab pipelines:");
|
||||
for pipeline in pipelines {
|
||||
println!(" {}", pipeline);
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Error listing GitLab pipelines: {}", e);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
println!("Not a GitLab repository or unable to get repository information");
|
||||
}
|
||||
list_workflows_and_pipelines(verbose);
|
||||
}
|
||||
|
||||
None => {
|
||||
// Default to TUI interface if no subcommand
|
||||
// Check if Docker is available, fall back to emulation if not
|
||||
let runtime_type = if !executor::docker::is_available() {
|
||||
println!("⚠️ Docker is not available. Using emulation mode instead.");
|
||||
logging::warning("Docker is not available. Using emulation mode instead.");
|
||||
executor::RuntimeType::Emulation
|
||||
} else {
|
||||
executor::RuntimeType::Docker
|
||||
};
|
||||
// Launch TUI by default when no command is provided
|
||||
let runtime_type = executor::RuntimeType::Docker;
|
||||
|
||||
// Set environment variable to hide action messages by default
|
||||
std::env::set_var("WRKFLW_HIDE_ACTION_MESSAGES", "true");
|
||||
|
||||
match ui::run_wrkflw_tui(
|
||||
Some(&PathBuf::from(".github/workflows")),
|
||||
runtime_type,
|
||||
verbose,
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(_) => {
|
||||
// Clean up on successful exit
|
||||
cleanup_on_exit().await;
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Error: {}", e);
|
||||
cleanup_on_exit().await; // Ensure cleanup even on error
|
||||
std::process::exit(1);
|
||||
}
|
||||
// Call the TUI implementation from the ui crate with default path
|
||||
if let Err(e) = ui::run_wrkflw_tui(None, runtime_type, verbose).await {
|
||||
eprintln!("Error running TUI: {}", e);
|
||||
std::process::exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Validate a GitHub workflow file
|
||||
fn validate_github_workflow(path: &Path, verbose: bool) {
|
||||
print!("Validating GitHub workflow file: {}... ", path.display());
|
||||
|
||||
// Use the ui crate's validate_workflow function
|
||||
match ui::validate_workflow(path, verbose) {
|
||||
Ok(_) => {
|
||||
// The detailed validation output is already printed by the function
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Error validating workflow: {}", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Validate a GitLab CI/CD pipeline file
|
||||
fn validate_gitlab_pipeline(path: &Path, verbose: bool) {
|
||||
print!("Validating GitLab CI pipeline file: {}... ", path.display());
|
||||
|
||||
// Parse and validate the pipeline file
|
||||
match parser::gitlab::parse_pipeline(path) {
|
||||
Ok(pipeline) => {
|
||||
println!("✅ Valid syntax");
|
||||
|
||||
// Additional structural validation
|
||||
let validation_result = validators::validate_gitlab_pipeline(&pipeline);
|
||||
|
||||
if !validation_result.is_valid {
|
||||
println!("⚠️ Validation issues:");
|
||||
for issue in validation_result.issues {
|
||||
println!(" - {}", issue);
|
||||
}
|
||||
} else if verbose {
|
||||
println!("✅ All validation checks passed");
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
println!("❌ Invalid");
|
||||
eprintln!("Validation failed: {}", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// List available workflows and pipelines in the repository
|
||||
fn list_workflows_and_pipelines(verbose: bool) {
|
||||
// Check for GitHub workflows
|
||||
let github_path = PathBuf::from(".github/workflows");
|
||||
if github_path.exists() && github_path.is_dir() {
|
||||
println!("GitHub Workflows:");
|
||||
|
||||
let entries = std::fs::read_dir(&github_path)
|
||||
.expect("Failed to read directory")
|
||||
.filter_map(|entry| entry.ok())
|
||||
.filter(|entry| {
|
||||
entry.path().is_file()
|
||||
&& entry
|
||||
.path()
|
||||
.extension().is_some_and(|ext| ext == "yml" || ext == "yaml")
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if entries.is_empty() {
|
||||
println!(" No workflow files found in .github/workflows");
|
||||
} else {
|
||||
for entry in entries {
|
||||
println!(" - {}", entry.path().display());
|
||||
}
|
||||
}
|
||||
} else {
|
||||
println!("GitHub Workflows: No .github/workflows directory found");
|
||||
}
|
||||
|
||||
// Check for GitLab CI pipeline
|
||||
let gitlab_path = PathBuf::from(".gitlab-ci.yml");
|
||||
if gitlab_path.exists() && gitlab_path.is_file() {
|
||||
println!("GitLab CI Pipeline:");
|
||||
println!(" - {}", gitlab_path.display());
|
||||
} else {
|
||||
println!("GitLab CI Pipeline: No .gitlab-ci.yml file found");
|
||||
}
|
||||
|
||||
// Check for other GitLab CI pipeline files
|
||||
if verbose {
|
||||
println!("Searching for other GitLab CI pipeline files...");
|
||||
|
||||
let entries = walkdir::WalkDir::new(".")
|
||||
.follow_links(true)
|
||||
.into_iter()
|
||||
.filter_map(|entry| entry.ok())
|
||||
.filter(|entry| {
|
||||
entry.path().is_file()
|
||||
&& entry
|
||||
.file_name()
|
||||
.to_string_lossy()
|
||||
.ends_with("gitlab-ci.yml")
|
||||
&& entry.path() != gitlab_path
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if !entries.is_empty() {
|
||||
println!("Additional GitLab CI Pipeline files:");
|
||||
for entry in entries {
|
||||
println!(" - {}", entry.path().display());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Final cleanup before program exit (redundant if called on success/error/signal?)
|
||||
// Consider if this final call is necessary given the calls in Ok/Err/signal handlers.
|
||||
// It might be okay as a safety net, but ensure cleanup_on_exit is idempotent.
|
||||
// cleanup_on_exit().await; // Keep or remove based on idempotency review
|
||||
}
|
||||
|
||||
3012
schemas/gitlab-ci.json
Normal file
3012
schemas/gitlab-ci.json
Normal file
File diff suppressed because it is too large
Load Diff
33
test_gitlab_ci/.gitlab/ci/build.yml
Normal file
33
test_gitlab_ci/.gitlab/ci/build.yml
Normal file
@@ -0,0 +1,33 @@
|
||||
.build-template:
|
||||
stage: build
|
||||
script:
|
||||
- cargo build --release
|
||||
artifacts:
|
||||
paths:
|
||||
- target/release/
|
||||
expire_in: 1 week
|
||||
cache:
|
||||
key:
|
||||
files:
|
||||
- Cargo.lock
|
||||
paths:
|
||||
- ${CARGO_HOME}
|
||||
- target/
|
||||
|
||||
# Normal build job
|
||||
build:
|
||||
extends: .build-template
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
- if: $CI_COMMIT_BRANCH == "main"
|
||||
|
||||
# Debug build with additional flags
|
||||
debug-build:
|
||||
extends: .build-template
|
||||
script:
|
||||
- cargo build --features debug
|
||||
variables:
|
||||
RUSTFLAGS: "-Z debug-info=2"
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event" && $DEBUG_BUILD == "true"
|
||||
when: manual
|
||||
63
test_gitlab_ci/.gitlab/ci/test.yml
Normal file
63
test_gitlab_ci/.gitlab/ci/test.yml
Normal file
@@ -0,0 +1,63 @@
|
||||
.test-template:
|
||||
stage: test
|
||||
dependencies:
|
||||
- build
|
||||
cache:
|
||||
key:
|
||||
files:
|
||||
- Cargo.lock
|
||||
paths:
|
||||
- ${CARGO_HOME}
|
||||
- target/
|
||||
|
||||
# Unit tests
|
||||
unit-tests:
|
||||
extends: .test-template
|
||||
script:
|
||||
- cargo test --lib
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
- if: $CI_COMMIT_BRANCH == "main"
|
||||
|
||||
# Integration tests
|
||||
integration-tests:
|
||||
extends: .test-template
|
||||
script:
|
||||
- cargo test --test '*'
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
- if: $CI_COMMIT_BRANCH == "main"
|
||||
|
||||
# Lint with clippy
|
||||
lint:
|
||||
extends: .test-template
|
||||
dependencies: [] # No dependencies needed for linting
|
||||
script:
|
||||
- rustup component add clippy
|
||||
- cargo clippy -- -D warnings
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
- if: $CI_COMMIT_BRANCH == "main"
|
||||
|
||||
# Check formatting
|
||||
format:
|
||||
extends: .test-template
|
||||
dependencies: [] # No dependencies needed for formatting
|
||||
script:
|
||||
- rustup component add rustfmt
|
||||
- cargo fmt -- --check
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
- if: $CI_COMMIT_BRANCH == "main"
|
||||
|
||||
# Deployment template
|
||||
.deploy-template:
|
||||
stage: deploy
|
||||
script:
|
||||
- echo "Deploying to ${ENVIRONMENT} environment"
|
||||
- cp target/release/wrkflw /tmp/wrkflw-${ENVIRONMENT}
|
||||
artifacts:
|
||||
paths:
|
||||
- /tmp/wrkflw-${ENVIRONMENT}
|
||||
dependencies:
|
||||
- build
|
||||
197
test_gitlab_ci/advanced.gitlab-ci.yml
Normal file
197
test_gitlab_ci/advanced.gitlab-ci.yml
Normal file
@@ -0,0 +1,197 @@
|
||||
stages:
|
||||
- setup
|
||||
- build
|
||||
- test
|
||||
- package
|
||||
- deploy
|
||||
|
||||
variables:
|
||||
CARGO_HOME: "${CI_PROJECT_DIR}/.cargo"
|
||||
RUST_BACKTRACE: "1"
|
||||
|
||||
workflow:
|
||||
rules:
|
||||
- if: $CI_COMMIT_BRANCH == "main"
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
- if: $CI_COMMIT_TAG =~ /^v\d+\.\d+\.\d+$/
|
||||
- if: $CI_COMMIT_BRANCH =~ /^feature\/.*/
|
||||
- if: $CI_COMMIT_BRANCH == "staging"
|
||||
|
||||
# Default image and settings for all jobs
|
||||
default:
|
||||
image: rust:1.76
|
||||
interruptible: true
|
||||
retry:
|
||||
max: 2
|
||||
when:
|
||||
- runner_system_failure
|
||||
- stuck_or_timeout_failure
|
||||
|
||||
# Cache configuration
|
||||
.cargo-cache:
|
||||
cache:
|
||||
key:
|
||||
files:
|
||||
- Cargo.lock
|
||||
paths:
|
||||
- ${CARGO_HOME}
|
||||
- target/
|
||||
policy: pull-push
|
||||
|
||||
# Job to initialize the environment
|
||||
setup:
|
||||
stage: setup
|
||||
extends: .cargo-cache
|
||||
cache:
|
||||
policy: push
|
||||
script:
|
||||
- cargo --version
|
||||
- rustc --version
|
||||
- cargo fetch
|
||||
artifacts:
|
||||
paths:
|
||||
- Cargo.lock
|
||||
|
||||
# Matrix build for multiple platforms
|
||||
.build-matrix:
|
||||
stage: build
|
||||
extends: .cargo-cache
|
||||
needs:
|
||||
- setup
|
||||
parallel:
|
||||
matrix:
|
||||
- TARGET:
|
||||
- x86_64-unknown-linux-gnu
|
||||
- x86_64-apple-darwin
|
||||
- aarch64-apple-darwin
|
||||
- x86_64-pc-windows-msvc
|
||||
RUST_VERSION:
|
||||
- "1.75"
|
||||
- "1.76"
|
||||
script:
|
||||
- rustup target add $TARGET
|
||||
- cargo build --release --target $TARGET
|
||||
artifacts:
|
||||
paths:
|
||||
- target/$TARGET/release/
|
||||
expire_in: 1 week
|
||||
rules:
|
||||
- if: $CI_COMMIT_BRANCH == "main" || $CI_COMMIT_TAG
|
||||
when: always
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
when: manual
|
||||
allow_failure: true
|
||||
|
||||
# Regular build job for most cases
|
||||
build:
|
||||
stage: build
|
||||
extends: .cargo-cache
|
||||
needs:
|
||||
- setup
|
||||
script:
|
||||
- cargo build --release
|
||||
artifacts:
|
||||
paths:
|
||||
- target/release/
|
||||
expire_in: 1 week
|
||||
rules:
|
||||
- if: $CI_COMMIT_BRANCH != "main" && !$CI_COMMIT_TAG
|
||||
when: always
|
||||
|
||||
# Test with different feature combinations
|
||||
.test-template:
|
||||
stage: test
|
||||
extends: .cargo-cache
|
||||
needs:
|
||||
- build
|
||||
artifacts:
|
||||
reports:
|
||||
junit: test-results.xml
|
||||
when: always
|
||||
|
||||
test-default:
|
||||
extends: .test-template
|
||||
script:
|
||||
- cargo test -- -Z unstable-options --format json | tee test-output.json
|
||||
- cat test-output.json | jq -r '.[]' > test-results.xml
|
||||
|
||||
test-all-features:
|
||||
extends: .test-template
|
||||
script:
|
||||
- cargo test --all-features -- -Z unstable-options --format json | tee test-output.json
|
||||
- cat test-output.json | jq -r '.[]' > test-results.xml
|
||||
|
||||
test-no-features:
|
||||
extends: .test-template
|
||||
script:
|
||||
- cargo test --no-default-features -- -Z unstable-options --format json | tee test-output.json
|
||||
- cat test-output.json | jq -r '.[]' > test-results.xml
|
||||
|
||||
# Security scanning
|
||||
security:
|
||||
stage: test
|
||||
extends: .cargo-cache
|
||||
needs:
|
||||
- build
|
||||
script:
|
||||
- cargo install cargo-audit || true
|
||||
- cargo audit
|
||||
allow_failure: true
|
||||
|
||||
# Linting
|
||||
lint:
|
||||
stage: test
|
||||
extends: .cargo-cache
|
||||
script:
|
||||
- rustup component add clippy
|
||||
- cargo clippy -- -D warnings
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
|
||||
# Package for different targets
|
||||
package:
|
||||
stage: package
|
||||
extends: .cargo-cache
|
||||
needs:
|
||||
- job: build
|
||||
artifacts: true
|
||||
- test-default
|
||||
- test-all-features
|
||||
script:
|
||||
- mkdir -p packages
|
||||
- tar -czf packages/wrkflw-${CI_COMMIT_REF_SLUG}.tar.gz -C target/release wrkflw
|
||||
artifacts:
|
||||
paths:
|
||||
- packages/
|
||||
only:
|
||||
- main
|
||||
- tags
|
||||
|
||||
# Deploy to staging
|
||||
deploy-staging:
|
||||
stage: deploy
|
||||
image: alpine
|
||||
needs:
|
||||
- package
|
||||
environment:
|
||||
name: staging
|
||||
script:
|
||||
- apk add --no-cache curl
|
||||
- curl -X POST -F "file=@packages/wrkflw-${CI_COMMIT_REF_SLUG}.tar.gz" ${STAGING_DEPLOY_URL}
|
||||
only:
|
||||
- staging
|
||||
|
||||
# Deploy to production
|
||||
deploy-production:
|
||||
stage: deploy
|
||||
image: alpine
|
||||
needs:
|
||||
- package
|
||||
environment:
|
||||
name: production
|
||||
script:
|
||||
- apk add --no-cache curl
|
||||
- curl -X POST -F "file=@packages/wrkflw-${CI_COMMIT_REF_SLUG}.tar.gz" ${PROD_DEPLOY_URL}
|
||||
only:
|
||||
- tags
|
||||
when: manual
|
||||
45
test_gitlab_ci/basic.gitlab-ci.yml
Normal file
45
test_gitlab_ci/basic.gitlab-ci.yml
Normal file
@@ -0,0 +1,45 @@
|
||||
stages:
|
||||
- build
|
||||
- test
|
||||
- deploy
|
||||
|
||||
variables:
|
||||
CARGO_HOME: "${CI_PROJECT_DIR}/.cargo"
|
||||
|
||||
# Default image for all jobs
|
||||
image: rust:1.76
|
||||
|
||||
build:
|
||||
stage: build
|
||||
script:
|
||||
- cargo build --release
|
||||
artifacts:
|
||||
paths:
|
||||
- target/release/
|
||||
expire_in: 1 week
|
||||
|
||||
test:
|
||||
stage: test
|
||||
script:
|
||||
- cargo test
|
||||
dependencies:
|
||||
- build
|
||||
|
||||
lint:
|
||||
stage: test
|
||||
script:
|
||||
- rustup component add clippy
|
||||
- cargo clippy -- -D warnings
|
||||
- cargo fmt -- --check
|
||||
|
||||
deploy:
|
||||
stage: deploy
|
||||
script:
|
||||
- echo "Deploying application..."
|
||||
- cp target/release/wrkflw /usr/local/bin/
|
||||
only:
|
||||
- main
|
||||
environment:
|
||||
name: production
|
||||
dependencies:
|
||||
- build
|
||||
97
test_gitlab_ci/docker.gitlab-ci.yml
Normal file
97
test_gitlab_ci/docker.gitlab-ci.yml
Normal file
@@ -0,0 +1,97 @@
|
||||
stages:
|
||||
- build
|
||||
- test
|
||||
- deploy
|
||||
|
||||
variables:
|
||||
DOCKER_DRIVER: overlay2
|
||||
DOCKER_TLS_CERTDIR: "/certs"
|
||||
CONTAINER_IMAGE: ${CI_REGISTRY_IMAGE}:${CI_COMMIT_REF_SLUG}
|
||||
CONTAINER_IMAGE_LATEST: ${CI_REGISTRY_IMAGE}:latest
|
||||
|
||||
# Use Docker-in-Docker for building and testing
|
||||
.docker:
|
||||
image: docker:20.10
|
||||
services:
|
||||
- docker:20.10-dind
|
||||
variables:
|
||||
DOCKER_HOST: tcp://docker:2376
|
||||
DOCKER_TLS_VERIFY: 1
|
||||
DOCKER_CERT_PATH: $DOCKER_TLS_CERTDIR/client
|
||||
before_script:
|
||||
- docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
|
||||
|
||||
# Build the Docker image
|
||||
build-docker:
|
||||
extends: .docker
|
||||
stage: build
|
||||
script:
|
||||
- docker build --pull -t $CONTAINER_IMAGE -t $CONTAINER_IMAGE_LATEST .
|
||||
- docker push $CONTAINER_IMAGE
|
||||
- docker push $CONTAINER_IMAGE_LATEST
|
||||
only:
|
||||
- main
|
||||
- tags
|
||||
|
||||
# Run tests inside Docker
|
||||
test-docker:
|
||||
extends: .docker
|
||||
stage: test
|
||||
script:
|
||||
- docker pull $CONTAINER_IMAGE
|
||||
- docker run --rm $CONTAINER_IMAGE cargo test
|
||||
dependencies:
|
||||
- build-docker
|
||||
|
||||
# Security scan the Docker image
|
||||
security-scan:
|
||||
extends: .docker
|
||||
stage: test
|
||||
image: aquasec/trivy:latest
|
||||
script:
|
||||
- trivy image --no-progress --exit-code 1 --severity HIGH,CRITICAL $CONTAINER_IMAGE
|
||||
allow_failure: true
|
||||
|
||||
# Run a Docker container with our app in the staging environment
|
||||
deploy-staging:
|
||||
extends: .docker
|
||||
stage: deploy
|
||||
environment:
|
||||
name: staging
|
||||
url: https://staging.example.com
|
||||
script:
|
||||
- docker pull $CONTAINER_IMAGE
|
||||
- docker tag $CONTAINER_IMAGE wrkflw-staging
|
||||
- |
|
||||
cat > deploy.sh << 'EOF'
|
||||
docker stop wrkflw-staging || true
|
||||
docker rm wrkflw-staging || true
|
||||
docker run -d --name wrkflw-staging -p 8080:8080 wrkflw-staging
|
||||
EOF
|
||||
- chmod +x deploy.sh
|
||||
- ssh $STAGING_USER@$STAGING_HOST 'bash -s' < deploy.sh
|
||||
only:
|
||||
- main
|
||||
when: manual
|
||||
|
||||
# Run a Docker container with our app in the production environment
|
||||
deploy-production:
|
||||
extends: .docker
|
||||
stage: deploy
|
||||
environment:
|
||||
name: production
|
||||
url: https://wrkflw.example.com
|
||||
script:
|
||||
- docker pull $CONTAINER_IMAGE
|
||||
- docker tag $CONTAINER_IMAGE wrkflw-production
|
||||
- |
|
||||
cat > deploy.sh << 'EOF'
|
||||
docker stop wrkflw-production || true
|
||||
docker rm wrkflw-production || true
|
||||
docker run -d --name wrkflw-production -p 80:8080 wrkflw-production
|
||||
EOF
|
||||
- chmod +x deploy.sh
|
||||
- ssh $PRODUCTION_USER@$PRODUCTION_HOST 'bash -s' < deploy.sh
|
||||
only:
|
||||
- tags
|
||||
when: manual
|
||||
40
test_gitlab_ci/includes.gitlab-ci.yml
Normal file
40
test_gitlab_ci/includes.gitlab-ci.yml
Normal file
@@ -0,0 +1,40 @@
|
||||
stages:
|
||||
- build
|
||||
- test
|
||||
- deploy
|
||||
|
||||
# Including external files
|
||||
include:
|
||||
- local: '.gitlab/ci/build.yml' # Will be created in a moment
|
||||
- local: '.gitlab/ci/test.yml' # Will be created in a moment
|
||||
- template: 'Workflows/MergeRequest-Pipelines.gitlab-ci.yml' # Built-in template
|
||||
|
||||
variables:
|
||||
RUST_VERSION: "1.76"
|
||||
CARGO_HOME: "${CI_PROJECT_DIR}/.cargo"
|
||||
|
||||
# Default settings for all jobs
|
||||
default:
|
||||
image: rust:${RUST_VERSION}
|
||||
before_script:
|
||||
- rustc --version
|
||||
- cargo --version
|
||||
|
||||
# Main pipeline jobs that use the included templates
|
||||
production_deploy:
|
||||
stage: deploy
|
||||
extends: .deploy-template # This template is defined in one of the included files
|
||||
variables:
|
||||
ENVIRONMENT: production
|
||||
only:
|
||||
- main
|
||||
when: manual
|
||||
|
||||
staging_deploy:
|
||||
stage: deploy
|
||||
extends: .deploy-template
|
||||
variables:
|
||||
ENVIRONMENT: staging
|
||||
only:
|
||||
- staging
|
||||
when: manual
|
||||
57
test_gitlab_ci/invalid.gitlab-ci.yml
Normal file
57
test_gitlab_ci/invalid.gitlab-ci.yml
Normal file
@@ -0,0 +1,57 @@
|
||||
# Invalid GitLab CI file with common mistakes
|
||||
|
||||
# Missing stages definition
|
||||
# stages:
|
||||
# - build
|
||||
# - test
|
||||
|
||||
variables:
|
||||
CARGO_HOME: ${CI_PROJECT_DIR}/.cargo # Missing quotes around value with variables
|
||||
|
||||
# Invalid job definition (missing script)
|
||||
build:
|
||||
stage: build # Referring to undefined stage
|
||||
# Missing required script section
|
||||
artifacts:
|
||||
paths:
|
||||
- target/release/
|
||||
expire_in: 1 week
|
||||
|
||||
# Invalid job with incorrect when value
|
||||
test:
|
||||
stage: test
|
||||
script:
|
||||
- cargo test
|
||||
when: never # Invalid value for when (should be always, manual, or delayed)
|
||||
dependencies:
|
||||
- non_existent_job # Dependency on non-existent job
|
||||
|
||||
# Improperly structured job with invalid keys
|
||||
deploy:
|
||||
stagee: deploy # Typo in stage key
|
||||
scriptt: # Typo in script key
|
||||
- echo "Deploying..."
|
||||
only:
|
||||
- main
|
||||
environment:
|
||||
production # Incorrect format for environment
|
||||
retry: hello # Incorrect type for retry (should be integer or object)
|
||||
|
||||
# Invalid rules section
|
||||
lint:
|
||||
stage: test
|
||||
script:
|
||||
- cargo clippy
|
||||
rules:
|
||||
- equals: $CI_COMMIT_BRANCH == "main" # Invalid rule (should be if, changes, exists, etc.)
|
||||
|
||||
# Job with invalid cache configuration
|
||||
cache-test:
|
||||
stage: test
|
||||
script:
|
||||
- echo "Testing cache"
|
||||
cache:
|
||||
paths:
|
||||
- ${CARGO_HOME}
|
||||
key: [invalid, key, type] # Invalid type for key (should be string)
|
||||
policy: invalid-policy # Invalid policy value
|
||||
11
test_gitlab_ci/minimal.gitlab-ci.yml
Normal file
11
test_gitlab_ci/minimal.gitlab-ci.yml
Normal file
@@ -0,0 +1,11 @@
|
||||
# Minimal GitLab CI configuration
|
||||
|
||||
image: rust:latest
|
||||
|
||||
build:
|
||||
script:
|
||||
- cargo build
|
||||
|
||||
test:
|
||||
script:
|
||||
- cargo test
|
||||
167
test_gitlab_ci/services.gitlab-ci.yml
Normal file
167
test_gitlab_ci/services.gitlab-ci.yml
Normal file
@@ -0,0 +1,167 @@
|
||||
stages:
|
||||
- build
|
||||
- test
|
||||
- deploy
|
||||
|
||||
variables:
|
||||
POSTGRES_DB: test_db
|
||||
POSTGRES_USER: postgres
|
||||
POSTGRES_PASSWORD: postgres
|
||||
POSTGRES_HOST: postgres
|
||||
REDIS_HOST: redis
|
||||
MONGO_HOST: mongo
|
||||
RUST_BACKTRACE: 1
|
||||
|
||||
# Default settings
|
||||
default:
|
||||
image: rust:1.76
|
||||
|
||||
# Build the application
|
||||
build:
|
||||
stage: build
|
||||
script:
|
||||
- cargo build --release
|
||||
artifacts:
|
||||
paths:
|
||||
- target/release/
|
||||
cache:
|
||||
key:
|
||||
files:
|
||||
- Cargo.lock
|
||||
paths:
|
||||
- ${CI_PROJECT_DIR}/.cargo
|
||||
- target/
|
||||
|
||||
# Run unit tests (no services needed)
|
||||
unit-tests:
|
||||
stage: test
|
||||
needs:
|
||||
- build
|
||||
script:
|
||||
- cargo test --lib
|
||||
cache:
|
||||
key:
|
||||
files:
|
||||
- Cargo.lock
|
||||
paths:
|
||||
- ${CI_PROJECT_DIR}/.cargo
|
||||
- target/
|
||||
policy: pull
|
||||
|
||||
# Run integration tests with a PostgreSQL service
|
||||
postgres-tests:
|
||||
stage: test
|
||||
needs:
|
||||
- build
|
||||
services:
|
||||
- name: postgres:14-alpine
|
||||
alias: postgres
|
||||
variables:
|
||||
# Service-specific variables
|
||||
POSTGRES_DB: test_db
|
||||
POSTGRES_USER: postgres
|
||||
POSTGRES_PASSWORD: postgres
|
||||
DATABASE_URL: postgres://postgres:postgres@postgres:5432/test_db
|
||||
script:
|
||||
- apt-get update && apt-get install -y postgresql-client
|
||||
- cd target/release && ./wrkflw test-postgres
|
||||
- psql -h postgres -U postgres -d test_db -c "SELECT 1;"
|
||||
cache:
|
||||
key:
|
||||
files:
|
||||
- Cargo.lock
|
||||
paths:
|
||||
- ${CI_PROJECT_DIR}/.cargo
|
||||
- target/
|
||||
policy: pull
|
||||
|
||||
# Run integration tests with Redis service
|
||||
redis-tests:
|
||||
stage: test
|
||||
needs:
|
||||
- build
|
||||
services:
|
||||
- name: redis:alpine
|
||||
alias: redis
|
||||
variables:
|
||||
REDIS_URL: redis://redis:6379
|
||||
script:
|
||||
- apt-get update && apt-get install -y redis-tools
|
||||
- cd target/release && ./wrkflw test-redis
|
||||
- redis-cli -h redis PING
|
||||
cache:
|
||||
key:
|
||||
files:
|
||||
- Cargo.lock
|
||||
paths:
|
||||
- ${CI_PROJECT_DIR}/.cargo
|
||||
- target/
|
||||
policy: pull
|
||||
|
||||
# Run integration tests with MongoDB service
|
||||
mongo-tests:
|
||||
stage: test
|
||||
needs:
|
||||
- build
|
||||
services:
|
||||
- name: mongo:5
|
||||
alias: mongo
|
||||
variables:
|
||||
MONGO_URL: mongodb://mongo:27017
|
||||
script:
|
||||
- apt-get update && apt-get install -y mongodb-clients
|
||||
- cd target/release && ./wrkflw test-mongo
|
||||
- mongosh --host mongo --eval "db.version()"
|
||||
cache:
|
||||
key:
|
||||
files:
|
||||
- Cargo.lock
|
||||
paths:
|
||||
- ${CI_PROJECT_DIR}/.cargo
|
||||
- target/
|
||||
policy: pull
|
||||
|
||||
# Run multi-service integration tests
|
||||
all-services-test:
|
||||
stage: test
|
||||
needs:
|
||||
- build
|
||||
services:
|
||||
- name: postgres:14-alpine
|
||||
alias: postgres
|
||||
- name: redis:alpine
|
||||
alias: redis
|
||||
- name: mongo:5
|
||||
alias: mongo
|
||||
- name: rabbitmq:3-management
|
||||
alias: rabbitmq
|
||||
variables:
|
||||
DATABASE_URL: postgres://postgres:postgres@postgres:5432/test_db
|
||||
REDIS_URL: redis://redis:6379
|
||||
MONGO_URL: mongodb://mongo:27017
|
||||
RABBITMQ_URL: amqp://guest:guest@rabbitmq:5672
|
||||
script:
|
||||
- apt-get update && apt-get install -y postgresql-client redis-tools mongodb-clients
|
||||
- cd target/release && ./wrkflw test-all-services
|
||||
cache:
|
||||
key:
|
||||
files:
|
||||
- Cargo.lock
|
||||
paths:
|
||||
- ${CI_PROJECT_DIR}/.cargo
|
||||
- target/
|
||||
policy: pull
|
||||
|
||||
# Deploy to production
|
||||
deploy:
|
||||
stage: deploy
|
||||
needs:
|
||||
- unit-tests
|
||||
- postgres-tests
|
||||
- redis-tests
|
||||
- mongo-tests
|
||||
script:
|
||||
- echo "Deploying application..."
|
||||
- cp target/release/wrkflw /tmp/
|
||||
only:
|
||||
- main
|
||||
186
test_gitlab_ci/workflow.gitlab-ci.yml
Normal file
186
test_gitlab_ci/workflow.gitlab-ci.yml
Normal file
@@ -0,0 +1,186 @@
|
||||
stages:
|
||||
- prepare
|
||||
- build
|
||||
- test
|
||||
- deploy
|
||||
|
||||
# Global workflow rules to control when pipelines run
|
||||
workflow:
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
when: always
|
||||
- if: $CI_COMMIT_BRANCH == "main"
|
||||
when: always
|
||||
- if: $CI_COMMIT_TAG
|
||||
when: always
|
||||
- if: $CI_COMMIT_BRANCH == "develop"
|
||||
when: always
|
||||
- if: $CI_COMMIT_BRANCH =~ /^release\/.*/
|
||||
when: always
|
||||
- if: $CI_COMMIT_BRANCH =~ /^hotfix\/.*/
|
||||
when: always
|
||||
- when: never # Skip all other branches
|
||||
|
||||
variables:
|
||||
RUST_VERSION: "1.76"
|
||||
CARGO_HOME: "${CI_PROJECT_DIR}/.cargo"
|
||||
|
||||
# Default settings
|
||||
default:
|
||||
image: "rust:${RUST_VERSION}"
|
||||
interruptible: true
|
||||
|
||||
# Cache definition to be used by other jobs
|
||||
.cargo-cache:
|
||||
cache:
|
||||
key:
|
||||
files:
|
||||
- Cargo.lock
|
||||
paths:
|
||||
- ${CARGO_HOME}
|
||||
- target/
|
||||
|
||||
# Prepare the dependencies (runs on all branches)
|
||||
prepare:
|
||||
stage: prepare
|
||||
extends: .cargo-cache
|
||||
script:
|
||||
- cargo fetch --locked
|
||||
artifacts:
|
||||
paths:
|
||||
- Cargo.lock
|
||||
|
||||
# Build only on main branch and MRs
|
||||
build:
|
||||
stage: build
|
||||
extends: .cargo-cache
|
||||
needs:
|
||||
- prepare
|
||||
script:
|
||||
- cargo build --release
|
||||
artifacts:
|
||||
paths:
|
||||
- target/release/
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
- if: $CI_COMMIT_BRANCH == "main"
|
||||
- if: $CI_COMMIT_TAG
|
||||
|
||||
# Build with debug symbols on develop branch
|
||||
debug-build:
|
||||
stage: build
|
||||
extends: .cargo-cache
|
||||
needs:
|
||||
- prepare
|
||||
script:
|
||||
- cargo build
|
||||
artifacts:
|
||||
paths:
|
||||
- target/debug/
|
||||
rules:
|
||||
- if: $CI_COMMIT_BRANCH == "develop"
|
||||
|
||||
# Test job - run on all branches except release and hotfix
|
||||
test:
|
||||
stage: test
|
||||
extends: .cargo-cache
|
||||
needs:
|
||||
- job: build
|
||||
optional: true
|
||||
- job: debug-build
|
||||
optional: true
|
||||
script:
|
||||
- |
|
||||
if [ -d "target/release" ]; then
|
||||
cargo test --release
|
||||
else
|
||||
cargo test
|
||||
fi
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
- if: $CI_COMMIT_BRANCH == "main"
|
||||
- if: $CI_COMMIT_BRANCH == "develop"
|
||||
- if: $CI_COMMIT_TAG
|
||||
- if: $CI_COMMIT_BRANCH =~ /^feature\/.*/
|
||||
|
||||
# Only lint on MRs and develop
|
||||
lint:
|
||||
stage: test
|
||||
extends: .cargo-cache
|
||||
script:
|
||||
- rustup component add clippy
|
||||
- cargo clippy -- -D warnings
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
- if: $CI_COMMIT_BRANCH == "develop"
|
||||
|
||||
# Run benchmarks only on main branch
|
||||
benchmark:
|
||||
stage: test
|
||||
extends: .cargo-cache
|
||||
needs:
|
||||
- build
|
||||
script:
|
||||
- cargo bench
|
||||
rules:
|
||||
- if: $CI_COMMIT_BRANCH == "main"
|
||||
- if: $CI_COMMIT_TAG
|
||||
|
||||
# Deploy to staging on develop branch pushes
|
||||
deploy-staging:
|
||||
stage: deploy
|
||||
needs:
|
||||
- test
|
||||
environment:
|
||||
name: staging
|
||||
url: https://staging.example.com
|
||||
script:
|
||||
- echo "Deploying to staging..."
|
||||
- cp target/release/wrkflw /tmp/wrkflw-staging
|
||||
rules:
|
||||
- if: $CI_COMMIT_BRANCH == "develop"
|
||||
when: on_success
|
||||
- if: $CI_COMMIT_BRANCH =~ /^release\/.*/
|
||||
when: manual
|
||||
|
||||
# Deploy to production on main branch and tags
|
||||
deploy-prod:
|
||||
stage: deploy
|
||||
needs:
|
||||
- test
|
||||
- benchmark
|
||||
environment:
|
||||
name: production
|
||||
url: https://example.com
|
||||
script:
|
||||
- echo "Deploying to production..."
|
||||
- cp target/release/wrkflw /tmp/wrkflw-prod
|
||||
rules:
|
||||
- if: $CI_COMMIT_BRANCH == "main"
|
||||
when: manual
|
||||
- if: $CI_COMMIT_TAG =~ /^v\d+\.\d+\.\d+$/
|
||||
when: manual
|
||||
- if: $CI_COMMIT_BRANCH =~ /^hotfix\/.*/
|
||||
when: manual
|
||||
|
||||
# Notify slack only when deploy succeeded or failed
|
||||
notify:
|
||||
stage: .post
|
||||
image: curlimages/curl:latest
|
||||
needs:
|
||||
- job: deploy-staging
|
||||
optional: true
|
||||
- job: deploy-prod
|
||||
optional: true
|
||||
script:
|
||||
- |
|
||||
if [ "$CI_JOB_STATUS" == "success" ]; then
|
||||
curl -X POST -H 'Content-type: application/json' --data '{"text":"Deployment succeeded! :tada:"}' $SLACK_WEBHOOK_URL
|
||||
else
|
||||
curl -X POST -H 'Content-type: application/json' --data '{"text":"Deployment failed! :boom:"}' $SLACK_WEBHOOK_URL
|
||||
fi
|
||||
rules:
|
||||
- if: $CI_COMMIT_BRANCH == "main" && $CI_PIPELINE_SOURCE != "merge_request_event"
|
||||
- if: $CI_COMMIT_BRANCH == "develop" && $CI_PIPELINE_SOURCE != "merge_request_event"
|
||||
- if: $CI_COMMIT_TAG
|
||||
- if: $CI_COMMIT_BRANCH =~ /^hotfix\/.*/
|
||||
Reference in New Issue
Block a user