2025-05-02 15:45:51 +05:30
|
|
|
#[allow(unused_imports)]
|
2025-04-21 16:42:16 +05:30
|
|
|
use bollard::Docker;
|
|
|
|
|
use futures::future;
|
2025-04-24 16:49:07 +05:30
|
|
|
use regex;
|
2025-04-21 16:42:16 +05:30
|
|
|
use serde_yaml::Value;
|
2025-03-29 12:47:20 +05:30
|
|
|
use std::collections::HashMap;
|
2025-04-04 15:08:29 +05:30
|
|
|
use std::fs;
|
2025-03-29 12:47:20 +05:30
|
|
|
use std::path::Path;
|
2025-04-24 16:49:07 +05:30
|
|
|
use std::process::Command;
|
2025-03-29 12:47:20 +05:30
|
|
|
use thiserror::Error;
|
|
|
|
|
|
Refactor: Migrate modules to workspace crates
- Extracted functionality from the `src/` directory into individual crates within the `crates/` directory. This improves modularity, organization, and separation of concerns.
- Migrated modules include: models, evaluator, ui, gitlab, utils, logging, github, matrix, executor, runtime, parser, and validators.
- Removed the original source files and directories from `src/` after successful migration.
- This change sets the stage for better code management and potentially independent development/versioning of workspace members.
2025-05-02 12:53:41 +05:30
|
|
|
use crate::dependency;
|
|
|
|
|
use crate::docker;
|
|
|
|
|
use crate::environment;
|
2025-08-09 15:06:17 +05:30
|
|
|
use crate::podman;
|
2025-08-09 17:03:03 +05:30
|
|
|
use wrkflw_logging;
|
|
|
|
|
use wrkflw_matrix::MatrixCombination;
|
|
|
|
|
use wrkflw_models::gitlab::Pipeline;
|
|
|
|
|
use wrkflw_parser::gitlab::{self, parse_pipeline};
|
|
|
|
|
use wrkflw_parser::workflow::{self, parse_workflow, ActionInfo, Job, WorkflowDefinition};
|
|
|
|
|
use wrkflw_runtime::container::ContainerRuntime;
|
|
|
|
|
use wrkflw_runtime::emulation;
|
2025-08-14 23:26:30 +05:30
|
|
|
use wrkflw_secrets::{SecretConfig, SecretManager, SecretMasker, SecretSubstitution};
|
2025-04-21 16:42:16 +05:30
|
|
|
|
|
|
|
|
#[allow(unused_variables, unused_assignments)]
|
2025-03-29 12:47:20 +05:30
|
|
|
/// Execute a GitHub Actions workflow file locally
|
|
|
|
|
pub async fn execute_workflow(
|
|
|
|
|
workflow_path: &Path,
|
2025-08-09 13:18:08 +05:30
|
|
|
config: ExecutionConfig,
|
2025-03-29 12:47:20 +05:30
|
|
|
) -> Result<ExecutionResult, ExecutionError> {
|
2025-08-09 17:03:03 +05:30
|
|
|
wrkflw_logging::info(&format!("Executing workflow: {}", workflow_path.display()));
|
|
|
|
|
wrkflw_logging::info(&format!("Runtime: {:?}", config.runtime_type));
|
2025-04-06 21:00:40 +05:30
|
|
|
|
2025-05-02 15:08:59 +05:30
|
|
|
// Determine if this is a GitLab CI/CD pipeline or GitHub Actions workflow
|
|
|
|
|
let is_gitlab = is_gitlab_pipeline(workflow_path);
|
|
|
|
|
|
|
|
|
|
if is_gitlab {
|
2025-08-09 13:18:08 +05:30
|
|
|
execute_gitlab_pipeline(workflow_path, config.clone()).await
|
2025-05-02 15:08:59 +05:30
|
|
|
} else {
|
2025-08-09 13:18:08 +05:30
|
|
|
execute_github_workflow(workflow_path, config.clone()).await
|
2025-05-02 15:08:59 +05:30
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Determine if a file is a GitLab CI/CD pipeline
|
|
|
|
|
fn is_gitlab_pipeline(path: &Path) -> bool {
|
|
|
|
|
// Check the file name
|
|
|
|
|
if let Some(file_name) = path.file_name() {
|
|
|
|
|
if let Some(file_name_str) = file_name.to_str() {
|
|
|
|
|
return file_name_str == ".gitlab-ci.yml" || file_name_str.ends_with("gitlab-ci.yml");
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// If file name check fails, try to read and determine by content
|
|
|
|
|
if let Ok(content) = fs::read_to_string(path) {
|
|
|
|
|
// GitLab CI/CD pipelines typically have stages, before_script, after_script at the top level
|
|
|
|
|
if content.contains("stages:")
|
|
|
|
|
|| content.contains("before_script:")
|
|
|
|
|
|| content.contains("after_script:")
|
|
|
|
|
{
|
|
|
|
|
// Check for GitHub Actions specific keys that would indicate it's not GitLab
|
|
|
|
|
if !content.contains("on:")
|
|
|
|
|
&& !content.contains("runs-on:")
|
|
|
|
|
&& !content.contains("uses:")
|
|
|
|
|
{
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
false
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Execute a GitHub Actions workflow file locally
|
|
|
|
|
async fn execute_github_workflow(
|
|
|
|
|
workflow_path: &Path,
|
2025-08-09 13:18:08 +05:30
|
|
|
config: ExecutionConfig,
|
2025-05-02 15:08:59 +05:30
|
|
|
) -> Result<ExecutionResult, ExecutionError> {
|
2025-03-29 12:47:20 +05:30
|
|
|
// 1. Parse workflow file
|
|
|
|
|
let workflow = parse_workflow(workflow_path)?;
|
|
|
|
|
|
|
|
|
|
// 2. Resolve job dependencies and create execution plan
|
|
|
|
|
let execution_plan = dependency::resolve_dependencies(&workflow)?;
|
|
|
|
|
|
|
|
|
|
// 3. Initialize appropriate runtime
|
2025-08-09 13:18:08 +05:30
|
|
|
let runtime = initialize_runtime(
|
|
|
|
|
config.runtime_type.clone(),
|
|
|
|
|
config.preserve_containers_on_failure,
|
|
|
|
|
)?;
|
2025-03-29 12:47:20 +05:30
|
|
|
|
2025-04-06 21:00:40 +05:30
|
|
|
// Create a temporary workspace directory
|
2025-04-21 18:04:52 +05:30
|
|
|
let workspace_dir = tempfile::tempdir()
|
|
|
|
|
.map_err(|e| ExecutionError::Execution(format!("Failed to create workspace: {}", e)))?;
|
2025-04-06 21:00:40 +05:30
|
|
|
|
2025-03-29 12:47:20 +05:30
|
|
|
// 4. Set up GitHub-like environment
|
2025-04-24 13:48:52 +05:30
|
|
|
let mut env_context = environment::create_github_context(&workflow, workspace_dir.path());
|
|
|
|
|
|
|
|
|
|
// Add runtime mode to environment
|
|
|
|
|
env_context.insert(
|
|
|
|
|
"WRKFLW_RUNTIME_MODE".to_string(),
|
2025-08-09 15:06:17 +05:30
|
|
|
match config.runtime_type {
|
|
|
|
|
RuntimeType::Emulation => "emulation".to_string(),
|
feat: Add comprehensive sandboxing for secure emulation mode
Security Features:
- Implement secure emulation runtime with command sandboxing
- Add command validation, filtering, and dangerous pattern detection
- Block harmful commands like 'rm -rf /', 'sudo', 'dd', etc.
- Add resource limits (CPU, memory, execution time, process count)
- Implement filesystem isolation and access controls
- Add environment variable sanitization
- Support shell operators (&&, ||, |, ;) with proper parsing
New Runtime Mode:
- Add 'secure-emulation' runtime option to CLI
- Update UI to support new runtime mode with green security indicator
- Mark legacy 'emulation' mode as unsafe in help text
- Default to secure mode for local development safety
Documentation:
- Create comprehensive security documentation (README_SECURITY.md)
- Update main README with security mode information
- Add example workflows demonstrating safe vs dangerous commands
- Include migration guide and best practices
Testing:
- Add comprehensive test suite for sandbox functionality
- Include security demo workflows for testing
- Test dangerous command blocking and safe command execution
- Verify resource limits and timeout functionality
Code Quality:
- Fix all clippy warnings with proper struct initialization
- Add proper error handling and user-friendly security messages
- Implement comprehensive logging for security events
- Follow Rust best practices throughout
This addresses security concerns by preventing accidental harmful
commands while maintaining full compatibility with legitimate CI/CD
workflows. Users can now safely run untrusted workflows locally
without risk to their host system.
2025-08-13 14:30:51 +05:30
|
|
|
RuntimeType::SecureEmulation => "secure_emulation".to_string(),
|
2025-08-09 15:06:17 +05:30
|
|
|
RuntimeType::Docker => "docker".to_string(),
|
|
|
|
|
RuntimeType::Podman => "podman".to_string(),
|
2025-04-24 13:48:52 +05:30
|
|
|
},
|
|
|
|
|
);
|
|
|
|
|
|
|
|
|
|
// Add flag to hide GitHub action messages when in emulation mode
|
|
|
|
|
env_context.insert(
|
|
|
|
|
"WRKFLW_HIDE_ACTION_MESSAGES".to_string(),
|
|
|
|
|
"true".to_string(),
|
|
|
|
|
);
|
2025-04-06 21:00:40 +05:30
|
|
|
|
|
|
|
|
// Setup GitHub environment files
|
|
|
|
|
environment::setup_github_environment_files(workspace_dir.path()).map_err(|e| {
|
2025-04-21 18:04:52 +05:30
|
|
|
ExecutionError::Execution(format!("Failed to setup GitHub env files: {}", e))
|
2025-04-06 21:00:40 +05:30
|
|
|
})?;
|
2025-03-29 12:47:20 +05:30
|
|
|
|
2025-08-14 23:26:30 +05:30
|
|
|
// 5. Initialize secrets management
|
|
|
|
|
let secret_manager = if let Some(secrets_config) = &config.secrets_config {
|
|
|
|
|
Some(
|
|
|
|
|
SecretManager::new(secrets_config.clone())
|
|
|
|
|
.await
|
|
|
|
|
.map_err(|e| {
|
|
|
|
|
ExecutionError::Execution(format!("Failed to initialize secret manager: {}", e))
|
|
|
|
|
})?,
|
|
|
|
|
)
|
|
|
|
|
} else {
|
|
|
|
|
Some(SecretManager::default().await.map_err(|e| {
|
|
|
|
|
ExecutionError::Execution(format!(
|
|
|
|
|
"Failed to initialize default secret manager: {}",
|
|
|
|
|
e
|
|
|
|
|
))
|
|
|
|
|
})?)
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
let secret_masker = SecretMasker::new();
|
|
|
|
|
|
|
|
|
|
// 6. Execute jobs according to the plan
|
2025-03-29 12:47:20 +05:30
|
|
|
let mut results = Vec::new();
|
2025-04-24 16:49:07 +05:30
|
|
|
let mut has_failures = false;
|
|
|
|
|
let mut failure_details = String::new();
|
|
|
|
|
|
2025-03-29 12:47:20 +05:30
|
|
|
for job_batch in execution_plan {
|
|
|
|
|
// Execute jobs in parallel if they don't depend on each other
|
2025-04-21 18:04:52 +05:30
|
|
|
let job_results = execute_job_batch(
|
|
|
|
|
&job_batch,
|
|
|
|
|
&workflow,
|
|
|
|
|
runtime.as_ref(),
|
|
|
|
|
&env_context,
|
2025-08-09 13:18:08 +05:30
|
|
|
config.verbose,
|
2025-08-14 23:26:30 +05:30
|
|
|
secret_manager.as_ref(),
|
|
|
|
|
Some(&secret_masker),
|
2025-04-21 18:04:52 +05:30
|
|
|
)
|
|
|
|
|
.await?;
|
2025-04-24 16:49:07 +05:30
|
|
|
|
|
|
|
|
// Check for job failures and collect details
|
|
|
|
|
for job_result in &job_results {
|
|
|
|
|
if job_result.status == JobStatus::Failure {
|
|
|
|
|
has_failures = true;
|
|
|
|
|
failure_details.push_str(&format!("\n❌ Job failed: {}\n", job_result.name));
|
|
|
|
|
|
|
|
|
|
// Add step details for failed jobs
|
|
|
|
|
for step in &job_result.steps {
|
|
|
|
|
if step.status == StepStatus::Failure {
|
|
|
|
|
failure_details.push_str(&format!(" ❌ {}: {}\n", step.name, step.output));
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2025-03-29 12:47:20 +05:30
|
|
|
results.extend(job_results);
|
|
|
|
|
}
|
|
|
|
|
|
2025-04-24 16:49:07 +05:30
|
|
|
// If there were failures, add detailed failure information to the result
|
|
|
|
|
if has_failures {
|
2025-08-09 17:03:03 +05:30
|
|
|
wrkflw_logging::error(&format!("Workflow execution failed:{}", failure_details));
|
2025-04-24 16:49:07 +05:30
|
|
|
}
|
|
|
|
|
|
|
|
|
|
Ok(ExecutionResult {
|
|
|
|
|
jobs: results,
|
|
|
|
|
failure_details: if has_failures {
|
|
|
|
|
Some(failure_details)
|
|
|
|
|
} else {
|
|
|
|
|
None
|
|
|
|
|
},
|
|
|
|
|
})
|
2025-03-29 12:47:20 +05:30
|
|
|
}
|
|
|
|
|
|
2025-05-02 15:08:59 +05:30
|
|
|
/// Execute a GitLab CI/CD pipeline locally
|
|
|
|
|
async fn execute_gitlab_pipeline(
|
|
|
|
|
pipeline_path: &Path,
|
2025-08-09 13:18:08 +05:30
|
|
|
config: ExecutionConfig,
|
2025-05-02 15:08:59 +05:30
|
|
|
) -> Result<ExecutionResult, ExecutionError> {
|
2025-08-09 17:03:03 +05:30
|
|
|
wrkflw_logging::info("Executing GitLab CI/CD pipeline");
|
2025-05-02 15:08:59 +05:30
|
|
|
|
|
|
|
|
// 1. Parse the GitLab pipeline file
|
|
|
|
|
let pipeline = parse_pipeline(pipeline_path)
|
|
|
|
|
.map_err(|e| ExecutionError::Parse(format!("Failed to parse GitLab pipeline: {}", e)))?;
|
|
|
|
|
|
|
|
|
|
// 2. Convert the GitLab pipeline to a format compatible with the workflow executor
|
|
|
|
|
let workflow = gitlab::convert_to_workflow_format(&pipeline);
|
|
|
|
|
|
|
|
|
|
// 3. Resolve job dependencies based on stages
|
|
|
|
|
let execution_plan = resolve_gitlab_dependencies(&pipeline, &workflow)?;
|
|
|
|
|
|
|
|
|
|
// 4. Initialize appropriate runtime
|
2025-08-09 13:18:08 +05:30
|
|
|
let runtime = initialize_runtime(
|
|
|
|
|
config.runtime_type.clone(),
|
|
|
|
|
config.preserve_containers_on_failure,
|
|
|
|
|
)?;
|
2025-05-02 15:08:59 +05:30
|
|
|
|
|
|
|
|
// Create a temporary workspace directory
|
|
|
|
|
let workspace_dir = tempfile::tempdir()
|
|
|
|
|
.map_err(|e| ExecutionError::Execution(format!("Failed to create workspace: {}", e)))?;
|
|
|
|
|
|
|
|
|
|
// 5. Set up GitLab-like environment
|
|
|
|
|
let mut env_context = create_gitlab_context(&pipeline, workspace_dir.path());
|
|
|
|
|
|
|
|
|
|
// Add runtime mode to environment
|
|
|
|
|
env_context.insert(
|
|
|
|
|
"WRKFLW_RUNTIME_MODE".to_string(),
|
2025-08-09 15:06:17 +05:30
|
|
|
match config.runtime_type {
|
|
|
|
|
RuntimeType::Emulation => "emulation".to_string(),
|
feat: Add comprehensive sandboxing for secure emulation mode
Security Features:
- Implement secure emulation runtime with command sandboxing
- Add command validation, filtering, and dangerous pattern detection
- Block harmful commands like 'rm -rf /', 'sudo', 'dd', etc.
- Add resource limits (CPU, memory, execution time, process count)
- Implement filesystem isolation and access controls
- Add environment variable sanitization
- Support shell operators (&&, ||, |, ;) with proper parsing
New Runtime Mode:
- Add 'secure-emulation' runtime option to CLI
- Update UI to support new runtime mode with green security indicator
- Mark legacy 'emulation' mode as unsafe in help text
- Default to secure mode for local development safety
Documentation:
- Create comprehensive security documentation (README_SECURITY.md)
- Update main README with security mode information
- Add example workflows demonstrating safe vs dangerous commands
- Include migration guide and best practices
Testing:
- Add comprehensive test suite for sandbox functionality
- Include security demo workflows for testing
- Test dangerous command blocking and safe command execution
- Verify resource limits and timeout functionality
Code Quality:
- Fix all clippy warnings with proper struct initialization
- Add proper error handling and user-friendly security messages
- Implement comprehensive logging for security events
- Follow Rust best practices throughout
This addresses security concerns by preventing accidental harmful
commands while maintaining full compatibility with legitimate CI/CD
workflows. Users can now safely run untrusted workflows locally
without risk to their host system.
2025-08-13 14:30:51 +05:30
|
|
|
RuntimeType::SecureEmulation => "secure_emulation".to_string(),
|
2025-08-09 15:06:17 +05:30
|
|
|
RuntimeType::Docker => "docker".to_string(),
|
|
|
|
|
RuntimeType::Podman => "podman".to_string(),
|
2025-05-02 15:08:59 +05:30
|
|
|
},
|
|
|
|
|
);
|
|
|
|
|
|
|
|
|
|
// Setup environment files
|
|
|
|
|
environment::setup_github_environment_files(workspace_dir.path()).map_err(|e| {
|
|
|
|
|
ExecutionError::Execution(format!("Failed to setup environment files: {}", e))
|
|
|
|
|
})?;
|
|
|
|
|
|
2025-08-14 23:26:30 +05:30
|
|
|
// 6. Initialize secrets management
|
|
|
|
|
let secret_manager = if let Some(secrets_config) = &config.secrets_config {
|
|
|
|
|
Some(
|
|
|
|
|
SecretManager::new(secrets_config.clone())
|
|
|
|
|
.await
|
|
|
|
|
.map_err(|e| {
|
|
|
|
|
ExecutionError::Execution(format!("Failed to initialize secret manager: {}", e))
|
|
|
|
|
})?,
|
|
|
|
|
)
|
|
|
|
|
} else {
|
|
|
|
|
Some(SecretManager::default().await.map_err(|e| {
|
|
|
|
|
ExecutionError::Execution(format!(
|
|
|
|
|
"Failed to initialize default secret manager: {}",
|
|
|
|
|
e
|
|
|
|
|
))
|
|
|
|
|
})?)
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
let secret_masker = SecretMasker::new();
|
|
|
|
|
|
|
|
|
|
// 7. Execute jobs according to the plan
|
2025-05-02 15:08:59 +05:30
|
|
|
let mut results = Vec::new();
|
|
|
|
|
let mut has_failures = false;
|
|
|
|
|
let mut failure_details = String::new();
|
|
|
|
|
|
|
|
|
|
for job_batch in execution_plan {
|
|
|
|
|
// Execute jobs in parallel if they don't depend on each other
|
|
|
|
|
let job_results = execute_job_batch(
|
|
|
|
|
&job_batch,
|
|
|
|
|
&workflow,
|
|
|
|
|
runtime.as_ref(),
|
|
|
|
|
&env_context,
|
2025-08-09 13:18:08 +05:30
|
|
|
config.verbose,
|
2025-08-14 23:26:30 +05:30
|
|
|
secret_manager.as_ref(),
|
|
|
|
|
Some(&secret_masker),
|
2025-05-02 15:08:59 +05:30
|
|
|
)
|
|
|
|
|
.await?;
|
|
|
|
|
|
|
|
|
|
// Check for job failures and collect details
|
|
|
|
|
for job_result in &job_results {
|
|
|
|
|
if job_result.status == JobStatus::Failure {
|
|
|
|
|
has_failures = true;
|
|
|
|
|
failure_details.push_str(&format!("\n❌ Job failed: {}\n", job_result.name));
|
|
|
|
|
|
|
|
|
|
// Add step details for failed jobs
|
|
|
|
|
for step in &job_result.steps {
|
|
|
|
|
if step.status == StepStatus::Failure {
|
|
|
|
|
failure_details.push_str(&format!(" ❌ {}: {}\n", step.name, step.output));
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
results.extend(job_results);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// If there were failures, add detailed failure information to the result
|
|
|
|
|
if has_failures {
|
2025-08-09 17:03:03 +05:30
|
|
|
wrkflw_logging::error(&format!("Pipeline execution failed:{}", failure_details));
|
2025-05-02 15:08:59 +05:30
|
|
|
}
|
|
|
|
|
|
|
|
|
|
Ok(ExecutionResult {
|
|
|
|
|
jobs: results,
|
|
|
|
|
failure_details: if has_failures {
|
|
|
|
|
Some(failure_details)
|
|
|
|
|
} else {
|
|
|
|
|
None
|
|
|
|
|
},
|
|
|
|
|
})
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Create an environment context for GitLab CI/CD pipeline execution
|
|
|
|
|
fn create_gitlab_context(pipeline: &Pipeline, workspace_dir: &Path) -> HashMap<String, String> {
|
|
|
|
|
let mut env_context = HashMap::new();
|
|
|
|
|
|
|
|
|
|
// Add GitLab CI/CD environment variables
|
|
|
|
|
env_context.insert("CI".to_string(), "true".to_string());
|
|
|
|
|
env_context.insert("GITLAB_CI".to_string(), "true".to_string());
|
|
|
|
|
|
|
|
|
|
// Add custom environment variable to indicate use in wrkflw
|
|
|
|
|
env_context.insert("WRKFLW_CI".to_string(), "true".to_string());
|
|
|
|
|
|
|
|
|
|
// Add workspace directory
|
|
|
|
|
env_context.insert(
|
|
|
|
|
"CI_PROJECT_DIR".to_string(),
|
|
|
|
|
workspace_dir.to_string_lossy().to_string(),
|
|
|
|
|
);
|
|
|
|
|
|
2025-05-02 15:45:51 +05:30
|
|
|
// Also add the workspace as the GitHub workspace for compatibility with emulation runtime
|
|
|
|
|
env_context.insert(
|
|
|
|
|
"GITHUB_WORKSPACE".to_string(),
|
|
|
|
|
workspace_dir.to_string_lossy().to_string(),
|
|
|
|
|
);
|
|
|
|
|
|
2025-05-02 15:08:59 +05:30
|
|
|
// Add global variables from the pipeline
|
|
|
|
|
if let Some(variables) = &pipeline.variables {
|
|
|
|
|
for (key, value) in variables {
|
|
|
|
|
env_context.insert(key.clone(), value.clone());
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
env_context
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Resolve GitLab CI/CD pipeline dependencies
|
|
|
|
|
fn resolve_gitlab_dependencies(
|
|
|
|
|
pipeline: &Pipeline,
|
|
|
|
|
workflow: &WorkflowDefinition,
|
|
|
|
|
) -> Result<Vec<Vec<String>>, ExecutionError> {
|
|
|
|
|
// For GitLab CI/CD pipelines, jobs within the same stage can run in parallel,
|
|
|
|
|
// but jobs in different stages run sequentially
|
|
|
|
|
|
|
|
|
|
// Get stages from the pipeline or create a default one
|
|
|
|
|
let stages = match &pipeline.stages {
|
|
|
|
|
Some(defined_stages) => defined_stages.clone(),
|
|
|
|
|
None => vec![
|
|
|
|
|
"build".to_string(),
|
|
|
|
|
"test".to_string(),
|
|
|
|
|
"deploy".to_string(),
|
|
|
|
|
],
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
// Create an execution plan based on stages
|
|
|
|
|
let mut execution_plan = Vec::new();
|
|
|
|
|
|
|
|
|
|
// For each stage, collect the jobs that belong to it
|
|
|
|
|
for stage in stages {
|
|
|
|
|
let mut stage_jobs = Vec::new();
|
|
|
|
|
|
|
|
|
|
for (job_name, job) in &pipeline.jobs {
|
|
|
|
|
// Skip template jobs
|
|
|
|
|
if let Some(true) = job.template {
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Get the job's stage, or assume "test" if not specified
|
|
|
|
|
let default_stage = "test".to_string();
|
|
|
|
|
let job_stage = job.stage.as_ref().unwrap_or(&default_stage);
|
|
|
|
|
|
|
|
|
|
// If the job belongs to the current stage, add it to the batch
|
|
|
|
|
if job_stage == &stage {
|
|
|
|
|
stage_jobs.push(job_name.clone());
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if !stage_jobs.is_empty() {
|
|
|
|
|
execution_plan.push(stage_jobs);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Also create a batch for jobs without a stage
|
|
|
|
|
let mut stageless_jobs = Vec::new();
|
|
|
|
|
|
|
|
|
|
for (job_name, job) in &pipeline.jobs {
|
|
|
|
|
// Skip template jobs
|
|
|
|
|
if let Some(true) = job.template {
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if job.stage.is_none() {
|
|
|
|
|
stageless_jobs.push(job_name.clone());
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if !stageless_jobs.is_empty() {
|
|
|
|
|
execution_plan.push(stageless_jobs);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
Ok(execution_plan)
|
|
|
|
|
}
|
|
|
|
|
|
2025-08-09 15:06:17 +05:30
|
|
|
// Determine if Docker/Podman is available or fall back to emulation
|
2025-03-29 12:47:20 +05:30
|
|
|
fn initialize_runtime(
|
|
|
|
|
runtime_type: RuntimeType,
|
2025-08-09 13:18:08 +05:30
|
|
|
preserve_containers_on_failure: bool,
|
2025-03-29 12:47:20 +05:30
|
|
|
) -> Result<Box<dyn ContainerRuntime>, ExecutionError> {
|
|
|
|
|
match runtime_type {
|
|
|
|
|
RuntimeType::Docker => {
|
|
|
|
|
if docker::is_available() {
|
2025-04-21 16:42:16 +05:30
|
|
|
// Handle the Result returned by DockerRuntime::new()
|
2025-08-09 13:18:08 +05:30
|
|
|
match docker::DockerRuntime::new_with_config(preserve_containers_on_failure) {
|
2025-04-21 16:42:16 +05:30
|
|
|
Ok(docker_runtime) => Ok(Box::new(docker_runtime)),
|
|
|
|
|
Err(e) => {
|
2025-08-09 17:03:03 +05:30
|
|
|
wrkflw_logging::error(&format!(
|
2025-04-21 16:42:16 +05:30
|
|
|
"Failed to initialize Docker runtime: {}, falling back to emulation mode",
|
|
|
|
|
e
|
|
|
|
|
));
|
Refactor: Migrate modules to workspace crates
- Extracted functionality from the `src/` directory into individual crates within the `crates/` directory. This improves modularity, organization, and separation of concerns.
- Migrated modules include: models, evaluator, ui, gitlab, utils, logging, github, matrix, executor, runtime, parser, and validators.
- Removed the original source files and directories from `src/` after successful migration.
- This change sets the stage for better code management and potentially independent development/versioning of workspace members.
2025-05-02 12:53:41 +05:30
|
|
|
Ok(Box::new(emulation::EmulationRuntime::new()))
|
2025-04-21 16:42:16 +05:30
|
|
|
}
|
|
|
|
|
}
|
2025-03-29 12:47:20 +05:30
|
|
|
} else {
|
2025-08-09 17:03:03 +05:30
|
|
|
wrkflw_logging::error("Docker not available, falling back to emulation mode");
|
Refactor: Migrate modules to workspace crates
- Extracted functionality from the `src/` directory into individual crates within the `crates/` directory. This improves modularity, organization, and separation of concerns.
- Migrated modules include: models, evaluator, ui, gitlab, utils, logging, github, matrix, executor, runtime, parser, and validators.
- Removed the original source files and directories from `src/` after successful migration.
- This change sets the stage for better code management and potentially independent development/versioning of workspace members.
2025-05-02 12:53:41 +05:30
|
|
|
Ok(Box::new(emulation::EmulationRuntime::new()))
|
2025-03-29 12:47:20 +05:30
|
|
|
}
|
|
|
|
|
}
|
2025-08-09 15:06:17 +05:30
|
|
|
RuntimeType::Podman => {
|
|
|
|
|
if podman::is_available() {
|
|
|
|
|
// Handle the Result returned by PodmanRuntime::new()
|
|
|
|
|
match podman::PodmanRuntime::new_with_config(preserve_containers_on_failure) {
|
|
|
|
|
Ok(podman_runtime) => Ok(Box::new(podman_runtime)),
|
|
|
|
|
Err(e) => {
|
2025-08-09 17:03:03 +05:30
|
|
|
wrkflw_logging::error(&format!(
|
2025-08-09 15:06:17 +05:30
|
|
|
"Failed to initialize Podman runtime: {}, falling back to emulation mode",
|
|
|
|
|
e
|
|
|
|
|
));
|
|
|
|
|
Ok(Box::new(emulation::EmulationRuntime::new()))
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
} else {
|
2025-08-09 17:03:03 +05:30
|
|
|
wrkflw_logging::error("Podman not available, falling back to emulation mode");
|
2025-08-09 15:06:17 +05:30
|
|
|
Ok(Box::new(emulation::EmulationRuntime::new()))
|
|
|
|
|
}
|
|
|
|
|
}
|
Refactor: Migrate modules to workspace crates
- Extracted functionality from the `src/` directory into individual crates within the `crates/` directory. This improves modularity, organization, and separation of concerns.
- Migrated modules include: models, evaluator, ui, gitlab, utils, logging, github, matrix, executor, runtime, parser, and validators.
- Removed the original source files and directories from `src/` after successful migration.
- This change sets the stage for better code management and potentially independent development/versioning of workspace members.
2025-05-02 12:53:41 +05:30
|
|
|
RuntimeType::Emulation => Ok(Box::new(emulation::EmulationRuntime::new())),
|
feat: Add comprehensive sandboxing for secure emulation mode
Security Features:
- Implement secure emulation runtime with command sandboxing
- Add command validation, filtering, and dangerous pattern detection
- Block harmful commands like 'rm -rf /', 'sudo', 'dd', etc.
- Add resource limits (CPU, memory, execution time, process count)
- Implement filesystem isolation and access controls
- Add environment variable sanitization
- Support shell operators (&&, ||, |, ;) with proper parsing
New Runtime Mode:
- Add 'secure-emulation' runtime option to CLI
- Update UI to support new runtime mode with green security indicator
- Mark legacy 'emulation' mode as unsafe in help text
- Default to secure mode for local development safety
Documentation:
- Create comprehensive security documentation (README_SECURITY.md)
- Update main README with security mode information
- Add example workflows demonstrating safe vs dangerous commands
- Include migration guide and best practices
Testing:
- Add comprehensive test suite for sandbox functionality
- Include security demo workflows for testing
- Test dangerous command blocking and safe command execution
- Verify resource limits and timeout functionality
Code Quality:
- Fix all clippy warnings with proper struct initialization
- Add proper error handling and user-friendly security messages
- Implement comprehensive logging for security events
- Follow Rust best practices throughout
This addresses security concerns by preventing accidental harmful
commands while maintaining full compatibility with legitimate CI/CD
workflows. Users can now safely run untrusted workflows locally
without risk to their host system.
2025-08-13 14:30:51 +05:30
|
|
|
RuntimeType::SecureEmulation => Ok(Box::new(
|
|
|
|
|
wrkflw_runtime::secure_emulation::SecureEmulationRuntime::new(),
|
|
|
|
|
)),
|
2025-03-29 12:47:20 +05:30
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[derive(Debug, Clone, PartialEq)]
|
|
|
|
|
pub enum RuntimeType {
|
|
|
|
|
Docker,
|
2025-08-09 15:06:17 +05:30
|
|
|
Podman,
|
2025-03-29 12:47:20 +05:30
|
|
|
Emulation,
|
feat: Add comprehensive sandboxing for secure emulation mode
Security Features:
- Implement secure emulation runtime with command sandboxing
- Add command validation, filtering, and dangerous pattern detection
- Block harmful commands like 'rm -rf /', 'sudo', 'dd', etc.
- Add resource limits (CPU, memory, execution time, process count)
- Implement filesystem isolation and access controls
- Add environment variable sanitization
- Support shell operators (&&, ||, |, ;) with proper parsing
New Runtime Mode:
- Add 'secure-emulation' runtime option to CLI
- Update UI to support new runtime mode with green security indicator
- Mark legacy 'emulation' mode as unsafe in help text
- Default to secure mode for local development safety
Documentation:
- Create comprehensive security documentation (README_SECURITY.md)
- Update main README with security mode information
- Add example workflows demonstrating safe vs dangerous commands
- Include migration guide and best practices
Testing:
- Add comprehensive test suite for sandbox functionality
- Include security demo workflows for testing
- Test dangerous command blocking and safe command execution
- Verify resource limits and timeout functionality
Code Quality:
- Fix all clippy warnings with proper struct initialization
- Add proper error handling and user-friendly security messages
- Implement comprehensive logging for security events
- Follow Rust best practices throughout
This addresses security concerns by preventing accidental harmful
commands while maintaining full compatibility with legitimate CI/CD
workflows. Users can now safely run untrusted workflows locally
without risk to their host system.
2025-08-13 14:30:51 +05:30
|
|
|
SecureEmulation,
|
2025-03-29 12:47:20 +05:30
|
|
|
}
|
|
|
|
|
|
2025-08-09 13:18:08 +05:30
|
|
|
#[derive(Debug, Clone)]
|
|
|
|
|
pub struct ExecutionConfig {
|
|
|
|
|
pub runtime_type: RuntimeType,
|
|
|
|
|
pub verbose: bool,
|
|
|
|
|
pub preserve_containers_on_failure: bool,
|
2025-08-14 23:26:30 +05:30
|
|
|
pub secrets_config: Option<SecretConfig>,
|
2025-08-09 13:18:08 +05:30
|
|
|
}
|
|
|
|
|
|
2025-03-29 12:47:20 +05:30
|
|
|
pub struct ExecutionResult {
|
|
|
|
|
pub jobs: Vec<JobResult>,
|
2025-04-24 16:49:07 +05:30
|
|
|
pub failure_details: Option<String>,
|
2025-03-29 12:47:20 +05:30
|
|
|
}
|
|
|
|
|
|
|
|
|
|
pub struct JobResult {
|
|
|
|
|
pub name: String,
|
|
|
|
|
pub status: JobStatus,
|
|
|
|
|
pub steps: Vec<StepResult>,
|
|
|
|
|
pub logs: String,
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[derive(Debug, Clone, PartialEq)]
|
2025-04-04 14:58:36 +05:30
|
|
|
#[allow(dead_code)]
|
2025-03-29 12:47:20 +05:30
|
|
|
pub enum JobStatus {
|
|
|
|
|
Success,
|
|
|
|
|
Failure,
|
|
|
|
|
Skipped,
|
|
|
|
|
}
|
|
|
|
|
|
2025-04-14 16:54:21 +05:30
|
|
|
#[derive(Debug, Clone)]
|
2025-03-29 12:47:20 +05:30
|
|
|
pub struct StepResult {
|
|
|
|
|
pub name: String,
|
|
|
|
|
pub status: StepStatus,
|
|
|
|
|
pub output: String,
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[derive(Debug, Clone, PartialEq)]
|
2025-04-14 16:54:21 +05:30
|
|
|
#[allow(dead_code)]
|
2025-03-29 12:47:20 +05:30
|
|
|
pub enum StepStatus {
|
|
|
|
|
Success,
|
|
|
|
|
Failure,
|
|
|
|
|
Skipped,
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[derive(Error, Debug)]
|
|
|
|
|
pub enum ExecutionError {
|
|
|
|
|
#[error("Parse error: {0}")]
|
2025-04-21 18:04:52 +05:30
|
|
|
Parse(String),
|
2025-03-29 12:47:20 +05:30
|
|
|
|
|
|
|
|
#[error("Runtime error: {0}")]
|
2025-04-21 18:04:52 +05:30
|
|
|
Runtime(String),
|
2025-03-29 12:47:20 +05:30
|
|
|
|
|
|
|
|
#[error("Execution error: {0}")]
|
2025-04-21 18:04:52 +05:30
|
|
|
Execution(String),
|
2025-03-29 12:47:20 +05:30
|
|
|
|
|
|
|
|
#[error("IO error: {0}")]
|
2025-04-21 18:04:52 +05:30
|
|
|
Io(#[from] std::io::Error),
|
2025-03-29 12:47:20 +05:30
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Convert errors from other modules
|
|
|
|
|
impl From<String> for ExecutionError {
|
|
|
|
|
fn from(err: String) -> Self {
|
2025-04-21 18:04:52 +05:30
|
|
|
ExecutionError::Parse(err)
|
2025-03-29 12:47:20 +05:30
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Add Action preparation functions
|
|
|
|
|
async fn prepare_action(
|
|
|
|
|
action: &ActionInfo,
|
2025-04-21 18:04:52 +05:30
|
|
|
runtime: &dyn ContainerRuntime,
|
2025-03-29 12:47:20 +05:30
|
|
|
) -> Result<String, ExecutionError> {
|
|
|
|
|
if action.is_docker {
|
|
|
|
|
// Docker action: pull the image
|
|
|
|
|
let image = action.repository.trim_start_matches("docker://");
|
|
|
|
|
|
2025-04-21 18:04:52 +05:30
|
|
|
runtime
|
|
|
|
|
.pull_image(image)
|
|
|
|
|
.await
|
|
|
|
|
.map_err(|e| ExecutionError::Runtime(format!("Failed to pull Docker image: {}", e)))?;
|
2025-03-29 12:47:20 +05:30
|
|
|
|
|
|
|
|
return Ok(image.to_string());
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if action.is_local {
|
|
|
|
|
// Local action: build from local directory
|
|
|
|
|
let action_dir = Path::new(&action.repository);
|
|
|
|
|
|
|
|
|
|
if !action_dir.exists() {
|
2025-04-21 18:04:52 +05:30
|
|
|
return Err(ExecutionError::Execution(format!(
|
2025-03-29 12:47:20 +05:30
|
|
|
"Local action directory not found: {}",
|
|
|
|
|
action_dir.display()
|
|
|
|
|
)));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
let dockerfile = action_dir.join("Dockerfile");
|
|
|
|
|
if dockerfile.exists() {
|
|
|
|
|
// It's a Docker action, build it
|
|
|
|
|
let tag = format!("wrkflw-local-action:{}", uuid::Uuid::new_v4());
|
|
|
|
|
|
2025-04-21 18:04:52 +05:30
|
|
|
runtime
|
|
|
|
|
.build_image(&dockerfile, &tag)
|
|
|
|
|
.await
|
|
|
|
|
.map_err(|e| ExecutionError::Runtime(format!("Failed to build image: {}", e)))?;
|
2025-03-29 12:47:20 +05:30
|
|
|
|
|
|
|
|
return Ok(tag);
|
|
|
|
|
} else {
|
|
|
|
|
// It's a JavaScript or composite action
|
|
|
|
|
// For simplicity, we'll use node to run it (this would need more work for full support)
|
|
|
|
|
return Ok("node:16-buster-slim".to_string());
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2025-08-14 14:22:34 +05:30
|
|
|
// GitHub action: determine appropriate image based on action type
|
|
|
|
|
let image = determine_action_image(&action.repository);
|
|
|
|
|
Ok(image)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Determine the appropriate Docker image for a GitHub action
|
|
|
|
|
fn determine_action_image(repository: &str) -> String {
|
|
|
|
|
// Handle specific well-known actions
|
|
|
|
|
match repository {
|
|
|
|
|
// PHP setup actions
|
|
|
|
|
repo if repo.starts_with("shivammathur/setup-php") => {
|
|
|
|
|
"composer:latest".to_string() // Use composer image which includes PHP and composer
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Python setup actions
|
|
|
|
|
repo if repo.starts_with("actions/setup-python") => "python:3.11-slim".to_string(),
|
|
|
|
|
|
|
|
|
|
// Node.js setup actions
|
|
|
|
|
repo if repo.starts_with("actions/setup-node") => "node:20-slim".to_string(),
|
|
|
|
|
|
|
|
|
|
// Java setup actions
|
|
|
|
|
repo if repo.starts_with("actions/setup-java") => "eclipse-temurin:17-jdk".to_string(),
|
|
|
|
|
|
|
|
|
|
// Go setup actions
|
|
|
|
|
repo if repo.starts_with("actions/setup-go") => "golang:1.21-slim".to_string(),
|
|
|
|
|
|
|
|
|
|
// .NET setup actions
|
|
|
|
|
repo if repo.starts_with("actions/setup-dotnet") => {
|
|
|
|
|
"mcr.microsoft.com/dotnet/sdk:7.0".to_string()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Rust setup actions
|
|
|
|
|
repo if repo.starts_with("actions-rs/toolchain")
|
|
|
|
|
|| repo.starts_with("dtolnay/rust-toolchain") =>
|
|
|
|
|
{
|
|
|
|
|
"rust:latest".to_string()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Docker/container actions
|
|
|
|
|
repo if repo.starts_with("docker/") => "docker:latest".to_string(),
|
|
|
|
|
|
|
|
|
|
// AWS actions
|
|
|
|
|
repo if repo.starts_with("aws-actions/") => "amazon/aws-cli:latest".to_string(),
|
|
|
|
|
|
|
|
|
|
// Default to Node.js for most GitHub actions (checkout, upload-artifact, etc.)
|
|
|
|
|
_ => {
|
|
|
|
|
// Check if it's a common core GitHub action that should use a more complete environment
|
|
|
|
|
if repository.starts_with("actions/checkout")
|
|
|
|
|
|| repository.starts_with("actions/upload-artifact")
|
|
|
|
|
|| repository.starts_with("actions/download-artifact")
|
|
|
|
|
|| repository.starts_with("actions/cache")
|
|
|
|
|
{
|
|
|
|
|
"catthehacker/ubuntu:act-latest".to_string() // Use act runner image for core actions
|
|
|
|
|
} else {
|
|
|
|
|
"node:16-buster-slim".to_string() // Default for other actions
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2025-03-29 12:47:20 +05:30
|
|
|
}
|
|
|
|
|
|
|
|
|
|
async fn execute_job_batch(
|
|
|
|
|
jobs: &[String],
|
|
|
|
|
workflow: &WorkflowDefinition,
|
2025-04-21 18:04:52 +05:30
|
|
|
runtime: &dyn ContainerRuntime,
|
2025-03-29 12:47:20 +05:30
|
|
|
env_context: &HashMap<String, String>,
|
|
|
|
|
verbose: bool,
|
2025-08-14 23:26:30 +05:30
|
|
|
secret_manager: Option<&SecretManager>,
|
|
|
|
|
secret_masker: Option<&SecretMasker>,
|
2025-03-29 12:47:20 +05:30
|
|
|
) -> Result<Vec<JobResult>, ExecutionError> {
|
|
|
|
|
// Execute jobs in parallel
|
2025-08-14 23:26:30 +05:30
|
|
|
let futures = jobs.iter().map(|job_name| {
|
|
|
|
|
execute_job_with_matrix(
|
|
|
|
|
job_name,
|
|
|
|
|
workflow,
|
|
|
|
|
runtime,
|
|
|
|
|
env_context,
|
|
|
|
|
verbose,
|
|
|
|
|
secret_manager,
|
|
|
|
|
secret_masker,
|
|
|
|
|
)
|
|
|
|
|
});
|
2025-03-29 12:47:20 +05:30
|
|
|
|
2025-04-14 16:54:21 +05:30
|
|
|
let result_arrays = future::join_all(futures).await;
|
2025-04-21 16:42:16 +05:30
|
|
|
|
2025-04-14 16:54:21 +05:30
|
|
|
// Flatten the results from all jobs and their matrix combinations
|
|
|
|
|
let mut results = Vec::new();
|
|
|
|
|
for result_array in result_arrays {
|
|
|
|
|
match result_array {
|
|
|
|
|
Ok(job_results) => results.extend(job_results),
|
2025-03-29 12:47:20 +05:30
|
|
|
Err(e) => return Err(e),
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2025-04-14 16:54:21 +05:30
|
|
|
Ok(results)
|
2025-03-29 12:47:20 +05:30
|
|
|
}
|
|
|
|
|
|
2025-04-21 18:04:52 +05:30
|
|
|
// Before execute_job_with_matrix implementation, add this struct
|
|
|
|
|
struct JobExecutionContext<'a> {
|
|
|
|
|
job_name: &'a str,
|
|
|
|
|
workflow: &'a WorkflowDefinition,
|
|
|
|
|
runtime: &'a dyn ContainerRuntime,
|
|
|
|
|
env_context: &'a HashMap<String, String>,
|
|
|
|
|
verbose: bool,
|
2025-08-14 23:26:30 +05:30
|
|
|
secret_manager: Option<&'a SecretManager>,
|
|
|
|
|
secret_masker: Option<&'a SecretMasker>,
|
2025-04-21 18:04:52 +05:30
|
|
|
}
|
|
|
|
|
|
2025-04-14 16:54:21 +05:30
|
|
|
/// Execute a job, expanding matrix if present
|
|
|
|
|
async fn execute_job_with_matrix(
|
|
|
|
|
job_name: &str,
|
|
|
|
|
workflow: &WorkflowDefinition,
|
2025-04-21 18:04:52 +05:30
|
|
|
runtime: &dyn ContainerRuntime,
|
2025-04-14 16:54:21 +05:30
|
|
|
env_context: &HashMap<String, String>,
|
|
|
|
|
verbose: bool,
|
2025-08-14 23:26:30 +05:30
|
|
|
secret_manager: Option<&SecretManager>,
|
|
|
|
|
secret_masker: Option<&SecretMasker>,
|
2025-04-14 16:54:21 +05:30
|
|
|
) -> Result<Vec<JobResult>, ExecutionError> {
|
|
|
|
|
// Get the job definition
|
|
|
|
|
let job = workflow.jobs.get(job_name).ok_or_else(|| {
|
2025-04-21 18:04:52 +05:30
|
|
|
ExecutionError::Execution(format!("Job '{}' not found in workflow", job_name))
|
2025-04-14 16:54:21 +05:30
|
|
|
})?;
|
2025-04-21 16:42:16 +05:30
|
|
|
|
2025-08-09 13:36:03 +05:30
|
|
|
// Evaluate job condition if present
|
|
|
|
|
if let Some(if_condition) = &job.if_condition {
|
|
|
|
|
let should_run = evaluate_job_condition(if_condition, env_context, workflow);
|
|
|
|
|
if !should_run {
|
2025-08-09 17:03:03 +05:30
|
|
|
wrkflw_logging::info(&format!(
|
2025-08-09 13:36:03 +05:30
|
|
|
"⏭️ Skipping job '{}' due to condition: {}",
|
|
|
|
|
job_name, if_condition
|
|
|
|
|
));
|
|
|
|
|
// Return a skipped job result
|
|
|
|
|
return Ok(vec![JobResult {
|
|
|
|
|
name: job_name.to_string(),
|
|
|
|
|
status: JobStatus::Skipped,
|
|
|
|
|
steps: Vec::new(),
|
|
|
|
|
logs: String::new(),
|
|
|
|
|
}]);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2025-04-14 16:54:21 +05:30
|
|
|
// Check if this is a matrix job
|
|
|
|
|
if let Some(matrix_config) = &job.matrix {
|
|
|
|
|
// Expand the matrix into combinations
|
2025-08-09 17:03:03 +05:30
|
|
|
let combinations = wrkflw_matrix::expand_matrix(matrix_config)
|
2025-04-21 18:04:52 +05:30
|
|
|
.map_err(|e| ExecutionError::Execution(format!("Failed to expand matrix: {}", e)))?;
|
2025-04-21 16:42:16 +05:30
|
|
|
|
2025-04-14 16:54:21 +05:30
|
|
|
if combinations.is_empty() {
|
2025-08-09 17:03:03 +05:30
|
|
|
wrkflw_logging::info(&format!(
|
2025-04-21 16:42:16 +05:30
|
|
|
"Matrix job '{}' has no valid combinations",
|
|
|
|
|
job_name
|
|
|
|
|
));
|
2025-04-14 16:54:21 +05:30
|
|
|
// Return empty result for jobs with no valid combinations
|
|
|
|
|
return Ok(Vec::new());
|
|
|
|
|
}
|
2025-04-21 16:42:16 +05:30
|
|
|
|
2025-08-09 17:03:03 +05:30
|
|
|
wrkflw_logging::info(&format!(
|
2025-04-14 16:54:21 +05:30
|
|
|
"Matrix job '{}' expanded to {} combinations",
|
|
|
|
|
job_name,
|
|
|
|
|
combinations.len()
|
|
|
|
|
));
|
2025-04-21 16:42:16 +05:30
|
|
|
|
2025-04-14 16:54:21 +05:30
|
|
|
// Set maximum parallel jobs
|
|
|
|
|
let max_parallel = matrix_config.max_parallel.unwrap_or_else(|| {
|
|
|
|
|
// If not specified, use a reasonable default based on CPU cores
|
|
|
|
|
std::cmp::max(1, num_cpus::get())
|
|
|
|
|
});
|
2025-04-21 16:42:16 +05:30
|
|
|
|
2025-04-14 16:54:21 +05:30
|
|
|
// Execute matrix combinations
|
2025-04-21 18:04:52 +05:30
|
|
|
execute_matrix_combinations(MatrixExecutionContext {
|
2025-04-14 16:54:21 +05:30
|
|
|
job_name,
|
2025-04-21 18:04:52 +05:30
|
|
|
job_template: job,
|
|
|
|
|
combinations: &combinations,
|
2025-04-14 16:54:21 +05:30
|
|
|
max_parallel,
|
2025-04-21 18:04:52 +05:30
|
|
|
fail_fast: matrix_config.fail_fast.unwrap_or(true),
|
2025-04-14 16:54:21 +05:30
|
|
|
workflow,
|
|
|
|
|
runtime,
|
|
|
|
|
env_context,
|
|
|
|
|
verbose,
|
2025-08-14 23:26:30 +05:30
|
|
|
secret_manager,
|
|
|
|
|
secret_masker,
|
2025-04-21 18:04:52 +05:30
|
|
|
})
|
2025-04-14 16:54:21 +05:30
|
|
|
.await
|
|
|
|
|
} else {
|
|
|
|
|
// Regular job, no matrix
|
2025-04-21 18:04:52 +05:30
|
|
|
let ctx = JobExecutionContext {
|
|
|
|
|
job_name,
|
|
|
|
|
workflow,
|
|
|
|
|
runtime,
|
|
|
|
|
env_context,
|
|
|
|
|
verbose,
|
2025-08-14 23:26:30 +05:30
|
|
|
secret_manager,
|
|
|
|
|
secret_masker,
|
2025-04-21 18:04:52 +05:30
|
|
|
};
|
|
|
|
|
let result = execute_job(ctx).await?;
|
2025-04-14 16:54:21 +05:30
|
|
|
Ok(vec![result])
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2025-04-21 16:42:16 +05:30
|
|
|
#[allow(unused_variables, unused_assignments)]
|
2025-04-21 18:04:52 +05:30
|
|
|
async fn execute_job(ctx: JobExecutionContext<'_>) -> Result<JobResult, ExecutionError> {
|
2025-04-14 16:54:21 +05:30
|
|
|
// Get job definition
|
2025-04-21 18:04:52 +05:30
|
|
|
let job = ctx.workflow.jobs.get(ctx.job_name).ok_or_else(|| {
|
|
|
|
|
ExecutionError::Execution(format!("Job '{}' not found in workflow", ctx.job_name))
|
2025-03-29 12:47:20 +05:30
|
|
|
})?;
|
2025-04-21 16:42:16 +05:30
|
|
|
|
2025-08-12 14:53:07 +05:30
|
|
|
// Handle reusable workflow jobs (job-level 'uses')
|
|
|
|
|
if let Some(uses) = &job.uses {
|
|
|
|
|
return execute_reusable_workflow_job(&ctx, uses, job.with.as_ref(), job.secrets.as_ref())
|
|
|
|
|
.await;
|
|
|
|
|
}
|
|
|
|
|
|
2025-04-14 16:54:21 +05:30
|
|
|
// Clone context and add job-specific variables
|
2025-04-21 18:04:52 +05:30
|
|
|
let mut job_env = ctx.env_context.clone();
|
2025-04-21 16:42:16 +05:30
|
|
|
|
2025-03-29 12:47:20 +05:30
|
|
|
// Add job-level environment variables
|
|
|
|
|
for (key, value) in &job.env {
|
|
|
|
|
job_env.insert(key.clone(), value.clone());
|
|
|
|
|
}
|
2025-04-21 16:42:16 +05:30
|
|
|
|
2025-04-14 16:54:21 +05:30
|
|
|
// Execute job steps
|
2025-03-29 12:47:20 +05:30
|
|
|
let mut step_results = Vec::new();
|
|
|
|
|
let mut job_logs = String::new();
|
2025-04-21 16:42:16 +05:30
|
|
|
|
2025-04-14 16:54:21 +05:30
|
|
|
// Create a temporary directory for this job execution
|
2025-04-21 18:04:52 +05:30
|
|
|
let job_dir = tempfile::tempdir()
|
|
|
|
|
.map_err(|e| ExecutionError::Execution(format!("Failed to create job directory: {}", e)))?;
|
2025-04-21 16:42:16 +05:30
|
|
|
|
2025-05-02 15:45:51 +05:30
|
|
|
// Get the current project directory
|
2025-04-21 16:42:16 +05:30
|
|
|
let current_dir = std::env::current_dir().map_err(|e| {
|
2025-04-21 18:04:52 +05:30
|
|
|
ExecutionError::Execution(format!("Failed to get current directory: {}", e))
|
2025-04-21 16:42:16 +05:30
|
|
|
})?;
|
2025-05-02 15:45:51 +05:30
|
|
|
|
2025-08-09 17:03:03 +05:30
|
|
|
wrkflw_logging::info(&format!("Executing job: {}", ctx.job_name));
|
2025-04-21 16:42:16 +05:30
|
|
|
|
|
|
|
|
let mut job_success = true;
|
|
|
|
|
|
|
|
|
|
// Execute job steps
|
2025-08-12 14:53:07 +05:30
|
|
|
// Determine runner image (default if not provided)
|
|
|
|
|
let runner_image_value = get_runner_image_from_opt(&job.runs_on);
|
|
|
|
|
|
2025-04-21 16:42:16 +05:30
|
|
|
for (idx, step) in job.steps.iter().enumerate() {
|
2025-04-21 18:04:52 +05:30
|
|
|
let step_result = execute_step(StepExecutionContext {
|
2025-04-21 16:42:16 +05:30
|
|
|
step,
|
2025-04-21 18:04:52 +05:30
|
|
|
step_idx: idx,
|
|
|
|
|
job_env: &job_env,
|
|
|
|
|
working_dir: job_dir.path(),
|
|
|
|
|
runtime: ctx.runtime,
|
|
|
|
|
workflow: ctx.workflow,
|
2025-08-12 14:53:07 +05:30
|
|
|
runner_image: &runner_image_value,
|
2025-04-21 18:04:52 +05:30
|
|
|
verbose: ctx.verbose,
|
|
|
|
|
matrix_combination: &None,
|
2025-08-14 23:26:30 +05:30
|
|
|
secret_manager: ctx.secret_manager,
|
|
|
|
|
secret_masker: ctx.secret_masker,
|
2025-04-21 18:04:52 +05:30
|
|
|
})
|
2025-04-21 16:42:16 +05:30
|
|
|
.await;
|
|
|
|
|
|
|
|
|
|
match step_result {
|
|
|
|
|
Ok(result) => {
|
|
|
|
|
// Check if step was successful
|
|
|
|
|
if result.status == StepStatus::Failure {
|
|
|
|
|
job_success = false;
|
2025-04-14 16:54:21 +05:30
|
|
|
}
|
2025-04-21 16:42:16 +05:30
|
|
|
|
2025-04-24 16:06:53 +05:30
|
|
|
// Add step output to logs only in verbose mode or if there's an error
|
|
|
|
|
if ctx.verbose || result.status == StepStatus::Failure {
|
2025-04-21 16:42:16 +05:30
|
|
|
job_logs.push_str(&format!(
|
|
|
|
|
"\n=== Output from step '{}' ===\n{}\n=== End output ===\n\n",
|
|
|
|
|
result.name, result.output
|
|
|
|
|
));
|
2025-04-24 16:06:53 +05:30
|
|
|
} else {
|
|
|
|
|
// In non-verbose mode, just record that the step ran but don't include output
|
2025-04-24 16:49:07 +05:30
|
|
|
job_logs.push_str(&format!(
|
|
|
|
|
"Step '{}' completed with status: {:?}\n",
|
|
|
|
|
result.name, result.status
|
|
|
|
|
));
|
2025-03-29 12:47:20 +05:30
|
|
|
}
|
2025-04-21 16:42:16 +05:30
|
|
|
|
|
|
|
|
step_results.push(result);
|
|
|
|
|
}
|
|
|
|
|
Err(e) => {
|
|
|
|
|
job_success = false;
|
|
|
|
|
job_logs.push_str(&format!("\n=== ERROR in step {} ===\n{}\n", idx + 1, e));
|
|
|
|
|
|
|
|
|
|
// Record the error as a failed step
|
|
|
|
|
step_results.push(StepResult {
|
|
|
|
|
name: step
|
|
|
|
|
.name
|
|
|
|
|
.clone()
|
|
|
|
|
.unwrap_or_else(|| format!("Step {}", idx + 1)),
|
|
|
|
|
status: StepStatus::Failure,
|
|
|
|
|
output: format!("Error: {}", e),
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
// Stop executing further steps
|
|
|
|
|
break;
|
2025-03-29 12:47:20 +05:30
|
|
|
}
|
|
|
|
|
}
|
2025-04-21 16:42:16 +05:30
|
|
|
}
|
|
|
|
|
|
2025-03-29 12:47:20 +05:30
|
|
|
Ok(JobResult {
|
2025-04-21 18:04:52 +05:30
|
|
|
name: ctx.job_name.to_string(),
|
2025-04-14 16:54:21 +05:30
|
|
|
status: if job_success {
|
|
|
|
|
JobStatus::Success
|
|
|
|
|
} else {
|
|
|
|
|
JobStatus::Failure
|
|
|
|
|
},
|
2025-03-29 12:47:20 +05:30
|
|
|
steps: step_results,
|
|
|
|
|
logs: job_logs,
|
|
|
|
|
})
|
|
|
|
|
}
|
|
|
|
|
|
2025-04-21 18:04:52 +05:30
|
|
|
// Before the execute_matrix_combinations function, add this struct
|
|
|
|
|
struct MatrixExecutionContext<'a> {
|
|
|
|
|
job_name: &'a str,
|
|
|
|
|
job_template: &'a Job,
|
|
|
|
|
combinations: &'a [MatrixCombination],
|
|
|
|
|
max_parallel: usize,
|
|
|
|
|
fail_fast: bool,
|
|
|
|
|
workflow: &'a WorkflowDefinition,
|
|
|
|
|
runtime: &'a dyn ContainerRuntime,
|
|
|
|
|
env_context: &'a HashMap<String, String>,
|
|
|
|
|
verbose: bool,
|
2025-08-14 23:26:30 +05:30
|
|
|
#[allow(dead_code)] // Planned for future implementation
|
|
|
|
|
secret_manager: Option<&'a SecretManager>,
|
|
|
|
|
#[allow(dead_code)] // Planned for future implementation
|
|
|
|
|
secret_masker: Option<&'a SecretMasker>,
|
2025-04-21 18:04:52 +05:30
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Execute a set of matrix combinations
|
|
|
|
|
async fn execute_matrix_combinations(
|
|
|
|
|
ctx: MatrixExecutionContext<'_>,
|
|
|
|
|
) -> Result<Vec<JobResult>, ExecutionError> {
|
|
|
|
|
let mut results = Vec::new();
|
|
|
|
|
let mut any_failed = false;
|
|
|
|
|
|
|
|
|
|
// Process combinations in chunks limited by max_parallel
|
|
|
|
|
for chunk in ctx.combinations.chunks(ctx.max_parallel) {
|
|
|
|
|
// Skip processing if fail-fast is enabled and a previous job failed
|
|
|
|
|
if ctx.fail_fast && any_failed {
|
|
|
|
|
// Add skipped results for remaining combinations
|
|
|
|
|
for combination in chunk {
|
2025-08-09 17:03:03 +05:30
|
|
|
let combination_name =
|
|
|
|
|
wrkflw_matrix::format_combination_name(ctx.job_name, combination);
|
2025-04-21 18:04:52 +05:30
|
|
|
results.push(JobResult {
|
|
|
|
|
name: combination_name,
|
|
|
|
|
status: JobStatus::Skipped,
|
|
|
|
|
steps: Vec::new(),
|
|
|
|
|
logs: "Job skipped due to previous matrix job failure".to_string(),
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Process this chunk of combinations in parallel
|
|
|
|
|
let chunk_futures = chunk.iter().map(|combination| {
|
|
|
|
|
execute_matrix_job(
|
|
|
|
|
ctx.job_name,
|
|
|
|
|
ctx.job_template,
|
|
|
|
|
combination,
|
|
|
|
|
ctx.workflow,
|
|
|
|
|
ctx.runtime,
|
|
|
|
|
ctx.env_context,
|
|
|
|
|
ctx.verbose,
|
|
|
|
|
)
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
let chunk_results = future::join_all(chunk_futures).await;
|
|
|
|
|
|
|
|
|
|
// Process results from this chunk
|
|
|
|
|
for result in chunk_results {
|
|
|
|
|
match result {
|
|
|
|
|
Ok(job_result) => {
|
|
|
|
|
if job_result.status == JobStatus::Failure {
|
|
|
|
|
any_failed = true;
|
|
|
|
|
}
|
|
|
|
|
results.push(job_result);
|
|
|
|
|
}
|
|
|
|
|
Err(e) => {
|
|
|
|
|
// On error, mark as failed and continue if not fail-fast
|
|
|
|
|
any_failed = true;
|
2025-08-09 17:03:03 +05:30
|
|
|
wrkflw_logging::error(&format!("Matrix job failed: {}", e));
|
2025-04-21 18:04:52 +05:30
|
|
|
|
|
|
|
|
if ctx.fail_fast {
|
|
|
|
|
return Err(e);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
Ok(results)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Execute a single matrix job combination
|
|
|
|
|
async fn execute_matrix_job(
|
|
|
|
|
job_name: &str,
|
|
|
|
|
job_template: &Job,
|
|
|
|
|
combination: &MatrixCombination,
|
2025-03-29 12:47:20 +05:30
|
|
|
workflow: &WorkflowDefinition,
|
2025-04-21 18:04:52 +05:30
|
|
|
runtime: &dyn ContainerRuntime,
|
|
|
|
|
base_env_context: &HashMap<String, String>,
|
2025-03-29 12:47:20 +05:30
|
|
|
verbose: bool,
|
2025-04-21 18:04:52 +05:30
|
|
|
) -> Result<JobResult, ExecutionError> {
|
|
|
|
|
// Create the matrix-specific job name
|
2025-08-09 17:03:03 +05:30
|
|
|
let matrix_job_name = wrkflw_matrix::format_combination_name(job_name, combination);
|
2025-04-21 18:04:52 +05:30
|
|
|
|
2025-08-09 17:03:03 +05:30
|
|
|
wrkflw_logging::info(&format!("Executing matrix job: {}", matrix_job_name));
|
2025-04-21 18:04:52 +05:30
|
|
|
|
|
|
|
|
// Clone the environment and add matrix-specific values
|
|
|
|
|
let mut job_env = base_env_context.clone();
|
|
|
|
|
environment::add_matrix_context(&mut job_env, combination);
|
|
|
|
|
|
|
|
|
|
// Add job-level environment variables
|
|
|
|
|
for (key, value) in &job_template.env {
|
|
|
|
|
// TODO: Substitute matrix variable references in env values
|
|
|
|
|
job_env.insert(key.clone(), value.clone());
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Execute the job steps
|
|
|
|
|
let mut step_results = Vec::new();
|
|
|
|
|
let mut job_logs = String::new();
|
|
|
|
|
|
|
|
|
|
// Create a temporary directory for this job execution
|
|
|
|
|
let job_dir = tempfile::tempdir()
|
|
|
|
|
.map_err(|e| ExecutionError::Execution(format!("Failed to create job directory: {}", e)))?;
|
|
|
|
|
|
2025-05-02 15:45:51 +05:30
|
|
|
// Get the current project directory
|
2025-04-21 18:04:52 +05:30
|
|
|
let current_dir = std::env::current_dir().map_err(|e| {
|
|
|
|
|
ExecutionError::Execution(format!("Failed to get current directory: {}", e))
|
|
|
|
|
})?;
|
2025-05-02 15:45:51 +05:30
|
|
|
|
2025-04-21 18:04:52 +05:30
|
|
|
let job_success = if job_template.steps.is_empty() {
|
2025-08-09 17:03:03 +05:30
|
|
|
wrkflw_logging::warning(&format!("Job '{}' has no steps", matrix_job_name));
|
2025-04-21 18:04:52 +05:30
|
|
|
true
|
|
|
|
|
} else {
|
|
|
|
|
// Execute each step
|
2025-08-12 14:53:07 +05:30
|
|
|
// Determine runner image (default if not provided)
|
|
|
|
|
let runner_image_value = get_runner_image_from_opt(&job_template.runs_on);
|
|
|
|
|
|
2025-04-21 18:04:52 +05:30
|
|
|
for (idx, step) in job_template.steps.iter().enumerate() {
|
|
|
|
|
match execute_step(StepExecutionContext {
|
|
|
|
|
step,
|
|
|
|
|
step_idx: idx,
|
|
|
|
|
job_env: &job_env,
|
|
|
|
|
working_dir: job_dir.path(),
|
|
|
|
|
runtime,
|
|
|
|
|
workflow,
|
2025-08-12 14:53:07 +05:30
|
|
|
runner_image: &runner_image_value,
|
2025-04-21 18:04:52 +05:30
|
|
|
verbose,
|
|
|
|
|
matrix_combination: &Some(combination.values.clone()),
|
2025-08-14 23:26:30 +05:30
|
|
|
secret_manager: None, // Matrix execution context doesn't have secrets yet
|
|
|
|
|
secret_masker: None,
|
2025-04-21 18:04:52 +05:30
|
|
|
})
|
|
|
|
|
.await
|
|
|
|
|
{
|
|
|
|
|
Ok(result) => {
|
|
|
|
|
job_logs.push_str(&format!("Step: {}\n", result.name));
|
|
|
|
|
job_logs.push_str(&format!("Status: {:?}\n", result.status));
|
2025-04-24 16:49:07 +05:30
|
|
|
|
2025-04-24 16:06:53 +05:30
|
|
|
// Only include step output in verbose mode or if there's an error
|
|
|
|
|
if verbose || result.status == StepStatus::Failure {
|
|
|
|
|
job_logs.push_str(&result.output);
|
|
|
|
|
job_logs.push_str("\n\n");
|
|
|
|
|
} else {
|
|
|
|
|
job_logs.push('\n');
|
2025-04-24 16:49:07 +05:30
|
|
|
job_logs.push('\n');
|
2025-04-24 16:06:53 +05:30
|
|
|
}
|
2025-04-21 18:04:52 +05:30
|
|
|
|
|
|
|
|
step_results.push(result.clone());
|
|
|
|
|
|
|
|
|
|
if result.status != StepStatus::Success {
|
|
|
|
|
// Step failed, abort job
|
|
|
|
|
return Ok(JobResult {
|
|
|
|
|
name: matrix_job_name,
|
|
|
|
|
status: JobStatus::Failure,
|
|
|
|
|
steps: step_results,
|
|
|
|
|
logs: job_logs,
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
Err(e) => {
|
|
|
|
|
// Log the error and abort the job
|
|
|
|
|
job_logs.push_str(&format!("Step execution error: {}\n\n", e));
|
|
|
|
|
return Ok(JobResult {
|
|
|
|
|
name: matrix_job_name,
|
|
|
|
|
status: JobStatus::Failure,
|
|
|
|
|
steps: step_results,
|
|
|
|
|
logs: job_logs,
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
true
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
// Return job result
|
|
|
|
|
Ok(JobResult {
|
|
|
|
|
name: matrix_job_name,
|
|
|
|
|
status: if job_success {
|
|
|
|
|
JobStatus::Success
|
|
|
|
|
} else {
|
|
|
|
|
JobStatus::Failure
|
|
|
|
|
},
|
|
|
|
|
steps: step_results,
|
|
|
|
|
logs: job_logs,
|
|
|
|
|
})
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Before the execute_step function, add this struct
|
|
|
|
|
struct StepExecutionContext<'a> {
|
Refactor: Migrate modules to workspace crates
- Extracted functionality from the `src/` directory into individual crates within the `crates/` directory. This improves modularity, organization, and separation of concerns.
- Migrated modules include: models, evaluator, ui, gitlab, utils, logging, github, matrix, executor, runtime, parser, and validators.
- Removed the original source files and directories from `src/` after successful migration.
- This change sets the stage for better code management and potentially independent development/versioning of workspace members.
2025-05-02 12:53:41 +05:30
|
|
|
step: &'a workflow::Step,
|
2025-04-21 18:04:52 +05:30
|
|
|
step_idx: usize,
|
|
|
|
|
job_env: &'a HashMap<String, String>,
|
|
|
|
|
working_dir: &'a Path,
|
|
|
|
|
runtime: &'a dyn ContainerRuntime,
|
|
|
|
|
workflow: &'a WorkflowDefinition,
|
2025-04-30 16:51:38 +05:30
|
|
|
runner_image: &'a str,
|
2025-04-21 18:04:52 +05:30
|
|
|
verbose: bool,
|
2025-04-24 16:49:07 +05:30
|
|
|
#[allow(dead_code)]
|
2025-04-21 18:04:52 +05:30
|
|
|
matrix_combination: &'a Option<HashMap<String, Value>>,
|
2025-08-14 23:26:30 +05:30
|
|
|
secret_manager: Option<&'a SecretManager>,
|
|
|
|
|
#[allow(dead_code)] // Planned for future implementation
|
|
|
|
|
secret_masker: Option<&'a SecretMasker>,
|
2025-04-21 18:04:52 +05:30
|
|
|
}
|
|
|
|
|
|
|
|
|
|
async fn execute_step(ctx: StepExecutionContext<'_>) -> Result<StepResult, ExecutionError> {
|
|
|
|
|
let step_name = ctx
|
|
|
|
|
.step
|
2025-03-29 12:47:20 +05:30
|
|
|
.name
|
|
|
|
|
.clone()
|
2025-04-21 18:04:52 +05:30
|
|
|
.unwrap_or_else(|| format!("Step {}", ctx.step_idx + 1));
|
2025-03-29 12:47:20 +05:30
|
|
|
|
2025-04-21 18:04:52 +05:30
|
|
|
if ctx.verbose {
|
2025-08-09 17:03:03 +05:30
|
|
|
wrkflw_logging::info(&format!(" Executing step: {}", step_name));
|
2025-03-29 12:47:20 +05:30
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Prepare step environment
|
2025-04-21 18:04:52 +05:30
|
|
|
let mut step_env = ctx.job_env.clone();
|
2025-03-29 12:47:20 +05:30
|
|
|
|
2025-08-14 23:26:30 +05:30
|
|
|
// Add step-level environment variables (with secret substitution)
|
2025-04-21 18:04:52 +05:30
|
|
|
for (key, value) in &ctx.step.env {
|
2025-08-14 23:26:30 +05:30
|
|
|
let resolved_value = if let Some(secret_manager) = ctx.secret_manager {
|
|
|
|
|
let mut substitution = SecretSubstitution::new(secret_manager);
|
|
|
|
|
match substitution.substitute(value).await {
|
|
|
|
|
Ok(resolved) => resolved,
|
|
|
|
|
Err(e) => {
|
|
|
|
|
wrkflw_logging::error(&format!(
|
|
|
|
|
"Failed to resolve secrets in environment variable {}: {}",
|
|
|
|
|
key, e
|
|
|
|
|
));
|
|
|
|
|
value.clone()
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
value.clone()
|
|
|
|
|
};
|
|
|
|
|
step_env.insert(key.clone(), resolved_value);
|
2025-03-29 12:47:20 +05:30
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Execute the step based on its type
|
2025-04-24 16:49:07 +05:30
|
|
|
let step_result = if let Some(uses) = &ctx.step.uses {
|
2025-03-29 12:47:20 +05:30
|
|
|
// Action step
|
2025-04-21 18:04:52 +05:30
|
|
|
let action_info = ctx.workflow.resolve_action(uses);
|
2025-03-29 12:47:20 +05:30
|
|
|
|
|
|
|
|
// Check if this is the checkout action
|
|
|
|
|
if uses.starts_with("actions/checkout") {
|
|
|
|
|
// Get the current directory (assumes this is where your project is)
|
|
|
|
|
let current_dir = std::env::current_dir().map_err(|e| {
|
2025-04-21 18:04:52 +05:30
|
|
|
ExecutionError::Execution(format!("Failed to get current dir: {}", e))
|
2025-03-29 12:47:20 +05:30
|
|
|
})?;
|
|
|
|
|
|
|
|
|
|
// Copy the project files to the workspace
|
2025-04-21 18:04:52 +05:30
|
|
|
copy_directory_contents(¤t_dir, ctx.working_dir)?;
|
2025-03-29 12:47:20 +05:30
|
|
|
|
|
|
|
|
// Add info for logs
|
2025-04-24 16:49:07 +05:30
|
|
|
let output = if ctx.verbose {
|
|
|
|
|
let mut detailed_output =
|
|
|
|
|
"Emulated checkout: Copied current directory to workspace\n\n".to_string();
|
|
|
|
|
|
|
|
|
|
// Add checkout action details
|
|
|
|
|
detailed_output.push_str("Checkout Details:\n");
|
|
|
|
|
detailed_output.push_str(" - Source: Local directory\n");
|
|
|
|
|
detailed_output
|
|
|
|
|
.push_str(&format!(" - Destination: {}\n", ctx.working_dir.display()));
|
|
|
|
|
|
|
|
|
|
// Add list of top-level files/directories that were copied (limit to 10)
|
|
|
|
|
detailed_output.push_str("\nTop-level files/directories copied:\n");
|
|
|
|
|
if let Ok(entries) = std::fs::read_dir(¤t_dir) {
|
|
|
|
|
for (i, entry) in entries.take(10).enumerate() {
|
|
|
|
|
if let Ok(entry) = entry {
|
|
|
|
|
let file_type = if entry.path().is_dir() {
|
|
|
|
|
"directory"
|
|
|
|
|
} else {
|
|
|
|
|
"file"
|
|
|
|
|
};
|
|
|
|
|
detailed_output.push_str(&format!(
|
|
|
|
|
" - {} ({})\n",
|
|
|
|
|
entry.file_name().to_string_lossy(),
|
|
|
|
|
file_type
|
|
|
|
|
));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if i >= 9 {
|
|
|
|
|
detailed_output.push_str(" - ... (more items not shown)\n");
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
detailed_output
|
|
|
|
|
} else {
|
|
|
|
|
"Emulated checkout: Copied current directory to workspace".to_string()
|
|
|
|
|
};
|
2025-03-29 12:47:20 +05:30
|
|
|
|
2025-04-21 18:04:52 +05:30
|
|
|
if ctx.verbose {
|
2025-03-29 12:47:20 +05:30
|
|
|
println!(" Emulated actions/checkout: copied project files to workspace");
|
|
|
|
|
}
|
|
|
|
|
|
2025-04-24 16:49:07 +05:30
|
|
|
StepResult {
|
2025-03-29 12:47:20 +05:30
|
|
|
name: step_name,
|
|
|
|
|
status: StepStatus::Success,
|
|
|
|
|
output,
|
2025-04-24 16:49:07 +05:30
|
|
|
}
|
2025-03-29 12:47:20 +05:30
|
|
|
} else {
|
2025-04-04 15:08:29 +05:30
|
|
|
// Get action info
|
2025-04-21 18:04:52 +05:30
|
|
|
let image = prepare_action(&action_info, ctx.runtime).await?;
|
2025-03-29 12:47:20 +05:30
|
|
|
|
2025-04-04 15:08:29 +05:30
|
|
|
// Special handling for composite actions
|
|
|
|
|
if image == "composite" && action_info.is_local {
|
|
|
|
|
// Handle composite action
|
|
|
|
|
let action_path = Path::new(&action_info.repository);
|
2025-04-24 16:49:07 +05:30
|
|
|
execute_composite_action(
|
2025-04-21 18:04:52 +05:30
|
|
|
ctx.step,
|
2025-04-04 15:08:29 +05:30
|
|
|
action_path,
|
|
|
|
|
&step_env,
|
2025-04-21 18:04:52 +05:30
|
|
|
ctx.working_dir,
|
|
|
|
|
ctx.runtime,
|
2025-04-30 16:51:38 +05:30
|
|
|
ctx.runner_image,
|
2025-04-21 18:04:52 +05:30
|
|
|
ctx.verbose,
|
2025-04-04 15:08:29 +05:30
|
|
|
)
|
2025-04-24 16:49:07 +05:30
|
|
|
.await?
|
|
|
|
|
} else {
|
|
|
|
|
// Regular Docker or JavaScript action processing
|
|
|
|
|
// ... (rest of the existing code for handling regular actions)
|
|
|
|
|
// Build command for Docker action
|
|
|
|
|
let mut cmd = Vec::new();
|
|
|
|
|
let mut owned_strings: Vec<String> = Vec::new(); // Keep strings alive until after we use cmd
|
|
|
|
|
|
|
|
|
|
// Special handling for Rust actions
|
|
|
|
|
if uses.starts_with("actions-rs/") {
|
2025-08-09 17:03:03 +05:30
|
|
|
wrkflw_logging::info(
|
|
|
|
|
"🔄 Detected Rust action - using system Rust installation",
|
|
|
|
|
);
|
2025-04-24 16:49:07 +05:30
|
|
|
|
|
|
|
|
// For toolchain action, verify Rust is installed
|
|
|
|
|
if uses.starts_with("actions-rs/toolchain@") {
|
|
|
|
|
let rustc_version = Command::new("rustc")
|
|
|
|
|
.arg("--version")
|
|
|
|
|
.output()
|
|
|
|
|
.map(|output| String::from_utf8_lossy(&output.stdout).to_string())
|
|
|
|
|
.unwrap_or_else(|_| "not found".to_string());
|
|
|
|
|
|
2025-08-09 17:03:03 +05:30
|
|
|
wrkflw_logging::info(&format!(
|
|
|
|
|
"🔄 Using system Rust: {}",
|
|
|
|
|
rustc_version.trim()
|
|
|
|
|
));
|
2025-04-24 16:49:07 +05:30
|
|
|
|
|
|
|
|
// Return success since we're using system Rust
|
|
|
|
|
return Ok(StepResult {
|
|
|
|
|
name: step_name,
|
|
|
|
|
status: StepStatus::Success,
|
|
|
|
|
output: format!("Using system Rust: {}", rustc_version.trim()),
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// For cargo action, execute cargo commands directly
|
|
|
|
|
if uses.starts_with("actions-rs/cargo@") {
|
|
|
|
|
let cargo_version = Command::new("cargo")
|
|
|
|
|
.arg("--version")
|
|
|
|
|
.output()
|
|
|
|
|
.map(|output| String::from_utf8_lossy(&output.stdout).to_string())
|
|
|
|
|
.unwrap_or_else(|_| "not found".to_string());
|
|
|
|
|
|
2025-08-09 17:03:03 +05:30
|
|
|
wrkflw_logging::info(&format!(
|
2025-04-24 16:49:07 +05:30
|
|
|
"🔄 Using system Rust/Cargo: {}",
|
|
|
|
|
cargo_version.trim()
|
|
|
|
|
));
|
|
|
|
|
|
|
|
|
|
// Get the command from the 'with' parameters
|
|
|
|
|
if let Some(with_params) = &ctx.step.with {
|
|
|
|
|
if let Some(command) = with_params.get("command") {
|
2025-08-09 17:03:03 +05:30
|
|
|
wrkflw_logging::info(&format!(
|
|
|
|
|
"🔄 Found command parameter: {}",
|
|
|
|
|
command
|
|
|
|
|
));
|
2025-04-24 16:49:07 +05:30
|
|
|
|
|
|
|
|
// Build the actual command
|
|
|
|
|
let mut real_command = format!("cargo {}", command);
|
|
|
|
|
|
|
|
|
|
// Add any arguments if specified
|
|
|
|
|
if let Some(args) = with_params.get("args") {
|
|
|
|
|
if !args.is_empty() {
|
|
|
|
|
// Resolve GitHub-style variables in args
|
|
|
|
|
let resolved_args = if args.contains("${{") {
|
2025-08-09 17:03:03 +05:30
|
|
|
wrkflw_logging::info(&format!(
|
2025-04-24 16:49:07 +05:30
|
|
|
"🔄 Resolving workflow variables in: {}",
|
|
|
|
|
args
|
|
|
|
|
));
|
|
|
|
|
|
|
|
|
|
// Handle common matrix variables
|
|
|
|
|
let mut resolved =
|
|
|
|
|
args.replace("${{ matrix.target }}", "");
|
|
|
|
|
resolved = resolved.replace("${{ matrix.os }}", "");
|
|
|
|
|
|
|
|
|
|
// Handle any remaining ${{ variables }} by removing them
|
|
|
|
|
let re_pattern =
|
|
|
|
|
regex::Regex::new(r"\$\{\{\s*([^}]+)\s*\}\}")
|
|
|
|
|
.unwrap_or_else(|_| {
|
2025-08-09 17:03:03 +05:30
|
|
|
wrkflw_logging::error(
|
2025-04-24 16:49:07 +05:30
|
|
|
"Failed to create regex pattern",
|
|
|
|
|
);
|
|
|
|
|
regex::Regex::new(r"\$\{\{.*?\}\}").unwrap()
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
let resolved =
|
|
|
|
|
re_pattern.replace_all(&resolved, "").to_string();
|
2025-08-09 17:03:03 +05:30
|
|
|
wrkflw_logging::info(&format!(
|
|
|
|
|
"🔄 Resolved to: {}",
|
|
|
|
|
resolved
|
|
|
|
|
));
|
2025-04-24 16:49:07 +05:30
|
|
|
|
|
|
|
|
resolved.trim().to_string()
|
|
|
|
|
} else {
|
|
|
|
|
args.clone()
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
// Only add if we have something left after resolving variables
|
|
|
|
|
// and it's not just "--target" without a value
|
|
|
|
|
if !resolved_args.is_empty() && resolved_args != "--target"
|
|
|
|
|
{
|
|
|
|
|
real_command.push_str(&format!(" {}", resolved_args));
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2025-08-09 17:03:03 +05:30
|
|
|
wrkflw_logging::info(&format!(
|
2025-04-24 16:49:07 +05:30
|
|
|
"🔄 Running actual command: {}",
|
|
|
|
|
real_command
|
|
|
|
|
));
|
|
|
|
|
|
|
|
|
|
// Execute the command
|
|
|
|
|
let mut cmd = Command::new("sh");
|
|
|
|
|
cmd.arg("-c");
|
|
|
|
|
cmd.arg(&real_command);
|
|
|
|
|
cmd.current_dir(ctx.working_dir);
|
|
|
|
|
|
|
|
|
|
// Add environment variables
|
|
|
|
|
for (key, value) in step_env {
|
|
|
|
|
cmd.env(key, value);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
match cmd.output() {
|
|
|
|
|
Ok(output) => {
|
|
|
|
|
let exit_code = output.status.code().unwrap_or(-1);
|
|
|
|
|
let stdout =
|
|
|
|
|
String::from_utf8_lossy(&output.stdout).to_string();
|
|
|
|
|
let stderr =
|
|
|
|
|
String::from_utf8_lossy(&output.stderr).to_string();
|
|
|
|
|
|
|
|
|
|
return Ok(StepResult {
|
|
|
|
|
name: step_name,
|
|
|
|
|
status: if exit_code == 0 {
|
|
|
|
|
StepStatus::Success
|
|
|
|
|
} else {
|
|
|
|
|
StepStatus::Failure
|
|
|
|
|
},
|
|
|
|
|
output: format!("{}\n{}", stdout, stderr),
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
Err(e) => {
|
|
|
|
|
return Ok(StepResult {
|
|
|
|
|
name: step_name,
|
|
|
|
|
status: StepStatus::Failure,
|
|
|
|
|
output: format!("Failed to execute command: {}", e),
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2025-04-04 15:08:29 +05:30
|
|
|
|
2025-04-24 16:49:07 +05:30
|
|
|
if action_info.is_docker {
|
|
|
|
|
// Docker actions just run the container
|
2025-03-29 12:47:20 +05:30
|
|
|
cmd.push("sh");
|
|
|
|
|
cmd.push("-c");
|
2025-04-24 16:49:07 +05:30
|
|
|
cmd.push("echo 'Executing Docker action'");
|
|
|
|
|
} else if action_info.is_local {
|
|
|
|
|
// For local actions, we need more complex logic based on action type
|
|
|
|
|
let action_dir = Path::new(&action_info.repository);
|
|
|
|
|
let action_yaml = action_dir.join("action.yml");
|
|
|
|
|
|
|
|
|
|
if action_yaml.exists() {
|
|
|
|
|
// Parse the action.yml to determine action type
|
|
|
|
|
// This is simplified - real implementation would be more complex
|
|
|
|
|
cmd.push("sh");
|
|
|
|
|
cmd.push("-c");
|
|
|
|
|
cmd.push("echo 'Local action without action.yml'");
|
|
|
|
|
} else {
|
|
|
|
|
cmd.push("sh");
|
|
|
|
|
cmd.push("-c");
|
|
|
|
|
cmd.push("echo 'Local action without action.yml'");
|
|
|
|
|
}
|
2025-03-29 12:47:20 +05:30
|
|
|
} else {
|
2025-04-24 16:49:07 +05:30
|
|
|
// For GitHub actions, check if we have special handling
|
Refactor: Migrate modules to workspace crates
- Extracted functionality from the `src/` directory into individual crates within the `crates/` directory. This improves modularity, organization, and separation of concerns.
- Migrated modules include: models, evaluator, ui, gitlab, utils, logging, github, matrix, executor, runtime, parser, and validators.
- Removed the original source files and directories from `src/` after successful migration.
- This change sets the stage for better code management and potentially independent development/versioning of workspace members.
2025-05-02 12:53:41 +05:30
|
|
|
if let Err(e) = emulation::handle_special_action(uses).await {
|
2025-04-24 16:49:07 +05:30
|
|
|
// Log error but continue
|
|
|
|
|
println!(" Warning: Special action handling failed: {}", e);
|
|
|
|
|
}
|
2025-04-24 13:48:52 +05:30
|
|
|
|
2025-04-24 16:49:07 +05:30
|
|
|
// Check if we should hide GitHub action messages
|
|
|
|
|
let hide_action_value = ctx
|
|
|
|
|
.job_env
|
|
|
|
|
.get("WRKFLW_HIDE_ACTION_MESSAGES")
|
|
|
|
|
.cloned()
|
|
|
|
|
.unwrap_or_else(|| "not set".to_string());
|
2025-04-24 13:48:52 +05:30
|
|
|
|
2025-08-09 17:03:03 +05:30
|
|
|
wrkflw_logging::debug(&format!(
|
2025-04-24 16:49:07 +05:30
|
|
|
"WRKFLW_HIDE_ACTION_MESSAGES value: {}",
|
|
|
|
|
hide_action_value
|
|
|
|
|
));
|
2025-04-24 13:48:52 +05:30
|
|
|
|
2025-04-24 16:49:07 +05:30
|
|
|
let hide_messages = hide_action_value == "true";
|
2025-08-09 17:03:03 +05:30
|
|
|
wrkflw_logging::debug(&format!("Should hide messages: {}", hide_messages));
|
2025-04-24 13:48:52 +05:30
|
|
|
|
2025-04-24 16:49:07 +05:30
|
|
|
// Only log a message to the console if we're showing action messages
|
|
|
|
|
if !hide_messages {
|
|
|
|
|
// For Emulation mode, log a message about what action would be executed
|
|
|
|
|
println!(" ⚙️ Would execute GitHub action: {}", uses);
|
|
|
|
|
}
|
2025-04-24 13:48:52 +05:30
|
|
|
|
2025-04-24 16:49:07 +05:30
|
|
|
// Extract the actual command from the GitHub action if applicable
|
|
|
|
|
let mut should_run_real_command = false;
|
|
|
|
|
let mut real_command_parts = Vec::new();
|
2025-03-29 12:47:20 +05:30
|
|
|
|
2025-04-24 16:49:07 +05:30
|
|
|
// Check if this action has 'with' parameters that specify a command to run
|
|
|
|
|
if let Some(with_params) = &ctx.step.with {
|
|
|
|
|
// Common GitHub action pattern: has a 'command' parameter
|
|
|
|
|
if let Some(cmd) = with_params.get("command") {
|
|
|
|
|
if ctx.verbose {
|
2025-08-09 17:03:03 +05:30
|
|
|
wrkflw_logging::info(&format!(
|
|
|
|
|
"🔄 Found command parameter: {}",
|
|
|
|
|
cmd
|
|
|
|
|
));
|
2025-04-24 16:49:07 +05:30
|
|
|
}
|
2025-04-24 13:48:52 +05:30
|
|
|
|
2025-04-24 16:49:07 +05:30
|
|
|
// Convert to real command based on action type patterns
|
|
|
|
|
if uses.contains("cargo") || uses.contains("rust") {
|
|
|
|
|
// Cargo command pattern
|
|
|
|
|
real_command_parts.push("cargo".to_string());
|
|
|
|
|
real_command_parts.push(cmd.clone());
|
|
|
|
|
should_run_real_command = true;
|
|
|
|
|
} else if uses.contains("node") || uses.contains("npm") {
|
|
|
|
|
// Node.js command pattern
|
|
|
|
|
if cmd == "npm" || cmd == "yarn" || cmd == "pnpm" {
|
|
|
|
|
real_command_parts.push(cmd.clone());
|
|
|
|
|
} else {
|
|
|
|
|
real_command_parts.push("npm".to_string());
|
|
|
|
|
real_command_parts.push("run".to_string());
|
|
|
|
|
real_command_parts.push(cmd.clone());
|
|
|
|
|
}
|
|
|
|
|
should_run_real_command = true;
|
|
|
|
|
} else if uses.contains("python") || uses.contains("pip") {
|
|
|
|
|
// Python command pattern
|
|
|
|
|
if cmd == "pip" {
|
|
|
|
|
real_command_parts.push("pip".to_string());
|
|
|
|
|
} else {
|
|
|
|
|
real_command_parts.push("python".to_string());
|
|
|
|
|
real_command_parts.push("-m".to_string());
|
|
|
|
|
real_command_parts.push(cmd.clone());
|
|
|
|
|
}
|
|
|
|
|
should_run_real_command = true;
|
|
|
|
|
} else {
|
|
|
|
|
// Generic command - try to execute directly if available
|
|
|
|
|
real_command_parts.push(cmd.clone());
|
|
|
|
|
should_run_real_command = true;
|
|
|
|
|
}
|
2025-03-29 12:47:20 +05:30
|
|
|
|
2025-04-24 16:49:07 +05:30
|
|
|
// Add any arguments if specified
|
|
|
|
|
if let Some(args) = with_params.get("args") {
|
|
|
|
|
if !args.is_empty() {
|
|
|
|
|
// Resolve GitHub-style variables in args
|
|
|
|
|
let resolved_args = if args.contains("${{") {
|
2025-08-09 17:03:03 +05:30
|
|
|
wrkflw_logging::info(&format!(
|
2025-04-24 16:49:07 +05:30
|
|
|
"🔄 Resolving workflow variables in: {}",
|
|
|
|
|
args
|
|
|
|
|
));
|
|
|
|
|
|
|
|
|
|
// Handle common matrix variables
|
|
|
|
|
let mut resolved = args.replace("${{ matrix.target }}", "");
|
|
|
|
|
resolved = resolved.replace("${{ matrix.os }}", "");
|
|
|
|
|
|
|
|
|
|
// Handle any remaining ${{ variables }} by removing them
|
|
|
|
|
let re_pattern =
|
|
|
|
|
regex::Regex::new(r"\$\{\{\s*([^}]+)\s*\}\}")
|
|
|
|
|
.unwrap_or_else(|_| {
|
2025-08-09 17:03:03 +05:30
|
|
|
wrkflw_logging::error(
|
2025-04-24 16:49:07 +05:30
|
|
|
"Failed to create regex pattern",
|
|
|
|
|
);
|
|
|
|
|
regex::Regex::new(r"\$\{\{.*?\}\}").unwrap()
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
let resolved =
|
|
|
|
|
re_pattern.replace_all(&resolved, "").to_string();
|
2025-08-09 17:03:03 +05:30
|
|
|
wrkflw_logging::info(&format!(
|
|
|
|
|
"🔄 Resolved to: {}",
|
|
|
|
|
resolved
|
|
|
|
|
));
|
2025-04-24 16:49:07 +05:30
|
|
|
|
|
|
|
|
resolved.trim().to_string()
|
|
|
|
|
} else {
|
|
|
|
|
args.clone()
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
// Only add if we have something left after resolving variables
|
|
|
|
|
if !resolved_args.is_empty() {
|
|
|
|
|
real_command_parts.push(resolved_args);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2025-03-29 12:47:20 +05:30
|
|
|
|
2025-04-24 16:49:07 +05:30
|
|
|
if should_run_real_command && !real_command_parts.is_empty() {
|
|
|
|
|
// Build a final command string
|
|
|
|
|
let command_str = real_command_parts.join(" ");
|
2025-08-09 17:03:03 +05:30
|
|
|
wrkflw_logging::info(&format!(
|
|
|
|
|
"🔄 Running actual command: {}",
|
|
|
|
|
command_str
|
|
|
|
|
));
|
2025-03-29 12:47:20 +05:30
|
|
|
|
2025-04-24 16:49:07 +05:30
|
|
|
// Replace the emulated command with a shell command to execute our command
|
|
|
|
|
cmd.clear();
|
|
|
|
|
cmd.push("sh");
|
|
|
|
|
cmd.push("-c");
|
|
|
|
|
owned_strings.push(command_str);
|
|
|
|
|
cmd.push(owned_strings.last().unwrap());
|
|
|
|
|
} else {
|
|
|
|
|
// Fall back to emulation for actions we don't know how to execute
|
|
|
|
|
cmd.clear();
|
|
|
|
|
cmd.push("sh");
|
|
|
|
|
cmd.push("-c");
|
2025-04-24 13:48:52 +05:30
|
|
|
|
2025-04-24 16:49:07 +05:30
|
|
|
let echo_msg = format!("echo 'Would execute GitHub action: {}'", uses);
|
|
|
|
|
owned_strings.push(echo_msg);
|
|
|
|
|
cmd.push(owned_strings.last().unwrap());
|
|
|
|
|
}
|
2025-04-24 13:48:52 +05:30
|
|
|
}
|
2025-04-24 16:49:07 +05:30
|
|
|
|
|
|
|
|
// Convert 'with' parameters to environment variables
|
|
|
|
|
if let Some(with_params) = &ctx.step.with {
|
|
|
|
|
for (key, value) in with_params {
|
|
|
|
|
step_env.insert(format!("INPUT_{}", key.to_uppercase()), value.clone());
|
|
|
|
|
}
|
2025-04-24 13:48:52 +05:30
|
|
|
}
|
|
|
|
|
|
2025-04-24 16:49:07 +05:30
|
|
|
// Convert environment HashMap to Vec<(&str, &str)> for container runtime
|
|
|
|
|
let env_vars: Vec<(&str, &str)> = step_env
|
|
|
|
|
.iter()
|
|
|
|
|
.map(|(k, v)| (k.as_str(), v.as_str()))
|
|
|
|
|
.collect();
|
|
|
|
|
|
2025-04-30 16:51:38 +05:30
|
|
|
// Define the standard workspace path inside the container
|
|
|
|
|
let container_workspace = Path::new("/github/workspace");
|
|
|
|
|
|
|
|
|
|
// Set up volume mapping from host working dir to container workspace
|
|
|
|
|
let volumes: Vec<(&Path, &Path)> = vec![(ctx.working_dir, container_workspace)];
|
2025-04-24 16:49:07 +05:30
|
|
|
|
|
|
|
|
let output = ctx
|
|
|
|
|
.runtime
|
|
|
|
|
.run_container(
|
2025-04-30 16:51:38 +05:30
|
|
|
ctx.runner_image,
|
2025-04-24 16:49:07 +05:30
|
|
|
&cmd.to_vec(),
|
|
|
|
|
&env_vars,
|
2025-04-30 16:51:38 +05:30
|
|
|
container_workspace,
|
2025-04-24 16:49:07 +05:30
|
|
|
&volumes,
|
|
|
|
|
)
|
|
|
|
|
.await
|
|
|
|
|
.map_err(|e| ExecutionError::Runtime(format!("{}", e)))?;
|
|
|
|
|
|
|
|
|
|
// Check if this was called from 'run' branch - don't try to hide these outputs
|
|
|
|
|
if output.exit_code == 0 {
|
|
|
|
|
// For GitHub actions in verbose mode, provide more detailed emulation information
|
|
|
|
|
let output_text = if ctx.verbose
|
|
|
|
|
&& uses.contains('/')
|
|
|
|
|
&& !uses.starts_with("./")
|
|
|
|
|
{
|
|
|
|
|
let mut detailed_output =
|
|
|
|
|
format!("Would execute GitHub action: {}\n", uses);
|
|
|
|
|
|
|
|
|
|
// Add information about the action inputs if available
|
|
|
|
|
if let Some(with_params) = &ctx.step.with {
|
|
|
|
|
detailed_output.push_str("\nAction inputs:\n");
|
|
|
|
|
for (key, value) in with_params {
|
|
|
|
|
detailed_output.push_str(&format!(" {}: {}\n", key, value));
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Add standard GitHub action environment variables
|
|
|
|
|
detailed_output.push_str("\nEnvironment variables:\n");
|
|
|
|
|
for (key, value) in step_env.iter() {
|
|
|
|
|
if key.starts_with("GITHUB_") || key.starts_with("INPUT_") {
|
|
|
|
|
detailed_output.push_str(&format!(" {}: {}\n", key, value));
|
|
|
|
|
}
|
2025-04-24 13:48:52 +05:30
|
|
|
}
|
2025-04-24 16:49:07 +05:30
|
|
|
|
|
|
|
|
// Include the original output
|
|
|
|
|
detailed_output
|
|
|
|
|
.push_str(&format!("\nOutput:\n{}\n{}", output.stdout, output.stderr));
|
|
|
|
|
detailed_output
|
2025-04-24 13:48:52 +05:30
|
|
|
} else {
|
2025-04-24 16:49:07 +05:30
|
|
|
format!("{}\n{}", output.stdout, output.stderr)
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
// Check if this is a cargo command that failed
|
|
|
|
|
if output.exit_code != 0 && (uses.contains("cargo") || uses.contains("rust")) {
|
|
|
|
|
// Add detailed error information for cargo commands
|
|
|
|
|
let mut error_details = format!(
|
|
|
|
|
"\n\n❌ Command failed with exit code: {}\n",
|
|
|
|
|
output.exit_code
|
|
|
|
|
);
|
|
|
|
|
|
|
|
|
|
// Add command details
|
|
|
|
|
error_details.push_str(&format!("Command: {}\n", cmd.join(" ")));
|
|
|
|
|
|
|
|
|
|
// Add environment details
|
|
|
|
|
error_details.push_str("\nEnvironment:\n");
|
|
|
|
|
for (key, value) in step_env.iter() {
|
|
|
|
|
if key.starts_with("GITHUB_")
|
|
|
|
|
|| key.starts_with("INPUT_")
|
|
|
|
|
|| key.starts_with("RUST")
|
|
|
|
|
{
|
|
|
|
|
error_details.push_str(&format!(" {}: {}\n", key, value));
|
|
|
|
|
}
|
|
|
|
|
}
|
2025-03-29 12:47:20 +05:30
|
|
|
|
2025-04-24 16:49:07 +05:30
|
|
|
// Add detailed output
|
|
|
|
|
error_details.push_str("\nDetailed output:\n");
|
|
|
|
|
error_details.push_str(&output.stdout);
|
|
|
|
|
error_details.push_str(&output.stderr);
|
|
|
|
|
|
|
|
|
|
// Return failure with detailed error information
|
|
|
|
|
return Ok(StepResult {
|
|
|
|
|
name: step_name,
|
|
|
|
|
status: StepStatus::Failure,
|
|
|
|
|
output: format!("{}\n{}", output_text, error_details),
|
|
|
|
|
});
|
2025-04-24 13:48:52 +05:30
|
|
|
}
|
2025-04-24 16:49:07 +05:30
|
|
|
|
|
|
|
|
StepResult {
|
|
|
|
|
name: step_name,
|
|
|
|
|
status: if output.exit_code == 0 {
|
|
|
|
|
StepStatus::Success
|
|
|
|
|
} else {
|
|
|
|
|
StepStatus::Failure
|
|
|
|
|
},
|
2025-05-02 15:08:59 +05:30
|
|
|
output: format!(
|
|
|
|
|
"Exit code: {}
|
|
|
|
|
{}
|
|
|
|
|
{}",
|
|
|
|
|
output.exit_code, output.stdout, output.stderr
|
|
|
|
|
),
|
2025-04-24 13:48:52 +05:30
|
|
|
}
|
2025-04-24 16:49:07 +05:30
|
|
|
} else {
|
|
|
|
|
StepResult {
|
|
|
|
|
name: step_name,
|
|
|
|
|
status: StepStatus::Failure,
|
|
|
|
|
output: format!(
|
|
|
|
|
"Exit code: {}\n{}\n{}",
|
|
|
|
|
output.exit_code, output.stdout, output.stderr
|
|
|
|
|
),
|
2025-04-24 13:48:52 +05:30
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2025-03-29 12:47:20 +05:30
|
|
|
}
|
2025-04-24 16:49:07 +05:30
|
|
|
} else if let Some(run) = &ctx.step.run {
|
|
|
|
|
// Run step
|
|
|
|
|
let mut output = String::new();
|
|
|
|
|
let mut status = StepStatus::Success;
|
|
|
|
|
let mut error_details = None;
|
|
|
|
|
|
2025-08-14 23:26:30 +05:30
|
|
|
// Perform secret substitution if secret manager is available
|
|
|
|
|
let resolved_run = if let Some(secret_manager) = ctx.secret_manager {
|
|
|
|
|
let mut substitution = SecretSubstitution::new(secret_manager);
|
|
|
|
|
match substitution.substitute(run).await {
|
|
|
|
|
Ok(resolved) => resolved,
|
|
|
|
|
Err(e) => {
|
|
|
|
|
return Ok(StepResult {
|
|
|
|
|
name: step_name,
|
|
|
|
|
status: StepStatus::Failure,
|
|
|
|
|
output: format!("Secret substitution failed: {}", e),
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
run.clone()
|
|
|
|
|
};
|
|
|
|
|
|
2025-04-24 16:49:07 +05:30
|
|
|
// Check if this is a cargo command
|
2025-08-14 23:26:30 +05:30
|
|
|
let is_cargo_cmd = resolved_run.trim().starts_with("cargo");
|
2025-03-29 12:47:20 +05:30
|
|
|
|
2025-08-14 14:22:34 +05:30
|
|
|
// For complex shell commands, use bash to execute them properly
|
|
|
|
|
// This handles quotes, pipes, redirections, and command substitutions correctly
|
2025-08-14 23:26:30 +05:30
|
|
|
let cmd_parts = vec!["bash", "-c", &resolved_run];
|
2025-04-24 16:49:07 +05:30
|
|
|
|
|
|
|
|
// Convert environment variables to the required format
|
2025-03-29 12:47:20 +05:30
|
|
|
let env_vars: Vec<(&str, &str)> = step_env
|
|
|
|
|
.iter()
|
|
|
|
|
.map(|(k, v)| (k.as_str(), v.as_str()))
|
|
|
|
|
.collect();
|
|
|
|
|
|
2025-04-30 16:51:38 +05:30
|
|
|
// Define the standard workspace path inside the container
|
|
|
|
|
let container_workspace = Path::new("/github/workspace");
|
|
|
|
|
|
|
|
|
|
// Set up volume mapping from host working dir to container workspace
|
|
|
|
|
let volumes: Vec<(&Path, &Path)> = vec![(ctx.working_dir, container_workspace)];
|
|
|
|
|
|
2025-04-24 16:49:07 +05:30
|
|
|
// Execute the command
|
|
|
|
|
match ctx
|
|
|
|
|
.runtime
|
|
|
|
|
.run_container(
|
2025-04-30 16:51:38 +05:30
|
|
|
ctx.runner_image,
|
2025-04-24 16:49:07 +05:30
|
|
|
&cmd_parts,
|
|
|
|
|
&env_vars,
|
2025-04-30 16:51:38 +05:30
|
|
|
container_workspace,
|
|
|
|
|
&volumes,
|
2025-04-24 16:49:07 +05:30
|
|
|
)
|
2025-03-29 12:47:20 +05:30
|
|
|
.await
|
2025-04-24 16:49:07 +05:30
|
|
|
{
|
|
|
|
|
Ok(container_output) => {
|
|
|
|
|
// Add command details to output
|
|
|
|
|
output.push_str(&format!("Command: {}\n\n", run));
|
|
|
|
|
|
|
|
|
|
if !container_output.stdout.is_empty() {
|
|
|
|
|
output.push_str("Standard Output:\n");
|
|
|
|
|
output.push_str(&container_output.stdout);
|
|
|
|
|
output.push('\n');
|
|
|
|
|
}
|
2025-04-21 16:42:16 +05:30
|
|
|
|
2025-04-24 16:49:07 +05:30
|
|
|
if !container_output.stderr.is_empty() {
|
|
|
|
|
output.push_str("Standard Error:\n");
|
|
|
|
|
output.push_str(&container_output.stderr);
|
|
|
|
|
output.push('\n');
|
|
|
|
|
}
|
2025-04-24 13:48:52 +05:30
|
|
|
|
2025-04-24 16:49:07 +05:30
|
|
|
if container_output.exit_code != 0 {
|
|
|
|
|
status = StepStatus::Failure;
|
2025-03-29 12:47:20 +05:30
|
|
|
|
2025-04-24 16:49:07 +05:30
|
|
|
// For cargo commands, add more detailed error information
|
|
|
|
|
if is_cargo_cmd {
|
|
|
|
|
let mut error_msg = String::new();
|
|
|
|
|
error_msg.push_str(&format!(
|
|
|
|
|
"\nCargo command failed with exit code {}\n",
|
|
|
|
|
container_output.exit_code
|
|
|
|
|
));
|
|
|
|
|
error_msg.push_str("Common causes for cargo command failures:\n");
|
2025-03-29 12:47:20 +05:30
|
|
|
|
2025-04-24 16:49:07 +05:30
|
|
|
if run.contains("fmt") {
|
|
|
|
|
error_msg.push_str(
|
|
|
|
|
"- Code formatting issues. Run 'cargo fmt' locally to fix.\n",
|
|
|
|
|
);
|
|
|
|
|
} else if run.contains("clippy") {
|
|
|
|
|
error_msg.push_str("- Linter warnings treated as errors. Run 'cargo clippy' locally to see details.\n");
|
|
|
|
|
} else if run.contains("test") {
|
|
|
|
|
error_msg.push_str("- Test failures. Run 'cargo test' locally to see which tests failed.\n");
|
|
|
|
|
} else if run.contains("build") {
|
|
|
|
|
error_msg.push_str(
|
|
|
|
|
"- Compilation errors. Check the error messages above.\n",
|
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
error_details = Some(error_msg);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
Err(e) => {
|
|
|
|
|
status = StepStatus::Failure;
|
|
|
|
|
output.push_str(&format!("Error executing command: {}\n", e));
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// If there are error details, append them to the output
|
|
|
|
|
if let Some(details) = error_details {
|
|
|
|
|
output.push_str(&details);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
StepResult {
|
|
|
|
|
name: step_name,
|
|
|
|
|
status,
|
|
|
|
|
output,
|
2025-03-29 12:47:20 +05:30
|
|
|
}
|
|
|
|
|
} else {
|
Refactor: Migrate modules to workspace crates
- Extracted functionality from the `src/` directory into individual crates within the `crates/` directory. This improves modularity, organization, and separation of concerns.
- Migrated modules include: models, evaluator, ui, gitlab, utils, logging, github, matrix, executor, runtime, parser, and validators.
- Removed the original source files and directories from `src/` after successful migration.
- This change sets the stage for better code management and potentially independent development/versioning of workspace members.
2025-05-02 12:53:41 +05:30
|
|
|
return Ok(StepResult {
|
|
|
|
|
name: step_name,
|
|
|
|
|
status: StepStatus::Skipped,
|
|
|
|
|
output: "Step has neither 'uses' nor 'run'".to_string(),
|
|
|
|
|
});
|
2025-04-24 16:49:07 +05:30
|
|
|
};
|
|
|
|
|
|
|
|
|
|
Ok(step_result)
|
2025-03-29 12:47:20 +05:30
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn copy_directory_contents(from: &Path, to: &Path) -> Result<(), ExecutionError> {
|
|
|
|
|
for entry in std::fs::read_dir(from)
|
2025-04-21 18:04:52 +05:30
|
|
|
.map_err(|e| ExecutionError::Execution(format!("Failed to read directory: {}", e)))?
|
2025-03-29 12:47:20 +05:30
|
|
|
{
|
2025-04-21 18:04:52 +05:30
|
|
|
let entry =
|
|
|
|
|
entry.map_err(|e| ExecutionError::Execution(format!("Failed to read entry: {}", e)))?;
|
2025-03-29 12:47:20 +05:30
|
|
|
let path = entry.path();
|
2025-08-17 01:41:17 +03:00
|
|
|
wrkflw_logging::debug(&format!("Copying entry: {path:?} -> {to:?}"));
|
2025-03-29 12:47:20 +05:30
|
|
|
|
|
|
|
|
// Skip hidden files/dirs and target directory for efficiency
|
2025-04-21 16:42:16 +05:30
|
|
|
let file_name = match path.file_name() {
|
|
|
|
|
Some(name) => name.to_string_lossy(),
|
|
|
|
|
None => {
|
2025-04-21 18:04:52 +05:30
|
|
|
return Err(ExecutionError::Execution(format!(
|
2025-04-21 17:38:43 +05:30
|
|
|
"Failed to get file name from path: {:?}",
|
|
|
|
|
path
|
|
|
|
|
)));
|
2025-04-21 16:42:16 +05:30
|
|
|
}
|
|
|
|
|
};
|
2025-03-29 12:47:20 +05:30
|
|
|
if file_name.starts_with(".") || file_name == "target" {
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
|
2025-04-21 16:42:16 +05:30
|
|
|
let dest_path = match path.file_name() {
|
|
|
|
|
Some(name) => to.join(name),
|
|
|
|
|
None => {
|
2025-04-21 18:04:52 +05:30
|
|
|
return Err(ExecutionError::Execution(format!(
|
2025-04-21 17:38:43 +05:30
|
|
|
"Failed to get file name from path: {:?}",
|
|
|
|
|
path
|
|
|
|
|
)));
|
2025-04-21 16:42:16 +05:30
|
|
|
}
|
|
|
|
|
};
|
2025-03-29 12:47:20 +05:30
|
|
|
|
|
|
|
|
if path.is_dir() {
|
2025-04-21 18:04:52 +05:30
|
|
|
std::fs::create_dir_all(&dest_path)
|
|
|
|
|
.map_err(|e| ExecutionError::Execution(format!("Failed to create dir: {}", e)))?;
|
2025-03-29 12:47:20 +05:30
|
|
|
|
|
|
|
|
// Recursively copy subdirectories
|
|
|
|
|
copy_directory_contents(&path, &dest_path)?;
|
|
|
|
|
} else {
|
2025-04-21 18:04:52 +05:30
|
|
|
std::fs::copy(&path, &dest_path)
|
|
|
|
|
.map_err(|e| ExecutionError::Execution(format!("Failed to copy file: {}", e)))?;
|
2025-03-29 12:47:20 +05:30
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn get_runner_image(runs_on: &str) -> String {
|
|
|
|
|
// Map GitHub runners to Docker images
|
|
|
|
|
match runs_on.trim() {
|
|
|
|
|
// ubuntu runners - micro images (minimal size)
|
|
|
|
|
"ubuntu-latest" => "node:16-buster-slim",
|
|
|
|
|
"ubuntu-22.04" => "node:16-bullseye-slim",
|
|
|
|
|
"ubuntu-20.04" => "node:16-buster-slim",
|
|
|
|
|
"ubuntu-18.04" => "node:16-buster-slim",
|
|
|
|
|
|
|
|
|
|
// ubuntu runners - medium images (with more tools)
|
|
|
|
|
"ubuntu-latest-medium" => "catthehacker/ubuntu:act-latest",
|
|
|
|
|
"ubuntu-22.04-medium" => "catthehacker/ubuntu:act-22.04",
|
|
|
|
|
"ubuntu-20.04-medium" => "catthehacker/ubuntu:act-20.04",
|
|
|
|
|
"ubuntu-18.04-medium" => "catthehacker/ubuntu:act-18.04",
|
|
|
|
|
|
|
|
|
|
// ubuntu runners - large images (with most tools)
|
|
|
|
|
"ubuntu-latest-large" => "catthehacker/ubuntu:full-latest",
|
|
|
|
|
"ubuntu-22.04-large" => "catthehacker/ubuntu:full-22.04",
|
|
|
|
|
"ubuntu-20.04-large" => "catthehacker/ubuntu:full-20.04",
|
|
|
|
|
"ubuntu-18.04-large" => "catthehacker/ubuntu:full-18.04",
|
|
|
|
|
|
2025-04-30 16:51:38 +05:30
|
|
|
// macOS runners - use a standard Rust image for compatibility
|
|
|
|
|
"macos-latest" => "rust:latest",
|
|
|
|
|
"macos-12" => "rust:latest", // Monterey equivalent
|
|
|
|
|
"macos-11" => "rust:latest", // Big Sur equivalent
|
|
|
|
|
"macos-10.15" => "rust:latest", // Catalina equivalent
|
2025-04-24 13:48:52 +05:30
|
|
|
|
|
|
|
|
// Windows runners - using servercore-based images
|
|
|
|
|
"windows-latest" => "mcr.microsoft.com/windows/servercore:ltsc2022",
|
|
|
|
|
"windows-2022" => "mcr.microsoft.com/windows/servercore:ltsc2022",
|
|
|
|
|
"windows-2019" => "mcr.microsoft.com/windows/servercore:ltsc2019",
|
|
|
|
|
|
2025-04-24 17:45:31 +05:30
|
|
|
// Language-specific runners
|
|
|
|
|
"python-latest" => "python:3.11-slim",
|
|
|
|
|
"python-3.11" => "python:3.11-slim",
|
|
|
|
|
"python-3.10" => "python:3.10-slim",
|
|
|
|
|
"python-3.9" => "python:3.9-slim",
|
|
|
|
|
"python-3.8" => "python:3.8-slim",
|
|
|
|
|
|
|
|
|
|
"node-latest" => "node:20-slim",
|
|
|
|
|
"node-20" => "node:20-slim",
|
|
|
|
|
"node-18" => "node:18-slim",
|
|
|
|
|
"node-16" => "node:16-slim",
|
|
|
|
|
|
|
|
|
|
"java-latest" => "eclipse-temurin:17-jdk",
|
|
|
|
|
"java-17" => "eclipse-temurin:17-jdk",
|
|
|
|
|
"java-11" => "eclipse-temurin:11-jdk",
|
|
|
|
|
"java-8" => "eclipse-temurin:8-jdk",
|
|
|
|
|
|
|
|
|
|
"go-latest" => "golang:1.21-slim",
|
|
|
|
|
"go-1.21" => "golang:1.21-slim",
|
|
|
|
|
"go-1.20" => "golang:1.20-slim",
|
|
|
|
|
"go-1.19" => "golang:1.19-slim",
|
|
|
|
|
|
|
|
|
|
"dotnet-latest" => "mcr.microsoft.com/dotnet/sdk:7.0",
|
|
|
|
|
"dotnet-7.0" => "mcr.microsoft.com/dotnet/sdk:7.0",
|
|
|
|
|
"dotnet-6.0" => "mcr.microsoft.com/dotnet/sdk:6.0",
|
|
|
|
|
"dotnet-5.0" => "mcr.microsoft.com/dotnet/sdk:5.0",
|
|
|
|
|
|
2025-03-29 12:47:20 +05:30
|
|
|
// Default case for other runners or custom strings
|
2025-04-24 13:48:52 +05:30
|
|
|
_ => {
|
|
|
|
|
// Check for platform prefixes and provide appropriate images
|
|
|
|
|
let runs_on_lower = runs_on.trim().to_lowercase();
|
|
|
|
|
if runs_on_lower.starts_with("macos") {
|
2025-04-30 16:51:38 +05:30
|
|
|
"rust:latest" // Use Rust image for macOS runners
|
2025-04-24 13:48:52 +05:30
|
|
|
} else if runs_on_lower.starts_with("windows") {
|
|
|
|
|
"mcr.microsoft.com/windows/servercore:ltsc2022" // Default Windows image
|
2025-04-24 17:45:31 +05:30
|
|
|
} else if runs_on_lower.starts_with("python") {
|
|
|
|
|
"python:3.11-slim" // Default Python image
|
|
|
|
|
} else if runs_on_lower.starts_with("node") {
|
|
|
|
|
"node:20-slim" // Default Node.js image
|
|
|
|
|
} else if runs_on_lower.starts_with("java") {
|
|
|
|
|
"eclipse-temurin:17-jdk" // Default Java image
|
|
|
|
|
} else if runs_on_lower.starts_with("go") {
|
|
|
|
|
"golang:1.21-slim" // Default Go image
|
|
|
|
|
} else if runs_on_lower.starts_with("dotnet") {
|
|
|
|
|
"mcr.microsoft.com/dotnet/sdk:7.0" // Default .NET image
|
2025-04-24 13:48:52 +05:30
|
|
|
} else {
|
|
|
|
|
"ubuntu:latest" // Default to Ubuntu for everything else
|
|
|
|
|
}
|
|
|
|
|
}
|
2025-03-29 12:47:20 +05:30
|
|
|
}
|
|
|
|
|
.to_string()
|
|
|
|
|
}
|
|
|
|
|
|
2025-08-13 13:21:58 +05:30
|
|
|
fn get_runner_image_from_opt(runs_on: &Option<Vec<String>>) -> String {
|
2025-08-12 14:53:07 +05:30
|
|
|
let default = "ubuntu-latest";
|
2025-08-13 13:21:58 +05:30
|
|
|
let ro = runs_on
|
|
|
|
|
.as_ref()
|
|
|
|
|
.and_then(|vec| vec.first())
|
|
|
|
|
.map(|s| s.as_str())
|
|
|
|
|
.unwrap_or(default);
|
2025-08-12 14:53:07 +05:30
|
|
|
get_runner_image(ro)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
async fn execute_reusable_workflow_job(
|
|
|
|
|
ctx: &JobExecutionContext<'_>,
|
|
|
|
|
uses: &str,
|
|
|
|
|
with: Option<&HashMap<String, String>>,
|
|
|
|
|
secrets: Option<&serde_yaml::Value>,
|
|
|
|
|
) -> Result<JobResult, ExecutionError> {
|
|
|
|
|
wrkflw_logging::info(&format!(
|
|
|
|
|
"Executing reusable workflow job '{}' -> {}",
|
|
|
|
|
ctx.job_name, uses
|
|
|
|
|
));
|
|
|
|
|
|
|
|
|
|
// Resolve the called workflow file path
|
|
|
|
|
enum UsesRef<'a> {
|
|
|
|
|
LocalPath(&'a str),
|
|
|
|
|
Remote {
|
|
|
|
|
owner: String,
|
|
|
|
|
repo: String,
|
|
|
|
|
path: String,
|
|
|
|
|
r#ref: String,
|
|
|
|
|
},
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
let uses_ref = if uses.starts_with("./") || uses.starts_with('/') {
|
|
|
|
|
UsesRef::LocalPath(uses)
|
|
|
|
|
} else {
|
|
|
|
|
// Expect format owner/repo/path/to/workflow.yml@ref
|
|
|
|
|
let parts: Vec<&str> = uses.split('@').collect();
|
|
|
|
|
if parts.len() != 2 {
|
|
|
|
|
return Err(ExecutionError::Execution(format!(
|
|
|
|
|
"Invalid reusable workflow reference: {}",
|
|
|
|
|
uses
|
|
|
|
|
)));
|
|
|
|
|
}
|
|
|
|
|
let left = parts[0];
|
|
|
|
|
let r#ref = parts[1].to_string();
|
|
|
|
|
let mut segs = left.splitn(3, '/');
|
|
|
|
|
let owner = segs.next().unwrap_or("").to_string();
|
|
|
|
|
let repo = segs.next().unwrap_or("").to_string();
|
|
|
|
|
let path = segs.next().unwrap_or("").to_string();
|
|
|
|
|
if owner.is_empty() || repo.is_empty() || path.is_empty() {
|
|
|
|
|
return Err(ExecutionError::Execution(format!(
|
|
|
|
|
"Invalid reusable workflow reference: {}",
|
|
|
|
|
uses
|
|
|
|
|
)));
|
|
|
|
|
}
|
|
|
|
|
UsesRef::Remote {
|
|
|
|
|
owner,
|
|
|
|
|
repo,
|
|
|
|
|
path,
|
|
|
|
|
r#ref,
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
// Load workflow file
|
|
|
|
|
let workflow_path = match uses_ref {
|
|
|
|
|
UsesRef::LocalPath(p) => {
|
|
|
|
|
// Resolve relative to current directory
|
|
|
|
|
let current_dir = std::env::current_dir().map_err(|e| {
|
|
|
|
|
ExecutionError::Execution(format!("Failed to get current dir: {}", e))
|
|
|
|
|
})?;
|
|
|
|
|
let path = current_dir.join(p);
|
|
|
|
|
if !path.exists() {
|
|
|
|
|
return Err(ExecutionError::Execution(format!(
|
|
|
|
|
"Reusable workflow not found at path: {}",
|
|
|
|
|
path.display()
|
|
|
|
|
)));
|
|
|
|
|
}
|
|
|
|
|
path
|
|
|
|
|
}
|
|
|
|
|
UsesRef::Remote {
|
|
|
|
|
owner,
|
|
|
|
|
repo,
|
|
|
|
|
path,
|
|
|
|
|
r#ref,
|
|
|
|
|
} => {
|
|
|
|
|
// Clone minimal repository and checkout ref
|
|
|
|
|
let tempdir = tempfile::tempdir().map_err(|e| {
|
|
|
|
|
ExecutionError::Execution(format!("Failed to create temp dir: {}", e))
|
|
|
|
|
})?;
|
|
|
|
|
let repo_url = format!("https://github.com/{}/{}.git", owner, repo);
|
|
|
|
|
// git clone
|
|
|
|
|
let status = Command::new("git")
|
|
|
|
|
.arg("clone")
|
|
|
|
|
.arg("--depth")
|
|
|
|
|
.arg("1")
|
|
|
|
|
.arg("--branch")
|
|
|
|
|
.arg(&r#ref)
|
|
|
|
|
.arg(&repo_url)
|
|
|
|
|
.arg(tempdir.path())
|
|
|
|
|
.status()
|
|
|
|
|
.map_err(|e| ExecutionError::Execution(format!("Failed to execute git: {}", e)))?;
|
|
|
|
|
if !status.success() {
|
|
|
|
|
return Err(ExecutionError::Execution(format!(
|
|
|
|
|
"Failed to clone {}@{}",
|
|
|
|
|
repo_url, r#ref
|
|
|
|
|
)));
|
|
|
|
|
}
|
|
|
|
|
let joined = tempdir.path().join(path);
|
|
|
|
|
if !joined.exists() {
|
|
|
|
|
return Err(ExecutionError::Execution(format!(
|
|
|
|
|
"Reusable workflow file not found in repo: {}",
|
|
|
|
|
joined.display()
|
|
|
|
|
)));
|
|
|
|
|
}
|
|
|
|
|
joined
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
// Parse called workflow
|
|
|
|
|
let called = parse_workflow(&workflow_path)?;
|
|
|
|
|
|
|
|
|
|
// Create child env context
|
|
|
|
|
let mut child_env = ctx.env_context.clone();
|
|
|
|
|
if let Some(with_map) = with {
|
|
|
|
|
for (k, v) in with_map {
|
|
|
|
|
child_env.insert(format!("INPUT_{}", k.to_uppercase()), v.clone());
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
if let Some(secrets_val) = secrets {
|
|
|
|
|
if let Some(map) = secrets_val.as_mapping() {
|
|
|
|
|
for (k, v) in map {
|
|
|
|
|
if let (Some(key), Some(value)) = (k.as_str(), v.as_str()) {
|
|
|
|
|
child_env.insert(format!("SECRET_{}", key.to_uppercase()), value.to_string());
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Execute called workflow
|
|
|
|
|
let plan = dependency::resolve_dependencies(&called)?;
|
|
|
|
|
let mut all_results = Vec::new();
|
|
|
|
|
let mut any_failed = false;
|
|
|
|
|
for batch in plan {
|
2025-08-14 23:26:30 +05:30
|
|
|
let results = execute_job_batch(
|
|
|
|
|
&batch,
|
|
|
|
|
&called,
|
|
|
|
|
ctx.runtime,
|
|
|
|
|
&child_env,
|
|
|
|
|
ctx.verbose,
|
|
|
|
|
None,
|
|
|
|
|
None,
|
|
|
|
|
)
|
|
|
|
|
.await?;
|
2025-08-12 14:53:07 +05:30
|
|
|
for r in &results {
|
|
|
|
|
if r.status == JobStatus::Failure {
|
|
|
|
|
any_failed = true;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
all_results.extend(results);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Summarize into a single JobResult
|
|
|
|
|
let mut logs = String::new();
|
|
|
|
|
logs.push_str(&format!("Called workflow: {}\n", workflow_path.display()));
|
|
|
|
|
for r in &all_results {
|
|
|
|
|
logs.push_str(&format!("- {}: {:?}\n", r.name, r.status));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Represent as one summary step for UI
|
|
|
|
|
let summary_step = StepResult {
|
|
|
|
|
name: format!("Run reusable workflow: {}", uses),
|
|
|
|
|
status: if any_failed {
|
|
|
|
|
StepStatus::Failure
|
|
|
|
|
} else {
|
|
|
|
|
StepStatus::Success
|
|
|
|
|
},
|
|
|
|
|
output: logs.clone(),
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
Ok(JobResult {
|
|
|
|
|
name: ctx.job_name.to_string(),
|
|
|
|
|
status: if any_failed {
|
|
|
|
|
JobStatus::Failure
|
|
|
|
|
} else {
|
|
|
|
|
JobStatus::Success
|
|
|
|
|
},
|
|
|
|
|
steps: vec![summary_step],
|
|
|
|
|
logs,
|
|
|
|
|
})
|
|
|
|
|
}
|
|
|
|
|
|
2025-05-02 15:45:51 +05:30
|
|
|
#[allow(dead_code)]
|
2025-03-29 12:47:20 +05:30
|
|
|
async fn prepare_runner_image(
|
|
|
|
|
image: &str,
|
2025-04-21 18:04:52 +05:30
|
|
|
runtime: &dyn ContainerRuntime,
|
2025-03-29 12:47:20 +05:30
|
|
|
verbose: bool,
|
|
|
|
|
) -> Result<(), ExecutionError> {
|
2025-04-24 17:45:31 +05:30
|
|
|
// Try to pull the image first
|
|
|
|
|
if let Err(e) = runtime.pull_image(image).await {
|
2025-08-09 17:03:03 +05:30
|
|
|
wrkflw_logging::warning(&format!("Failed to pull image {}: {}", image, e));
|
2025-04-24 13:48:52 +05:30
|
|
|
}
|
|
|
|
|
|
2025-04-24 17:45:31 +05:30
|
|
|
// Check if this is a language-specific runner
|
|
|
|
|
let language_info = extract_language_info(image);
|
|
|
|
|
if let Some((language, version)) = language_info {
|
|
|
|
|
// Try to prepare a language-specific environment
|
|
|
|
|
if let Ok(custom_image) = runtime
|
|
|
|
|
.prepare_language_environment(language, version, None)
|
|
|
|
|
.await
|
|
|
|
|
.map_err(|e| ExecutionError::Runtime(e.to_string()))
|
|
|
|
|
{
|
2025-04-24 13:48:52 +05:30
|
|
|
if verbose {
|
2025-08-09 17:03:03 +05:30
|
|
|
wrkflw_logging::info(&format!("Using customized image: {}", custom_image));
|
2025-04-24 13:48:52 +05:30
|
|
|
}
|
2025-04-24 17:45:31 +05:30
|
|
|
return Ok(());
|
2025-04-24 13:48:52 +05:30
|
|
|
}
|
2025-04-24 17:45:31 +05:30
|
|
|
}
|
2025-03-29 12:47:20 +05:30
|
|
|
|
2025-04-24 17:45:31 +05:30
|
|
|
Ok(())
|
|
|
|
|
}
|
|
|
|
|
|
2025-05-02 15:45:51 +05:30
|
|
|
#[allow(dead_code)]
|
2025-04-24 17:45:31 +05:30
|
|
|
fn extract_language_info(image: &str) -> Option<(&'static str, Option<&str>)> {
|
|
|
|
|
let image_lower = image.to_lowercase();
|
|
|
|
|
|
|
|
|
|
// Check for language-specific images
|
|
|
|
|
if image_lower.starts_with("python:") {
|
|
|
|
|
Some(("python", Some(&image[7..])))
|
|
|
|
|
} else if image_lower.starts_with("node:") {
|
|
|
|
|
Some(("node", Some(&image[5..])))
|
|
|
|
|
} else if image_lower.starts_with("eclipse-temurin:") {
|
|
|
|
|
Some(("java", Some(&image[15..])))
|
|
|
|
|
} else if image_lower.starts_with("golang:") {
|
|
|
|
|
Some(("go", Some(&image[6..])))
|
|
|
|
|
} else if image_lower.starts_with("mcr.microsoft.com/dotnet/sdk:") {
|
|
|
|
|
Some(("dotnet", Some(&image[29..])))
|
|
|
|
|
} else if image_lower.starts_with("rust:") {
|
|
|
|
|
Some(("rust", Some(&image[5..])))
|
|
|
|
|
} else {
|
|
|
|
|
None
|
2025-03-29 12:47:20 +05:30
|
|
|
}
|
|
|
|
|
}
|
2025-04-04 15:08:29 +05:30
|
|
|
|
|
|
|
|
async fn execute_composite_action(
|
Refactor: Migrate modules to workspace crates
- Extracted functionality from the `src/` directory into individual crates within the `crates/` directory. This improves modularity, organization, and separation of concerns.
- Migrated modules include: models, evaluator, ui, gitlab, utils, logging, github, matrix, executor, runtime, parser, and validators.
- Removed the original source files and directories from `src/` after successful migration.
- This change sets the stage for better code management and potentially independent development/versioning of workspace members.
2025-05-02 12:53:41 +05:30
|
|
|
step: &workflow::Step,
|
2025-04-04 15:08:29 +05:30
|
|
|
action_path: &Path,
|
|
|
|
|
job_env: &HashMap<String, String>,
|
|
|
|
|
working_dir: &Path,
|
2025-04-21 18:04:52 +05:30
|
|
|
runtime: &dyn ContainerRuntime,
|
2025-04-30 16:51:38 +05:30
|
|
|
runner_image: &str,
|
2025-04-04 15:08:29 +05:30
|
|
|
verbose: bool,
|
|
|
|
|
) -> Result<StepResult, ExecutionError> {
|
|
|
|
|
// Find the action definition file
|
|
|
|
|
let action_yaml = action_path.join("action.yml");
|
|
|
|
|
let action_yaml_alt = action_path.join("action.yaml");
|
|
|
|
|
|
|
|
|
|
let action_file = if action_yaml.exists() {
|
|
|
|
|
action_yaml
|
|
|
|
|
} else if action_yaml_alt.exists() {
|
|
|
|
|
action_yaml_alt
|
|
|
|
|
} else {
|
2025-04-21 18:04:52 +05:30
|
|
|
return Err(ExecutionError::Execution(format!(
|
2025-04-04 15:08:29 +05:30
|
|
|
"No action.yml or action.yaml found in {}",
|
|
|
|
|
action_path.display()
|
|
|
|
|
)));
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
// Parse the composite action definition
|
2025-04-21 18:04:52 +05:30
|
|
|
let action_content = fs::read_to_string(&action_file)
|
|
|
|
|
.map_err(|e| ExecutionError::Execution(format!("Failed to read action file: {}", e)))?;
|
2025-04-04 15:08:29 +05:30
|
|
|
|
|
|
|
|
let action_def: serde_yaml::Value = serde_yaml::from_str(&action_content)
|
2025-04-21 18:04:52 +05:30
|
|
|
.map_err(|e| ExecutionError::Execution(format!("Invalid action YAML: {}", e)))?;
|
2025-04-04 15:08:29 +05:30
|
|
|
|
|
|
|
|
// Check if it's a composite action
|
|
|
|
|
match action_def.get("runs").and_then(|v| v.get("using")) {
|
|
|
|
|
Some(serde_yaml::Value::String(using)) if using == "composite" => {
|
|
|
|
|
// Get the steps
|
|
|
|
|
let steps = match action_def.get("runs").and_then(|v| v.get("steps")) {
|
|
|
|
|
Some(serde_yaml::Value::Sequence(steps)) => steps,
|
|
|
|
|
_ => {
|
2025-04-21 18:04:52 +05:30
|
|
|
return Err(ExecutionError::Execution(
|
2025-04-04 15:08:29 +05:30
|
|
|
"Composite action is missing steps".to_string(),
|
|
|
|
|
))
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
// Process inputs from the calling step's 'with' parameters
|
|
|
|
|
let mut action_env = job_env.clone();
|
|
|
|
|
if let Some(inputs_def) = action_def.get("inputs") {
|
|
|
|
|
if let Some(inputs_map) = inputs_def.as_mapping() {
|
|
|
|
|
for (input_name, input_def) in inputs_map {
|
|
|
|
|
if let Some(input_name_str) = input_name.as_str() {
|
|
|
|
|
// Get default value if available
|
|
|
|
|
let default_value = input_def
|
|
|
|
|
.get("default")
|
|
|
|
|
.and_then(|v| v.as_str())
|
|
|
|
|
.unwrap_or("");
|
|
|
|
|
|
|
|
|
|
// Check if the input was provided in the 'with' section
|
|
|
|
|
let input_value = step
|
|
|
|
|
.with
|
|
|
|
|
.as_ref()
|
|
|
|
|
.and_then(|with| with.get(input_name_str))
|
|
|
|
|
.unwrap_or(&default_value.to_string())
|
|
|
|
|
.clone();
|
|
|
|
|
|
|
|
|
|
// Add to environment as INPUT_X
|
|
|
|
|
action_env.insert(
|
|
|
|
|
format!("INPUT_{}", input_name_str.to_uppercase()),
|
|
|
|
|
input_value,
|
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Execute each step
|
|
|
|
|
let mut step_outputs = Vec::new();
|
|
|
|
|
for (idx, step_def) in steps.iter().enumerate() {
|
|
|
|
|
// Convert the YAML step to our Step struct
|
|
|
|
|
let composite_step = match convert_yaml_to_step(step_def) {
|
|
|
|
|
Ok(step) => step,
|
|
|
|
|
Err(e) => {
|
2025-04-21 18:04:52 +05:30
|
|
|
return Err(ExecutionError::Execution(format!(
|
2025-04-04 15:08:29 +05:30
|
|
|
"Failed to process composite action step {}: {}",
|
|
|
|
|
idx + 1,
|
|
|
|
|
e
|
|
|
|
|
)))
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
// Execute the step - using Box::pin to handle async recursion
|
2025-04-21 18:04:52 +05:30
|
|
|
let step_result = Box::pin(execute_step(StepExecutionContext {
|
|
|
|
|
step: &composite_step,
|
|
|
|
|
step_idx: idx,
|
|
|
|
|
job_env: &action_env,
|
2025-04-04 15:08:29 +05:30
|
|
|
working_dir,
|
|
|
|
|
runtime,
|
Refactor: Migrate modules to workspace crates
- Extracted functionality from the `src/` directory into individual crates within the `crates/` directory. This improves modularity, organization, and separation of concerns.
- Migrated modules include: models, evaluator, ui, gitlab, utils, logging, github, matrix, executor, runtime, parser, and validators.
- Removed the original source files and directories from `src/` after successful migration.
- This change sets the stage for better code management and potentially independent development/versioning of workspace members.
2025-05-02 12:53:41 +05:30
|
|
|
workflow: &workflow::WorkflowDefinition {
|
2025-04-04 15:08:29 +05:30
|
|
|
name: "Composite Action".to_string(),
|
|
|
|
|
on: vec![],
|
|
|
|
|
on_raw: serde_yaml::Value::Null,
|
|
|
|
|
jobs: HashMap::new(),
|
|
|
|
|
},
|
2025-04-30 16:51:38 +05:30
|
|
|
runner_image,
|
2025-04-04 15:08:29 +05:30
|
|
|
verbose,
|
2025-04-21 18:04:52 +05:30
|
|
|
matrix_combination: &None,
|
2025-08-14 23:26:30 +05:30
|
|
|
secret_manager: None, // Composite actions don't have secrets yet
|
|
|
|
|
secret_masker: None,
|
2025-04-21 18:04:52 +05:30
|
|
|
}))
|
2025-04-04 15:08:29 +05:30
|
|
|
.await?;
|
|
|
|
|
|
|
|
|
|
// Add output to results
|
|
|
|
|
step_outputs.push(format!("Step {}: {}", idx + 1, step_result.output));
|
|
|
|
|
|
|
|
|
|
// Short-circuit on failure if needed
|
|
|
|
|
if step_result.status == StepStatus::Failure {
|
|
|
|
|
return Ok(StepResult {
|
|
|
|
|
name: step
|
|
|
|
|
.name
|
|
|
|
|
.clone()
|
|
|
|
|
.unwrap_or_else(|| "Composite Action".to_string()),
|
|
|
|
|
status: StepStatus::Failure,
|
2025-04-24 16:49:07 +05:30
|
|
|
output: step_outputs.join("\n"),
|
2025-04-04 15:08:29 +05:30
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// All steps completed successfully
|
2025-04-24 16:49:07 +05:30
|
|
|
let output = if verbose {
|
|
|
|
|
let mut detailed_output = format!(
|
|
|
|
|
"Executed composite action from: {}\n\n",
|
|
|
|
|
action_path.display()
|
|
|
|
|
);
|
|
|
|
|
|
|
|
|
|
// Add information about the composite action if available
|
|
|
|
|
if let Ok(action_content) =
|
|
|
|
|
serde_yaml::from_str::<serde_yaml::Value>(&action_content)
|
|
|
|
|
{
|
|
|
|
|
if let Some(name) = action_content.get("name").and_then(|v| v.as_str()) {
|
|
|
|
|
detailed_output.push_str(&format!("Action name: {}\n", name));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if let Some(description) =
|
|
|
|
|
action_content.get("description").and_then(|v| v.as_str())
|
|
|
|
|
{
|
|
|
|
|
detailed_output.push_str(&format!("Description: {}\n", description));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
detailed_output.push('\n');
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Add individual step outputs
|
|
|
|
|
detailed_output.push_str("Step outputs:\n");
|
|
|
|
|
for output in &step_outputs {
|
|
|
|
|
detailed_output.push_str(&format!("{}\n", output));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
detailed_output
|
|
|
|
|
} else {
|
|
|
|
|
format!(
|
|
|
|
|
"Executed composite action with {} steps",
|
|
|
|
|
step_outputs.len()
|
|
|
|
|
)
|
|
|
|
|
};
|
|
|
|
|
|
2025-04-04 15:08:29 +05:30
|
|
|
Ok(StepResult {
|
|
|
|
|
name: step
|
|
|
|
|
.name
|
|
|
|
|
.clone()
|
|
|
|
|
.unwrap_or_else(|| "Composite Action".to_string()),
|
|
|
|
|
status: StepStatus::Success,
|
2025-04-24 16:49:07 +05:30
|
|
|
output,
|
2025-04-04 15:08:29 +05:30
|
|
|
})
|
|
|
|
|
}
|
2025-04-21 18:04:52 +05:30
|
|
|
_ => Err(ExecutionError::Execution(
|
2025-04-04 15:08:29 +05:30
|
|
|
"Action is not a composite action or has invalid format".to_string(),
|
|
|
|
|
)),
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Helper function to convert YAML step to our Step struct
|
Refactor: Migrate modules to workspace crates
- Extracted functionality from the `src/` directory into individual crates within the `crates/` directory. This improves modularity, organization, and separation of concerns.
- Migrated modules include: models, evaluator, ui, gitlab, utils, logging, github, matrix, executor, runtime, parser, and validators.
- Removed the original source files and directories from `src/` after successful migration.
- This change sets the stage for better code management and potentially independent development/versioning of workspace members.
2025-05-02 12:53:41 +05:30
|
|
|
fn convert_yaml_to_step(step_yaml: &serde_yaml::Value) -> Result<workflow::Step, String> {
|
2025-04-04 15:08:29 +05:30
|
|
|
// Extract step properties
|
|
|
|
|
let name = step_yaml
|
|
|
|
|
.get("name")
|
|
|
|
|
.and_then(|v| v.as_str())
|
|
|
|
|
.map(|s| s.to_string());
|
|
|
|
|
|
|
|
|
|
let uses = step_yaml
|
|
|
|
|
.get("uses")
|
|
|
|
|
.and_then(|v| v.as_str())
|
|
|
|
|
.map(|s| s.to_string());
|
|
|
|
|
|
|
|
|
|
let run = step_yaml
|
|
|
|
|
.get("run")
|
|
|
|
|
.and_then(|v| v.as_str())
|
|
|
|
|
.map(|s| s.to_string());
|
|
|
|
|
|
|
|
|
|
let shell = step_yaml
|
|
|
|
|
.get("shell")
|
|
|
|
|
.and_then(|v| v.as_str())
|
|
|
|
|
.map(|s| s.to_string());
|
|
|
|
|
|
|
|
|
|
let with = step_yaml.get("with").and_then(|v| v.as_mapping()).map(|m| {
|
|
|
|
|
let mut with_map = HashMap::new();
|
|
|
|
|
for (k, v) in m {
|
|
|
|
|
if let (Some(key), Some(value)) = (k.as_str(), v.as_str()) {
|
|
|
|
|
with_map.insert(key.to_string(), value.to_string());
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
with_map
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
let env = step_yaml
|
|
|
|
|
.get("env")
|
|
|
|
|
.and_then(|v| v.as_mapping())
|
|
|
|
|
.map(|m| {
|
|
|
|
|
let mut env_map = HashMap::new();
|
|
|
|
|
for (k, v) in m {
|
|
|
|
|
if let (Some(key), Some(value)) = (k.as_str(), v.as_str()) {
|
|
|
|
|
env_map.insert(key.to_string(), value.to_string());
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
env_map
|
|
|
|
|
})
|
|
|
|
|
.unwrap_or_default();
|
|
|
|
|
|
|
|
|
|
// For composite steps with shell, construct a run step
|
2025-04-21 18:04:52 +05:30
|
|
|
let final_run = run;
|
2025-04-04 15:08:29 +05:30
|
|
|
|
2025-04-24 16:06:53 +05:30
|
|
|
// Extract continue_on_error
|
2025-04-24 16:49:07 +05:30
|
|
|
let continue_on_error = step_yaml.get("continue-on-error").and_then(|v| v.as_bool());
|
2025-04-24 16:06:53 +05:30
|
|
|
|
Refactor: Migrate modules to workspace crates
- Extracted functionality from the `src/` directory into individual crates within the `crates/` directory. This improves modularity, organization, and separation of concerns.
- Migrated modules include: models, evaluator, ui, gitlab, utils, logging, github, matrix, executor, runtime, parser, and validators.
- Removed the original source files and directories from `src/` after successful migration.
- This change sets the stage for better code management and potentially independent development/versioning of workspace members.
2025-05-02 12:53:41 +05:30
|
|
|
Ok(workflow::Step {
|
2025-04-04 15:08:29 +05:30
|
|
|
name,
|
|
|
|
|
uses,
|
|
|
|
|
run: final_run,
|
2025-04-21 18:04:52 +05:30
|
|
|
with,
|
2025-04-04 15:08:29 +05:30
|
|
|
env,
|
2025-04-24 16:06:53 +05:30
|
|
|
continue_on_error,
|
2025-04-04 15:08:29 +05:30
|
|
|
})
|
|
|
|
|
}
|
2025-08-09 13:36:03 +05:30
|
|
|
|
|
|
|
|
/// Evaluate a job condition expression
|
|
|
|
|
/// This is a simplified implementation that handles basic GitHub Actions expressions
|
|
|
|
|
fn evaluate_job_condition(
|
|
|
|
|
condition: &str,
|
|
|
|
|
env_context: &HashMap<String, String>,
|
|
|
|
|
workflow: &WorkflowDefinition,
|
|
|
|
|
) -> bool {
|
2025-08-09 17:03:03 +05:30
|
|
|
wrkflw_logging::debug(&format!("Evaluating condition: {}", condition));
|
2025-08-09 13:36:03 +05:30
|
|
|
|
|
|
|
|
// For now, implement basic pattern matching for common conditions
|
|
|
|
|
// TODO: Implement a full GitHub Actions expression evaluator
|
|
|
|
|
|
|
|
|
|
// Handle simple boolean conditions
|
|
|
|
|
if condition == "true" {
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
if condition == "false" {
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Handle github.event.pull_request.draft == false
|
|
|
|
|
if condition.contains("github.event.pull_request.draft == false") {
|
|
|
|
|
// For local execution, assume this is always true (not a draft)
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Handle needs.jobname.outputs.outputname == 'value' patterns
|
|
|
|
|
if condition.contains("needs.") && condition.contains(".outputs.") {
|
|
|
|
|
// For now, simulate that outputs are available but empty
|
|
|
|
|
// This means conditions like needs.changes.outputs.source-code == 'true' will be false
|
2025-08-09 17:03:03 +05:30
|
|
|
wrkflw_logging::debug(
|
2025-08-09 13:36:03 +05:30
|
|
|
"Evaluating needs.outputs condition - defaulting to false for local execution",
|
|
|
|
|
);
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Default to true for unknown conditions to avoid breaking workflows
|
2025-08-09 17:03:03 +05:30
|
|
|
wrkflw_logging::warning(&format!(
|
2025-08-09 13:36:03 +05:30
|
|
|
"Unknown condition pattern: '{}' - defaulting to true",
|
|
|
|
|
condition
|
|
|
|
|
));
|
|
|
|
|
true
|
|
|
|
|
}
|