Compare commits

...

4 Commits

Author SHA1 Message Date
bahdotsh
8765537cfa feat(cli): wrkflw validate accepts multiple paths (files/dirs); autodetects GitHub/GitLab per file; --gitlab forces GitLab for all; graceful EPIPE handling when piped; docs updated 2025-08-13 14:06:40 +05:30
Gokul
ac708902ef Merge pull request #35 from bahdotsh/feature/async-log-processing
feat: move log stream composition and filtering to background thread
2025-08-13 13:41:18 +05:30
bahdotsh
d1268d55cf feat: move log stream composition and filtering to background thread
- Resolves #29: UI unresponsiveness in logs tab
- Add LogProcessor with background thread for async log processing
- Implement pre-processed log caching with ProcessedLogEntry
- Replace frame-by-frame log processing with cached results
- Add automatic log change detection for app and system logs
- Optimize rendering from O(n) to O(1) complexity
- Maintain all search, filter, and highlighting functionality
- Fix clippy warning for redundant pattern matching

Performance improvements:
- Log processing moved to separate thread with 50ms debouncing
- UI rendering no longer blocks on log filtering/formatting
- Supports thousands of logs without UI lag
- Non-blocking request/response pattern with mpsc channels
2025-08-13 13:38:17 +05:30
Gokul
a146d94c35 Merge pull request #34 from bahdotsh/fix/runs-on-array-support
fix: Support array format for runs-on field in GitHub Actions workflows
2025-08-13 13:24:35 +05:30
9 changed files with 513 additions and 195 deletions

View File

@@ -111,6 +111,12 @@ wrkflw validate path/to/workflow.yml
# Validate workflows in a specific directory
wrkflw validate path/to/workflows
# Validate multiple files and/or directories (GitHub and GitLab are auto-detected)
wrkflw validate path/to/flow-1.yml path/to/flow-2.yml path/to/workflows
# Force GitLab parsing for all provided paths
wrkflw validate --gitlab .gitlab-ci.yml other.gitlab-ci.yml
# Validate with verbose output
wrkflw validate --verbose path/to/workflow.yml

View File

@@ -154,6 +154,15 @@ fn run_tui_event_loop(
if last_tick.elapsed() >= tick_rate {
app.tick();
app.update_running_workflow_progress();
// Check for log processing updates (includes system log change detection)
app.check_log_processing_updates();
// Request log processing if needed
if app.logs_need_update {
app.request_log_processing_update();
}
last_tick = Instant::now();
}

View File

@@ -1,4 +1,5 @@
// App state for the UI
use crate::log_processor::{LogProcessingRequest, LogProcessor, ProcessedLogEntry};
use crate::models::{
ExecutionResultMsg, JobExecution, LogFilterLevel, StepExecution, Workflow, WorkflowExecution,
WorkflowStatus,
@@ -40,6 +41,12 @@ pub struct App {
pub log_filter_level: Option<LogFilterLevel>, // Current log level filter
pub log_search_matches: Vec<usize>, // Indices of logs that match the search
pub log_search_match_idx: usize, // Current match index for navigation
// Background log processing
pub log_processor: LogProcessor,
pub processed_logs: Vec<ProcessedLogEntry>,
pub logs_need_update: bool, // Flag to trigger log processing
pub last_system_logs_count: usize, // Track system log changes
}
impl App {
@@ -199,6 +206,12 @@ impl App {
log_filter_level: Some(LogFilterLevel::All),
log_search_matches: Vec::new(),
log_search_match_idx: 0,
// Background log processing
log_processor: LogProcessor::new(),
processed_logs: Vec::new(),
logs_need_update: true,
last_system_logs_count: 0,
}
}
@@ -429,10 +442,9 @@ impl App {
if let Some(idx) = self.workflow_list_state.selected() {
if idx < self.workflows.len() && !self.execution_queue.contains(&idx) {
self.execution_queue.push(idx);
let timestamp = Local::now().format("%H:%M:%S").to_string();
self.logs.push(format!(
"[{}] Added '{}' to execution queue. Press 'Enter' to start.",
timestamp, self.workflows[idx].name
self.add_timestamped_log(&format!(
"Added '{}' to execution queue. Press 'Enter' to start.",
self.workflows[idx].name
));
}
}
@@ -635,10 +647,11 @@ impl App {
self.log_search_active = false;
self.log_search_query.clear();
self.log_search_matches.clear();
self.mark_logs_for_update();
}
KeyCode::Backspace => {
self.log_search_query.pop();
self.update_log_search_matches();
self.mark_logs_for_update();
}
KeyCode::Enter => {
self.log_search_active = false;
@@ -646,7 +659,7 @@ impl App {
}
KeyCode::Char(c) => {
self.log_search_query.push(c);
self.update_log_search_matches();
self.mark_logs_for_update();
}
_ => {}
}
@@ -658,8 +671,8 @@ impl App {
if !self.log_search_active {
// Don't clear the query, this allows toggling the search UI while keeping the filter
} else {
// When activating search, update matches
self.update_log_search_matches();
// When activating search, trigger update
self.mark_logs_for_update();
}
}
@@ -670,8 +683,8 @@ impl App {
Some(level) => Some(level.next()),
};
// Update search matches when filter changes
self.update_log_search_matches();
// Trigger log processing update when filter changes
self.mark_logs_for_update();
}
// Clear log search and filter
@@ -680,6 +693,7 @@ impl App {
self.log_filter_level = None;
self.log_search_matches.clear();
self.log_search_match_idx = 0;
self.mark_logs_for_update();
}
// Update matches based on current search and filter
@@ -955,4 +969,82 @@ impl App {
}
}
}
/// Request log processing update from background thread
pub fn request_log_processing_update(&mut self) {
let request = LogProcessingRequest {
search_query: self.log_search_query.clone(),
filter_level: self.log_filter_level.clone(),
app_logs: self.logs.clone(),
app_logs_count: self.logs.len(),
system_logs_count: wrkflw_logging::get_logs().len(),
};
if self.log_processor.request_update(request).is_err() {
// Log processor channel disconnected, recreate it
self.log_processor = LogProcessor::new();
self.logs_need_update = true;
}
}
/// Check for and apply log processing updates
pub fn check_log_processing_updates(&mut self) {
// Check if system logs have changed
let current_system_logs_count = wrkflw_logging::get_logs().len();
if current_system_logs_count != self.last_system_logs_count {
self.last_system_logs_count = current_system_logs_count;
self.mark_logs_for_update();
}
if let Some(response) = self.log_processor.try_get_update() {
self.processed_logs = response.processed_logs;
self.log_search_matches = response.search_matches;
// Update scroll position to first match if we have search results
if !self.log_search_matches.is_empty() && !self.log_search_query.is_empty() {
self.log_search_match_idx = 0;
if let Some(&idx) = self.log_search_matches.first() {
self.log_scroll = idx;
}
}
self.logs_need_update = false;
}
}
/// Trigger log processing when search/filter changes
pub fn mark_logs_for_update(&mut self) {
self.logs_need_update = true;
self.request_log_processing_update();
}
/// Get combined app and system logs for background processing
pub fn get_combined_logs(&self) -> Vec<String> {
let mut all_logs = Vec::new();
// Add app logs
for log in &self.logs {
all_logs.push(log.clone());
}
// Add system logs
for log in wrkflw_logging::get_logs() {
all_logs.push(log.clone());
}
all_logs
}
/// Add a log entry and trigger log processing update
pub fn add_log(&mut self, message: String) {
self.logs.push(message);
self.mark_logs_for_update();
}
/// Add a formatted log entry with timestamp and trigger log processing update
pub fn add_timestamped_log(&mut self, message: &str) {
let timestamp = Local::now().format("%H:%M:%S").to_string();
let formatted_message = format!("[{}] {}", timestamp, message);
self.add_log(formatted_message);
}
}

View File

@@ -12,6 +12,7 @@
pub mod app;
pub mod components;
pub mod handlers;
pub mod log_processor;
pub mod models;
pub mod utils;
pub mod views;

View File

@@ -0,0 +1,305 @@
// Background log processor for asynchronous log filtering and formatting
use crate::models::LogFilterLevel;
use ratatui::{
style::{Color, Style},
text::{Line, Span},
widgets::{Cell, Row},
};
use std::sync::mpsc;
use std::thread;
use std::time::{Duration, Instant};
/// Processed log entry ready for rendering
#[derive(Debug, Clone)]
pub struct ProcessedLogEntry {
pub timestamp: String,
pub log_type: String,
pub log_style: Style,
pub content_spans: Vec<Span<'static>>,
}
impl ProcessedLogEntry {
/// Convert to a table row for rendering
pub fn to_row(&self) -> Row<'static> {
Row::new(vec![
Cell::from(self.timestamp.clone()),
Cell::from(self.log_type.clone()).style(self.log_style),
Cell::from(Line::from(self.content_spans.clone())),
])
}
}
/// Request to update log processing parameters
#[derive(Debug, Clone)]
pub struct LogProcessingRequest {
pub search_query: String,
pub filter_level: Option<LogFilterLevel>,
pub app_logs: Vec<String>, // Complete app logs
pub app_logs_count: usize, // To detect changes in app logs
pub system_logs_count: usize, // To detect changes in system logs
}
/// Response with processed logs
#[derive(Debug, Clone)]
pub struct LogProcessingResponse {
pub processed_logs: Vec<ProcessedLogEntry>,
pub total_log_count: usize,
pub filtered_count: usize,
pub search_matches: Vec<usize>, // Indices of logs that match search
}
/// Background log processor
pub struct LogProcessor {
request_tx: mpsc::Sender<LogProcessingRequest>,
response_rx: mpsc::Receiver<LogProcessingResponse>,
_worker_handle: thread::JoinHandle<()>,
}
impl LogProcessor {
/// Create a new log processor with a background worker thread
pub fn new() -> Self {
let (request_tx, request_rx) = mpsc::channel::<LogProcessingRequest>();
let (response_tx, response_rx) = mpsc::channel::<LogProcessingResponse>();
let worker_handle = thread::spawn(move || {
Self::worker_loop(request_rx, response_tx);
});
Self {
request_tx,
response_rx,
_worker_handle: worker_handle,
}
}
/// Send a processing request (non-blocking)
pub fn request_update(
&self,
request: LogProcessingRequest,
) -> Result<(), mpsc::SendError<LogProcessingRequest>> {
self.request_tx.send(request)
}
/// Try to get the latest processed logs (non-blocking)
pub fn try_get_update(&self) -> Option<LogProcessingResponse> {
self.response_rx.try_recv().ok()
}
/// Background worker loop
fn worker_loop(
request_rx: mpsc::Receiver<LogProcessingRequest>,
response_tx: mpsc::Sender<LogProcessingResponse>,
) {
let mut last_request: Option<LogProcessingRequest> = None;
let mut last_processed_time = Instant::now();
let mut cached_logs: Vec<String> = Vec::new();
let mut cached_app_logs_count = 0;
let mut cached_system_logs_count = 0;
loop {
// Check for new requests with a timeout to allow periodic processing
let request = match request_rx.recv_timeout(Duration::from_millis(100)) {
Ok(req) => Some(req),
Err(mpsc::RecvTimeoutError::Timeout) => None,
Err(mpsc::RecvTimeoutError::Disconnected) => break,
};
// Update request if we received one
if let Some(req) = request {
last_request = Some(req);
}
// Process if we have a request and enough time has passed since last processing
if let Some(ref req) = last_request {
let should_process = last_processed_time.elapsed() > Duration::from_millis(50)
&& (cached_app_logs_count != req.app_logs_count
|| cached_system_logs_count != req.system_logs_count
|| cached_logs.is_empty());
if should_process {
// Refresh log cache if log counts changed
if cached_app_logs_count != req.app_logs_count
|| cached_system_logs_count != req.system_logs_count
|| cached_logs.is_empty()
{
cached_logs = Self::get_combined_logs(&req.app_logs);
cached_app_logs_count = req.app_logs_count;
cached_system_logs_count = req.system_logs_count;
}
let response = Self::process_logs(&cached_logs, req);
if response_tx.send(response).is_err() {
break; // Receiver disconnected
}
last_processed_time = Instant::now();
}
}
}
}
/// Get combined app and system logs
fn get_combined_logs(app_logs: &[String]) -> Vec<String> {
let mut all_logs = Vec::new();
// Add app logs
for log in app_logs {
all_logs.push(log.clone());
}
// Add system logs
for log in wrkflw_logging::get_logs() {
all_logs.push(log.clone());
}
all_logs
}
/// Process logs according to search and filter criteria
fn process_logs(all_logs: &[String], request: &LogProcessingRequest) -> LogProcessingResponse {
// Filter logs based on search query and filter level
let mut filtered_logs = Vec::new();
let mut search_matches = Vec::new();
for (idx, log) in all_logs.iter().enumerate() {
let passes_filter = match &request.filter_level {
None => true,
Some(level) => level.matches(log),
};
let matches_search = if request.search_query.is_empty() {
true
} else {
log.to_lowercase()
.contains(&request.search_query.to_lowercase())
};
if passes_filter && matches_search {
filtered_logs.push((idx, log));
if matches_search && !request.search_query.is_empty() {
search_matches.push(filtered_logs.len() - 1);
}
}
}
// Process filtered logs into display format
let processed_logs: Vec<ProcessedLogEntry> = filtered_logs
.iter()
.map(|(_, log_line)| Self::process_log_entry(log_line, &request.search_query))
.collect();
LogProcessingResponse {
processed_logs,
total_log_count: all_logs.len(),
filtered_count: filtered_logs.len(),
search_matches,
}
}
/// Process a single log entry into display format
fn process_log_entry(log_line: &str, search_query: &str) -> ProcessedLogEntry {
// Extract timestamp from log format [HH:MM:SS]
let timestamp = if log_line.starts_with('[') && log_line.contains(']') {
let end = log_line.find(']').unwrap_or(0);
if end > 1 {
log_line[1..end].to_string()
} else {
"??:??:??".to_string()
}
} else {
"??:??:??".to_string()
};
// Determine log type and style
let (log_type, log_style) =
if log_line.contains("Error") || log_line.contains("error") || log_line.contains("")
{
("ERROR", Style::default().fg(Color::Red))
} else if log_line.contains("Warning")
|| log_line.contains("warning")
|| log_line.contains("⚠️")
{
("WARN", Style::default().fg(Color::Yellow))
} else if log_line.contains("Success")
|| log_line.contains("success")
|| log_line.contains("")
{
("SUCCESS", Style::default().fg(Color::Green))
} else if log_line.contains("Running")
|| log_line.contains("running")
|| log_line.contains("")
{
("INFO", Style::default().fg(Color::Cyan))
} else if log_line.contains("Triggering") || log_line.contains("triggered") {
("TRIG", Style::default().fg(Color::Magenta))
} else {
("INFO", Style::default().fg(Color::Gray))
};
// Extract content after timestamp
let content = if log_line.starts_with('[') && log_line.contains(']') {
let start = log_line.find(']').unwrap_or(0) + 1;
log_line[start..].trim()
} else {
log_line
};
// Create content spans with search highlighting
let content_spans = if !search_query.is_empty() {
Self::highlight_search_matches(content, search_query)
} else {
vec![Span::raw(content.to_string())]
};
ProcessedLogEntry {
timestamp,
log_type: log_type.to_string(),
log_style,
content_spans,
}
}
/// Highlight search matches in content
fn highlight_search_matches(content: &str, search_query: &str) -> Vec<Span<'static>> {
let mut spans = Vec::new();
let lowercase_content = content.to_lowercase();
let lowercase_query = search_query.to_lowercase();
if lowercase_content.contains(&lowercase_query) {
let mut last_idx = 0;
while let Some(idx) = lowercase_content[last_idx..].find(&lowercase_query) {
let real_idx = last_idx + idx;
// Add text before match
if real_idx > last_idx {
spans.push(Span::raw(content[last_idx..real_idx].to_string()));
}
// Add matched text with highlight
let match_end = real_idx + search_query.len();
spans.push(Span::styled(
content[real_idx..match_end].to_string(),
Style::default().bg(Color::Yellow).fg(Color::Black),
));
last_idx = match_end;
}
// Add remaining text after last match
if last_idx < content.len() {
spans.push(Span::raw(content[last_idx..].to_string()));
}
} else {
spans.push(Span::raw(content.to_string()));
}
spans
}
}
impl Default for LogProcessor {
fn default() -> Self {
Self::new()
}
}

View File

@@ -50,6 +50,7 @@ pub struct StepExecution {
}
/// Log filter levels
#[derive(Debug, Clone, PartialEq)]
pub enum LogFilterLevel {
Info,
Warning,

View File

@@ -140,45 +140,8 @@ pub fn render_logs_tab(f: &mut Frame<CrosstermBackend<io::Stdout>>, app: &App, a
f.render_widget(search_block, chunks[1]);
}
// Combine application logs with system logs
let mut all_logs = Vec::new();
// Now all logs should have timestamps in the format [HH:MM:SS]
// Process app logs
for log in &app.logs {
all_logs.push(log.clone());
}
// Process system logs
for log in wrkflw_logging::get_logs() {
all_logs.push(log.clone());
}
// Filter logs based on search query and filter level
let filtered_logs = if !app.log_search_query.is_empty() || app.log_filter_level.is_some() {
all_logs
.iter()
.filter(|log| {
let passes_filter = match &app.log_filter_level {
None => true,
Some(level) => level.matches(log),
};
let matches_search = if app.log_search_query.is_empty() {
true
} else {
log.to_lowercase()
.contains(&app.log_search_query.to_lowercase())
};
passes_filter && matches_search
})
.cloned()
.collect::<Vec<String>>()
} else {
all_logs.clone() // Clone to avoid moving all_logs
};
// Use processed logs from background thread instead of processing on every frame
let filtered_logs = &app.processed_logs;
// Create a table for logs for better organization
let header_cells = ["Time", "Type", "Message"]
@@ -189,109 +152,10 @@ pub fn render_logs_tab(f: &mut Frame<CrosstermBackend<io::Stdout>>, app: &App, a
.style(Style::default().add_modifier(Modifier::BOLD))
.height(1);
let rows = filtered_logs.iter().map(|log_line| {
// Parse log line to extract timestamp, type and message
// Extract timestamp from log format [HH:MM:SS]
let timestamp = if log_line.starts_with('[') && log_line.contains(']') {
let end = log_line.find(']').unwrap_or(0);
if end > 1 {
log_line[1..end].to_string()
} else {
"??:??:??".to_string() // Show placeholder for malformed logs
}
} else {
"??:??:??".to_string() // Show placeholder for malformed logs
};
let (log_type, log_style, _) =
if log_line.contains("Error") || log_line.contains("error") || log_line.contains("")
{
("ERROR", Style::default().fg(Color::Red), log_line.as_str())
} else if log_line.contains("Warning")
|| log_line.contains("warning")
|| log_line.contains("⚠️")
{
(
"WARN",
Style::default().fg(Color::Yellow),
log_line.as_str(),
)
} else if log_line.contains("Success")
|| log_line.contains("success")
|| log_line.contains("")
{
(
"SUCCESS",
Style::default().fg(Color::Green),
log_line.as_str(),
)
} else if log_line.contains("Running")
|| log_line.contains("running")
|| log_line.contains("")
{
("INFO", Style::default().fg(Color::Cyan), log_line.as_str())
} else if log_line.contains("Triggering") || log_line.contains("triggered") {
(
"TRIG",
Style::default().fg(Color::Magenta),
log_line.as_str(),
)
} else {
("INFO", Style::default().fg(Color::Gray), log_line.as_str())
};
// Extract content after timestamp
let content = if log_line.starts_with('[') && log_line.contains(']') {
let start = log_line.find(']').unwrap_or(0) + 1;
log_line[start..].trim()
} else {
log_line.as_str()
};
// Highlight search matches in content if search is active
let mut content_spans = Vec::new();
if !app.log_search_query.is_empty() {
let lowercase_content = content.to_lowercase();
let lowercase_query = app.log_search_query.to_lowercase();
if lowercase_content.contains(&lowercase_query) {
let mut last_idx = 0;
while let Some(idx) = lowercase_content[last_idx..].find(&lowercase_query) {
let real_idx = last_idx + idx;
// Add text before match
if real_idx > last_idx {
content_spans.push(Span::raw(content[last_idx..real_idx].to_string()));
}
// Add matched text with highlight
let match_end = real_idx + app.log_search_query.len();
content_spans.push(Span::styled(
content[real_idx..match_end].to_string(),
Style::default().bg(Color::Yellow).fg(Color::Black),
));
last_idx = match_end;
}
// Add remaining text after last match
if last_idx < content.len() {
content_spans.push(Span::raw(content[last_idx..].to_string()));
}
} else {
content_spans.push(Span::raw(content));
}
} else {
content_spans.push(Span::raw(content));
}
Row::new(vec![
Cell::from(timestamp),
Cell::from(log_type).style(log_style),
Cell::from(Line::from(content_spans)),
])
});
// Convert processed logs to table rows - this is now very fast since logs are pre-processed
let rows = filtered_logs
.iter()
.map(|processed_log| processed_log.to_row());
let content_idx = if show_search_bar { 2 } else { 1 };

View File

@@ -26,6 +26,9 @@ wrkflw validate
wrkflw validate .github/workflows/ci.yml
wrkflw validate path/to/workflows
# Validate multiple files and/or directories
wrkflw validate path/to/flow-1.yml path/to/flow-2.yml path/to/workflows
# Run a workflow (Docker by default)
wrkflw run .github/workflows/ci.yml
@@ -40,10 +43,11 @@ wrkflw tui --runtime podman
### Commands
- **validate**: Validate a workflow/pipeline file or directory
- **validate**: Validate workflow/pipeline files and/or directories
- GitHub (default): `.github/workflows/*.yml`
- GitLab: `.gitlab-ci.yml` or files ending with `gitlab-ci.yml`
- Exit code behavior (by default): `1` when validation failures are detected
- Accepts multiple paths in a single invocation
- Exit code behavior (by default): `1` when any validation failure is detected
- Flags: `--gitlab`, `--exit-code`, `--no-exit-code`, `--verbose`
- **run**: Execute a workflow or pipeline locally

View File

@@ -48,8 +48,9 @@ struct Wrkflw {
enum Commands {
/// Validate workflow or pipeline files
Validate {
/// Path to workflow/pipeline file or directory (defaults to .github/workflows)
path: Option<PathBuf>,
/// Path(s) to workflow/pipeline file(s) or directory(ies) (defaults to .github/workflows if none provided)
#[arg(value_name = "path", num_args = 0..)]
paths: Vec<PathBuf>,
/// Explicitly validate as GitLab CI/CD pipeline
#[arg(long)]
@@ -266,6 +267,28 @@ fn is_gitlab_pipeline(path: &Path) -> bool {
#[tokio::main]
async fn main() {
// Gracefully handle Broken pipe (EPIPE) when output is piped (e.g., to `head`)
let default_panic_hook = std::panic::take_hook();
std::panic::set_hook(Box::new(move |info| {
let mut is_broken_pipe = false;
if let Some(s) = info.payload().downcast_ref::<&str>() {
if s.contains("Broken pipe") {
is_broken_pipe = true;
}
}
if let Some(s) = info.payload().downcast_ref::<String>() {
if s.contains("Broken pipe") {
is_broken_pipe = true;
}
}
if is_broken_pipe {
// Treat as a successful, short-circuited exit
std::process::exit(0);
}
// Fallback to the default hook for all other panics
default_panic_hook(info);
}));
let cli = Wrkflw::parse();
let verbose = cli.verbose;
let debug = cli.debug;
@@ -286,65 +309,78 @@ async fn main() {
match &cli.command {
Some(Commands::Validate {
path,
paths,
gitlab,
exit_code,
no_exit_code,
}) => {
// Determine the path to validate
let validate_path = path
.clone()
.unwrap_or_else(|| PathBuf::from(".github/workflows"));
// Check if the path exists
if !validate_path.exists() {
eprintln!("Error: Path does not exist: {}", validate_path.display());
std::process::exit(1);
}
// Determine the paths to validate (default to .github/workflows when none provided)
let validate_paths: Vec<PathBuf> = if paths.is_empty() {
vec![PathBuf::from(".github/workflows")]
} else {
paths.clone()
};
// Determine if we're validating a GitLab pipeline based on the --gitlab flag or file detection
let force_gitlab = *gitlab;
let mut validation_failed = false;
if validate_path.is_dir() {
// Validate all workflow files in the directory
let entries = std::fs::read_dir(&validate_path)
.expect("Failed to read directory")
.filter_map(|entry| entry.ok())
.filter(|entry| {
entry.path().is_file()
&& entry
.path()
.extension()
.is_some_and(|ext| ext == "yml" || ext == "yaml")
})
.collect::<Vec<_>>();
for validate_path in validate_paths {
// Check if the path exists; if not, mark failure but continue
if !validate_path.exists() {
eprintln!("Error: Path does not exist: {}", validate_path.display());
validation_failed = true;
continue;
}
println!("Validating {} workflow file(s)...", entries.len());
if validate_path.is_dir() {
// Validate all workflow files in the directory
let entries = std::fs::read_dir(&validate_path)
.expect("Failed to read directory")
.filter_map(|entry| entry.ok())
.filter(|entry| {
entry.path().is_file()
&& entry
.path()
.extension()
.is_some_and(|ext| ext == "yml" || ext == "yaml")
})
.collect::<Vec<_>>();
for entry in entries {
let path = entry.path();
let is_gitlab = force_gitlab || is_gitlab_pipeline(&path);
println!(
"Validating {} workflow file(s) in {}...",
entries.len(),
validate_path.display()
);
for entry in entries {
let path = entry.path();
let is_gitlab = force_gitlab || is_gitlab_pipeline(&path);
let file_failed = if is_gitlab {
validate_gitlab_pipeline(&path, verbose)
} else {
validate_github_workflow(&path, verbose)
};
if file_failed {
validation_failed = true;
}
}
} else {
// Validate a single workflow file
let is_gitlab = force_gitlab || is_gitlab_pipeline(&validate_path);
let file_failed = if is_gitlab {
validate_gitlab_pipeline(&path, verbose)
validate_gitlab_pipeline(&validate_path, verbose)
} else {
validate_github_workflow(&path, verbose)
validate_github_workflow(&validate_path, verbose)
};
if file_failed {
validation_failed = true;
}
}
} else {
// Validate a single workflow file
let is_gitlab = force_gitlab || is_gitlab_pipeline(&validate_path);
validation_failed = if is_gitlab {
validate_gitlab_pipeline(&validate_path, verbose)
} else {
validate_github_workflow(&validate_path, verbose)
};
}
// Set exit code if validation failed and exit_code flag is true (and no_exit_code is false)