diff --git a/whatsdirty b/whatsdirty index 625f6c5..e0e1805 100755 --- a/whatsdirty +++ b/whatsdirty @@ -172,6 +172,7 @@ get_changed_files_list() { analyze_repository() { local repo_path="$1" local base_dir="$2" + local skip_fetch="$3" local repo_name # Calculate relative path from base directory @@ -187,8 +188,10 @@ analyze_repository() { cd "$repo_path" || return 1 - # Fetch remote updates first - fetch_remote_status + # Fetch remote updates first (unless already done in parallel) + if [ "$skip_fetch" != "skip_fetch" ]; then + fetch_remote_status + fi # Check upstream status local upstream_info @@ -383,20 +386,38 @@ print_clean_repositories() { scan_repositories() { local scan_dir="$1" local temp_file="$2" - - # Use find to recursively locate all .git directories + + # First, collect all repository paths + local -a repo_paths=() while IFS= read -r git_dir; do - # Get the repository path (parent of .git) local repo_path repo_path=$(dirname "$git_dir") - + repo_paths+=("$repo_path") + done < <(find "$scan_dir" -type d -path "*_deps*" -prune -o -type d -name ".git" -print 2>/dev/null | sort) + + # Parallelize git fetch for all repositories + local -a fetch_pids=() + for repo_path in "${repo_paths[@]}"; do + ( + cd "$repo_path" 2>/dev/null && timeout 5 git fetch --all --quiet 2>/dev/null || true + ) & + fetch_pids+=($!) + done + + # Wait for all fetches to complete + for pid in "${fetch_pids[@]}"; do + wait "$pid" 2>/dev/null || true + done + + # Now analyze all repositories (fetch data is already cached) + for repo_path in "${repo_paths[@]}"; do local repo_data - repo_data=$(analyze_repository "$repo_path" "$scan_dir") + repo_data=$(analyze_repository "$repo_path" "$scan_dir" "skip_fetch") if [ -n "$repo_data" ]; then echo "$repo_data" >> "$temp_file" fi cd "$scan_dir" || exit 1 - done < <(find "$scan_dir" -type d -name ".git" 2>/dev/null | sort) + done } main() {