Skip to content

Instantly share code, notes, and snippets.

@AshishKapoor
Last active September 16, 2025 14:17
Show Gist options
  • Save AshishKapoor/30adb9a388d70c9045f9a77e3486dbf8 to your computer and use it in GitHub Desktop.
Save AshishKapoor/30adb9a388d70c9045f9a77e3486dbf8 to your computer and use it in GitHub Desktop.
This script updates all the npm projects with security audits.
#!/usr/bin/env bash
### README
## Make script executable
# chmod +x ./refresh.sh
## Run the script
# ./refresh.sh
## Update packages
# ./refresh.sh --upgrade-deps latest --upgrade-all
set -euo pipefail
# refresh-yarn-locks.sh
# Iterate through subdirectories (recursively or just first level) that contain a package.json
# and (re)generate yarn.lock by leveraging an npm-generated package-lock.json as an audit/fix baseline.
# Steps per project (default order):
# 1. npm install --package-lock-only
# 2. remove yarn.lock (if present)
# 3. npm audit fix (best effort; ignore failures optionally)
# 4. yarn import (creates yarn.lock from package-lock.json)
# 5. remove package-lock.json
#
# Enhancements:
# - Dry-run (-n)
# - Limit depth / only listed dirs (-d / args)
# - Ignore certain globs (-x)
# - Parallel mode (-p) (experimental: controls concurrency with GNU parallel / xargs -P)
# - Continue on error (--keep-going)
# - Skip audit step (--skip-audit)
# - Force yarn classic vs berry detection
# - Colorized output (can disable with NO_COLOR=1)
#
# Usage:
# ./refresh-yarn-locks.sh [options] [dir1 dir2 ...]
# If no dirs are provided, script auto-discovers directories containing package.json (excluding node_modules, .git, etc.)
VERSION="1.0.0"
# Early internal invocation handling (must happen before normal arg parsing)
if [[ ${1:-} == "--internal-one" ]]; then
shift || true
target_dir=${1:-}
if [[ -z "$target_dir" ]]; then
echo "[ERR ] Internal invocation missing directory argument" >&2
exit 2
fi
# Minimal project processing environment: avoid re-discovery & re-parsing flags
# We still need defaults for color etc., so continue script after setting a marker
INTERNAL_ONE=1
INTERNAL_TARGET="$target_dir"
else
INTERNAL_ONE=0
INTERNAL_TARGET=""
fi
# -------- Color Helpers --------
if [[ -t 1 && -z "${NO_COLOR:-}" ]]; then
RED="\033[31m"; GREEN="\033[32m"; YELLOW="\033[33m"; BLUE="\033[34m"; MAGENTA="\033[35m"; CYAN="\033[36m"; BOLD="\033[1m"; RESET="\033[0m"
else
RED=""; GREEN=""; YELLOW=""; BLUE=""; MAGENTA=""; CYAN=""; BOLD=""; RESET=""
fi
log() { printf "%b\n" "$*"; }
info() { log "${BLUE}[INFO]${RESET} $*"; }
success() { log "${GREEN}[OK]${RESET} $*"; }
warn() { log "${YELLOW}[WARN]${RESET} $*"; }
error() { log "${RED}[ERR ]${RESET} $*"; }
# -------- Defaults --------
DRY_RUN=0
KEEP_GOING=0
SKIP_AUDIT=0
MAX_DEPTH=4
PARALLEL=0
PARALLEL_JOBS="4"
PARALLEL_AUTO=0
PREFIX_OUTPUT=0
EXCLUDES=()
YARN_CMD="yarn"
NPM_CMD="npm"
CUSTOM_FIND_DIRS=()
# npm peer dependency conflict handling
LEGACY_PEER_DEPS=0 # if set, always pass --legacy-peer-deps
AUTO_LEGACY_FALLBACK=1 # if install fails with ERESOLVE, retry with --legacy-peer-deps
FORCE_PEER_DEPS=0 # alternative: use --force (less safe)
EXTRA_NPM_FLAGS=() # user-specified arbitrary flags
BUMP_PEERS=0 # if set, attempt to bump peer dependency versions in package.json on ERESOLVE
BUMP_PEERS_WARN=0 # bump when peer warnings appear even if install succeeds
BUMP_PEERS_ALWAYS=0 # always attempt peer bump after install
GIT_ENABLE=1
GIT_BRANCH="main"
GIT_PULL=1
GIT_DIRTY_ALLOW=0
UPGRADE_DEPS_MODE="" # latest|wanted
UPGRADE_INCLUDE_DEV=1
UPGRADE_ONLY_LIST="" # comma-separated list of deps to upgrade (subset)
UPGRADE_ALL=0 # if 1 upgrade all dependencies ignoring ONLY list
GIT_VISITED_REPOS=()
print_help() {
cat <<EOF
refresh-yarn-locks v$VERSION
Usage: $0 [options] [paths...]
Options:
-n, --dry-run Show what would be done without making changes
-k, --keep-going Continue processing other projects if one fails
-a, --skip-audit Skip 'npm audit fix' step
-d, --max-depth N Max search depth for auto-discovery (default: $MAX_DEPTH)
-x, --exclude GLOB Exclude directories matching GLOB (can repeat)
-p, --parallel [N|auto] Run in parallel (optional N = jobs, 'auto' = CPU cores, default: $PARALLEL_JOBS)
--yarn-cmd CMD Use a custom yarn command (default: yarn)
--npm-cmd CMD Use a custom npm command (default: npm)
--legacy-peer-deps Always run npm install with --legacy-peer-deps
--no-auto-legacy Disable automatic retry with --legacy-peer-deps on ERESOLVE
--force-peer Use --force instead of legacy-peer-deps on retry (if auto fallback)
--npm-flags "..." Additional flags passed verbatim to npm install
--bump-peers Attempt to modify package.json to satisfy peer dependencies before fallback
--bump-peers-warn Also bump when peer warnings appear (even if install succeeds)
--bump-peers-always Always attempt peer bump after install
--prefix-output Prefix each log line with project path when in parallel
--no-git Disable git operations
--git-branch NAME Branch to checkout (default: main; fallback master)
--no-git-pull Skip git pull after checkout
--git-dirty-allow Allow processing even with uncommitted changes
--upgrade-deps MODE Update dependency versions (MODE = latest|wanted) before install
--upgrade-only LIST Comma-separated dependency names to upgrade (implies subset)
--upgrade-all Upgrade all dependencies (overrides --upgrade-only)
--no-dev-upgrade Do not upgrade devDependencies
--no-color Disable color output (or set NO_COLOR=1)
-h, --help Show this help
-v, --version Show version
Examples:
$0 # auto-discover projects
$0 -n # dry run
$0 packages/app packages/api # only these paths
$0 -x "**/templates" # exclude templates
$0 -p 6 # parallel with 6 jobs
EOF
}
while [[ $# -gt 0 && $INTERNAL_ONE -eq 0 ]]; do
case "$1" in
-n|--dry-run) DRY_RUN=1; shift ;;
-k|--keep-going) KEEP_GOING=1; shift ;;
-a|--skip-audit) SKIP_AUDIT=1; shift ;;
-d|--max-depth) MAX_DEPTH="$2"; shift 2 ;;
-x|--exclude) EXCLUDES+=("$2"); shift 2 ;;
-p|--parallel)
PARALLEL=1
if [[ ${2:-} == auto ]]; then PARALLEL_AUTO=1; shift 2;
elif [[ ${2:-} =~ ^[0-9]+$ ]]; then PARALLEL_JOBS="$2"; shift 2; else shift; fi ;;
--yarn-cmd) YARN_CMD="$2"; shift 2 ;;
--npm-cmd) NPM_CMD="$2"; shift 2 ;;
--legacy-peer-deps) LEGACY_PEER_DEPS=1; shift ;;
--no-auto-legacy) AUTO_LEGACY_FALLBACK=0; shift ;;
--force-peer) FORCE_PEER_DEPS=1; shift ;;
--npm-flags) EXTRA_NPM_FLAGS+=("$2"); shift 2 ;;
--bump-peers) BUMP_PEERS=1; shift ;;
--bump-peers-warn) BUMP_PEERS_WARN=1; shift ;;
--bump-peers-always) BUMP_PEERS_ALWAYS=1; shift ;;
--prefix-output) PREFIX_OUTPUT=1; shift ;;
--no-git) GIT_ENABLE=0; shift ;;
--git-branch) GIT_BRANCH="$2"; shift 2 ;;
--no-git-pull) GIT_PULL=0; shift ;;
--git-dirty-allow) GIT_DIRTY_ALLOW=1; shift ;;
--upgrade-deps)
if [[ -z ${2:-} ]]; then error "--upgrade-deps requires MODE (latest|wanted)"; exit 1; fi
case "$2" in latest|wanted) UPGRADE_DEPS_MODE="$2" ;; *) error "Invalid upgrade mode: $2"; exit 1 ;; esac
shift 2 ;;
--upgrade-only)
UPGRADE_ONLY_LIST="$2"; shift 2 ;;
--upgrade-all) UPGRADE_ALL=1; shift ;;
--no-dev-upgrade) UPGRADE_INCLUDE_DEV=0; shift ;;
--no-git) GIT_ENABLE=0; shift ;;
--git-branch) GIT_BRANCH="$2"; shift 2 ;;
--no-git-pull) GIT_PULL=0; shift ;;
--git-dirty-allow) GIT_DIRTY_ALLOW=1; shift ;;
--upgrade-deps)
if [[ -z ${2:-} ]]; then error "--upgrade-deps requires MODE (latest|wanted)"; exit 1; fi
case "$2" in latest|wanted) UPGRADE_DEPS_MODE="$2" ;; *) error "Invalid upgrade mode: $2"; exit 1 ;; esac
shift 2 ;;
--no-dev-upgrade) UPGRADE_INCLUDE_DEV=0; shift ;;
--no-color) NO_COLOR=1; shift ;;
-h|--help) print_help; exit 0 ;;
-v|--version) echo "$VERSION"; exit 0 ;;
--) shift; break ;;
-*) error "Unknown option: $1"; exit 1 ;;
*) CUSTOM_FIND_DIRS+=("$1"); shift ;;
esac
done
# Re-disable colors if flag used after variable initialization
if [[ -n "${NO_COLOR:-}" ]]; then
RED=""; GREEN=""; YELLOW=""; BLUE=""; MAGENTA=""; CYAN=""; BOLD=""; RESET=""
fi
# -------- Discover Projects --------
PROJECT_DIRS=()
is_excluded() {
local dir="$1" pattern
# Safely iterate even if EXCLUDES is empty or unset
if [[ ${#EXCLUDES[@]:-0} -gt 0 ]]; then
for pattern in "${EXCLUDES[@]}"; do
if [[ "$dir" == $pattern ]]; then
return 0
fi
done
fi
return 1
}
collect_projects() {
local base="$1"
if [[ -f "$base/package.json" ]]; then
PROJECT_DIRS+=("$base")
return
fi
# find subdirectories containing package.json
while IFS= read -r -d '' pkg; do
local d
d="$(dirname "$pkg")"
if is_excluded "$d"; then continue; fi
PROJECT_DIRS+=("$d")
done < <(find "$base" -maxdepth "$MAX_DEPTH" -type f -name package.json -not -path '*/node_modules/*' -print0 2>/dev/null)
}
if [[ $INTERNAL_ONE -eq 0 ]]; then
if [[ ${#CUSTOM_FIND_DIRS[@]} -gt 0 ]]; then
for p in "${CUSTOM_FIND_DIRS[@]}"; do
collect_projects "$p"
done
else
collect_projects "."
fi
else
PROJECT_DIRS=("$INTERNAL_TARGET")
fi
# De-duplicate (portable, avoids readarray not present in macOS bash 3.2)
if [[ ${#PROJECT_DIRS[@]} -gt 0 ]]; then
DEDUPED_TMP="$(printf '%s\n' "${PROJECT_DIRS[@]}" | awk '!x[$0]++' | sort)"
PROJECT_DIRS=()
while IFS= read -r line; do
[[ -n "$line" ]] && PROJECT_DIRS+=("$line")
done <<< "$DEDUPED_TMP"
fi
if [[ ${#PROJECT_DIRS[@]} -eq 0 ]]; then
warn "No projects with package.json found."
exit 0
fi
info "Discovered ${#PROJECT_DIRS[@]} project(s)."
# --------------- Helper Functions ---------------
detect_cores() {
local cores
if command -v getconf >/dev/null 2>&1; then
cores=$(getconf _NPROCESSORS_ONLN 2>/dev/null || true)
fi
if [[ -z "$cores" || "$cores" -le 0 ]]; then
cores=$(sysctl -n hw.ncpu 2>/dev/null || true)
fi
if [[ -z "$cores" || "$cores" -le 0 ]]; then cores=4; fi
echo "$cores"
}
git_prepare_repo() {
local dir="$1" repo_root="$1"
while [[ "$repo_root" != "/" && ! -d "$repo_root/.git" ]]; do
repo_root="$(dirname "$repo_root")"
done
[[ -d "$repo_root/.git" ]] || return 0
local seen
if [[ ${#GIT_VISITED_REPOS[@]:-0} -gt 0 ]]; then
for seen in "${GIT_VISITED_REPOS[@]}"; do [[ "$seen" == "$repo_root" ]] && return 0; done
fi
GIT_VISITED_REPOS+=("$repo_root")
[[ $GIT_ENABLE -eq 1 ]] || return 0
if [[ $DRY_RUN -eq 1 ]]; then
info "[git] DRY: would checkout $GIT_BRANCH (fallback master) in $repo_root"
return 0
fi
if [[ $GIT_DIRTY_ALLOW -eq 0 ]]; then
(cd "$repo_root" && (git diff --quiet && git diff --cached --quiet)) || { warn "[git] Dirty repo; skip checkout"; return 0; }
fi
local target="$GIT_BRANCH"
(cd "$repo_root" && git rev-parse --verify "$target" >/dev/null 2>&1) || {
if (cd "$repo_root" && git rev-parse --verify master >/dev/null 2>&1); then
warn "[git] Branch $target missing; using master"; target="master"; else warn "[git] No $target or master"; return 0; fi }
(cd "$repo_root" && git checkout "$target" >/dev/null 2>&1) || { warn "[git] checkout failed"; return 0; }
if [[ $GIT_PULL -eq 1 ]]; then (cd "$repo_root" && git pull --ff-only >/dev/null 2>&1) || warn "[git] pull failed"; fi
success "[git] Ready ($target) $repo_root"
}
update_package_json_from_peer_output() {
local text="$1" label="$2"
if [[ $DRY_RUN -eq 1 ]]; then warn "DRY: peer adjust ($label)"; return 0; fi
PEER_OUTPUT="$text" node <<'NODE'
const fs=require('fs');
const txt=process.env.PEER_OUTPUT||'';
if(!fs.existsSync('package.json'))process.exit(0);
let pkg=JSON.parse(fs.readFileSync('package.json','utf8'));
const rxs=[/peer(?:Optional)?\s+(@?[^@\s]+(?:\/[^@\s]+)?)@"([^"\n]+)"/,/peer(?:Optional)?\s+(@?[^@\s]+(?:\/[^@\s]+)?)@([^\s]+)/];
const needed=new Map();
for(const line of txt.split(/\r?\n/)){for(const rx of rxs){const m=line.match(rx);if(m){if(!needed.has(m[1]))needed.set(m[1],m[2]);break;}}}
if(!needed.size){console.log('[BUMP] none');process.exit(0);} let changed=false;
const devBias=n=>/eslint|jest|typescript-eslint|testing-library/.test(n);
for(const [name,range] of needed){const dep=(pkg.dependencies&&name in pkg.dependencies)?'dependencies':(pkg.devDependencies&&name in pkg.devDependencies)?'devDependencies':(devBias(name)?'devDependencies':'devDependencies');
if(!pkg[dep])pkg[dep]={}; if(pkg[dep][name]===range)continue; pkg[dep][name]=range; console.log(`[BUMP] ${name} -> ${range} (${dep})`); changed=true;}
if(changed){fs.writeFileSync('package.json',JSON.stringify(pkg,null,2)+'\n');console.log('[BUMP] package.json updated.');} else console.log('[BUMP] no changes');
NODE
}
upgrade_dependencies() {
local mode="$1"; [[ -n "$mode" ]] || return 0; [[ -f package.json ]] || return 0
local only_list="$UPGRADE_ONLY_LIST"; local all="$UPGRADE_ALL"
if [[ $DRY_RUN -eq 1 ]]; then info "DRY: scan upgrades ($mode)"; fi
UPGRADE_MODE="$mode" ONLY_LIST="$only_list" ALL_UP="$all" INCLUDE_DEV="$UPGRADE_INCLUDE_DEV" DRY="$DRY_RUN" node <<'NODE'
const fs=require('fs'), cp=require('child_process');
const mode=process.env.UPGRADE_MODE; const only=(process.env.ONLY_LIST||'').split(/[,\s]+/).filter(Boolean);
const all=process.env.ALL_UP==='1'; const includeDev=process.env.INCLUDE_DEV==='1'; const dry=process.env.DRY==='1';
if(!fs.existsSync('package.json'))process.exit(0);
const pkg=JSON.parse(fs.readFileSync('package.json','utf8'));
function parseMajor(spec){const v=spec.replace(/^[^0-9]*/,'').split(/[^0-9.]/)[0];const m=v.split('.')[0];return parseInt(m,10);}
function latestVersion(name){try{return cp.execSync(`npm view ${name} version`,{stdio:['ignore','pipe','ignore']}).toString().trim();}catch{return null;}}
function versions(name){try{return JSON.parse(cp.execSync(`npm view ${name} versions --json`,{stdio:['ignore','pipe','ignore']}).toString());}catch{return[];}}
const sections=['dependencies','devDependencies']; let changes=[];
for(const section of sections){ if(section==='devDependencies' && !includeDev) continue; const deps=pkg[section]||{}; for(const name of Object.keys(deps)){
if(!all && only.length && !only.includes(name)) continue; const spec=deps[name]; if(/^file:|^git\+|^ssh:|^http/.test(spec)||spec.includes('#')) continue;
const latest=latestVersion(name); if(!latest) continue; const curMajor=parseMajor(spec); let target=latest;
if(mode==='wanted'){ const latestMajor=parseInt(latest.split('.')[0],10); if(latestMajor!==curMajor){ const arr=versions(name).filter(v=>parseInt(v.split('.')[0],10)===curMajor); if(arr.length){arr.sort((a,b)=>a===b?0: (a<b?1:-1)); target=arr[0]; } else continue; } }
const prefix = spec.startsWith('~')? '~' : '^'; const newSpec=`${prefix}${target}`; if(newSpec!==spec){ deps[name]=newSpec; changes.push({name,from:spec,to:newSpec,section}); }
} }
if(changes.length){ for(const c of changes) console.log(`[UPGRADE${dry?'-DRY':''}] ${c.name}: ${c.from} -> ${c.to} (${c.section})`); if(!dry){ fs.writeFileSync('package.json',JSON.stringify(pkg,null,2)+'\n'); console.log('[UPGRADE] package.json updated'); } else console.log('[UPGRADE-DRY] package.json would be updated'); }
else console.log('[UPGRADE] No changes');
NODE
}
# -------- Per-project processing --------
process_project() {
local dir="$1"
info "Processing: $dir"
pushd "$dir" >/dev/null || { error "Cannot enter $dir"; return 1; }
if [[ ! -f package.json ]]; then
warn "Skipping (no package.json)"
popd >/dev/null; return 0
fi
# Git operations (once per repo root)
git_prepare_repo "$dir" || true
# Dependency upgrades prior to installation
if [[ -n "$UPGRADE_DEPS_MODE" ]]; then
upgrade_dependencies "$UPGRADE_DEPS_MODE"
fi
# Git operations (only once per repo root)
git_prepare_repo "$dir" || { warn "Git prepare failed for $dir"; }
# Optional dependency upgrades prior to install
if [[ -n "$UPGRADE_DEPS_MODE" ]]; then
upgrade_dependencies "$UPGRADE_DEPS_MODE"
fi
# Step 1: npm install --package-lock-only (with optional flags + fallback)
local npm_base=("$NPM_CMD" install --package-lock-only)
# Append user flags
if [[ ${#EXTRA_NPM_FLAGS[@]} -gt 0 ]]; then
npm_base+=("${EXTRA_NPM_FLAGS[@]}")
fi
# Apply always legacy if requested
if [[ $LEGACY_PEER_DEPS -eq 1 ]]; then
npm_base+=(--legacy-peer-deps)
fi
local used_fallback=0
local used_bump=0
if [[ $DRY_RUN -eq 1 ]]; then
log "DRY: ${npm_base[*]}"
else
local install_status=0
local output
output="$(${npm_base[*]} 2>&1)" || install_status=$?
if [[ $install_status -ne 0 ]]; then
if echo "$output" | grep -q 'ERESOLVE'; then
warn "Peer dependency resolution failed (ERESOLVE)."
if [[ $BUMP_PEERS -eq 1 ]]; then
info "Attempting to adjust package.json to satisfy peer requirements (--bump-peers)."
update_package_json_from_peer_output "$output" "ERESOLVE" || true
if $(${npm_base[*]} >/dev/null 2>&1); then
used_bump=1
else
warn "Install still failing after bump attempt; proceeding to fallback logic."
fi
fi
if [[ $used_bump -eq 0 ]]; then
if [[ $AUTO_LEGACY_FALLBACK -eq 1 && $LEGACY_PEER_DEPS -eq 0 ]]; then
if [[ $FORCE_PEER_DEPS -eq 1 ]]; then
warn "Retrying with --force (FORCE_PEER_DEPS active)."
if ! $NPM_CMD install --package-lock-only --force; then
error "npm install retry with --force failed"; popd >/dev/null; return 1; fi
used_fallback=1
else
warn "Retrying with --legacy-peer-deps (--no-auto-legacy to disable)."
if ! $NPM_CMD install --package-lock-only --legacy-peer-deps; then
error "npm install retry with --legacy-peer-deps failed"; popd >/dev/null; return 1; fi
used_fallback=1
fi
else
error "npm install failed (ERESOLVE) and no fallback permitted."
printf '%s\n' "$output" >&2
popd >/dev/null; return 1
fi
fi
else
error "npm install --package-lock-only failed"
printf '%s\n' "$output" >&2
popd >/dev/null; return 1
fi
else
if [[ $BUMP_PEERS_ALWAYS -eq 1 || ( $BUMP_PEERS_WARN -eq 1 && "$output" == *"peer"* ) ]]; then
if [[ $BUMP_PEERS_ALWAYS -eq 1 ]]; then
info "Post-install peer analysis (--bump-peers-always)."
else
info "Peer warnings detected; attempting bump (--bump-peers-warn)."
fi
update_package_json_from_peer_output "$output" "POST" || true
# Re-run to refresh package-lock if changed; unconditional re-run is cheap for lock-only.
$NPM_CMD install --package-lock-only >/dev/null 2>&1 || true
used_bump=1
fi
fi
fi
# Step 2: remove yarn.lock
if [[ -f yarn.lock ]]; then
if [[ $DRY_RUN -eq 1 ]]; then
log "DRY: rm yarn.lock"
else
rm -f yarn.lock
fi
fi
# Step 3: npm audit fix (optional)
# Skip audit automatically if we needed a peer-deps fallback (often noisy / redundant)
if [[ $SKIP_AUDIT -eq 0 && $used_fallback -eq 0 && $used_bump -eq 0 ]]; then
if [[ $DRY_RUN -eq 1 ]]; then
log "DRY: $NPM_CMD audit fix || true"
else
if ! $NPM_CMD audit fix || true; then
warn "npm audit fix encountered issues (ignored)"
fi
fi
elif [[ $SKIP_AUDIT -eq 0 && ( $used_fallback -eq 1 || $used_bump -eq 1 ) ]]; then
warn "Skipping audit after peer resolution adjustments (run manually if needed)."
fi
# Step 4: yarn import
local import_attempted=0
if [[ $DRY_RUN -eq 1 ]]; then
log "DRY: $YARN_CMD import"
else
if ! import_output="$($YARN_CMD import 2>&1)"; then
if echo "$import_output" | grep -qi 'Failed to import.*corrupted'; then
warn "First yarn import failed: attempting to regenerate package-lock.json and retry."
rm -f package-lock.json
if [[ $LEGACY_PEER_DEPS -eq 1 || $used_fallback -eq 1 ]]; then
$NPM_CMD install --package-lock-only --legacy-peer-deps || true
else
$NPM_CMD install --package-lock-only || true
fi
if ! second_output="$($YARN_CMD import 2>&1)"; then
warn "yarn import failed after retry; falling back to 'yarn install' to generate fresh yarn.lock."
rm -f package-lock.json
if ! third_output="$($YARN_CMD install --ignore-scripts 2>&1)"; then
error "Final fallback 'yarn install' failed"
printf '%s\n' "$second_output" >&2
printf '%s\n' "$third_output" >&2
popd >/dev/null; return 1
fi
fi
import_attempted=1
else
warn "yarn import failed (non-corruption). Falling back directly to 'yarn install'."
rm -f package-lock.json
if ! alt_output="$($YARN_CMD install --ignore-scripts 2>&1)"; then
error "Fallback 'yarn install' failed"
printf '%s\n' "$import_output" >&2
printf '%s\n' "$alt_output" >&2
popd >/dev/null; return 1
fi
fi
fi
fi
# Step 5: remove package-lock.json
if [[ $DRY_RUN -eq 1 ]]; then
log "DRY: rm package-lock.json"
else
rm -f package-lock.json
fi
success "Finished: $dir"
popd >/dev/null
}
run_sequential() {
local failures=0
for dir in "${PROJECT_DIRS[@]}"; do
if ! process_project "$dir"; then
((failures++))
if [[ $KEEP_GOING -eq 0 ]]; then
error "Aborting due to failure (use --keep-going to continue)."
return 1
fi
fi
done
if [[ $failures -gt 0 ]]; then
warn "$failures project(s) failed"
return 1
fi
return 0
}
run_parallel() {
local jobs_limit="$PARALLEL_JOBS"
if [[ $PARALLEL_AUTO -eq 1 ]]; then
jobs_limit=$(detect_cores)
info "Auto-detected $jobs_limit cores for parallelism"
fi
info "Starting parallel run (concurrency=$jobs_limit)"
local -a pids=()
local -a names=()
local active=0
local failures=0
run_one() {
local dir="$1"
if [[ $PREFIX_OUTPUT -eq 1 ]]; then
# Capture output and prefix each line
{
if ! "$0" --internal-one "$dir" 2>&1; then
echo "__EXIT_STATUS__:$?" >&2
else
echo "__EXIT_STATUS__:0" >&2
fi
} | sed -e "s#^#[$dir] #"
else
if ! "$0" --internal-one "$dir"; then
echo "__EXIT_STATUS__:$?" >&2
else
echo "__EXIT_STATUS__:0" >&2
fi
fi
}
for dir in "${PROJECT_DIRS[@]}"; do
while [[ $active -ge $jobs_limit ]]; do
# wait for one to finish
for i in "${!pids[@]}"; do
if ! kill -0 "${pids[$i]}" 2>/dev/null; then
wait "${pids[$i]}" || true
unset 'pids[$i]' 'names[$i]'
((active--))
fi
done
sleep 0.1
done
# Launch background process capturing output
{
run_one "$dir"
} >"/tmp/refresh-yarn-locks-$$-$(echo "$dir" | tr '/' '_').log" 2>&1 &
pids+=($!)
names+=("$dir")
((active++))
done
# Wait remaining
for i in "${!pids[@]}"; do
wait "${pids[$i]}" || failures=$((failures+1))
# Stream log after completion (avoids interleaving)
local logf="/tmp/refresh-yarn-locks-$$-$(echo "${names[$i]}" | tr '/' '_').log"
if [[ -f "$logf" ]]; then
cat "$logf"
rm -f "$logf"
fi
if [[ $failures -gt 0 && $KEEP_GOING -eq 0 ]]; then
error "Aborting remaining due to failure (use --keep-going)."
break
fi
done
if [[ $failures -gt 0 ]]; then
warn "$failures project(s) failed"
return 1
fi
return 0
}
if [[ $INTERNAL_ONE -eq 1 ]]; then
process_project "$INTERNAL_TARGET"
exit $?
fi
if [[ $PARALLEL -eq 1 ]]; then
# Export settings so internal --internal-one inherits
export UPGRADE_DEPS_MODE UPGRADE_INCLUDE_DEV UPGRADE_ONLY_LIST UPGRADE_ALL \
GIT_ENABLE GIT_BRANCH GIT_PULL GIT_DIRTY_ALLOW BUMP_PEERS BUMP_PEERS_WARN BUMP_PEERS_ALWAYS \
LEGACY_PEER_DEPS AUTO_LEGACY_FALLBACK FORCE_PEER_DEPS EXTRA_NPM_FLAGS
info "Running in parallel (jobs=$PARALLEL_JOBS)"
run_parallel
else
run_sequential
fi
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment