Skip to content

Instantly share code, notes, and snippets.

@Esl1h
Created December 17, 2025 18:23
Show Gist options
  • Select an option

  • Save Esl1h/505eca9fa78bd0ff11d2ffa8dc60e5da to your computer and use it in GitHub Desktop.

Select an option

Save Esl1h/505eca9fa78bd0ff11d2ffa8dc60e5da to your computer and use it in GitHub Desktop.
GitFlow Analyzer - Analyzes branch structure, PR history, and CI/CD pipelines
#!/bin/bash
#
# gitflow-analyzer.sh
# Analyzes branch structure, PR history, and CI/CD pipelines
#
# Usage: ./gitflow-analyzer.sh [repo_path] [output_file]
#
# Supported CI/CD platforms:
# - Bitbucket Pipelines
# - GitHub Actions
# - GitLab CI
# - Jenkins (Jenkinsfile)
# - Azure DevOps
# - CircleCI
# - Travis CI
# - Drone CI
# - Tekton
# - ArgoCD
# - AWS CodePipeline (buildspec.yml)
#
set -o pipefail
REPO_PATH="${1:-.}"
OUTPUT_FILE="${2:-gitflow-analysis-$(basename "$REPO_PATH")-$(date +%Y%m%d_%H%M%S).txt}"
# Colors
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
CYAN='\033[0;36m'
NC='\033[0m'
# Symbols
CHECK="✓"
CROSS="✗"
ARROW="→"
BULLET="•"
log_info() { echo -e "${BLUE}[INFO]${NC} $*"; }
log_warn() { echo -e "${YELLOW}[WARN]${NC} $*"; }
log_error() { echo -e "${RED}[ERROR]${NC} $*"; }
log_success() { echo -e "${GREEN}[OK]${NC} $*"; }
# Validate repository
cd "$REPO_PATH" 2>/dev/null || { log_error "Directory '$REPO_PATH' not found"; exit 1; }
git rev-parse --git-dir &>/dev/null || { log_error "'$REPO_PATH' is not a Git repository"; exit 1; }
REPO_NAME=$(basename "$(git rev-parse --show-toplevel)")
#===============================================================================
# PIPELINE DETECTION
#===============================================================================
detect_pipelines() {
local pipelines=()
# Bitbucket
[[ -f "bitbucket-pipelines.yml" ]] && pipelines+=("bitbucket-pipelines.yml:Bitbucket Pipelines")
# GitLab
[[ -f ".gitlab-ci.yml" ]] && pipelines+=(".gitlab-ci.yml:GitLab CI")
for inc in .gitlab-ci/*.yml .gitlab/*.yml; do
[[ -f "$inc" ]] && pipelines+=("$inc:GitLab CI (include)")
done
# Jenkins
[[ -f "Jenkinsfile" ]] && pipelines+=("Jenkinsfile:Jenkins")
[[ -f "jenkins/Jenkinsfile" ]] && pipelines+=("jenkins/Jenkinsfile:Jenkins")
for jf in Jenkinsfile.*; do
[[ -f "$jf" ]] && pipelines+=("$jf:Jenkins")
done
# Azure DevOps
[[ -f "azure-pipelines.yml" ]] && pipelines+=("azure-pipelines.yml:Azure DevOps")
for ap in .azure-pipelines/*.yml; do
[[ -f "$ap" ]] && pipelines+=("$ap:Azure DevOps")
done
# CircleCI
[[ -f ".circleci/config.yml" ]] && pipelines+=(".circleci/config.yml:CircleCI")
# GitHub Actions
if [[ -d ".github/workflows" ]]; then
for wf in .github/workflows/*.yml .github/workflows/*.yaml; do
[[ -f "$wf" ]] && pipelines+=("$wf:GitHub Actions")
done
fi
# Travis CI
[[ -f ".travis.yml" ]] && pipelines+=(".travis.yml:Travis CI")
# Drone CI
[[ -f ".drone.yml" ]] && pipelines+=(".drone.yml:Drone CI")
[[ -f ".drone.jsonnet" ]] && pipelines+=(".drone.jsonnet:Drone CI")
# Tekton
for tk in .tekton/*.yaml tekton/*.yaml; do
[[ -f "$tk" ]] && pipelines+=("$tk:Tekton")
done
# ArgoCD
[[ -f "argocd-app.yaml" ]] && pipelines+=("argocd-app.yaml:ArgoCD")
for argo in .argocd/*.yaml argocd/*.yaml; do
[[ -f "$argo" ]] && pipelines+=("$argo:ArgoCD")
done
# AWS CodeBuild/CodePipeline
[[ -f "buildspec.yml" ]] && pipelines+=("buildspec.yml:AWS CodeBuild")
[[ -f "buildspec.yaml" ]] && pipelines+=("buildspec.yaml:AWS CodeBuild")
for bs in buildspec-*.yml buildspec-*.yaml; do
[[ -f "$bs" ]] && pipelines+=("$bs:AWS CodeBuild")
done
# Woodpecker CI
[[ -f ".woodpecker.yml" ]] && pipelines+=(".woodpecker.yml:Woodpecker CI")
[[ -f ".woodpecker/pipeline.yml" ]] && pipelines+=(".woodpecker/pipeline.yml:Woodpecker CI")
# Buildkite
[[ -f ".buildkite/pipeline.yml" ]] && pipelines+=(".buildkite/pipeline.yml:Buildkite")
# Codefresh
[[ -f "codefresh.yml" ]] && pipelines+=("codefresh.yml:Codefresh")
# Semaphore
[[ -f ".semaphore/semaphore.yml" ]] && pipelines+=(".semaphore/semaphore.yml:Semaphore CI")
printf '%s\n' "${pipelines[@]}"
}
#===============================================================================
# COMMON ANALYSIS HELPERS
#===============================================================================
# Extract image/container references
extract_images() {
local file="$1"
[[ -z "$file" || ! -f "$file" ]] && return 0
grep -Eo '(image|container|docker):\s*[^ ]+' "$file" 2>/dev/null | \
sed 's/.*:\s*//' | sort -u
}
# Extract environment variables
extract_env_vars() {
local file="$1"
[[ -z "$file" || ! -f "$file" ]] && return 0
grep -Eo '\$\{?[A-Z_][A-Z0-9_]*\}?' "$file" 2>/dev/null | \
tr -d '${}"' | sort -u
}
# Extract secrets/credentials patterns
extract_secrets_patterns() {
local file="$1"
[[ -z "$file" || ! -f "$file" ]] && return 0
grep -EiHn '(secret|password|token|key|credential|api_key|apikey|auth)' "$file" 2>/dev/null | \
grep -v '^\s*#' | head -20
}
# Detect security scanning tools
detect_security_tools() {
local file="$1"
[[ -z "$file" || ! -f "$file" ]] && return 0
local tools=()
grep -qi 'tflint' "$file" && tools+=("TFLint")
grep -qi 'tfsec' "$file" && tools+=("TFSec")
grep -qi 'checkov' "$file" && tools+=("Checkov")
grep -qi 'trivy' "$file" && tools+=("Trivy")
grep -qi 'snyk' "$file" && tools+=("Snyk")
grep -qi 'sonarqube\|sonar-scanner' "$file" && tools+=("SonarQube")
grep -qi 'gitleaks' "$file" && tools+=("Gitleaks")
grep -qi 'semgrep' "$file" && tools+=("Semgrep")
grep -qi 'bandit' "$file" && tools+=("Bandit")
grep -qi 'safety' "$file" && tools+=("Safety")
grep -qi 'grype' "$file" && tools+=("Grype")
grep -qi 'syft' "$file" && tools+=("Syft")
grep -qi 'dockle' "$file" && tools+=("Dockle")
grep -qi 'hadolint' "$file" && tools+=("Hadolint")
grep -qi 'kics' "$file" && tools+=("KICS")
grep -qi 'terrascan' "$file" && tools+=("Terrascan")
grep -qi 'prowler' "$file" && tools+=("Prowler")
grep -qi 'cloudsploit' "$file" && tools+=("CloudSploit")
grep -qi 'scout\|docker.*scout' "$file" && tools+=("Docker Scout")
grep -qi 'clair' "$file" && tools+=("Clair")
grep -qi 'anchore' "$file" && tools+=("Anchore")
grep -qi 'falco' "$file" && tools+=("Falco")
grep -qi 'kubesec' "$file" && tools+=("Kubesec")
grep -qi 'polaris' "$file" && tools+=("Polaris")
grep -qi 'kube-bench' "$file" && tools+=("kube-bench")
grep -qi 'kube-hunter' "$file" && tools+=("kube-hunter")
grep -qi 'npm audit\|yarn audit' "$file" && tools+=("NPM/Yarn Audit")
grep -qi 'pip-audit\|safety check' "$file" && tools+=("Python Audit")
grep -qi 'gosec' "$file" && tools+=("GoSec")
grep -qi 'brakeman' "$file" && tools+=("Brakeman")
grep -qi 'dependency.check\|owasp' "$file" && tools+=("OWASP Dependency Check")
printf '%s\n' "${tools[@]}"
}
# Detect IaC tools
detect_iac_tools() {
local file="$1"
[[ -z "$file" || ! -f "$file" ]] && return 0
local tools=()
grep -qi 'terraform' "$file" && tools+=("Terraform")
grep -qi 'terragrunt' "$file" && tools+=("Terragrunt")
grep -qi 'pulumi' "$file" && tools+=("Pulumi")
grep -qi 'cloudformation\|cfn' "$file" && tools+=("CloudFormation")
grep -qi 'ansible' "$file" && tools+=("Ansible")
grep -qi 'helm' "$file" && tools+=("Helm")
grep -qi 'kustomize' "$file" && tools+=("Kustomize")
grep -qi 'kubectl' "$file" && tools+=("kubectl")
grep -qi 'cdk\s\|aws-cdk' "$file" && tools+=("AWS CDK")
grep -qi 'cdktf' "$file" && tools+=("CDKTF")
grep -qi 'crossplane' "$file" && tools+=("Crossplane")
printf '%s\n' "${tools[@]}"
}
# Detect cloud providers
detect_cloud_providers() {
local file="$1"
[[ -z "$file" || ! -f "$file" ]] && return 0
local providers=()
grep -qiE 'aws|amazon|s3:|ecr:|ecs:|eks:|lambda' "$file" && providers+=("AWS")
grep -qiE 'gcp|google|gke|gcr|bigquery' "$file" && providers+=("GCP")
grep -qiE 'azure|az\s|aks|acr' "$file" && providers+=("Azure")
grep -qiE 'digitalocean|do_' "$file" && providers+=("DigitalOcean")
grep -qiE 'linode' "$file" && providers+=("Linode")
grep -qiE 'vultr' "$file" && providers+=("Vultr")
grep -qiE 'heroku' "$file" && providers+=("Heroku")
grep -qiE 'vercel' "$file" && providers+=("Vercel")
grep -qiE 'netlify' "$file" && providers+=("Netlify")
grep -qiE 'cloudflare' "$file" && providers+=("Cloudflare")
printf '%s\n' "${providers[@]}"
}
#===============================================================================
# BITBUCKET PIPELINES ANALYZER
#===============================================================================
analyze_bitbucket_pipeline() {
local file="$1"
echo -e "\n--- Bitbucket Pipelines: $file ---"
# Base image
local image
image=$(grep -E '^image:' "$file" | head -1 | awk '{print $2}')
[[ -n "$image" ]] && echo "Base image: $image"
# Global options
if grep -q '^options:' "$file"; then
echo -e "\nGlobal options:"
grep -A5 '^options:' "$file" | grep -E '^\s+\w+:' | sed 's/^/ /'
fi
# Definitions (anchors, caches, services)
if grep -q '^definitions:' "$file"; then
echo -e "\nDefinitions:"
grep -A2 'caches:' "$file" | grep -E '^\s+\w+:' | awk '{print " Cache: " $1}' | sed 's/://'
grep -A2 'services:' "$file" | grep -E '^\s+\w+:' | awk '{print " Service: " $1}' | sed 's/://'
fi
# Branches with pipelines
echo -e "\nBranches with pipelines:"
grep -E '^\s{4}[a-zA-Z0-9_*/"'\''.-]+:\s*$' "$file" 2>/dev/null | \
sed 's/://g; s/["\x27]//g' | awk '{print " " $BULLET " " $1}'
# Pipeline steps
echo -e "\nPipeline steps:"
grep -E '^\s+-\s*step:' "$file" -A10 | grep -E 'name:' | \
sed 's/.*name:\s*/ '"$BULLET"' /' | head -30
# Deployment environments
echo -e "\nDeployment environments:"
grep -E '^\s+deployment:' "$file" | awk '{print " '"$BULLET"' " $2}' | sort -u
# Manual triggers
local manual_count
manual_count=$(grep -c 'trigger:\s*manual' "$file" 2>/dev/null || echo 0)
echo -e "\nManual triggers: $manual_count"
# OIDC configuration
if grep -q 'oidc:\s*true' "$file" 2>/dev/null; then
echo -e "\nOIDC: Enabled"
echo " AWS Roles:"
grep -oE 'arn:aws:iam::[0-9]+:role/[a-zA-Z0-9_/-]+' "$file" | \
sort -u | sed 's/^/ /'
fi
# Pipes (Bitbucket specific)
echo -e "\nBitbucket Pipes:"
grep -oE 'pipe:\s*[a-zA-Z0-9/-]+' "$file" | \
sed 's/pipe:\s*/ '"$BULLET"' /' | sort -u
# Parallel steps
local parallel_count
parallel_count=$(grep -c '^\s*-\s*parallel:' "$file" 2>/dev/null || echo 0)
echo "Parallel blocks: $parallel_count"
# Artifacts
echo -e "\nArtifacts:"
grep -A10 'artifacts:' "$file" 2>/dev/null | grep -E '^\s+-' | \
awk '{print " " $0}' | sort -u | head -10
# After-script
if grep -q 'after-script:' "$file"; then
echo "After-script: Configured"
fi
# Max-time
local max_time
max_time=$(grep -oE 'max-time:\s*[0-9]+' "$file" | head -1)
[[ -n "$max_time" ]] && echo "Max execution time: ${max_time#*: } minutes"
# Size (memory)
if grep -q 'size:\s*2x' "$file"; then
echo "Memory: 2x (8GB)"
fi
# Clone settings
if grep -q 'clone:' "$file"; then
echo -e "\nClone settings:"
grep -A5 '^clone:' "$file" | grep -v '^clone:' | grep -E '^\s+\w+:' | sed 's/^/ /'
fi
}
#===============================================================================
# GITHUB ACTIONS ANALYZER
#===============================================================================
analyze_github_actions() {
local file="$1"
echo -e "\n--- GitHub Actions: $file ---"
# Workflow name
local wf_name
wf_name=$(grep -E '^name:' "$file" | head -1 | sed 's/name:\s*//')
[[ -n "$wf_name" ]] && echo "Workflow: $wf_name"
# Triggers
echo -e "\nTriggers:"
grep -A30 '^on:' "$file" 2>/dev/null | \
grep -E '^\s{2}[a-z_]+:' | sed 's/://g' | awk '{print " '"$BULLET"' " $1}'
# Branch filters
echo -e "\nBranch filters:"
grep -A5 'branches:' "$file" 2>/dev/null | grep -E '^\s+-' | \
awk '{print " " $0}' | head -10
# Path filters
if grep -q 'paths:' "$file"; then
echo -e "\nPath filters:"
grep -A10 'paths:' "$file" 2>/dev/null | grep -E '^\s+-' | \
awk '{print " " $0}' | head -10
fi
# Scheduled runs (cron)
local cron_expr
cron_expr=$(grep -oE "cron:\s*['\"][^'\"]+['\"]" "$file" | head -1)
[[ -n "$cron_expr" ]] && echo -e "\nSchedule: $cron_expr"
# Concurrency
if grep -q 'concurrency:' "$file"; then
echo -e "\nConcurrency:"
grep -A3 'concurrency:' "$file" | sed 's/^/ /'
fi
# Permissions
if grep -q 'permissions:' "$file"; then
echo -e "\nPermissions:"
grep -A10 'permissions:' "$file" | grep -E '^\s+\w+:' | sed 's/^/ /' | head -10
fi
# Environment variables
if grep -q '^env:' "$file"; then
echo -e "\nGlobal environment variables:"
grep -A10 '^env:' "$file" | grep -E '^\s+[A-Z_]+:' | \
awk -F: '{print " '"$BULLET"' " $1}' | head -10
fi
# Jobs
echo -e "\nJobs:"
grep -E '^ [a-zA-Z0-9_-]+:$' "$file" 2>/dev/null | \
sed 's/://g' | awk '{print " '"$BULLET"' " $1}'
# Job dependencies
echo -e "\nJob dependencies:"
grep -E '^\s+needs:' "$file" | sed 's/needs:/needs:/' | \
awk '{print " " $0}' | sort -u
# Runners
echo -e "\nRunners:"
grep -E 'runs-on:' "$file" | awk '{print " '"$BULLET"' " $2}' | sort -u
# Container jobs
if grep -q 'container:' "$file"; then
echo -e "\nContainer jobs:"
grep -B2 -A3 'container:' "$file" | grep -E 'image:' | \
sed 's/.*image:\s*/ '"$BULLET"' /'
fi
# Services
if grep -q 'services:' "$file"; then
echo -e "\nServices:"
grep -A20 'services:' "$file" | grep -E '^\s{6}[a-z]+:' | \
sed 's/://g' | awk '{print " '"$BULLET"' " $1}'
fi
# Matrix strategy
if grep -q 'matrix:' "$file"; then
echo -e "\nMatrix strategy:"
grep -A15 'matrix:' "$file" | grep -E '^\s+\w+:' | sed 's/^/ /' | head -10
fi
# Actions used
echo -e "\nActions used:"
grep -oE 'uses:\s*[a-zA-Z0-9./_@-]+' "$file" | \
sed 's/uses:\s*/ '"$BULLET"' /' | sort -u | head -20
# Secrets referenced
echo -e "\nSecrets referenced:"
grep -oE '\$\{\{\s*secrets\.[A-Z_]+\s*\}\}' "$file" | \
sed 's/.*secrets\./ '"$BULLET"' /; s/\s*}}//' | sort -u
# Outputs
if grep -q 'outputs:' "$file"; then
echo -e "\nJob outputs:"
grep -B1 -A5 'outputs:' "$file" | grep -E '^\s+\w+:' | sed 's/^/ /' | head -10
fi
# Artifacts
if grep -q 'upload-artifact\|download-artifact' "$file"; then
echo "Artifacts: Configured"
fi
# Caching
if grep -q 'actions/cache' "$file"; then
echo "Caching: Enabled"
fi
# OIDC
if grep -q 'id-token:\s*write' "$file"; then
echo "OIDC: Enabled (id-token: write)"
fi
# Reusable workflows
if grep -q 'workflow_call:' "$file"; then
echo "Type: Reusable workflow"
fi
if grep -q 'uses:.*\.github/workflows/' "$file"; then
echo -e "\nReusable workflows called:"
grep -oE 'uses:.*\.github/workflows/[^@]+' "$file" | \
sed 's/uses:\s*/ '"$BULLET"' /' | sort -u
fi
}
#===============================================================================
# GITLAB CI ANALYZER
#===============================================================================
analyze_gitlab_ci() {
local file="$1"
echo -e "\n--- GitLab CI: $file ---"
# Default image
local default_image
default_image=$(grep -E '^image:' "$file" | head -1 | awk '{print $2}')
[[ -n "$default_image" ]] && echo "Default image: $default_image"
# Stages
echo -e "\nStages:"
grep -A30 '^stages:' "$file" 2>/dev/null | grep -E '^\s+-' | \
awk '{print " " $0}'
# Variables
if grep -q '^variables:' "$file"; then
echo -e "\nGlobal variables:"
grep -A20 '^variables:' "$file" | grep -E '^\s+[A-Z_]+:' | \
awk -F: '{print " '"$BULLET"' " $1}' | head -15
fi
# Includes
if grep -q '^include:' "$file"; then
echo -e "\nIncludes:"
grep -A20 '^include:' "$file" | grep -E '(local|remote|project|template):' | \
sed 's/^/ /'
fi
# Jobs
echo -e "\nJobs:"
grep -E '^[a-zA-Z0-9_.-]+:$' "$file" 2>/dev/null | \
grep -vE '^(stages|variables|default|include|cache|services|before_script|after_script|workflow):$' | \
sed 's/://g' | awk '{print " '"$BULLET"' " $1}'
# Hidden jobs (templates)
echo -e "\nHidden jobs (templates):"
grep -E '^\.[a-zA-Z0-9_-]+:' "$file" | sed 's/://g' | \
awk '{print " '"$BULLET"' " $1}'
# Rules
if grep -q 'rules:' "$file"; then
echo -e "\nRules patterns:"
grep -A5 'rules:' "$file" | grep -E 'if:' | \
sed 's/.*if:\s*/ '"$BULLET"' /' | sort -u | head -10
fi
# Only/Except (legacy)
if grep -q 'only:\|except:' "$file"; then
echo "Legacy only/except: Present (consider migrating to rules)"
fi
# Environments
echo -e "\nEnvironments:"
grep -E '^\s+environment:' "$file" -A3 | grep -E 'name:' | \
sed 's/.*name:\s*/ '"$BULLET"' /' | sort -u
# Manual jobs
local manual_jobs
manual_jobs=$(grep -c 'when:\s*manual' "$file" 2>/dev/null || echo 0)
echo -e "\nManual jobs: $manual_jobs"
# Needs (DAG)
if grep -q 'needs:' "$file"; then
echo "DAG mode (needs): Enabled"
fi
# Parallel jobs
if grep -q 'parallel:' "$file"; then
echo -e "\nParallel configurations:"
grep -B2 'parallel:' "$file" | grep -E '^[a-zA-Z]' | sed 's/:$//' | \
awk '{print " '"$BULLET"' " $1}'
fi
# Services
if grep -q '^services:\|^\s+services:' "$file"; then
echo -e "\nServices:"
grep -A10 'services:' "$file" | grep -E '^\s+-\s*(name:|image:)' | \
sed 's/.*:\s*/ '"$BULLET"' /' | sort -u
fi
# Cache
if grep -q 'cache:' "$file"; then
echo -e "\nCache configuration:"
grep -A10 'cache:' "$file" | grep -E '(key|paths|policy):' | sed 's/^/ /'
fi
# Artifacts
if grep -q 'artifacts:' "$file"; then
echo -e "\nArtifacts:"
grep -A10 'artifacts:' "$file" | grep -E '(paths|reports|expire_in):' | sed 's/^/ /'
fi
# Extends
if grep -q 'extends:' "$file"; then
echo -e "\nExtends usage:"
grep 'extends:' "$file" | sed 's/.*extends:\s*/ '"$BULLET"' /' | sort -u
fi
# Trigger (downstream pipelines)
if grep -q 'trigger:' "$file"; then
echo -e "\nDownstream triggers:"
grep -A3 'trigger:' "$file" | grep -E '(project|include):' | sed 's/^/ /'
fi
# Resource group
if grep -q 'resource_group:' "$file"; then
echo -e "\nResource groups:"
grep 'resource_group:' "$file" | awk '{print " '"$BULLET"' " $2}' | sort -u
fi
# Allow failure
local allow_failure
allow_failure=$(grep -c 'allow_failure:\s*true' "$file" 2>/dev/null || echo 0)
echo "Jobs with allow_failure: $allow_failure"
# Interruptible
if grep -q 'interruptible:' "$file"; then
echo "Interruptible jobs: Configured"
fi
# Workflow rules
if grep -q '^workflow:' "$file"; then
echo -e "\nWorkflow rules: Configured"
fi
}
#===============================================================================
# JENKINS ANALYZER
#===============================================================================
analyze_jenkins() {
local file="$1"
echo -e "\n--- Jenkins: $file ---"
# Pipeline type
if grep -q 'pipeline\s*{' "$file"; then
echo "Type: Declarative Pipeline"
elif grep -q 'node\s*{' "$file"; then
echo "Type: Scripted Pipeline"
fi
# Agent
echo -e "\nAgent configuration:"
grep -A10 'agent\s*{' "$file" | grep -E '(label|docker|kubernetes|any|none)' | \
sed 's/^/ /'
# Tools
if grep -q 'tools\s*{' "$file"; then
echo -e "\nTools:"
grep -A10 'tools\s*{' "$file" | grep -E '^\s+\w+\s' | \
awk '{print " '"$BULLET"' " $1 ": " $2}'
fi
# Environment
if grep -q 'environment\s*{' "$file"; then
echo -e "\nEnvironment variables:"
grep -A20 'environment\s*{' "$file" | grep -E '^\s+[A-Z_]+\s*=' | \
awk -F= '{print " '"$BULLET"' " $1}' | head -15
fi
# Stages
echo -e "\nStages:"
grep -E "stage\s*\(['\"]" "$file" | \
sed "s/.*stage\s*(['\"]//; s/['\"]).*//; s/^/ $BULLET /"
# Parallel stages
if grep -q 'parallel\s*{' "$file"; then
echo "Parallel stages: Yes"
fi
# When conditions
if grep -q 'when\s*{' "$file"; then
echo -e "\nWhen conditions:"
grep -A5 'when\s*{' "$file" | grep -E '(branch|expression|environment|tag)' | \
sed 's/^/ /' | sort -u
fi
# Post actions
if grep -q 'post\s*{' "$file"; then
echo -e "\nPost actions:"
grep -E '(always|success|failure|unstable|changed|aborted)\s*{' "$file" | \
sed 's/{.*//; s/^/ '"$BULLET"' /'
fi
# Input (manual approval)
if grep -q 'input\s*{' "$file"; then
echo "Manual approval: Configured"
fi
# Options
if grep -q 'options\s*{' "$file"; then
echo -e "\nOptions:"
grep -A15 'options\s*{' "$file" | \
grep -E '(timeout|retry|disableConcurrentBuilds|buildDiscarder|skipDefaultCheckout)' | \
sed 's/^/ /'
fi
# Parameters
if grep -q 'parameters\s*{' "$file"; then
echo -e "\nParameters:"
grep -E '(string|choice|booleanParam|text)\s*\(' "$file" | \
grep -oE "name:\s*['\"][^'\"]+['\"]" | \
sed "s/name:\s*['\"]//; s/['\"]//; s/^/ $BULLET /"
fi
# Shared libraries
if grep -q '@Library' "$file"; then
echo -e "\nShared libraries:"
grep -oE "@Library\(['\"][^'\"]+['\"]" "$file" | \
sed "s/@Library(['\"]//; s/['\"]//; s/^/ $BULLET /"
fi
# Credentials
echo -e "\nCredentials used:"
grep -oE "credentials\(['\"][^'\"]+['\"]" "$file" | \
sed "s/credentials(['\"]//; s/['\"]//; s/^/ $BULLET /" | sort -u
# withCredentials
if grep -q 'withCredentials' "$file"; then
echo "withCredentials blocks: Yes"
fi
# Stash/unstash
if grep -q 'stash\|unstash' "$file"; then
echo "Stash/unstash: Configured"
fi
# Archive artifacts
if grep -q 'archiveArtifacts' "$file"; then
echo "Archive artifacts: Configured"
fi
# Email notifications
if grep -q 'emailext\|mail' "$file"; then
echo "Email notifications: Configured"
fi
# Slack notifications
if grep -q 'slackSend' "$file"; then
echo "Slack notifications: Configured"
fi
}
#===============================================================================
# AZURE DEVOPS ANALYZER
#===============================================================================
analyze_azure_devops() {
local file="$1"
echo -e "\n--- Azure DevOps: $file ---"
# Pipeline name
local pipeline_name
pipeline_name=$(grep -E '^name:' "$file" | head -1 | sed 's/name:\s*//')
[[ -n "$pipeline_name" ]] && echo "Pipeline: $pipeline_name"
# Trigger
echo -e "\nTriggers:"
if grep -q '^trigger:' "$file"; then
grep -A10 '^trigger:' "$file" | grep -E '(branches|paths|tags)' -A3 | \
grep -E '^\s+-' | sed 's/^/ /'
fi
# PR trigger
if grep -q '^pr:' "$file"; then
echo "PR trigger: Configured"
fi
# Schedules
if grep -q 'schedules:' "$file"; then
echo -e "\nSchedules:"
grep -A5 'cron:' "$file" | grep 'cron:' | sed 's/.*cron:\s*/ '"$BULLET"' /'
fi
# Pool
echo -e "\nAgent pool:"
grep -A5 'pool:' "$file" | grep -E '(vmImage|name):' | sed 's/^/ /'
# Variables
if grep -q '^variables:' "$file"; then
echo -e "\nVariables:"
grep -A20 '^variables:' "$file" | grep -E '^\s*-?\s*name:' | \
sed 's/.*name:\s*/ '"$BULLET"' /' | head -15
fi
# Variable groups
if grep -q 'group:' "$file"; then
echo -e "\nVariable groups:"
grep 'group:' "$file" | sed 's/.*group:\s*/ '"$BULLET"' /' | sort -u
fi
# Stages
if grep -q '^stages:' "$file"; then
echo -e "\nStages:"
grep -E '^\s+-\s*stage:' "$file" | sed 's/.*stage:\s*/ '"$BULLET"' /'
fi
# Jobs
echo -e "\nJobs:"
grep -E '^\s+-\s*job:' "$file" | sed 's/.*job:\s*/ '"$BULLET"' /'
# Deployment jobs
if grep -q 'deployment:' "$file"; then
echo -e "\nDeployment jobs:"
grep -E '^\s+-\s*deployment:' "$file" | sed 's/.*deployment:\s*/ '"$BULLET"' /'
fi
# Environments
if grep -q 'environment:' "$file"; then
echo -e "\nEnvironments:"
grep 'environment:' "$file" | sed 's/.*environment:\s*/ '"$BULLET"' /' | sort -u
fi
# Strategy
if grep -q 'strategy:' "$file"; then
echo -e "\nDeployment strategies:"
grep -A5 'strategy:' "$file" | grep -E '(runOnce|rolling|canary):' | \
sed 's/://; s/^/ '"$BULLET"' /'
fi
# Templates
if grep -q 'template:' "$file"; then
echo -e "\nTemplates used:"
grep 'template:' "$file" | sed 's/.*template:\s*/ '"$BULLET"' /' | sort -u
fi
# Extends
if grep -q '^extends:' "$file"; then
echo "Extends template: Yes"
fi
# Steps
echo -e "\nTask types used:"
grep -oE 'task:\s*[a-zA-Z]+@[0-9]+' "$file" | \
sed 's/task:\s*/ '"$BULLET"' /' | sort | uniq -c | sort -rn | head -10
# Script types
echo -e "\nScript types:"
grep -cE '^\s+-\s*script:' "$file" | xargs -I{} echo " script: {}"
grep -cE '^\s+-\s*bash:' "$file" | xargs -I{} echo " bash: {}"
grep -cE '^\s+-\s*powershell:' "$file" | xargs -I{} echo " powershell: {}"
grep -cE '^\s+-\s*pwsh:' "$file" | xargs -I{} echo " pwsh: {}"
# Conditions
if grep -q 'condition:' "$file"; then
echo -e "\nConditions:"
grep 'condition:' "$file" | sed 's/.*condition:\s*/ /' | sort -u | head -5
fi
# Dependencies
if grep -q 'dependsOn:' "$file"; then
echo "Job dependencies: Configured"
fi
# Service connections
echo -e "\nService connections:"
grep -oE '(azureSubscription|serviceConnection|dockerRegistryServiceConnection):\s*[^ ]+' "$file" | \
sed 's/^/ '"$BULLET"' /' | sort -u
# Artifacts
if grep -q 'publish\|download' "$file"; then
echo "Artifacts: Configured"
fi
# Container jobs
if grep -q 'container:' "$file"; then
echo "Container jobs: Yes"
fi
}
#===============================================================================
# CIRCLECI ANALYZER
#===============================================================================
analyze_circleci() {
local file="$1"
echo -e "\n--- CircleCI: $file ---"
# Version
local version
version=$(grep -E '^version:' "$file" | head -1 | awk '{print $2}')
echo "Config version: $version"
# Setup
if grep -q '^setup:\s*true' "$file"; then
echo "Dynamic config: Enabled"
fi
# Orbs
if grep -q '^orbs:' "$file"; then
echo -e "\nOrbs:"
grep -A20 '^orbs:' "$file" | grep -E '^\s+[a-z]' | \
sed 's/^/ '"$BULLET"' /' | head -10
fi
# Executors
if grep -q '^executors:' "$file"; then
echo -e "\nExecutors:"
grep -A30 '^executors:' "$file" | grep -E '^\s{2}[a-z]' | \
sed 's/://; s/^/ '"$BULLET"' /'
fi
# Commands (reusable)
if grep -q '^commands:' "$file"; then
echo -e "\nReusable commands:"
grep -A50 '^commands:' "$file" | grep -E '^\s{2}[a-z_-]+:$' | \
sed 's/://; s/^/ '"$BULLET"' /'
fi
# Jobs
echo -e "\nJobs:"
grep -E '^\s{2}[a-z_-]+:$' "$file" | \
grep -v -E '(docker|machine|macos|executor|steps|environment|working_directory):' | \
sed 's/://; s/^/ '"$BULLET"' /'
# Executors used in jobs
echo -e "\nExecution environments:"
grep -E '(docker|machine|macos):\s*$' "$file" -A3 | grep -E '(image|-\s)' | \
sed 's/^/ /' | sort -u | head -10
# Resource class
echo -e "\nResource classes:"
grep -E 'resource_class:' "$file" | awk '{print " '"$BULLET"' " $2}' | sort -u
# Workflows
echo -e "\nWorkflows:"
grep -A100 '^workflows:' "$file" | grep -E '^\s{2}[a-z_-]+:$' | \
sed 's/://; s/^/ '"$BULLET"' /'
# Workflow filters
if grep -q 'filters:' "$file"; then
echo -e "\nBranch/tag filters:"
grep -A10 'filters:' "$file" | grep -E '(branches|tags):' -A3 | \
grep -E '(only|ignore):' | sed 's/^/ /' | head -10
fi
# Requires (dependencies)
if grep -q 'requires:' "$file"; then
echo "Job dependencies: Configured"
fi
# Approval jobs
if grep -q 'type:\s*approval' "$file"; then
echo "Manual approval: Configured"
fi
# Scheduled workflows
if grep -q 'triggers:' "$file"; then
echo -e "\nScheduled triggers:"
grep -A10 'triggers:' "$file" | grep 'cron:' | \
sed 's/.*cron:\s*/ '"$BULLET"' /'
fi
# Context
if grep -q 'context:' "$file"; then
echo -e "\nContexts:"
grep 'context:' "$file" | sed 's/.*context:\s*/ '"$BULLET"' /' | sort -u
fi
# Parameters
if grep -q '^parameters:' "$file"; then
echo -e "\nPipeline parameters:"
grep -A30 '^parameters:' "$file" | grep -E '^\s{2}[a-z]' | \
sed 's/://; s/^/ '"$BULLET"' /'
fi
# Persist/attach workspace
if grep -q 'persist_to_workspace\|attach_workspace' "$file"; then
echo "Workspace sharing: Configured"
fi
# Store artifacts
if grep -q 'store_artifacts' "$file"; then
echo "Artifacts: Configured"
fi
# Store test results
if grep -q 'store_test_results' "$file"; then
echo "Test results: Configured"
fi
# Cache
if grep -q 'save_cache\|restore_cache' "$file"; then
echo "Caching: Configured"
fi
# Matrix
if grep -q 'matrix:' "$file"; then
echo "Matrix builds: Configured"
fi
}
#===============================================================================
# TRAVIS CI ANALYZER
#===============================================================================
analyze_travis_ci() {
local file="$1"
echo -e "\n--- Travis CI: $file ---"
# Language
local language
language=$(grep -E '^language:' "$file" | awk '{print $2}')
echo "Language: ${language:-not specified}"
# OS
echo -e "\nOS:"
grep -E '^os:' "$file" -A5 | grep -E '^\s*-' | sed 's/^/ /'
# Dist
local dist
dist=$(grep -E '^dist:' "$file" | awk '{print $2}')
[[ -n "$dist" ]] && echo "Distribution: $dist"
# Services
if grep -q '^services:' "$file"; then
echo -e "\nServices:"
grep -A10 '^services:' "$file" | grep -E '^\s*-' | sed 's/^/ /'
fi
# Language versions
echo -e "\nLanguage versions:"
grep -E '^(node_js|python|ruby|go|php|java|scala|perl|rust):' "$file" -A10 | \
grep -E '^\s*-' | sed 's/^/ /'
# Matrix/jobs
if grep -q '^jobs:\|^matrix:' "$file"; then
echo -e "\nBuild matrix:"
grep -A30 '^jobs:\|^matrix:' "$file" | grep -E '(include|exclude|allow_failures):' -A5 | \
head -15 | sed 's/^/ /'
fi
# Stages
if grep -q '^stages:' "$file"; then
echo -e "\nStages:"
grep -A10 '^stages:' "$file" | grep -E '^\s*-' | sed 's/^/ /'
fi
# Branches
if grep -q '^branches:' "$file"; then
echo -e "\nBranch configuration:"
grep -A10 '^branches:' "$file" | grep -E '(only|except):' -A3 | sed 's/^/ /'
fi
# Env
if grep -q '^env:' "$file"; then
echo -e "\nEnvironment:"
grep -A10 '^env:' "$file" | grep -E '(global|matrix|jobs):' -A3 | \
head -10 | sed 's/^/ /'
fi
# Cache
if grep -q '^cache:' "$file"; then
echo -e "\nCache:"
grep -A5 '^cache:' "$file" | sed 's/^/ /'
fi
# Lifecycle hooks
echo -e "\nLifecycle hooks:"
for hook in before_install install before_script script after_success after_failure before_deploy deploy after_deploy; do
grep -q "^${hook}:" "$file" && echo " $CHECK $hook"
done
# Notifications
if grep -q '^notifications:' "$file"; then
echo -e "\nNotifications:"
grep -A10 '^notifications:' "$file" | grep -E '^\s{2}[a-z]+:' | \
sed 's/://; s/^/ '"$BULLET"' /'
fi
# Addons
if grep -q '^addons:' "$file"; then
echo -e "\nAddons:"
grep -A15 '^addons:' "$file" | grep -E '^\s{2}[a-z]+:' | \
sed 's/://; s/^/ '"$BULLET"' /'
fi
# Deploy
if grep -q '^deploy:' "$file"; then
echo -e "\nDeploy providers:"
grep -A20 '^deploy:' "$file" | grep 'provider:' | \
sed 's/.*provider:\s*/ '"$BULLET"' /' | sort -u
fi
}
#===============================================================================
# DRONE CI ANALYZER
#===============================================================================
analyze_drone_ci() {
local file="$1"
echo -e "\n--- Drone CI: $file ---"
# Kind
local kind
kind=$(grep -E '^kind:' "$file" | head -1 | awk '{print $2}')
echo "Kind: ${kind:-pipeline}"
# Type
local type
type=$(grep -E '^type:' "$file" | head -1 | awk '{print $2}')
echo "Type: ${type:-docker}"
# Name
local name
name=$(grep -E '^name:' "$file" | head -1 | sed 's/name:\s*//')
[[ -n "$name" ]] && echo "Pipeline: $name"
# Platform
if grep -q '^platform:' "$file"; then
echo -e "\nPlatform:"
grep -A5 '^platform:' "$file" | grep -E '(os|arch):' | sed 's/^/ /'
fi
# Trigger
if grep -q '^trigger:' "$file"; then
echo -e "\nTrigger:"
grep -A15 '^trigger:' "$file" | grep -E '(branch|event|ref|status):' -A3 | \
head -10 | sed 's/^/ /'
fi
# Clone
if grep -q '^clone:' "$file"; then
echo -e "\nClone settings:"
grep -A5 '^clone:' "$file" | grep -v '^clone:' | sed 's/^/ /'
fi
# Steps
echo -e "\nSteps:"
grep -E '^\s*-\s*name:' "$file" | sed 's/.*name:\s*/ '"$BULLET"' /'
# Images
echo -e "\nImages:"
grep -E '^\s+image:' "$file" | awk '{print " '"$BULLET"' " $2}' | sort -u
# Services
if grep -q '^services:' "$file"; then
echo -e "\nServices:"
grep -A30 '^services:' "$file" | grep -E '^\s*-\s*name:' | \
sed 's/.*name:\s*/ '"$BULLET"' /'
fi
# Volumes
if grep -q 'volumes:' "$file"; then
echo "Volumes: Configured"
fi
# Secrets
if grep -q 'from_secret:' "$file"; then
echo -e "\nSecrets:"
grep 'from_secret:' "$file" | awk '{print " '"$BULLET"' " $2}' | sort -u
fi
# Depends on
if grep -q 'depends_on:' "$file"; then
echo "Pipeline dependencies: Configured"
fi
# When conditions
if grep -q 'when:' "$file"; then
echo "Conditional steps: Yes"
fi
# Promotions
if grep -q 'promotion' "$file"; then
echo "Promotions: Configured"
fi
}
#===============================================================================
# AWS CODEBUILD ANALYZER
#===============================================================================
analyze_aws_codebuild() {
local file="$1"
echo -e "\n--- AWS CodeBuild: $file ---"
# Version
local version
version=$(grep -E '^version:' "$file" | awk '{print $2}')
echo "Spec version: ${version:-0.2}"
# Run-as
local run_as
run_as=$(grep -E '^run-as:' "$file" | awk '{print $2}')
[[ -n "$run_as" ]] && echo "Run as: $run_as"
# Environment variables
if grep -q '^env:' "$file"; then
echo -e "\nEnvironment:"
# Shell
grep -A2 'shell:' "$file" | grep 'shell:' | sed 's/.*shell:\s*/ Shell: /'
# Variables
echo " Variables:"
grep -A20 'variables:' "$file" | grep -E '^\s{4}[A-Z_]+:' | \
awk -F: '{print " '"$BULLET"' " $1}' | head -10
# Parameter store
if grep -q 'parameter-store:' "$file"; then
echo " Parameter Store: Configured"
fi
# Secrets Manager
if grep -q 'secrets-manager:' "$file"; then
echo " Secrets Manager: Configured"
fi
# Exported variables
if grep -q 'exported-variables:' "$file"; then
echo " Exported variables:"
grep -A10 'exported-variables:' "$file" | grep -E '^\s+-' | \
sed 's/^/ /'
fi
fi
# Phases
echo -e "\nBuild phases:"
for phase in install pre_build build post_build; do
if grep -q "^phases:" "$file" && grep -q "${phase}:" "$file"; then
local commands
commands=$(grep -A30 "${phase}:" "$file" | grep -c '^\s+-' 2>/dev/null || echo 0)
echo " $CHECK $phase ($commands commands)"
# Runtime versions
if [[ "$phase" == "install" ]]; then
grep -A15 'install:' "$file" | grep -A10 'runtime-versions:' | \
grep -E '^\s+\w+:' | sed 's/^/ /'
fi
fi
done
# Finally block
if grep -q 'finally:' "$file"; then
echo " $CHECK finally (cleanup)"
fi
# Proxy
if grep -q '^proxy:' "$file"; then
echo -e "\nProxy: Configured"
fi
# Batch
if grep -q '^batch:' "$file"; then
echo -e "\nBatch build: Configured"
grep -A20 '^batch:' "$file" | grep -E '(fast-fail|build-graph|build-list|build-matrix):' | \
sed 's/^/ /'
fi
# Reports
if grep -q '^reports:' "$file"; then
echo -e "\nReports:"
grep -A30 '^reports:' "$file" | grep -E '^\s{2}[a-zA-Z_-]+:' | \
sed 's/://; s/^/ '"$BULLET"' /'
fi
# Artifacts
if grep -q '^artifacts:' "$file"; then
echo -e "\nArtifacts:"
grep -A15 '^artifacts:' "$file" | grep -E '(files|name|discard-paths|base-directory):' | \
sed 's/^/ /'
# Secondary artifacts
if grep -q 'secondary-artifacts:' "$file"; then
echo " Secondary artifacts: Configured"
fi
fi
# Cache
if grep -q '^cache:' "$file"; then
echo -e "\nCache:"
grep -A10 '^cache:' "$file" | grep -E '(paths|type):' | sed 's/^/ /'
fi
}
#===============================================================================
# TEKTON ANALYZER
#===============================================================================
analyze_tekton() {
local file="$1"
echo -e "\n--- Tekton: $file ---"
# Kind
local kind
kind=$(grep -E '^kind:' "$file" | head -1 | awk '{print $2}')
echo "Kind: ${kind:-unknown}"
# API version
local api_version
api_version=$(grep -E '^apiVersion:' "$file" | head -1 | awk '{print $2}')
echo "API Version: $api_version"
# Name
local name
name=$(grep -E '^\s+name:' "$file" | head -1 | awk '{print $2}')
[[ -n "$name" ]] && echo "Name: $name"
case "$kind" in
"Task")
echo -e "\nTask steps:"
grep -A3 '^\s+-\s*name:' "$file" | grep 'name:' | \
sed 's/.*name:\s*/ '"$BULLET"' /'
# Params
if grep -q 'params:' "$file"; then
echo -e "\nParameters:"
grep -A5 'params:' "$file" | grep -E '^\s+-\s*name:' | \
sed 's/.*name:\s*/ '"$BULLET"' /'
fi
# Workspaces
if grep -q 'workspaces:' "$file"; then
echo -e "\nWorkspaces:"
grep -A20 'workspaces:' "$file" | grep -E '^\s+-\s*name:' | \
sed 's/.*name:\s*/ '"$BULLET"' /'
fi
# Results
if grep -q 'results:' "$file"; then
echo -e "\nResults:"
grep -A10 'results:' "$file" | grep -E '^\s+-\s*name:' | \
sed 's/.*name:\s*/ '"$BULLET"' /'
fi
;;
"Pipeline")
echo -e "\nPipeline tasks:"
grep -E '^\s+-\s*name:' "$file" | sed 's/.*name:\s*/ '"$BULLET"' /'
# Task refs
if grep -q 'taskRef:' "$file"; then
echo -e "\nTask references:"
grep -A2 'taskRef:' "$file" | grep 'name:' | \
sed 's/.*name:\s*/ '"$BULLET"' /' | sort -u
fi
# RunAfter
if grep -q 'runAfter:' "$file"; then
echo "Task dependencies: Configured"
fi
# When expressions
if grep -q 'when:' "$file"; then
echo "Conditional execution: Configured"
fi
# Finally
if grep -q 'finally:' "$file"; then
echo "Finally tasks: Configured"
fi
;;
"PipelineRun"|"TaskRun")
echo "Run configuration"
# Service account
grep 'serviceAccountName:' "$file" | \
sed 's/.*serviceAccountName:\s*/ Service Account: /'
# Timeout
grep 'timeout:' "$file" | sed 's/.*timeout:\s*/ Timeout: /'
;;
"TriggerTemplate"|"TriggerBinding"|"EventListener")
echo "Triggers configuration"
;;
esac
}
#===============================================================================
# ARGOCD ANALYZER
#===============================================================================
analyze_argocd() {
local file="$1"
echo -e "\n--- ArgoCD: $file ---"
# Kind
local kind
kind=$(grep -E '^kind:' "$file" | head -1 | awk '{print $2}')
echo "Kind: ${kind:-Application}"
# Name
local name
name=$(grep -E '^\s+name:' "$file" | head -1 | awk '{print $2}')
[[ -n "$name" ]] && echo "Name: $name"
# Project
local project
project=$(grep 'project:' "$file" | head -1 | awk '{print $2}')
[[ -n "$project" ]] && echo "Project: $project"
# Source
if grep -q 'source:' "$file"; then
echo -e "\nSource:"
grep -A10 'source:' "$file" | grep -E '(repoURL|path|targetRevision|chart):' | \
sed 's/^/ /'
fi
# Destination
if grep -q 'destination:' "$file"; then
echo -e "\nDestination:"
grep -A5 'destination:' "$file" | grep -E '(server|namespace):' | \
sed 's/^/ /'
fi
# Sync policy
if grep -q 'syncPolicy:' "$file"; then
echo -e "\nSync policy:"
grep -A15 'syncPolicy:' "$file" | grep -E '(automated|syncOptions|retry):' -A3 | \
head -10 | sed 's/^/ /'
fi
# Health
if grep -q 'health:' "$file"; then
echo "Custom health check: Configured"
fi
# Ignore differences
if grep -q 'ignoreDifferences:' "$file"; then
echo "Ignore differences: Configured"
fi
# Helm values
if grep -q 'helm:' "$file"; then
echo -e "\nHelm:"
grep -A10 'helm:' "$file" | grep -E '(valueFiles|values|releaseName):' | \
sed 's/^/ /'
fi
# Kustomize
if grep -q 'kustomize:' "$file"; then
echo "Kustomize: Configured"
fi
}
#===============================================================================
# BUILDKITE ANALYZER
#===============================================================================
analyze_buildkite() {
local file="$1"
echo -e "\n--- Buildkite: $file ---"
# Agents
if grep -q 'agents:' "$file"; then
echo "Agent targeting:"
grep -A5 'agents:' "$file" | grep -E '^\s+\w+:' | sed 's/^/ /'
fi
# Steps
echo -e "\nSteps:"
grep -E '^\s+-\s*label:' "$file" | sed 's/.*label:\s*/ '"$BULLET"' /'
# Commands
local command_steps
command_steps=$(grep -c '^\s+command:' "$file" 2>/dev/null || echo 0)
echo "Command steps: $command_steps"
# Plugins
if grep -q 'plugins:' "$file"; then
echo -e "\nPlugins:"
grep -A20 'plugins:' "$file" | grep -E '^\s+-\s*[a-z]' | \
sed 's/#.*//; s/^/ /' | head -10
fi
# Wait steps
if grep -q '^\s*-\s*wait' "$file"; then
echo "Wait steps: Yes"
fi
# Block steps
if grep -q '^\s+-\s*block:' "$file"; then
echo "Block (manual) steps: Yes"
fi
# Trigger steps
if grep -q '^\s+-\s*trigger:' "$file"; then
echo -e "\nTrigger pipelines:"
grep -A2 'trigger:' "$file" | grep 'trigger:' | \
sed 's/.*trigger:\s*/ '"$BULLET"' /'
fi
# Groups
if grep -q '^\s+-\s*group:' "$file"; then
echo -e "\nGroups:"
grep 'group:' "$file" | sed 's/.*group:\s*/ '"$BULLET"' /'
fi
# Artifact paths
if grep -q 'artifact_paths:' "$file"; then
echo "Artifacts: Configured"
fi
# Retry
if grep -q 'retry:' "$file"; then
echo "Retry: Configured"
fi
# Concurrency
if grep -q 'concurrency:' "$file"; then
echo "Concurrency limits: Configured"
fi
# Matrix
if grep -q 'matrix:' "$file"; then
echo "Matrix builds: Configured"
fi
}
#===============================================================================
# GENERIC ANALYZER (fallback)
#===============================================================================
analyze_generic() {
local file="$1"
local platform="$2"
echo -e "\n--- $platform: $file ---"
[[ ! -f "$file" ]] && { echo "(File not found)"; return 0; }
echo "(Detailed parser not implemented)"
# Basic stats
local lines
lines=$(wc -l < "$file" 2>/dev/null || echo 0)
echo "File size: $lines lines"
# Common patterns
echo -e "\nDetected patterns:"
grep -qi 'stage\|job\|step' "$file" 2>/dev/null && echo " $BULLET Stages/Jobs/Steps"
grep -qi 'deploy' "$file" 2>/dev/null && echo " $BULLET Deployment"
grep -qi 'test' "$file" 2>/dev/null && echo " $BULLET Testing"
grep -qi 'build' "$file" 2>/dev/null && echo " $BULLET Build"
grep -qi 'docker\|container' "$file" 2>/dev/null && echo " $BULLET Containers"
grep -qi 'secret\|credential' "$file" 2>/dev/null && echo " $BULLET Secrets"
grep -qi 'cache' "$file" 2>/dev/null && echo " $BULLET Caching"
grep -qi 'artifact' "$file" 2>/dev/null && echo " $BULLET Artifacts"
grep -qi 'notify\|slack\|email' "$file" 2>/dev/null && echo " $BULLET Notifications"
}
#===============================================================================
# MAIN ANALYSIS ROUTER
#===============================================================================
analyze_pipeline() {
local file="$1"
local platform="$2"
# Skip if file or platform is empty/missing
[[ -z "$file" || -z "$platform" ]] && return 0
[[ ! -f "$file" ]] && return 0
[[ ! -s "$file" ]] && return 0
case "$platform" in
"Bitbucket Pipelines")
analyze_bitbucket_pipeline "$file"
;;
"GitHub Actions")
analyze_github_actions "$file"
;;
"GitLab CI"|"GitLab CI (include)")
analyze_gitlab_ci "$file"
;;
"Jenkins")
analyze_jenkins "$file"
;;
"Azure DevOps")
analyze_azure_devops "$file"
;;
"CircleCI")
analyze_circleci "$file"
;;
"Travis CI")
analyze_travis_ci "$file"
;;
"Drone CI")
analyze_drone_ci "$file"
;;
"AWS CodeBuild")
analyze_aws_codebuild "$file"
;;
"Tekton")
analyze_tekton "$file"
;;
"ArgoCD")
analyze_argocd "$file"
;;
"Buildkite")
analyze_buildkite "$file"
;;
*)
analyze_generic "$file" "$platform"
;;
esac
# Common analysis for all platforms
if [[ -f "$file" && -s "$file" ]]; then
echo -e "\n --- Common Analysis ---"
# Security tools
local sec_tools
mapfile -t sec_tools < <(detect_security_tools "$file")
if [[ ${#sec_tools[@]} -gt 0 && -n "${sec_tools[0]}" ]]; then
echo " Security tools:"
printf ' %s\n' "${sec_tools[@]}"
fi
# IaC tools
local iac_tools
mapfile -t iac_tools < <(detect_iac_tools "$file")
if [[ ${#iac_tools[@]} -gt 0 && -n "${iac_tools[0]}" ]]; then
echo " IaC tools:"
printf ' %s\n' "${iac_tools[@]}"
fi
# Cloud providers
local providers
mapfile -t providers < <(detect_cloud_providers "$file")
if [[ ${#providers[@]} -gt 0 && -n "${providers[0]}" ]]; then
echo " Cloud providers:"
printf ' %s\n' "${providers[@]}"
fi
fi
}
#===============================================================================
# GIT ANALYSIS FUNCTIONS
#===============================================================================
analyze_git_structure() {
echo "==============================================================================="
echo "BRANCH STRUCTURE"
echo "==============================================================================="
echo -e "\n=== LOCAL AND REMOTE BRANCHES ==="
git branch -a
echo -e "\n=== BRANCH TYPE COUNT ==="
git branch -r | grep -v HEAD | sed 's/.*\///' | cut -d'-' -f1 | cut -d'/' -f1 | sort | uniq -c | sort -rn
echo -e "\n=== BRANCH PREFIXES ==="
git branch -r | grep -v HEAD | sed 's/.*origin\///' | grep '/' | cut -d'/' -f1 | sort | uniq -c | sort -rn
echo -e "\n=== TICKETS BY PROJECT ==="
git branch -r | grep -oE '[A-Z]+-[0-9]+' | cut -d'-' -f1 | sort | uniq -c | sort -rn
echo -e "\n=== TOP 10 CONTRIBUTORS ==="
git shortlog -sn --all | head -10
echo -e "\n=== GITFLOW MACRO (last 30) ==="
git log --all --graph --simplify-by-decoration --pretty=format:'%h %d %s' --abbrev-commit -30
echo -e "\n\n=== RECENT TIMELINE (last 30) ==="
git log --all --graph --pretty=format:'%h | %d | %s | %cr | %an' --abbrev-commit --date=relative --simplify-by-decoration -30
echo -e "\n\n=== LAST 20 MERGES ==="
git log --merges --oneline --graph -20
echo -e "\n=== LONG-LIVED BRANCHES ==="
echo "Detected long-lived branches:"
for branch in main master develop staging stg dev production prod release; do
if git show-ref --verify --quiet "refs/remotes/origin/$branch" 2>/dev/null; then
last_update=$(git log -1 --format="%cr" "origin/$branch" 2>/dev/null)
commits_ahead=$(git rev-list --count "origin/$branch..origin/main" 2>/dev/null || echo "N/A")
echo " $BULLET $branch (last update: $last_update, commits behind main: $commits_ahead)"
fi
done
echo -e "\n=== ACTIVITY BY MONTH (last 12 months) ==="
git log --since="12 months ago" --format="%ai" | cut -d'-' -f1,2 | sort | uniq -c
echo -e "\n=== STALE BRANCHES (>90 days) ==="
local stale_count=0
while IFS= read -r branch; do
branch_name=$(echo "$branch" | sed 's/.*origin\///')
last_commit=$(git log -1 --format="%ci" "origin/$branch_name" 2>/dev/null)
if [[ -n "$last_commit" ]]; then
last_epoch=$(date -d "$last_commit" +%s 2>/dev/null || echo 0)
now_epoch=$(date +%s)
days_old=$(( (now_epoch - last_epoch) / 86400 ))
if [[ $days_old -gt 90 ]]; then
echo " $BULLET $branch_name ($days_old days old)"
((stale_count++))
fi
fi
done < <(git branch -r | grep -v HEAD | head -50)
[[ $stale_count -eq 0 ]] && echo " None found (checked first 50 branches)"
echo -e "\n=== BRANCH PROTECTION (inferred) ==="
# Check for protected branch patterns
local main_branch="main"
git show-ref --verify --quiet refs/remotes/origin/master 2>/dev/null && main_branch="master"
local direct_commits
direct_commits=$(git log --first-parent --no-merges "origin/$main_branch" --oneline 2>/dev/null | head -20 | wc -l)
local merge_commits
merge_commits=$(git log --first-parent --merges "origin/$main_branch" --oneline 2>/dev/null | head -20 | wc -l)
echo " Main branch: $main_branch"
echo " Recent direct commits: $direct_commits"
echo " Recent merge commits: $merge_commits"
if [[ $merge_commits -gt $direct_commits ]]; then
echo " Pattern: Likely protected (mostly merges)"
else
echo " Pattern: Likely unprotected (direct commits allowed)"
fi
}
detect_gitflow_pattern() {
echo -e "\n=== GITFLOW CONSISTENCY ==="
local pattern="Unknown"
local has_dev=false has_staging=false has_main=false has_develop=false has_release=false
git show-ref --verify --quiet refs/remotes/origin/dev 2>/dev/null && has_dev=true
git show-ref --verify --quiet refs/remotes/origin/develop 2>/dev/null && has_develop=true
git show-ref --verify --quiet refs/remotes/origin/staging 2>/dev/null && has_staging=true
git show-ref --verify --quiet refs/remotes/origin/main 2>/dev/null && has_main=true
git show-ref --verify --quiet refs/remotes/origin/master 2>/dev/null && has_main=true
git branch -r 2>/dev/null | grep -q 'origin/release/' && has_release=true
if $has_dev && $has_staging && $has_main; then
pattern="dev $ARROW staging $ARROW main (Traditional GitFlow)"
elif $has_develop && $has_main && $has_release; then
pattern="develop $ARROW release/* $ARROW main (GitFlow with release branches)"
elif $has_develop && $has_main; then
pattern="develop $ARROW main (Simplified GitFlow)"
elif $has_main && ! $has_develop && ! $has_dev; then
pattern="Trunk-based (main only)"
elif $has_main && $has_staging; then
pattern="main $ARROW staging (Two-branch)"
fi
echo "Pattern: $pattern"
# Feature branch naming
echo -e "\nFeature branch conventions:"
local feature_branches
feature_branches=$(git branch -r 2>/dev/null | grep -v HEAD | sed 's/.*origin\///' | grep -E '^(feature|feat|fix|bugfix|hotfix|release|chore)/' | \
cut -d'/' -f1 | sort | uniq -c | sort -rn | head -5)
if [[ -n "$feature_branches" ]]; then
echo "$feature_branches" | sed 's/^/ /'
else
echo " (none detected)"
fi
# Environment verification
echo -e "\nEnvironment verification:"
local env_found=false
for env in dev develop staging stg prod production; do
if git show-ref --verify --quiet "refs/remotes/origin/$env" 2>/dev/null; then
printf " [%s] %s\n" "$CHECK" "$env"
env_found=true
fi
done
$env_found || echo " (no environment branches detected)"
}
#===============================================================================
# MAIN EXECUTION
#===============================================================================
{
echo "==============================================================================="
echo "GITFLOW ANALYSIS: $REPO_NAME"
echo "Date: $(date '+%Y-%m-%d %H:%M:%S')"
echo "Analyzer version: 3.0"
echo "==============================================================================="
echo -e "\n=== SUMMARY ==="
FIRST_COMMIT=$(git log --reverse --format="%cr|%ci" 2>/dev/null | head -1)
LAST_COMMIT=$(git log -1 --format="%cr|%ci" 2>/dev/null)
TOTAL_COMMITS=$(git rev-list --count --all 2>/dev/null || echo 0)
MERGE_COMMITS=$(git log --merges --oneline 2>/dev/null | wc -l)
PR_COUNT=$(git log --oneline --grep="pull request\|Merge pull\|merge request" 2>/dev/null | wc -l)
CONTRIBUTORS=$(git shortlog -sn --all 2>/dev/null | wc -l)
TAGS=$(git tag 2>/dev/null | wc -l)
echo "Repository: $REPO_NAME"
echo "First commit: $FIRST_COMMIT"
echo "Last commit: $LAST_COMMIT"
echo "Total commits: $TOTAL_COMMITS"
echo "Merge commits: $MERGE_COMMITS"
echo "PRs (estimated): $PR_COUNT"
echo "Contributors: $CONTRIBUTORS"
echo "Tags: $TAGS"
# CI/CD ANALYSIS
echo -e "\n==============================================================================="
echo "CI/CD PIPELINE ANALYSIS"
echo "==============================================================================="
mapfile -t PIPELINES < <(detect_pipelines)
# Filter empty entries and validate files exist
declare -a VALID_PIPELINES=()
for p in "${PIPELINES[@]}"; do
[[ -z "$p" ]] && continue
local pfile="${p%:*}"
local pplat="${p##*:}"
[[ -z "$pfile" || -z "$pplat" ]] && continue
[[ ! -f "$pfile" ]] && continue
VALID_PIPELINES+=("$p")
done
if [[ ${#VALID_PIPELINES[@]} -eq 0 ]]; then
echo -e "\n[!] No pipeline files detected"
else
echo -e "\nPipelines found: ${#VALID_PIPELINES[@]}"
# List all detected pipelines
for pipeline in "${VALID_PIPELINES[@]}"; do
local file="${pipeline%:*}"
local platform="${pipeline##*:}"
echo " $CHECK $file ($platform)"
done
# Detailed analysis per platform
for pipeline in "${VALID_PIPELINES[@]}"; do
local file="${pipeline%:*}"
local platform="${pipeline##*:}"
analyze_pipeline "$file" "$platform"
done
# CORRELATION: Pipeline x Git Branches
echo -e "\n==============================================================================="
echo "CORRELATION: PIPELINES x GIT BRANCHES"
echo "==============================================================================="
echo -e "\nRemote branches coverage:"
git branch -r 2>/dev/null | grep -v HEAD | sed 's/.*origin\///' | sort -u | while read -r branch; do
[[ -z "$branch" ]] && continue
local has_pipeline="$CROSS"
local matched_file=""
for pipeline in "${VALID_PIPELINES[@]}"; do
local file="${pipeline%:*}"
[[ ! -f "$file" ]] && continue
# Check various branch reference patterns
if grep -qE "^\s{2,4}${branch}:\s*$" "$file" 2>/dev/null || \
grep -qE "^\s{2,4}'${branch}':\s*$" "$file" 2>/dev/null || \
grep -qE "^\s{2,4}\"${branch}\":\s*$" "$file" 2>/dev/null || \
grep -qE "branch.*${branch}" "$file" 2>/dev/null || \
grep -qE "refs/heads/${branch}" "$file" 2>/dev/null; then
has_pipeline="$CHECK"
matched_file="$file"
break
fi
done
if [[ "$has_pipeline" == "$CHECK" ]]; then
printf " [%s] %-30s (%s)\n" "$has_pipeline" "$branch" "$matched_file"
else
printf " [%s] %s\n" "$has_pipeline" "$branch"
fi
done
detect_gitflow_pattern
fi
# GIT STRUCTURE ANALYSIS
analyze_git_structure
echo -e "\n==============================================================================="
echo "END OF ANALYSIS"
echo "==============================================================================="
} > "$OUTPUT_FILE"
log_success "Analysis saved to: $OUTPUT_FILE"
echo ""
head -60 "$OUTPUT_FILE"
echo "..."
echo "(use 'cat $OUTPUT_FILE' to view complete output)"
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment