|
import sys |
|
import yaml |
|
import os |
|
import argparse |
|
import json |
|
from typing import Any, Dict, List, Optional, Union |
|
from collections import defaultdict |
|
|
|
def find_value_by_path(data: Union[Dict, List], path_parts: List[str]) -> Optional[Any]: |
|
""" |
|
Recursively finds a value in a nested dictionary or list given a path. |
|
|
|
Args: |
|
data: The dictionary or list to search within. |
|
path_parts: A list of strings representing the path to the desired value. |
|
|
|
Returns: |
|
The value found at the specified path, or None if the path does not exist. |
|
If the path points to a list, it returns a list of all matching values. |
|
""" |
|
current_data = data |
|
for part in path_parts: |
|
if isinstance(current_data, dict): |
|
if part in current_data: |
|
current_data = current_data[part] |
|
else: |
|
return None |
|
elif isinstance(current_data, list): |
|
results = [] |
|
for item in current_data: |
|
result = find_value_by_path(item, [part]) |
|
if result is not None: |
|
if isinstance(result, list): |
|
results.extend(result) |
|
else: |
|
results.append(result) |
|
return results if results else None |
|
else: |
|
return None |
|
return current_data |
|
|
|
def count_containers(doc: Dict[str, Any]) -> Dict[str, int]: |
|
""" |
|
Count containers and initContainers for a given document. |
|
|
|
Args: |
|
doc: The Kubernetes resource document to analyze |
|
|
|
Returns: |
|
Dict with 'containers' and 'initContainers' counts |
|
""" |
|
kind = doc.get('kind') |
|
containers_count = 0 |
|
init_containers_count = 0 |
|
|
|
# Define paths for different resource types |
|
container_paths = { |
|
'Deployment': 'spec.template.spec.containers', |
|
'StatefulSet': 'spec.template.spec.containers', |
|
'DaemonSet': 'spec.template.spec.containers', |
|
'Job': 'spec.template.spec.containers', |
|
'CronJob': 'spec.jobTemplate.spec.template.spec.containers', |
|
'Pod': 'spec.containers' |
|
} |
|
|
|
init_container_paths = { |
|
'Deployment': 'spec.template.spec.initContainers', |
|
'StatefulSet': 'spec.template.spec.initContainers', |
|
'DaemonSet': 'spec.template.spec.initContainers', |
|
'Job': 'spec.template.spec.initContainers', |
|
'CronJob': 'spec.jobTemplate.spec.template.spec.initContainers', |
|
'Pod': 'spec.initContainers' |
|
} |
|
|
|
if kind in container_paths: |
|
containers = find_value_by_path(doc, container_paths[kind].split('.')) |
|
if isinstance(containers, list): |
|
containers_count = len(containers) |
|
|
|
if kind in init_container_paths: |
|
init_containers = find_value_by_path(doc, init_container_paths[kind].split('.')) |
|
if isinstance(init_containers, list): |
|
init_containers_count = len(init_containers) |
|
|
|
return { |
|
'containers': containers_count, |
|
'initContainers': init_containers_count |
|
} |
|
|
|
def check_security_context(doc: Dict[str, Any]) -> Dict[str, Any]: |
|
""" |
|
Check existing securityContext configurations. |
|
|
|
Args: |
|
doc: The Kubernetes resource document to analyze |
|
|
|
Returns: |
|
Dict with security context analysis results |
|
""" |
|
kind = doc.get('kind') |
|
security_info = { |
|
'has_pod_security_context': False, |
|
'has_seccomp_profile': False, |
|
'seccomp_type': None, |
|
'containers_with_security_context': 0, |
|
'containers_with_readonly_fs': 0, |
|
'init_containers_with_security_context': 0, |
|
'init_containers_with_readonly_fs': 0, |
|
'needs_pod_seccomp_patch': False, |
|
'containers_needing_readonly_fs': [], |
|
'init_containers_needing_readonly_fs': [] |
|
} |
|
|
|
# Define paths for pod-level security context |
|
pod_security_paths = { |
|
'Deployment': 'spec.template.spec.securityContext', |
|
'StatefulSet': 'spec.template.spec.securityContext', |
|
'DaemonSet': 'spec.template.spec.securityContext', |
|
'Job': 'spec.template.spec.securityContext', |
|
'CronJob': 'spec.jobTemplate.spec.template.spec.securityContext', |
|
'Pod': 'spec.securityContext' |
|
} |
|
|
|
if kind in pod_security_paths: |
|
pod_sc = find_value_by_path(doc, pod_security_paths[kind].split('.')) |
|
if pod_sc and isinstance(pod_sc, dict): |
|
security_info['has_pod_security_context'] = True |
|
seccomp_profile = pod_sc.get('seccompProfile', {}) |
|
if seccomp_profile and isinstance(seccomp_profile, dict): |
|
security_info['has_seccomp_profile'] = True |
|
security_info['seccomp_type'] = seccomp_profile.get('type') |
|
|
|
# Check if needs seccomp patch |
|
if not security_info['has_seccomp_profile'] or security_info['seccomp_type'] != 'RuntimeDefault': |
|
security_info['needs_pod_seccomp_patch'] = True |
|
|
|
# Define base paths for containers |
|
container_base_paths = { |
|
'Deployment': 'spec.template.spec.containers', |
|
'StatefulSet': 'spec.template.spec.containers', |
|
'DaemonSet': 'spec.template.spec.containers', |
|
'Job': 'spec.template.spec.containers', |
|
'CronJob': 'spec.jobTemplate.spec.template.spec.containers', |
|
'Pod': 'spec.containers' |
|
} |
|
|
|
init_container_base_paths = { |
|
'Deployment': 'spec.template.spec.initContainers', |
|
'StatefulSet': 'spec.template.spec.initContainers', |
|
'DaemonSet': 'spec.template.spec.initContainers', |
|
'Job': 'spec.template.spec.initContainers', |
|
'CronJob': 'spec.jobTemplate.spec.template.spec.initContainers', |
|
'Pod': 'spec.initContainers' |
|
} |
|
|
|
# Check main containers |
|
if kind in container_base_paths: |
|
containers = find_value_by_path(doc, container_base_paths[kind].split('.')) |
|
if isinstance(containers, list): |
|
for i, container in enumerate(containers): |
|
if container.get('securityContext') and isinstance(container['securityContext'], dict): |
|
security_info['containers_with_security_context'] += 1 |
|
if container['securityContext'].get('readOnlyRootFilesystem') is True: |
|
security_info['containers_with_readonly_fs'] += 1 |
|
else: |
|
security_info['containers_needing_readonly_fs'].append(i) |
|
else: |
|
security_info['containers_needing_readonly_fs'].append(i) |
|
|
|
# Check init containers |
|
if kind in init_container_base_paths: |
|
init_containers = find_value_by_path(doc, init_container_base_paths[kind].split('.')) |
|
if isinstance(init_containers, list): |
|
for i, init_container in enumerate(init_containers): |
|
if init_container.get('securityContext') and isinstance(init_container['securityContext'], dict): |
|
security_info['init_containers_with_security_context'] += 1 |
|
if init_container['securityContext'].get('readOnlyRootFilesystem') is True: |
|
security_info['init_containers_with_readonly_fs'] += 1 |
|
else: |
|
security_info['init_containers_needing_readonly_fs'].append(i) |
|
else: |
|
security_info['init_containers_needing_readonly_fs'].append(i) |
|
|
|
return security_info |
|
|
|
def get_container_names(doc: Dict[str, Any]) -> Dict[str, List[str]]: |
|
""" |
|
Get container and initContainer names for a given document. |
|
|
|
Args: |
|
doc: The Kubernetes resource document to analyze |
|
|
|
Returns: |
|
Dict with 'containers' and 'initContainers' name lists |
|
""" |
|
kind = doc.get('kind') |
|
container_names = [] |
|
init_container_names = [] |
|
|
|
# Define paths for different resource types |
|
container_paths = { |
|
'Deployment': 'spec.template.spec.containers', |
|
'StatefulSet': 'spec.template.spec.containers', |
|
'DaemonSet': 'spec.template.spec.containers', |
|
'Job': 'spec.template.spec.containers', |
|
'CronJob': 'spec.jobTemplate.spec.template.spec.containers', |
|
'Pod': 'spec.containers' |
|
} |
|
|
|
init_container_paths = { |
|
'Deployment': 'spec.template.spec.initContainers', |
|
'StatefulSet': 'spec.template.spec.initContainers', |
|
'DaemonSet': 'spec.template.spec.initContainers', |
|
'Job': 'spec.template.spec.initContainers', |
|
'CronJob': 'spec.jobTemplate.spec.template.spec.initContainers', |
|
'Pod': 'spec.initContainers' |
|
} |
|
|
|
if kind in container_paths: |
|
containers = find_value_by_path(doc, container_paths[kind].split('.')) |
|
if isinstance(containers, list): |
|
container_names = [c.get('name', f'unnamed-{i}') for i, c in enumerate(containers)] |
|
|
|
if kind in init_container_paths: |
|
init_containers = find_value_by_path(doc, init_container_paths[kind].split('.')) |
|
if isinstance(init_containers, list): |
|
init_container_names = [c.get('name', f'unnamed-init-{i}') for i, c in enumerate(init_containers)] |
|
|
|
return { |
|
'containers': container_names, |
|
'initContainers': init_container_names |
|
} |
|
|
|
def get_default_search_rules() -> Dict[str, List[str]]: |
|
"""Returns a dictionary of built-in search rules for common K8s resources.""" |
|
return { |
|
'Deployment': [ |
|
'metadata.name', |
|
'metadata.namespace', |
|
'spec.replicas', |
|
'spec.template.spec.containers.name', |
|
'spec.template.spec.initContainers.name' |
|
], |
|
'StatefulSet': [ |
|
'metadata.name', |
|
'metadata.namespace', |
|
'spec.replicas', |
|
'spec.template.spec.containers.name', |
|
'spec.template.spec.initContainers.name' |
|
], |
|
'CronJob': [ |
|
'metadata.name', |
|
'metadata.namespace', |
|
'spec.schedule', |
|
'spec.jobTemplate.spec.template.spec.containers.name', |
|
'spec.jobTemplate.spec.template.spec.initContainers.name' |
|
], |
|
'Service': [ |
|
'metadata.name', |
|
'metadata.namespace', |
|
'spec.type', |
|
'spec.ports.port' |
|
], |
|
'Pod': [ |
|
'metadata.name', |
|
'metadata.namespace', |
|
'spec.containers.name', |
|
'spec.initContainers.name' |
|
], |
|
'Job': [ |
|
'metadata.name', |
|
'metadata.namespace', |
|
'spec.template.spec.containers.name', |
|
'spec.template.spec.initContainers.name' |
|
], |
|
'DaemonSet': [ |
|
'metadata.name', |
|
'metadata.namespace', |
|
'spec.template.spec.containers.name', |
|
'spec.template.spec.initContainers.name' |
|
], |
|
'ConfigMap': [ |
|
'metadata.name', |
|
'metadata.namespace', |
|
'data' |
|
], |
|
'Secret': [ |
|
'metadata.name', |
|
'metadata.namespace', |
|
'type' |
|
] |
|
} |
|
|
|
def analyze_yaml(file_path: str, kind_filters: Optional[List[str]], config_file: Optional[str]) -> List[Dict[str, Any]]: |
|
""" |
|
Analyzes a multi-document YAML file to provide insights for kustomization creation. |
|
|
|
Args: |
|
file_path: The path to the multi-document YAML file |
|
kind_filters: A list of K8s kinds to filter the analysis by |
|
config_file: The path to a custom YAML configuration file |
|
|
|
Returns: |
|
A list of dictionaries containing analysis results for each document |
|
|
|
Raises: |
|
FileNotFoundError: If the specified file_path or config_file does not exist |
|
ValueError: If the YAML file or config file is malformed |
|
""" |
|
if not os.path.exists(file_path): |
|
raise FileNotFoundError(f"The file '{file_path}' was not found.") |
|
|
|
search_rules = get_default_search_rules() |
|
if config_file: |
|
if not os.path.exists(config_file): |
|
raise FileNotFoundError(f"The configuration file '{config_file}' was not found.") |
|
try: |
|
with open(config_file, 'r', encoding='utf-8') as f: |
|
custom_rules = yaml.safe_load(f) |
|
if isinstance(custom_rules, dict): |
|
search_rules.update(custom_rules) |
|
else: |
|
raise ValueError("Configuration file must be a dictionary.") |
|
except yaml.YAMLError as e: |
|
raise ValueError(f"Error parsing configuration file: {e}") |
|
|
|
try: |
|
with open(file_path, 'r', encoding='utf-8') as file: |
|
parsed_docs = yaml.load_all(file, Loader=yaml.FullLoader) |
|
|
|
extracted_data = [] |
|
for i, doc in enumerate(parsed_docs): |
|
if not doc or not isinstance(doc, dict): |
|
continue |
|
|
|
kind = doc.get('kind') |
|
if kind_filters and kind not in kind_filters: |
|
continue |
|
|
|
name = doc.get('metadata', {}).get('name') |
|
namespace = doc.get('metadata', {}).get('namespace') |
|
|
|
document_data: Dict[str, Any] = { |
|
'document_index': i, |
|
'kind': kind, |
|
'name': name, |
|
'namespace': namespace |
|
} |
|
|
|
# Add container analysis |
|
container_counts = count_containers(doc) |
|
container_names = get_container_names(doc) |
|
document_data['container_analysis'] = { |
|
'counts': container_counts, |
|
'names': container_names |
|
} |
|
|
|
# Add security context analysis |
|
security_analysis = check_security_context(doc) |
|
document_data['security_analysis'] = security_analysis |
|
|
|
# Original path-based analysis |
|
if kind in search_rules: |
|
for path in search_rules[kind]: |
|
if path not in ['metadata.name', 'metadata.namespace']: # Skip duplicates |
|
path_parts = path.split('.') |
|
value = find_value_by_path(doc, path_parts) |
|
if value is not None: |
|
document_data[path] = value |
|
|
|
extracted_data.append(document_data) |
|
return extracted_data |
|
|
|
except yaml.YAMLError as e: |
|
raise ValueError(f"Error parsing YAML file: {e}") |
|
|
|
def generate_kustomization_guidance(data: List[Dict[str, Any]]) -> str: |
|
""" |
|
Generate guidance for creating kustomization.yaml based on analysis results. |
|
|
|
Args: |
|
data: List of analyzed documents |
|
|
|
Returns: |
|
String containing formatted guidance for kustomization creation |
|
""" |
|
guidance = [] |
|
|
|
# Summary statistics |
|
kind_stats = defaultdict(int) |
|
total_containers = 0 |
|
total_init_containers = 0 |
|
resources_needing_patches = [] |
|
|
|
for doc in data: |
|
kind = doc.get('kind', 'Unknown') |
|
kind_stats[kind] += 1 |
|
|
|
container_counts = doc.get('container_analysis', {}).get('counts', {}) |
|
total_containers += container_counts.get('containers', 0) |
|
total_init_containers += container_counts.get('initContainers', 0) |
|
|
|
security = doc.get('security_analysis', {}) |
|
if (security.get('needs_pod_seccomp_patch') or |
|
security.get('containers_needing_readonly_fs') or |
|
security.get('init_containers_needing_readonly_fs')): |
|
resources_needing_patches.append({ |
|
'kind': kind, |
|
'name': doc.get('name'), |
|
'containers': container_counts.get('containers', 0), |
|
'initContainers': container_counts.get('initContainers', 0), |
|
'containers_needing_readonly': len(security.get('containers_needing_readonly_fs', [])), |
|
'init_containers_needing_readonly': len(security.get('init_containers_needing_readonly_fs', [])) |
|
}) |
|
|
|
guidance.append("=== KUSTOMIZATION.YAML CREATION GUIDANCE ===\n") |
|
|
|
# Resource summary |
|
guidance.append("📊 Resource Summary:") |
|
for kind, count in sorted(kind_stats.items()): |
|
guidance.append(f" - {kind}: {count} resource(s)") |
|
guidance.append(f" - Total Containers: {total_containers}") |
|
guidance.append(f" - Total Init Containers: {total_init_containers}") |
|
guidance.append("") |
|
|
|
# Universal patches needed |
|
guidance.append("🔧 Universal Patches Needed:") |
|
workload_kinds = set(kind_stats.keys()) & {'Deployment', 'StatefulSet', 'Job', 'CronJob', 'DaemonSet', 'Pod'} |
|
if workload_kinds: |
|
guidance.append(" Pod-level seccompProfile patches apply to:") |
|
for kind in sorted(workload_kinds): |
|
guidance.append(f" - {kind} ({kind_stats[kind]} resource(s))") |
|
else: |
|
guidance.append(" ✅ No workload resources found requiring universal patches") |
|
guidance.append("") |
|
|
|
# Specific patches needed |
|
guidance.append("🎯 Specific Patches Needed:") |
|
if resources_needing_patches: |
|
guidance.append(" The following resources need container-level readOnlyRootFilesystem patches:") |
|
for resource in resources_needing_patches: |
|
guidance.append(f" - {resource['kind']}/{resource['name']}:") |
|
if resource['containers_needing_readonly'] > 0: |
|
guidance.append(f" * Needs {resource['containers_needing_readonly']} container patch(es)") |
|
if resource['init_containers_needing_readonly'] > 0: |
|
guidance.append(f" * Needs {resource['init_containers_needing_readonly']} initContainer patch(es)") |
|
else: |
|
guidance.append(" ✅ All resources have proper readOnlyRootFilesystem configuration") |
|
guidance.append("") |
|
|
|
# Patch strategy recommendation |
|
guidance.append("💡 Recommended Patch Strategy:") |
|
if len(resources_needing_patches) <= 5: |
|
guidance.append(" - Few resources detected, recommend using JSON Patch for precise control") |
|
else: |
|
guidance.append(" - Many resources detected, recommend combining Strategic Merge and JSON Patch") |
|
|
|
if total_init_containers > 0: |
|
guidance.append(" - InitContainers detected: create separate patches for each initContainer") |
|
|
|
guidance.append(" - Organize universal and specific patches separately for maintainability") |
|
guidance.append(" - Use target selectors (kind + name) for specific resource patches") |
|
guidance.append("") |
|
|
|
# Example patch structure |
|
guidance.append("📝 Patch Structure Example:") |
|
guidance.append(" 1. Universal patches (strategic merge) for pod-level securityContext") |
|
guidance.append(" 2. Specific patches (JSON patch) for container-level readOnlyRootFilesystem") |
|
guidance.append(" 3. Separate patches for each initContainer that needs modification") |
|
guidance.append("") |
|
|
|
return "\n".join(guidance) |
|
|
|
def print_detailed_analysis(data: List[Dict[str, Any]]): |
|
"""Print detailed analysis for each resource.""" |
|
print("=== DETAILED RESOURCE ANALYSIS ===\n") |
|
|
|
for doc in data: |
|
print(f"📄 Document {doc['document_index']}: {doc.get('kind', 'Unknown')}/{doc.get('name', 'unnamed')}") |
|
if doc.get('namespace'): |
|
print(f" Namespace: {doc['namespace']}") |
|
|
|
# Container information |
|
container_analysis = doc.get('container_analysis', {}) |
|
counts = container_analysis.get('counts', {}) |
|
names = container_analysis.get('names', {}) |
|
|
|
print(f" Containers: {counts.get('containers', 0)}") |
|
if names.get('containers'): |
|
print(f" Names: {', '.join(names['containers'])}") |
|
|
|
print(f" Init Containers: {counts.get('initContainers', 0)}") |
|
if names.get('initContainers'): |
|
print(f" Names: {', '.join(names['initContainers'])}") |
|
|
|
# Security analysis |
|
security = doc.get('security_analysis', {}) |
|
print(f" Security Configuration:") |
|
print(f" Pod seccompProfile: {'✅ Configured' if not security.get('needs_pod_seccomp_patch') else '❌ Needs patch'}") |
|
|
|
containers_readonly = counts.get('containers', 0) - len(security.get('containers_needing_readonly_fs', [])) |
|
total_containers = counts.get('containers', 0) |
|
print(f" Container readOnlyRootFilesystem: {containers_readonly}/{total_containers} ✅") |
|
|
|
init_containers_readonly = counts.get('initContainers', 0) - len(security.get('init_containers_needing_readonly_fs', [])) |
|
total_init_containers = counts.get('initContainers', 0) |
|
print(f" InitContainer readOnlyRootFilesystem: {init_containers_readonly}/{total_init_containers} ✅") |
|
|
|
# Show what patches are needed |
|
patches_needed = [] |
|
if security.get('needs_pod_seccomp_patch'): |
|
patches_needed.append("Pod seccompProfile") |
|
if security.get('containers_needing_readonly_fs'): |
|
patches_needed.append(f"Container readOnlyRootFilesystem (indices: {security['containers_needing_readonly_fs']})") |
|
if security.get('init_containers_needing_readonly_fs'): |
|
patches_needed.append(f"InitContainer readOnlyRootFilesystem (indices: {security['init_containers_needing_readonly_fs']})") |
|
|
|
if patches_needed: |
|
print(f" 🔧 Patches Needed: {', '.join(patches_needed)}") |
|
else: |
|
print(f" ✅ No additional patches required") |
|
|
|
print("-" * 80) |
|
|
|
def print_summary_analysis(data: List[Dict[str, Any]]): |
|
"""Print a concise summary of the analysis.""" |
|
if not data: |
|
print("No resources found to analyze.") |
|
return |
|
|
|
kind_stats = defaultdict(int) |
|
total_containers = 0 |
|
total_init_containers = 0 |
|
resources_needing_pod_patches = 0 |
|
resources_needing_container_patches = 0 |
|
|
|
for doc in data: |
|
kind = doc.get('kind', 'Unknown') |
|
kind_stats[kind] += 1 |
|
|
|
container_counts = doc.get('container_analysis', {}).get('counts', {}) |
|
total_containers += container_counts.get('containers', 0) |
|
total_init_containers += container_counts.get('initContainers', 0) |
|
|
|
security = doc.get('security_analysis', {}) |
|
if security.get('needs_pod_seccomp_patch'): |
|
resources_needing_pod_patches += 1 |
|
if (security.get('containers_needing_readonly_fs') or |
|
security.get('init_containers_needing_readonly_fs')): |
|
resources_needing_container_patches += 1 |
|
|
|
print("=== ANALYSIS SUMMARY ===") |
|
print(f"Total Resources: {len(data)}") |
|
print(f"Resource Types: {', '.join(sorted(kind_stats.keys()))}") |
|
print(f"Total Containers: {total_containers}") |
|
print(f"Total Init Containers: {total_init_containers}") |
|
print(f"Resources needing pod-level patches: {resources_needing_pod_patches}") |
|
print(f"Resources needing container-level patches: {resources_needing_container_patches}") |
|
print() |
|
|
|
def main(): |
|
"""Main function to parse arguments and run the analysis.""" |
|
parser = argparse.ArgumentParser( |
|
description="Analyze K8s multi-YAML files to guide kustomization.yaml creation", |
|
formatter_class=argparse.RawTextHelpFormatter, |
|
epilog=""" |
|
Usage Examples: |
|
# Analyze all resources and get creation guidance |
|
python k8s_analyzer.py manifests.yaml |
|
|
|
# Analyze specific resource types |
|
python k8s_analyzer.py manifests.yaml --kind Deployment CronJob |
|
|
|
# Get detailed analysis of each resource |
|
python k8s_analyzer.py manifests.yaml --detailed |
|
|
|
# Get guidance for creating kustomization.yaml |
|
python k8s_analyzer.py manifests.yaml --guidance |
|
|
|
# Output JSON format for further processing |
|
python k8s_analyzer.py manifests.yaml --json |
|
|
|
# Use custom search rules |
|
python k8s_analyzer.py manifests.yaml --config custom_rules.yaml |
|
""" |
|
) |
|
parser.add_argument( |
|
'file_path', |
|
metavar='FILE', |
|
help="Path to the multi-YAML file" |
|
) |
|
parser.add_argument( |
|
'--json', |
|
action='store_true', |
|
help="Output results in JSON format" |
|
) |
|
parser.add_argument( |
|
'--kind', |
|
nargs='+', |
|
metavar='KIND', |
|
help="Filter analysis to specific resource kinds (e.g., 'Deployment', 'CronJob')" |
|
) |
|
parser.add_argument( |
|
'--config', |
|
metavar='CONFIG_FILE', |
|
help="Path to custom YAML configuration file with search rules" |
|
) |
|
parser.add_argument( |
|
'--detailed', |
|
action='store_true', |
|
help="Show detailed analysis for each resource" |
|
) |
|
parser.add_argument( |
|
'--guidance', |
|
action='store_true', |
|
help="Show guidance for creating kustomization.yaml" |
|
) |
|
parser.add_argument( |
|
'--summary', |
|
action='store_true', |
|
help="Show only a concise summary of the analysis" |
|
) |
|
|
|
args = parser.parse_args() |
|
|
|
try: |
|
results = analyze_yaml(args.file_path, args.kind, args.config) |
|
|
|
if args.json: |
|
print(json.dumps(results, indent=2, ensure_ascii=False)) |
|
else: |
|
if args.summary: |
|
print_summary_analysis(results) |
|
elif args.guidance: |
|
guidance = generate_kustomization_guidance(results) |
|
print(guidance) |
|
elif args.detailed: |
|
print_detailed_analysis(results) |
|
else: |
|
# Default behavior: show guidance |
|
guidance = generate_kustomization_guidance(results) |
|
print(guidance) |
|
print("Use --detailed for detailed resource analysis") |
|
print("Use --summary for a concise summary") |
|
print("Use --guidance to see only creation guidance") |
|
|
|
except (FileNotFoundError, ValueError) as e: |
|
print(f"Error: {e}", file=sys.stderr) |
|
sys.exit(1) |
|
|
|
if __name__ == "__main__": |
|
main() |