Files
nvhi-atsila-microservice/Infrastructure/foundation/Jenkinsfile
2025-08-03 00:10:47 +00:00

747 lines
32 KiB
Groovy

pipeline {
agent any
parameters {
choice(
name: 'ACTION',
choices: ['plan', 'apply', 'destroy', 'cleanup'],
description: 'Action to perform: plan (review), apply (deploy), destroy (remove infra), cleanup (remove bootstrap)'
)
booleanParam(
name: 'AUTO_APPROVE',
defaultValue: false,
description: 'Auto-approve terraform apply (use with caution)'
)
booleanParam(
name: 'SKIP_SONAR',
defaultValue: false,
description: 'Skip SonarQube analysis (not recommended)'
)
booleanParam(
name: 'SKIP_BOOTSTRAP',
defaultValue: false,
description: 'Skip bootstrap phase (S3/DynamoDB already exist)'
)
string(
name: 'PROJECT_NAME',
defaultValue: 'nvhi-atsila-microservice',
description: 'Project name for resource naming'
)
string(
name: 'AWS_CREDENTIALS_ID',
defaultValue: 'aws-ci',
description: 'AWS credentials stored in Jenkins'
)
string(
name: 'AWS_REGION_ID',
defaultValue: 'AWS_REGION',
description: 'AWS region credential stored in Jenkins'
)
choice(
name: 'ENVIRONMENT',
choices: ['dev', 'staging', 'prod'],
description: 'Environment to deploy'
)
}
environment {
// Terraform configuration
TF_VERSION = '1.5.7'
TF_IN_AUTOMATION = 'true'
TF_INPUT = 'false'
TF_CLI_ARGS = '-no-color'
// Working directory
TF_WORKING_DIR = 'infrastructure/foundation'
// Project configuration (AWS_REGION will be injected from Jenkins credentials)
PROJECT_NAME = "${params.PROJECT_NAME}"
ENVIRONMENT = "${params.ENVIRONMENT}"
// SonarQube configuration
SONAR_PROJECT_KEY = "${params.PROJECT_NAME}-foundation"
SONAR_PROJECT_NAME = "${params.PROJECT_NAME} Foundation Layer"
SONAR_PROJECT_VERSION = "${BUILD_NUMBER}"
}
stages {
stage('🔍 Checkout & Validation') {
steps {
echo "=== Enterprise CI/CD Foundation Layer Pipeline ==="
echo "Action: ${params.ACTION}"
echo "Environment: ${params.ENVIRONMENT}"
echo "Project: ${params.PROJECT_NAME}"
echo "AWS Credentials: ${params.AWS_CREDENTIALS_ID}"
echo "AWS Region Credential: ${params.AWS_REGION_ID}"
echo "Authentication: Jenkins Credential Store (Enterprise Standard)"
echo "Build: #${BUILD_NUMBER}"
echo "Working Directory: ${env.TF_WORKING_DIR}"
// Clean workspace and checkout latest code
deleteDir()
checkout scm
// Verify repository structure
script {
sh '''
echo "Repository structure validation:"
# Check for required directories
if [ ! -d "${TF_WORKING_DIR}" ]; then
echo "❌ Missing foundation directory: ${TF_WORKING_DIR}"
exit 1
fi
# Check for required files
cd "${TF_WORKING_DIR}"
for file in main.tf variables.tf outputs.tf versions.tf bootstrap.sh cleanup.sh; do
if [ ! -f "$file" ]; then
echo "❌ Missing required file: $file"
exit 1
fi
echo "✅ Found: $file"
done
# Make scripts executable
chmod +x bootstrap.sh cleanup.sh
echo "✅ Repository structure validated"
'''
}
}
}
stage('🔧 Setup Tools') {
steps {
script {
// Install Terraform if not available
sh '''
if ! command -v terraform &> /dev/null; then
echo "Installing Terraform ${TF_VERSION}..."
wget -q https://releases.hashicorp.com/terraform/${TF_VERSION}/terraform_${TF_VERSION}_linux_amd64.zip
unzip -o terraform_${TF_VERSION}_linux_amd64.zip
chmod +x terraform
sudo mv terraform /usr/local/bin/ || mv terraform /tmp/
export PATH="/tmp:$PATH"
fi
echo "Terraform version:"
terraform version
'''
// Verify AWS credentials and permissions via Jenkins credential store
withCredentials([
aws(credentialsId: "${params.AWS_CREDENTIALS_ID}"),
string(credentialsId: "${params.AWS_REGION_ID}", variable: 'AWS_REGION')
]) {
sh '''
echo "AWS CLI version:"
aws --version
echo "Verifying Jenkins stored AWS credentials..."
echo "AWS Region: ${AWS_REGION}"
aws sts get-caller-identity
echo "Testing AWS permissions..."
aws ec2 describe-vpcs --max-items 1 --region ${AWS_REGION} > /dev/null && echo "✅ EC2 permissions OK" || echo "⚠️ EC2 permissions limited"
aws s3 ls > /dev/null 2>&1 && echo "✅ S3 permissions OK" || echo "⚠️ S3 permissions limited"
aws dynamodb list-tables --region ${AWS_REGION} > /dev/null 2>&1 && echo "✅ DynamoDB permissions OK" || echo "⚠️ DynamoDB permissions limited"
echo "✅ Jenkins credential store authentication verified"
'''
}
}
}
}
stage('🔍 SonarQube Analysis') {
when {
allOf {
not { params.SKIP_SONAR }
expression { params.ACTION != 'cleanup' }
}
}
steps {
dir("${env.TF_WORKING_DIR}") {
script {
// Create comprehensive SonarQube configuration
writeFile file: 'sonar-project.properties', text: """
sonar.projectKey=${env.SONAR_PROJECT_KEY}
sonar.projectName=${env.SONAR_PROJECT_NAME}
sonar.projectVersion=${env.SONAR_PROJECT_VERSION}
sonar.sources=.
sonar.sourceEncoding=UTF-8
# Terraform-specific configuration
sonar.terraform.file.suffixes=.tf
sonar.exclusions=**/*.tfstate,**/*.tfstate.backup,**/.terraform/**,**/*.tfplan
# Include scripts in analysis
sonar.inclusions=**/*.tf,**/*.sh
# Quality gate settings
sonar.qualitygate.wait=true
# Coverage and duplications
sonar.cpd.exclusions=**/*.tf
# Custom properties for enterprise analysis
sonar.tags=terraform,infrastructure,enterprise-cicd
"""
// Run SonarQube analysis
withSonarQubeEnv('SonarQube') {
sh '''
echo "🔍 Running SonarQube analysis on Terraform infrastructure..."
sonar-scanner
'''
}
}
}
}
}
stage('🎯 Quality Gate') {
when {
allOf {
not { params.SKIP_SONAR }
expression { params.ACTION != 'cleanup' }
}
}
steps {
script {
timeout(time: 5, unit: 'MINUTES') {
def qg = waitForQualityGate()
if (qg.status != 'OK') {
echo "❌ SonarQube Quality Gate failed: ${qg.status}"
echo "Quality gate details: ${qg}"
if (params.ACTION == 'apply' && !params.AUTO_APPROVE) {
def proceed = input(
message: 'SonarQube Quality Gate failed. How do you want to proceed?',
parameters: [
choice(
name: 'DECISION',
choices: ['Abort', 'Proceed anyway'],
description: 'Quality gate failed - your decision'
)
]
)
if (proceed == 'Abort') {
error "Deployment aborted due to quality gate failure"
}
} else if (params.ACTION == 'apply' && params.AUTO_APPROVE) {
echo "⚠️ Quality gate failed but AUTO_APPROVE is enabled, proceeding..."
} else {
error "Quality gate failed and action is ${params.ACTION}"
}
} else {
echo "✅ SonarQube Quality Gate passed"
}
}
}
}
}
stage('🚀 Bootstrap Backend') {
when {
allOf {
expression { params.ACTION == 'apply' }
not { params.SKIP_BOOTSTRAP }
}
}
steps {
dir("${env.TF_WORKING_DIR}") {
withCredentials([
aws(credentialsId: "${params.AWS_CREDENTIALS_ID}"),
string(credentialsId: "${params.AWS_REGION_ID}", variable: 'AWS_REGION')
]) {
script {
echo "=== Bootstrapping Terraform Backend ==="
sh '''
# Set environment variables for bootstrap script
export PROJECT_NAME="${PROJECT_NAME}"
export ENVIRONMENT="${ENVIRONMENT}"
export AWS_REGION="${AWS_REGION}"
# Run bootstrap script (uses Jenkins credentials)
./bootstrap.sh
# Verify backend configuration was created
if [ ! -f backend.tf ]; then
echo "❌ Bootstrap failed - backend.tf not created"
exit 1
fi
echo "✅ Backend bootstrap completed"
echo "Generated backend.tf:"
cat backend.tf
'''
}
}
}
}
}
stage('🔄 Terraform Init & Validate') {
when {
expression { params.ACTION != 'cleanup' }
}
steps {
dir("${env.TF_WORKING_DIR}") {
withCredentials([
aws(credentialsId: "${params.AWS_CREDENTIALS_ID}"),
string(credentialsId: "${params.AWS_REGION_ID}", variable: 'AWS_REGION')
]) {
script {
sh '''
echo "=== Terraform Initialization ==="
# Create terraform.tfvars if not exists
if [ ! -f terraform.tfvars ]; then
echo "Creating terraform.tfvars..."
cat > terraform.tfvars << EOF
# Generated by Jenkins Pipeline Build #${BUILD_NUMBER}
project_name = "${PROJECT_NAME}"
environment = "${ENVIRONMENT}"
aws_region = "${AWS_REGION}"
# Free tier optimized settings
enable_private_subnets = false
enable_vpc_endpoints = false
enable_nat_gateway = false
single_nat_gateway = true
cost_optimization_mode = true
# Jenkins-managed tags
common_tags = {
Terraform = "true"
Project = "${PROJECT_NAME}"
Environment = "${ENVIRONMENT}"
ManagedBy = "jenkins"
Pipeline = "foundation-layer"
BuildNumber = "${BUILD_NUMBER}"
GitCommit = "${GIT_COMMIT}"
}
EOF
fi
echo "Current terraform.tfvars:"
cat terraform.tfvars
# Initialize Terraform (uses Jenkins credentials)
terraform init -upgrade
# Validate configuration
terraform validate
# Format check
terraform fmt -check=true || {
echo "⚠️ Terraform files need formatting"
terraform fmt -diff=true
}
echo "✅ Terraform initialized and validated"
'''
}
}
}
}
}
stage('📊 Terraform Plan') {
when {
expression { params.ACTION in ['plan', 'apply'] }
}
steps {
dir("${env.TF_WORKING_DIR}") {
withCredentials([
aws(credentialsId: "${params.AWS_CREDENTIALS_ID}"),
string(credentialsId: "${params.AWS_REGION_ID}", variable: 'AWS_REGION')
]) {
script {
sh '''
echo "=== Terraform Plan ==="
terraform plan \
-var="project_name=${PROJECT_NAME}" \
-var="environment=${ENVIRONMENT}" \
-var="aws_region=${AWS_REGION}" \
-out=tfplan \
-detailed-exitcode || PLAN_EXIT_CODE=$?
# Handle plan exit codes
case ${PLAN_EXIT_CODE:-0} in
0)
echo "✅ No changes needed - infrastructure is up to date"
;;
1)
echo "❌ Terraform plan failed"
exit 1
;;
2)
echo "📝 Changes detected - plan saved to tfplan"
# Show plan summary
echo "=== Plan Summary ==="
terraform show -no-color tfplan | grep -E "(Plan:|No changes|Error:)" || echo "Plan generated successfully"
;;
esac
'''
// Archive the plan for audit
archiveArtifacts artifacts: 'tfplan', allowEmptyArchive: true
}
}
}
}
}
stage('🚦 Deployment Approval') {
when {
allOf {
expression { params.ACTION == 'apply' }
not { params.AUTO_APPROVE }
}
}
steps {
script {
def planSummary = ""
dir("${env.TF_WORKING_DIR}") {
planSummary = sh(
script: 'terraform show -no-color tfplan | grep "Plan:" || echo "No plan summary available"',
returnStdout: true
).trim()
}
echo "=== Manual Approval Required ==="
echo "Environment: ${params.ENVIRONMENT}"
echo "Region: ${params.AWS_REGION}"
echo "Plan Summary: ${planSummary}"
def approvalData = input(
id: 'ProceedApply',
message: """
🔍 Review the Terraform plan output above carefully.
Environment: ${params.ENVIRONMENT}
Region: ${params.AWS_REGION}
Plan: ${planSummary}
Proceed with deployment?
""",
parameters: [
choice(
name: 'PROCEED',
choices: ['No', 'Yes, deploy infrastructure'],
description: 'Deployment decision'
),
string(
name: 'APPROVER',
defaultValue: env.BUILD_USER ?: 'jenkins-user',
description: 'Your name for audit trail'
)
]
)
if (approvalData.PROCEED != 'Yes, deploy infrastructure') {
error "Deployment cancelled by ${approvalData.APPROVER}"
}
echo "✅ Deployment approved by: ${approvalData.APPROVER}"
env.DEPLOYMENT_APPROVER = approvalData.APPROVER
}
}
}
stage('🚀 Terraform Apply') {
when {
expression { params.ACTION == 'apply' }
}
steps {
dir("${env.TF_WORKING_DIR}") {
withCredentials([
aws(credentialsId: "${params.AWS_CREDENTIALS_ID}"),
string(credentialsId: "${params.AWS_REGION_ID}", variable: 'AWS_REGION')
]) {
script {
echo "=== Terraform Apply ==="
if (env.DEPLOYMENT_APPROVER) {
echo "✅ Approved by: ${env.DEPLOYMENT_APPROVER}"
}
sh '''
terraform apply -auto-approve tfplan
echo "=== Deployment Outputs ==="
terraform output
# Save outputs for other stages/jobs
terraform output -json > terraform-outputs.json
terraform output > terraform-outputs.txt
'''
// Archive outputs
archiveArtifacts artifacts: 'terraform-outputs.json,terraform-outputs.txt', allowEmptyArchive: true
}
}
}
}
}
stage('💥 Terraform Destroy') {
when {
expression { params.ACTION == 'destroy' }
}
steps {
dir("${env.TF_WORKING_DIR}") {
withCredentials([
aws(credentialsId: "${params.AWS_CREDENTIALS_ID}"),
string(credentialsId: "${params.AWS_REGION_ID}", variable: 'AWS_REGION')
]) {
script {
def destroyApproval = input(
id: 'ProceedDestroy',
message: """
⚠️ DESTRUCTIVE ACTION WARNING ⚠️
This will permanently delete ALL infrastructure in:
• Environment: ${params.ENVIRONMENT}
• Project: ${params.PROJECT_NAME}
This action CANNOT be undone!
Type 'DESTROY' exactly to confirm:
""",
parameters: [
string(
name: 'CONFIRMATION',
defaultValue: '',
description: 'Type DESTROY to confirm deletion'
),
string(
name: 'DESTROYER',
defaultValue: env.BUILD_USER ?: 'jenkins-user',
description: 'Your name for audit trail'
)
]
)
if (destroyApproval.CONFIRMATION != 'DESTROY') {
error "Destroy cancelled - confirmation text did not match 'DESTROY'"
}
echo "💀 DESTROY operation confirmed by: ${destroyApproval.DESTROYER}"
echo "💀 Destroying infrastructure in 10 seconds..."
echo "💀 Last chance to cancel with Ctrl+C..."
sleep(10)
sh '''
terraform destroy -auto-approve \
-var="project_name=${PROJECT_NAME}" \
-var="environment=${ENVIRONMENT}" \
-var="aws_region=${AWS_REGION}"
'''
echo "💀 Infrastructure destroyed by: ${destroyApproval.DESTROYER}"
echo "💀 Next step: Run with ACTION=cleanup to remove bootstrap resources"
}
}
}
}
}
stage('🧹 Cleanup Bootstrap') {
when {
expression { params.ACTION == 'cleanup' }
}
steps {
dir("${env.TF_WORKING_DIR}") {
withCredentials([
aws(credentialsId: "${params.AWS_CREDENTIALS_ID}"),
string(credentialsId: "${params.AWS_REGION_ID}", variable: 'AWS_REGION')
]) {
script {
echo "=== Cleanup Bootstrap Resources ==="
sh '''
# Set environment variables for cleanup script
export PROJECT_NAME="${PROJECT_NAME}"
export ENVIRONMENT="${ENVIRONMENT}"
export AWS_REGION="${AWS_REGION}"
# Run cleanup script (uses Jenkins credentials)
./cleanup.sh
echo "✅ Bootstrap cleanup completed"
'''
}
}
}
}
}
stage('📈 Post-Deployment Validation') {
when {
expression { params.ACTION == 'apply' }
}
steps {
dir("${env.TF_WORKING_DIR}") {
withCredentials([
aws(credentialsId: "${params.AWS_CREDENTIALS_ID}"),
string(credentialsId: "${params.AWS_REGION_ID}", variable: 'AWS_REGION')
]) {
script {
sh '''
echo "=== Post-Deployment Validation ==="
# Validate VPC
VPC_ID=$(terraform output -raw vpc_id 2>/dev/null)
if [ -n "$VPC_ID" ] && [ "$VPC_ID" != "null" ]; then
echo "✅ VPC created successfully: $VPC_ID"
# Get VPC details
aws ec2 describe-vpcs --vpc-ids $VPC_ID --region ${AWS_REGION} \
--query 'Vpcs[0].{VpcId:VpcId,State:State,CidrBlock:CidrBlock}' \
--output table
# Count resources
SUBNET_COUNT=$(aws ec2 describe-subnets --filters "Name=vpc-id,Values=$VPC_ID" \
--query 'length(Subnets)' --output text --region ${AWS_REGION})
echo "✅ Subnets created: $SUBNET_COUNT"
SG_COUNT=$(aws ec2 describe-security-groups --filters "Name=vpc-id,Values=$VPC_ID" \
--query 'length(SecurityGroups)' --output text --region ${AWS_REGION})
echo "✅ Security groups: $SG_COUNT"
else
echo "❌ VPC validation failed"
exit 1
fi
# Validate backend resources
BUCKET_NAME=$(terraform output -raw terraform_state_bucket_name 2>/dev/null)
TABLE_NAME=$(terraform output -raw terraform_locks_table_name 2>/dev/null)
if [ -n "$BUCKET_NAME" ] && [ "$BUCKET_NAME" != "null" ]; then
echo "✅ S3 backend bucket: $BUCKET_NAME"
aws s3 ls s3://$BUCKET_NAME --region ${AWS_REGION}
fi
if [ -n "$TABLE_NAME" ] && [ "$TABLE_NAME" != "null" ]; then
echo "✅ DynamoDB locks table: $TABLE_NAME"
aws dynamodb describe-table --table-name $TABLE_NAME --region ${AWS_REGION} \
--query 'Table.{TableName:TableName,Status:TableStatus}' --output table
fi
# Cost analysis
echo "=== Cost Analysis ==="
echo "✅ Current configuration: ~$0/month (free tier optimized)"
echo "✅ No NAT Gateways (saves ~$32/month)"
echo "✅ No VPC Endpoints (saves ~$14/month)"
echo "✅ Using public subnets only for cost optimization"
echo "✅ Using Jenkins credential store (enterprise standard)"
'''
}
}
}
}
}
}
post {
always {
script {
echo "=== Pipeline Execution Summary ==="
echo "🔹 Build: #${BUILD_NUMBER}"
echo "🔹 Action: ${params.ACTION}"
echo "🔹 Environment: ${params.ENVIRONMENT}"
echo "🔹 Duration: ${currentBuild.durationString}"
echo "🔹 Result: ${currentBuild.result ?: 'SUCCESS'}"
// Archive all important artifacts
dir("${env.TF_WORKING_DIR}") {
archiveArtifacts artifacts: '*.tf,terraform.tfvars,*.tfplan,terraform-outputs.*,sonar-project.properties,.backend-config', allowEmptyArchive: true
}
}
}
success {
script {
echo "✅ Foundation Layer pipeline completed successfully!"
if (params.ACTION == 'apply') {
def message = """
🎉 Foundation Layer Deployment Complete!
📊 Deployment Details:
• Environment: ${params.ENVIRONMENT}
• Region: ${params.AWS_REGION}
• Project: ${params.PROJECT_NAME}
• Build: #${BUILD_NUMBER}
• Duration: ${currentBuild.durationString}
${env.DEPLOYMENT_APPROVER ? "• Approved by: ${env.DEPLOYMENT_APPROVER}" : ""}
🏗️ Infrastructure Created:
• VPC with multi-AZ public subnets
• Security groups for ALB and ECS
• S3 bucket for Terraform state
• DynamoDB table for state locking
• Internet Gateway and routing
💰 Cost: ~$0/month (free tier optimized)
🚀 Next Steps:
• Phase 2: Deploy Shared Services (ECR, ALB, IAM)
• Phase 3: Deploy Application Layer (ECS Fargate)
• Phase 4: Setup application CI/CD pipeline
📋 Outputs: Check archived artifacts for resource details
"""
echo message
}
}
}
failure {
script {
echo "❌ Foundation Layer pipeline failed!"
// Archive debug information
dir("${env.TF_WORKING_DIR}") {
sh '''
echo "=== Debug Information ===" > debug-info.txt
echo "Build: ${BUILD_NUMBER}" >> debug-info.txt
echo "Action: ${ACTION}" >> debug-info.txt
echo "Environment: ${ENVIRONMENT}" >> debug-info.txt
echo "Region: ${AWS_REGION}" >> debug-info.txt
echo "" >> debug-info.txt
echo "Terraform version:" >> debug-info.txt
terraform version >> debug-info.txt 2>&1 || echo "Terraform not available" >> debug-info.txt
echo "" >> debug-info.txt
echo "AWS CLI version:" >> debug-info.txt
aws --version >> debug-info.txt 2>&1 || echo "AWS CLI not available" >> debug-info.txt
echo "" >> debug-info.txt
echo "Working directory:" >> debug-info.txt
pwd >> debug-info.txt
ls -la >> debug-info.txt 2>&1
echo "" >> debug-info.txt
echo "Terraform state:" >> debug-info.txt
terraform state list >> debug-info.txt 2>&1 || echo "No state available" >> debug-info.txt
'''
archiveArtifacts artifacts: 'debug-info.txt', allowEmptyArchive: true
}
}
}
cleanup {
// Clean sensitive data but preserve artifacts
dir("${env.TF_WORKING_DIR}") {
sh '''
rm -f .terraform.lock.hcl 2>/dev/null || true
rm -rf .terraform/ 2>/dev/null || true
'''
}
}
}
}