From d70565d9218f1dc46815cff696d2803db1c2f0f6 Mon Sep 17 00:00:00 2001 From: lenape Date: Sat, 12 Jul 2025 09:58:15 +0000 Subject: [PATCH] automated terminal push --- Jenkinsfile | 66 +++++++++++++++++++++++++++++++++++++++++++++++++---- 1 file changed, 62 insertions(+), 4 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index d038a86..74a5974 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -5,6 +5,7 @@ pipeline { GITEA_CREDS = '52ee0829-6e65-4951-925b-4186254c3f21' SONAR_HOST = 'https://sonar.jacquesingram.online' SONAR_TOKEN = credentials('sonar-token') + AWS_CRED_ID = 'aws-ci' AWS_ACCOUNT_ID = credentials('AWS_ACCOUNT_ID') AWS_REGION = 'us-east-2' @@ -13,6 +14,7 @@ pipeline { TF_BACKEND_BUCKET = 'nvhi-atsila-tf-state' TF_BACKEND_PREFIX = 'ecs/terraform.tfstate' TF_DDB_TABLE = 'nvhi-atsila-locks' + SSH_CRED_ID = 'jenkins-ssh' TF_VAR_cluster_name = 'nvhi-atsila-cluster' @@ -25,12 +27,14 @@ pipeline { IMAGE_NAME = "${AWS_ACCOUNT_ID}.dkr.ecr.${AWS_REGION}.amazonaws.com/${ECR_REPO}" IMAGE_TAG = "v1.0.${BUILD_NUMBER}" } + stages { stage('Checkout') { steps { checkout scm } } + stage('SonarQube Scan') { steps { script { @@ -45,6 +49,7 @@ pipeline { } } } + stage('Login to ECR') { steps { withCredentials([[ @@ -52,14 +57,13 @@ pipeline { credentialsId: env.AWS_CRED_ID ]]) { sh ''' - aws ecr get-login-password --region ${AWS_REGION} \ - | docker login \ - --username AWS \ - --password-stdin ${AWS_ACCOUNT_ID}.dkr.ecr.${AWS_REGION}.amazonaws.com + aws ecr get-login-password --region $AWS_REGION \ + | docker login --username AWS --password-stdin $AWS_ACCOUNT_ID.dkr.ecr.$AWS_REGION.amazonaws.com ''' } } } + stage('Build & Push Docker Image') { steps { script { @@ -68,6 +72,57 @@ pipeline { } } } + + stage('Bootstrap Remote State') { + steps { + withCredentials([[ + $class: 'AmazonWebServicesCredentialsBinding', + credentialsId: env.AWS_CRED_ID + ]]) { + sh ''' + set -e + + # 1) Ensure S3 bucket exists + if ! aws s3api head-bucket --bucket $TF_BACKEND_BUCKET 2>/dev/null; then + echo "Creating S3 bucket $TF_BACKEND_BUCKET..." + aws s3api create-bucket \ + --bucket $TF_BACKEND_BUCKET \ + --region $AWS_REGION \ + --create-bucket-configuration LocationConstraint=$AWS_REGION + + echo "Enabling encryption & versioning..." + aws s3api put-bucket-encryption \ + --bucket $TF_BACKEND_BUCKET \ + --server-side-encryption-configuration '{"Rules":[{"ApplyServerSideEncryptionByDefault":{"SSEAlgorithm":"AES256"}}]}' + + aws s3api put-bucket-versioning \ + --bucket $TF_BACKEND_BUCKET \ + --versioning-configuration Status=Enabled + else + echo "S3 bucket $TF_BACKEND_BUCKET already exists." + fi + + # 2) Ensure DynamoDB table exists + if ! aws dynamodb describe-table --table-name $TF_DDB_TABLE 2>/dev/null; then + echo "Creating DynamoDB table $TF_DDB_TABLE..." + aws dynamodb create-table \ + --table-name $TF_DDB_TABLE \ + --attribute-definitions AttributeName=LockID,AttributeType=S \ + --key-schema AttributeName=LockID,KeyType=HASH \ + --billing-mode PAY_PER_REQUEST + + echo "Enabling point-in-time recovery..." + aws dynamodb update-continuous-backups \ + --table-name $TF_DDB_TABLE \ + --point-in-time-recovery-specification Enabled=true + else + echo "DynamoDB table $TF_DDB_TABLE already exists." + fi + ''' + } + } + } + stage('Terraform Init & Apply') { steps { withCredentials([[ @@ -81,6 +136,7 @@ pipeline { -backend-config="key=${TF_BACKEND_PREFIX}" \ -backend-config="region=${AWS_REGION}" \ -backend-config="dynamodb_table=${TF_DDB_TABLE}" + terraform apply -auto-approve \ -var="cluster_name=${TF_VAR_cluster_name}" \ -var="vpc_cidr=${TF_VAR_vpc_cidr}" \ @@ -93,6 +149,7 @@ pipeline { } } } + stage('Configure EC2 with Ansible') { steps { script { @@ -109,6 +166,7 @@ pipeline { ) } } + stage('Register & Deploy to ECS') { steps { withCredentials([[