Skip to content

Instantly share code, notes, and snippets.

View artem-hatchenko's full-sized avatar

Artem Hatchenko artem-hatchenko

View GitHub Profile
# Block for the modules
check_modules() {
# Run the "tflint" command and write the result to the $MODULES_OUTPUT file
cd $TF_DIR/$MODULE_DIR/$1
echo -e "\nMODULE: $1" | tee -a $MODULES_OUTPUT
if grep -qP '^module' *.tf
then echo "Found module call. Running terraform init..."
terraform init &>/dev/null
$TFLIT --config $MODULES_CONF_FILE | tee -a $MODULES_OUTPUT
rm -rf .terraform*
# Get the list of ALL modules in the form: "./rds/common", "./rds/instance" etc.
MODULES="$(cd $TF_DIR/$MODULE_DIR; find . -type f -name '*.tf' -exec dirname {} \;)"
SORTED="$(echo $MODULES | sed -e $'s/ /\\\n/g' | sort | uniq)"
# Get the list of used modules from the INPUT_MODULE_LIST variable (i.e. "ecr,rds,eks", "s3")
IFS=',' read -ra INPUT_MODULE_LIST <<<"$INPUT_MODULE_LIST"
#If the modules used (INPUT_MODULE_LIST variable) are in the list of all modules (SORTED variable),
# then we call the "check_modules()" function and use the "tflint" command for the module being checked
if [ -n "${INPUT_MODULE_LIST}" ]; then
---
modules:
- acm
- s3
- cloudfront
- wafv2-fortinet
NODES_CONFIG = [
"tf-ci-worker-1": [
AWS_ACCOUNT: "111111111111",
ENV_NAME: "prod1",
DEPLOY_ROLE_ARN: "arn:aws:iam::111111111111:role\\/terraform",
PROD_VPC_CIDR: "10.210.0.0\\/16"
],
"tf-ci-worker-2": [
AWS_ACCOUNT: "222222222222",
ENV_NAME: "prod2",
options { // Default build options
lock label: "tf-ci", variable: "LOCKED_NODE", quantity: 1
buildDiscarder(logRotator(numToKeepStr: '10'))
timeout(time: 240, unit:'MINUTES')
timestamps()
}
def payload_json = "${env.json_payload}".substring(1, "${env.json_payload}".length() - 1); // removing extra curly braces from json payload
def parsed_json = new JsonSlurper().parseText("{$payload_json}");
env.REPO_FULL_NAME = parsed_json.pullrequest.source.repository.full_name // Get full repo name (consists of: <workspace>/<repo_slug>)
env.COMMIT_HASH = parsed_json.pullrequest.source.commit.hash // Get pull request commit hash
env.PULL_REQUEST_ID = parsed_json.pullrequest.id // Get pull request id
env.PULL_REQUEST_TITLE = parsed_json.pullrequest.title // Get pull request title
env.PULL_REQUEST_URL = parsed_json.pullrequest.links.html.href // Get pull request URL
env.PULL_REQUEST_COMMENT_COUNT = parsed_json.pullrequest.comment_count // Get number of comments in pull request
env.BRANCH = parsed_json.pullrequest.source.branch.name // Get pull request branch name
env.AUTHOR = parsed_json.pullrequest.author.nickname // Get username of author
env.AUTHOR_EMAIL = "${env.AUTHOR}".replaceAll("\\s",".").toLowerCase() + "@automat-it.com"
stage("Send notification") {
steps {
script {
withCredentials([string(credentialsId: 'slack-app', variable: 'slack_token')]) {
env.SLACK_USER_ID = sh (script: """
SLACK_USER_ID="\$(curl -d \"token=${slack_token}\" -d \"email=${env.AUTHOR_EMAIL}\" https://slack.com/api/users.lookupByEmail | jq -r '.user.id')"
echo \$SLACK_USER_ID""", returnStdout: true).trim()
}
if ("${env.PULL_REQUEST_COMMENT_COUNT}" == "0") {
sendBitbucketComment ("${env.REPO_FULL_NAME}", "${env.PULL_REQUEST_ID}", "${env.WELCOME_COMMENT}")
import sys
import boto3
 
def cleanup():
    get_last_modified = lambda obj: int(obj['LastModified'].strftime('%s'))
 
    s3 = boto3.client('s3')
    result = s3.list_objects(Bucket=bucket, Delimiter='/')
    for dir in result.get('CommonPrefixes'):
        print('Directory: ' + str(dir['Prefix']))
AWSTemplateFormatVersion: "2010-09-09"
Description: 'Template for Lambda Fuction for stopping AWS resources (EC2, RDS, ASG)'
### OUTPUT ###
Outputs:
LambdaRoleARN:
Description: Role for Lambda execution.
Value:
Fn::GetAtt:
- LambdaRole