Code snippet toolbox

System Description Code snippet
AWS get current AWS Account ID using boto3
session.client("sts").get_caller_identity().get("Account")
Docker Docker file for building image for AWS lambda
FROM public.ecr.aws/lambda/python:3.11
RUN yum install -y tar gzip
RUN pip3 install --upgrade pip
COPY ./lambda ${LAMBDA_TASK_ROOT}
RUN pip3 install -r ${LAMBDA_TASK_ROOT}/requirements.txt
CMD ["appname.lambda_handler"]
Docker Python Script "build_docker_image.py" for building Docker image tiggered by terraform
import glob
import json
import os
import subprocess
import sys
import tempfile

def run_command(command):
    subprocess.run(command, shell=True, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, check=True)

def main() -> None:
    tf_input = json.loads(sys.stdin.readline())    
    image_name = "docker_image_name"
    run_command(f"aws ecr get-login-password --region eu-west-1 | docker login --username AWS --password-stdin {''.join(repository_url.split('/')[:-1])}")
    run_command(f"docker build -t {image_name} .")
    run_command(f"docker tag {image_name}:latest {repository_url}:latest")
    run_command(f"docker push {repository_url}:latest")

    print(json.dumps({"image_uri": f"{repository_url}"}))      

if __name__ == "__main__":
    main()
Git Git Cheat sheet (PDF)
https://education.github.com/git-cheat-sheet-education.pdf
Linux (Re-)install certifications
sudo yum check-update ca-certificates; (($?==100)) && sudo yum update ca-certificates || sudo yum -y reinstall ca-certificates
Linux Copy file from local to server with SCP
scp -i "~/.ssh/privatekey.pem" filename ec2-user@10.x.x.x:filename
Linux Install and setup a cronjob using crontab on a AWS EC2
sudo yum install cronie
crontab -e
# in editor setup the comman e.g. hourly python script run
0 * * * * /usr/bin/python3 /home/<username>/script.py > /home/<username>/log.txt 2>&1
Linux Install pip using a specific Python version
sudo dnf install python3.11-pip -y
Linux SSH Connection with PEM File
ssh -i "~/.ssh/privatekey.pem"  ec2-user@10.x.x.x
Linux Start a new session using screen. Can be used to run a scheduler script in a infinite loop.
screen -S new_session_name
Linux Update Certicates
sudo yum check-update ca-certificates; (($?==100)) && sudo yum update ca-certificates || sudo yum -y reinstall ca-certificates
Linux yum update
sudo yum update -y
MySQL MySQL does not start with error message: MySQL Shutdown Unexpectedly / MySQL wurde unerwartet beendet
1. Rename folder mysql/data to mysql/data_old
2. Make a copy of mysql/backup folder and name it as mysql/data
3. Copy all your database folders from mysql/data_old into mysql/data (except mysql, performance_schema, and phpmyadmin folders)
4. Copy mysql/data_old/ibdata1 file into mysql/data folder
5. Start MySQL from XAMPP control panel
Python convert image from svg to png
from svglib.svglib import svg2rlg
from reportlab.graphics import renderPM
drawing = svg2rlg("image.svg")
renderPM.drawToFile(drawing, "image.png", fmt="PNG")
Python CSV to PARQUET
import pandas as pd
df = pd.read_csv('example.csv')
df.to_parquet('output.parquet')
Python Get Password / Key Pair from p8 and AWS Secret Manager.
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.asymmetric import rsa
from cryptography.hazmat.primitives.asymmetric import dsa
from cryptography.hazmat.primitives import serialization
import json

# This function returns the private key of a key pair from a .p8 file
def get_private_key_from_file(path, password):
    """Load the private key from the given path securely."""
    with open(path, "rb") as key_file:
        private_key = serialization.load_pem_private_key(
            key_file.read(),
            password=password.encode() if password else None,
            backend=default_backend()
        )
    return private_key.private_bytes(
    encoding=serialization.Encoding.DER,
    format=serialization.PrivateFormat.PKCS8,
    encryption_algorithm=serialization.NoEncryption())

# This function returns the private key of a key pair from AWS Secret Manager
# To setup the AWS Secret Manager put the private key in the "Plaintext" section of the Secret value (not: key / value)
def get_private_key_from_secretmanager(session, region, secret_name):    
    client = session.client(
        service_name='secretsmanager',
        region_name=region
    )
    # Retrieve the secrects details from AWS
    response = client.get_secret_value(SecretId=secret_name)
    # Extract the secrect key 
    key = response['SecretString']        
    private_key= serialization.load_pem_private_key(
        key.encode('utf-8'),
        password=None,
        backend=default_backend()
    )
    return private_key.private_bytes(
        encoding=serialization.Encoding.DER,
        format=serialization.PrivateFormat.PKCS8,
        encryption_algorithm=serialization.NoEncryption())    

# This function returns the key pair from AWS Secret Manager
# To setup the AWS Secret Manager put key and value into the "Key/value" section of Secret value (no key / value)
def get_key_value_from_secretmanager(session, region, secret_name):
    client = session.client(
        service_name='secretsmanager',
        region_name=region
    )
    # Retrieve the secrects details from AWS
    response = client.get_secret_value(SecretId=secret_name)
    # Extract the key / value
    secretjson = json.loads(response['SecretString'])
    key, value = next(iter(secretjson.items()))
    return key, value

Python Install / Upgrade PIP
python.exe -m pip install --upgrade pip
Python Python Package for AWS lambda layer
pip install --platform=manylinux2014_x86_64 --target=package --implementation cp --python-version 311 --only-binary=:all: --upgrade -r requirements.txt
Python Scheduler script for running commands all 60 minutes. To be used in Linux with a seperat process (see screen -S)
import time
import subprocess
import schedule

def job():
    subprocess.call("usr/bin/python3 /home/script.py > /home/log.txt 2>&1"), shell=True)

schedule.every(60).minutes.do(job)

while True:
    schedule.run_pending()
    time.sleep(1)
Python Virtual Environment
python -m venv <virtualenvname>

source <virtualenvname>/bin/activate    #LINUX
<virtualenvname>/Scripts/activate       #WIN
terraform Build Docker image by executing python script "build_docker_image.py" and upload the image to AWS ECR Repository
resource "null_resource" "always_run" {
    triggers = {
        always_run = "${timestamp()}"
    }
}

resource "aws_ecr_repository" "docker_repository" {
    name = "docker_repository"
}

resource "aws_ecr_repository_policy" "repository_policy" {
    repository = aws_ecr_repository.docker_repository.name
    policy = <<EOF
        {
          "Version": "2008-10-17",
          "Statement": [
            {
              "Sid": "AllowPushPull",
              "Effect": "Allow",
              "Principal": "*",
              "Action": [
                "ecr:GetDownloadUrlForLayer",
                "ecr:BatchGetImage",
                "ecr:BatchCheckLayerAvailability",
                "ecr:PutImage",
                "ecr:InitiateLayerUpload",
                "ecr:UploadLayerPart",
                "ecr:CompleteLayerUpload"
              ]
            }
          ]
        }
    EOF
}

data "external" "build_lambda" {
    program = ["python", "../build_docker_image.py"]
    query = {
        "repository_url": aws_ecr_repository.docker_repository.repository_url
    }
}

resource "aws_iam_role" "lambda_role" {
    name = "lambda_role"
    assume_role_policy = <<EOF
        {
          "Version": "2012-10-17",
          "Statement": [
            {
              "Effect": "Allow",
              "Principal": {
                "Service": "lambda.amazonaws.com"
              },
              "Action": "sts:AssumeRole"
            }
          ]
        }
    EOF
}

resource "aws_iam_policy" "lambda_policy" {
    name = "lambda_policy"
    policy = <<EOF
        {
          "Version": "2012-10-17",
          "Statement": [
            {
              "Effect": "Allow",
              "Action": [
                "logs:CreateLogGroup",
                "logs:CreateLogStream",
                "logs:PutLogEvents"
              ],
              "Resource": "arn:aws:logs:*:*:*"
            }
          ]
        }
    EOF
}

resource "aws_iam_role_policy_attachment" "lambda_policy_attachment" {
    role = aws_iam_role.lambda_role.name
    policy_arn = aws_iam_policy.lambda_policy.arn
}

resource "aws_lambda_function" "lambda_function" {
     depends_on = [data.external.build_lambda]
     lifecycle {
         replace_triggered_by = [null_resource.always_run]
     }
    function_name = "lambda_function"
    image_uri = "${data.external.build_lambda.ryesesult["image_uri"]}"
    package_type = "Image"
    role = aws_iam_role.lambda_role.arn
    timeout = 300
    memory_size = 256
    environment {
        variables = {}
    }
}
Terraform Graph infrastructure to svg file
terraform graph | dot -Tsvg > graph.svg
Terraform Initial setup of AWS resources for storing the terraform state.
####################################### Terraform State ####################################################
# Step1: comment all but resource "aws_s3_bucket" "tfstatebucket" and resource "aws_dynamodb_table" "tfstatedynamotable"
# Step2: terraform apply
# Step3: de-comment Step1
# Step4: terraform apply 

terraform {
  required_version = ">= 0.12"
}

provider "aws" {  
  region  = "eu-west-1" 
  profile = "default" 	
}

terraform {
  backend "s3" {
    bucket         = "tfstate"
    key            = "terraform.state"
    region         = "eu-west-1"
    encrypt        = true
    dynamodb_table = "tfstate"    
  }
}

resource "aws_s3_bucket" "tfstatebucket" {
  bucket = "tfstate-${var.vpc}"
  acl    = "private"
  versioning {
    enabled = true
  }
  server_side_encryption_configuration {
    rule {
      apply_server_side_encryption_by_default {
        sse_algorithm = "AES256"
      }
    }
  }
  tags = {
    Name        = "Terraform state bucket for ${var.applicationname}-${var.environment}"
    Environment = "${var.environment}"
  }
}

resource "aws_dynamodb_table" "tfstatedynamotable" {
  name           = "tfstate-${var.vpc}"
  hash_key       = "LockID"
  read_capacity  = 5
  write_capacity = 5
  attribute {
    name = "LockID"
    type = "S"
  }
  tags = {
    Name        = "Dynamodb for terraform state consistency for ${var.applicationname}-${var.environment}"
    Environment = "${var.environment}"
  }
}

#usage: terraform init --backend-config="access_key=XXX" --backend-config="secret_key=XXX"
terraform {
  backend "s3" {
    bucket         = "tfstate-vpc-XXX"
    key            = "terraform.state"
    region         = "eu-cenwesttral-1"
    encrypt        = true
    dynamodb_table = "tfstate-vpc-XXX"
    lock_table     = "tfstatedynamotable"
  }
}
####################################### end of Terraform State ####################################################
Windows Acticate God Mode. Create folder on desktop and rename it to the code snippet.
GodMode.{ED7BA470-8E54-465E-825C-99712043E01C}
Windows add ssh key pair. -->Add Path System Variable: C:\Windows\System32\OpenSSH\ -->start powershell ISE as Administrator
Set-Service ssh-agent -StartupType Manual
Start-Service ssh-agent
ssh-add C:\Users\<username>\.ssh\id_rsa
Windows Create SSH Key pair. --> C:\Users\<Username>\.ssh and look for the key-pairs id_rsa and id_rsa.pub
ssh-keygen -t rsa -C "myemail@example.com"
Windows Overview of port usage
netstat -a -n
Windows Set user environment variable
SETX <var>=<value>
Windows Show all environments variables
Get-ChildItem Env: