Skip to content

Instantly share code, notes, and snippets.

@nicc777
Last active September 2, 2022 11:24
Show Gist options
  • Save nicc777/58813133463ddefb59f92e7114d22e81 to your computer and use it in GitHub Desktop.
Save nicc777/58813133463ddefb59f92e7114d22e81 to your computer and use it in GitHub Desktop.
Python Code Templates

AWS / Boto3 Functions and Lambda Functions

Module header functions

Prerequisite funtion defined early in a module:

import boto3
import traceback
import os
import json
import logging
from datetime import datetime
import sys
from inspect import getframeinfo, stack
# Other imports here...

def get_logger(level=logging.INFO):
    logger = logging.getLogger()
    for h in logger.handlers:
        logger.removeHandler(h)
    formatter = logging.Formatter('%(funcName)s:%(lineno)d -  %(levelname)s - %(message)s')
    ch = logging.StreamHandler(sys.stdout)
    ch.setLevel(level)    
    ch.setFormatter(formatter)
    logger.addHandler(ch)
    logger.setLevel(level)
    return logger

def get_client(client_name: str, region: str='eu-central-1', boto3_clazz=boto3):
    return boto3_clazz.client(client_name, region_name=region)

Function returning a dict with an optional next token

Code:

def function_name(
    some_var_i_need,
    client=get_client(client_name="elbv2"), 
    next_token: str=None,
    logger=get_logger(level=logging.INFO)
)->dict:
    result = dict()
    try:
        if next_token is not None:
            response = client.some_boto3_function("parameters as required...", Marker=next_token)
        else:
            response = client.some_boto3_function("parameters as required...")
        logger.debug('response={}'.format(json.dumps(response, default=str)))
        for something in response['WhatEverTheContractIs']:
            # Process... populate result
            pass
        if 'NextMarker' in response:  # Search for the specific next token as per the API contract
            result = {**result, **function_name(some_var_i_need=some_var_i_need, client=client, logger=logger, next_token=response['NextMarker'])}
    except:
        logger.error('EXCEPTION: {}'.format(traceback.format_exc()))
    logger.debug('result={}'.format(json.dumps(result, default=str)))
    return result

Function returning a list with an optional next token

Code:

def function_name(
    some_var_i_need,
    client=get_client(client_name="elbv2"), 
    next_token: str=None,
    logger=get_logger(level=logging.INFO)
)->list:
    result = list()
    try:
        if next_token is not None:
            response = client.some_boto3_function("parameters as required...", Marker=next_token)
        else:
            response = client.some_boto3_function("parameters as required...")
        logger.debug('response={}'.format(json.dumps(response, default=str)))
        # Process response here and populate result...
        if 'NextMarker' in response:
            result += function_name(
                some_var_i_need=some_var_i_need,
                client=client, 
                logger=logger,
                next_token=response['NextMarker']
            )
    except:
        logger.error('EXCEPTION: {}'.format(traceback.format_exc()))
    logger.debug('result={}'.format(result))
    return result

Lambda Skeleton

Template:

# ADD the header as per section ``Module header functions``

CACHE_TTL_DEFAULT = 600
cache = dict()

def get_utc_timestamp(with_decimal: bool = False):
    epoch = datetime(1970, 1, 1, 0, 0, 0)
    now = datetime.utcnow()
    timestamp = (now - epoch).total_seconds()
    if with_decimal:
        return timestamp
    return int(timestamp)
    
    
def get_debug()->bool:
    try:
        return bool(int(os.getenv('DEBUG', '0')))
    except:
        pass
    return False
    

def get_cache_ttl(logger=get_logger())->int:
    try:
        return int(os.getenv('CACHE_TTL', '{}'.format(CACHE_TTL_DEFAULT)))
    except:
        logger.error('EXCEPTION: {}'.format(traceback.format_exc()))
    return CACHE_TTL_DEFAULT


def refresh_environment_cache(logger=get_logger()):
    global cache
    now = get_utc_timestamp(with_decimal=False)
    if 'Environment' in cache:
        if cache['Environment']['Expiry'] > now:
            return
    cache['Environment'] = {
        'Expiry': get_utc_timestamp() + get_cache_ttl(logger=logger),
        'Data': {
            'CACHE_TTL': get_cache_ttl(logger=logger),
            'DEBUG': get_debug(),
            # Other ENVIRONMENT variables can be added here... The environment will be re-read after the CACHE_TTL 
        }
    }
    logger.debug('cache: {}'.format((json.dumps(cache))))


def debug_log(message: str, variables_as_dict: dict=dict(), variable_as_list: list=list(), logger=get_logger(level=logging.INFO)):
    """
        See:
            https://docs.python.org/3/library/stdtypes.html#str.format
            https://docs.python.org/3/library/string.html#formatstrings

        For this function, the `message` is expected to contain key word variable place holders and the `variables` dict must hold a dictionary with the values matched to the keywords

        Example:

            >>> d = {'one': 1, 'number-two': 'two', 'SomeBool': True}
            >>> message = 'one = {one} and the number {number-two}. Yes, it is {SomeBool}'
            >>> message.format(**d)
            'one = 1 and the number two. Yes, it is True'

            >>> l = ('one', 2, True)
            >>> message = '{} and {}'
            >>> message.format(*l)
            'one and 2'

    """
    if cache['Environment']['Data']['DEBUG'] is True:
        try:
            caller = getframeinfo(stack()[1][0])
            caller_str = '{}():{}'.format(caller.function, caller.lineno)
            message = '[{}]  {}'.format(caller_str, message)
            if len(variables_as_dict) > 0:
                logger.debug(message.format(**variables_as_dict))
            else:
                logger.debug(message.format(*variable_as_list))
        except:
            pass


###############################################################################
###                                                                         ###
###                         M A I N    H A N D L E R                        ###
###                                                                         ###
###############################################################################

    
def handler(
    event,
    context,
    logger=get_logger(level=logging.INFO),
    boto3_clazz=boto3,
    run_from_main: bool=False
):
    refresh_environment_cache(logger=logger)
    if cache['Environment']['Data']['DEBUG'] is True and run_from_main is False:
        logger  = get_logger(level=logging.DEBUG)
    
    debug_log('event={}', variable_as_list=[event], logger=logger)
    
    return {"Result": "Ok", "Message": None}    # Adapt to suite the use case....


###############################################################################
###                                                                         ###
###                        M A I N    F U N C T I O N                       ###
###                                                                         ###
###############################################################################


if __name__ == '__main__':
    logger = logging.getLogger("my_lambda")
    formatter = logging.Formatter('%(asctime)s - %(name)s - %(funcName)s:%(lineno)d -  %(levelname)s - %(message)s')

    ch = logging.StreamHandler()
    if get_debug() is True:
        ch.setLevel(logging.DEBUG)    
    else:
        ch.setLevel(logging.INFO)
    ch.setFormatter(formatter)
    logger.addHandler(ch)
    
    if get_debug() is True:
        logger.setLevel(logging.DEBUG)
    else:    
        logger.setLevel(logging.INFO)
    handler(event={}, context=None, logger=logger, run_from_main=True)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment