sudo apt-get install autoconf automake libtool curl make g++ unzip -ycd ~| # the `update-function-configuration` overwrites the existing set envars. | |
| # In order to *ADD* variables we need to read the existing envars and add to that. | |
| # This command uses `jq` to read and transform the json result to an envar then update the lambda configuration | |
| # create the updated envar set | |
| export YOUR_FUNCTION_NAME={populate this} | |
| export UPDATED_ENVIRONMNET_VARIABLES=$(aws lambda get-function-configuration --function-name ${YOUR_FUNCTION_NAME} | \ | |
| jq --compact-output ".Environment + {\"Variables\": (.Environment.Variables + {\"NEW_ENVAR_NAME\": \"NEW_ENVAR_VALUE\"})}") | |
| # check |
| import json | |
| import boto3 | |
| secretsmgr_endpoint_url = 'http://localhost:4584' | |
| SM_CLIENT = boto3.client('secretsmanager', endpoint_url=secretsmgr_endpoint_url) | |
| # Set Secret | |
| sample_secret_id = 'some:id:2' | |
| secret_data = json.dumps({'key1': 'value1', 'key2': 'value2'}) |
| import os | |
| AWS_REGION = os.getenv('AWS_DEFAULT_REGION', 'ap-northeast-1') | |
| DEFAULT_S3_SERVICE_ENDPOINT = f'https://s3.{AWS_REGION}.amazonaws.com' | |
| DEFAULT_SQS_SERVICE_ENDPOINT = f'https://sqs.{AWS_REGION}.amazonaws.com' | |
| DEFAULT_SNS_SERVICE_ENDPOINT = f'https://sns.{AWS_REGION}.amazonaws.com' | |
| AWS_SERVICE_ENDPOINTS = { | |
| 's3': os.getenv('S3_SERVICE_ENDPOINT', DEFAULT_S3_SERVICE_ENDPOINT), |
| import json | |
| import subprocess | |
| from xml.etree import ElementTree | |
| import boto3 | |
| from . import settings | |
| S3 = boto3.client( | |
| 's3', | |
| endpoint_url=settings.AWS_SERVICE_ENDPOINTS['s3'], | |
| ) |
Create User (User1A) in Account-A
AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEYCreate Group in Account-A
Create Role (MyRoleB) in Account-B with Permissions you want user to have:
| def parse_arn(arn_str: str) -> Tuple[str, str, str, Optional[str], str, Optional[List[str]]]: | |
| """ | |
| https://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html | |
| arn:partition:service:region:account-id:resource | |
| arn:partition:service:region:account-id:resourcetype/resource | |
| arn:partition:service:region:account-id:resourcetype/resource/qualifier | |
| arn:partition:service:region:account-id:resourcetype/resource:qualifier | |
| arn:partition:service:region:account-id:resourcetype:resource | |
| arn:partition:service:region:account-id:resourcetype:resource:qualifier |
| #!/bin/bash | |
| # Command to transfer | |
| export VIDEO_DIRECTORY=/storage/VIDEO/HOME/$(python3 -c "import datetime;print(datetime.datetime.now().strftime('%Y%m%d-video-dump'), end='')") | |
| echo Making directory: ${VIDEO_DIRECTORY} | |
| mkdir ${VIDEO_DIRECTORY} | |
| echo Copying... | |
| rsync --progress -h /media/monkut/JVCCAM_MEM/AVCHD/BDMV/STREAM/*.MTS ${VIDEO_DIRECTORY} |
| """ | |
| Get the latest model assets key from resulting sagemaker training session | |
| """ | |
| import boto3 | |
| S3 = boto3.client('s3') | |
| def get_latest_model_assets_key(bucket: str, prefix: str): | |
| model_assets = [] | |
| paginator = S3.get_paginator('list_objects') |