- Please retrieve information from AWS
- Then configure the profile on
.aws/config- Remember to delete previous credentials
[default]
output=json
You have to do 2 things in order to allow your container to access your host's postgresql database
Obs: By "Host" here I mean "the server where docker is running on".
Find your postgresql.conf (in case you don't know where it is)
$ sudo find / -type f -name postgresql.conf
| # Reference | |
| # https://github.com/Delgan/loguru/issues/302 | |
| from logging import __file__, Handler, LogRecord, currentframe | |
| from sys import stdout, stderr | |
| class InterceptHandler(Handler): | |
| def emit(self, record: LogRecord): | |
| # Get corresponding Loguru level if it exists | |
| try: |
| -- show running queries (pre 9.2) | |
| SELECT procpid, age(clock_timestamp(), query_start), usename, current_query | |
| FROM pg_stat_activity | |
| WHERE current_query != '<IDLE>' AND current_query NOT ILIKE '%pg_stat_activity%' | |
| ORDER BY query_start desc; | |
| -- show running queries (9.2) | |
| SELECT pid, age(clock_timestamp(), query_start), usename, query | |
| FROM pg_stat_activity | |
| WHERE query != '<IDLE>' AND query NOT ILIKE '%pg_stat_activity%' |
| # PROS: Easy to use in CRUDs | |
| # CONS: Invalid autocompletion | |
| from inspect import isclass | |
| from pydantic import BaseModel, create_model | |
| from pydantic_core import SchemaSerializer, SchemaValidator | |
| def omit(*fields): |
| -- Reference | |
| -- https://stackoverflow.com/questions/46470030/postgresql-index-size-and-value-number | |
| SELECT | |
| i.relname "Table Name" | |
| , indexrelname "Index Name" | |
| , pg_size_pretty(pg_total_relation_size(relid)) As "Total Size" | |
| , pg_size_pretty(pg_indexes_size(relid)) as "Total Size of all Indexes" | |
| , pg_size_pretty(pg_relation_size(relid)) as "Table Size" | |
| , pg_size_pretty(pg_relation_size(indexrelid)) "Index Size" |
| #!/bin/bash | |
| icon_name="$1" | |
| icon_directory="/usr/share/icons" | |
| find "$icon_directory" -name "*$icon_name*" |
| import subprocess | |
| import IPython | |
| import duckdb | |
| def main(log_group): | |
| def process_log_entry(timestamp, log_group, log_entry): | |
| conn.execute( | |
| "INSERT INTO cloudwatch_logs (timestamp, log_group, log_entry) VALUES (?, ?, ?)", | |
| [timestamp, log_group, log_entry], | |
| ) |
| import subprocess | |
| import IPython | |
| import psycopg2 | |
| def main(log_group): | |
| def process_log_entry(timestamp, log_group, log_entry): | |
| cursor.execute( | |
| "INSERT INTO cloudwatch_logs (timestamp, log_group, log_entry) VALUES (%s, %s, %s)", | |
| (timestamp, log_group, log_entry) |
| """Creating thread safe and managed sessions using SQLAlchemy. | |
| The sessions that are created are expected to be: | |
| - thread safe | |
| - handle committing | |
| - handle rolling back on errors | |
| - handle session removal/releasing once context or thread is closed. | |
| Author: Nitish Reddy Koripalli | |
| License: MIT |