To add a conda environment to the list of environments:
$ source activate thisenv
(thisenv) $ pip install ipykernel
(thisenv) $ python -m ipykernel install --user --name thisenv
| # see also https://github.com/wrobstory/pgshift | |
| import gzip | |
| from io import StringIO, BytesIO | |
| from functools import wraps | |
| import boto | |
| from sqlalchemy import MetaData | |
| from pandas import DataFrame | |
| from pandas.io.sql import SQLTable, pandasSQL_builder |
| @route('/static/<path:path>') | |
| def static_proxy(path): | |
| conn = boto.s3.connection.S3Connection(ACCESS_KEY, SECRET_ACCESS_KEY) | |
| bucket = conn.get_bucket(BUCKET_NAME, validate=False) | |
| key = boto.s3.key.Key(bucket) | |
| key.key = path | |
| try: | |
| key.open_read() | |
| headers = dict(key.resp.getheaders()) |
| # Write dataframe to buffer | |
| csv_buffer = StringIO() | |
| df.to_csv(csv_buffer, index=False) | |
| # Upload CSV to S3 | |
| s3_key = 'test.csv' | |
| s3_resource = aws_session.resource('s3') | |
| s3_resource.Object(s3_bucket, s3_key).put(Body=csv_buffer.getvalue()) |