Generating some data first:
# R version 3.3.0
require(data.table) ## 1.9.7, commit 2433, github
require(dplyr) ## devel, commit 3189, github| class Respond extends Service[Request, Response] with Logger { | |
| def apply(request: Request) = { | |
| try { | |
| request.method -> Path(request.path) match { | |
| case GET -> Root / "todos" => Future.value { | |
| val data = Todos.allAsJson | |
| debug("data: %s" format data) | |
| Responses.json(data, acceptsGzip(request)) | |
| } | |
| case GET -> Root / "todos" / id => Future.value { |
| import psycopg2 | |
| from sshtunnel import SSHTunnelForwarder | |
| # For interactive work (on ipython) it's easier to work with explicit objects | |
| # instead of contexts. | |
| # Create an SSH tunnel | |
| tunnel = SSHTunnelForwarder( | |
| ('128.199.169.188', 22), | |
| ssh_username='<username>', |
| // 'Hello World' nodejs6.10 runtime AWS Lambda function | |
| exports.handler = (event, context, callback) => { | |
| console.log('Hello, logs!'); | |
| callback(null, 'great success'); | |
| } |
There are 4 possible serialization format when using avro:
| SELECT 'ALTER TABLE '|| schemaname || '.' || tablename ||' OWNER TO username;' | |
| FROM pg_tables WHERE NOT schemaname IN ('pg_catalog', 'information_schema') | |
| ORDER BY schemaname, tablename; | |
| SELECT 'ALTER SEQUENCE '|| sequence_schema || '.' || sequence_name ||' OWNER TO username;' | |
| FROM information_schema.sequences WHERE NOT sequence_schema IN ('pg_catalog', 'information_schema') | |
| ORDER BY sequence_schema, sequence_name; | |
| SELECT 'ALTER VIEW '|| table_schema || '.' || table_name ||' OWNER TO username;' | |
| FROM information_schema.views WHERE NOT table_schema IN ('pg_catalog', 'information_schema') |