Clone the desired repo:
git clone https://github.com/dutchiechris/fio-conf.git
Create a branch for your changes:
git checkout -b feature123
| #cloud-config | |
| ssh_authorized_keys: | |
| - ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAgEAt4qwYNjFYPnPJObaNC+VoMYBFcEuUzCCBw6YzcU5f27bm0AwKvZKbVbwCo7IJ7gpNP0tIKvDK9z/qRQmcjWlW9p/cqpCa2a1fb0+8O2BhFXaD0BxuZWeuX8SF+mfJ2NzhgpTnR= kvm | |
| - ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQCh1LOHY/929X7qVXqpFQZRJt4uz3rkLVaCHdVD57Y+CEXjEQpoDBBdD6AEkPCvJ+vzFm9LC6KKUFVS7VXxZRUvXvKSWTKUrdsYV9vhNRLFGXYwzyUJov05w+/fxlels/L5Zc== coreos | |
| hostname: @@VMNAME@@ | |
| users: | |
| - name: "core" |
| #! /usr/bin/python | |
| ################################################################################ | |
| # Manage (create/start/stop/undefine) CoreOS VMs on CentOS 7 using # | |
| # QEMU/KVM/libvirt. Makes it easy to create/undefine container hosts # | |
| # for lab purposes. # | |
| # # | |
| # For more info see this blog post: # | |
| # http://beginswithdata.com/2016/12/30/centos7-kvm-coreos/ # | |
| # # | |
| # Copyright (c) Chris Madden. All rights reserved. # |
| [root@server02 network-scripts]# cat ifcfg-enp14s0 | |
| DEVICE=enp14s0 | |
| TYPE=Ethernet | |
| BOOTPROTO=none | |
| ONBOOT=yes | |
| NM_CONTROLLED=no | |
| [root@server02 network-scripts]# cat ifcfg-enp14s0.35 | |
| DEVICE=enp14s0.35 | |
| TYPE=Ethernet |
| 1) Install s3cmd | |
| # pip install s3cmd | |
| 2) setup s3 (file on windows is C:\Users\<USER>\AppData\Roaming\s3cmd.ini) | |
| access_key = <KEY_HERE> | |
| secret_key = <SECRET_HERE> | |
| check_ssl_certificate = False | |
| check_ssl_hostname = False | |
| host_base = <host>:8082 | |
| host_bucket = <host>:8082/%(bucket) | |
| 3) Run commands! |
| This doc explains how the registry API works: | |
| https://github.com/docker/distribution/blob/master/docs/spec/api.md | |
| Here’s an example using curl. | |
| Check details of an image that was pushed to the registry: | |
| [root@3-kvm1 ~]# curl -v -X GET 3-docker10.nltestlab.hq.netapp.com:5000/v2/busybox/manifests/latest | |
| * About to connect() to 3-docker10.nltestlab.hq.netapp.com port 5000 (#0) | |
| * Trying 10.64.28.160... |
| #!/bin/sh | |
| # | |
| # Google Cloud Platform Big Query script to copy data from one region to another | |
| # | |
| # Disclaimer: This is not an officially supported Google product. | |
| # Written code can be used as a baseline but is not meant for production usage. | |
| # Project that owns source and destination datasets | |
| PROJECT=my-project |
Clone the desired repo:
git clone https://github.com/dutchiechris/fio-conf.git
Create a branch for your changes:
git checkout -b feature123
| SELECT | |
| w.airline_name, | |
| COUNT(departure_delay) AS delay | |
| FROM | |
| `bigquery-samples.airline_ontime_data.flights` AS f | |
| JOIN ( | |
| SELECT | |
| REGEXP_EXTRACT( airline ,r'.*: (.*)') as airline_code, | |
| REGEXP_EXTRACT( airline ,r'(.*):.*') as airline_name | |
| FROM |
Create a Cloud Storage bucket and put your audio file in there
Go to Speech API longrunningrecognize docs and put a snippet like this where the gs:// stuff is your file in cloud storage:
{
| /* | |
| # Make a HdX disk with 5GiB/s+ perf, or use local SSD, and create test file | |
| sudo mkfs.ext4 /dev/disk/by-id/google-adhoc1 | |
| sudo mount /dev/disk/by-id/google-adhoc1 /mnt/tmpfs | |
| dd if=/dev/zero of=/mnt/tmpfs/1g.dat bs=1M count=50000 | |
| # Set keys with access to your bucket | |
| export AWS_ACCESS_KEY_ID="<YOUR GCS HMAC ACCESS KEY>" | |
| export AWS_SECRET_ACCESS_KEY="<YOUR GCS HMAC SECRET KEY>" |