Skip to content

Instantly share code, notes, and snippets.

--min-cpu-platform=Intel\ Skylake
export IMAGE_FAMILY="tf-latest-cpu" # or put any required
export ZONE="us-west1-b"
export INSTANCE_NAME="my-instance"
export INSTANCE_TYPE="n1-standard-8"
gcloud compute instances create $INSTANCE_NAME \
--zone=$ZONE \
--image-family=$IMAGE_FAMILY \
--image-project=deeplearning-platform-release \
--machine-type=$INSTANCE_TYPE \
--boot-disk-size=120GB \
export PROJECT_NAME="" # put your project name
export INSTANCE_NAME="" # put the instance name
# pick the zone that is supported by tpu:
# https://cloud.google.com/tpu/docs/regions
export ZONE="us-central1-b"
gcloud compute instances create $INSTANCE_NAME \
--project=$PROJECT_NAME \
--zone=$ZONE \
--machine-type=n1-standard-2 \
gssh --zone $ZONE $INSTANCE_NAME -- -L 8080:localhost:8080
import tensorflow as tf
TPU_WORKER = '10.0.101.2:8470' # put your IP!
session = tf.Session('grpc://' + TPU_WORKER,
config=tf.ConfigProto(isolate_session_state=True))
export PROJECT_NAME="" # put your project name
export INSTANCE_NAME="" # put the instance name
# pick the zone that is supported by tpu:
# https://cloud.google.com/tpu/docs/regions
export ZONE="us-central1-b"
gcloud compute instances create $INSTANCE_NAME \
--project=$PROJECT_NAME \
--zone=$ZONE \
--machine-type=n1-standard-2 \
export IMAGE_FAMILY="chainer-latest-cu92-experimental"
export ZONE="us-west1-b"
export INSTANCE_NAME="chainer-test"
gcloud compute instances create $INSTANCE_NAME \
--zone=$ZONE \
--image-family=$IMAGE_FAMILY \
--image-project=deeplearning-platform-release \
--maintenance-policy=TERMINATE \
--accelerator='type=nvidia-tesla-v100,count=1' \
--metadata='install-nvidia-driver=True'
export IMAGE_FAMILY="common-cpu"
export ZONE="us-west1-b"
export INSTANCE_NAME="jupyter-with-custom-user"
gcloud compute instances create $INSTANCE_NAME \
--zone=$ZONE \
--image-family=$IMAGE_FAMILY \
--image-project=deeplearning-platform-release \
--metadata="jupyter-user=bob"
%%bigquery
SELECT
source_year AS year,
COUNT(is_male) AS birth_count
FROM `bigquery-public-data.samples.natality`
GROUP BY year
ORDER BY year DESC
LIMIT 15
import numpy as np
import tensorflow as tf
import pandas as pd
FEATURES = ["SqFt", "Bedrooms", "Offers"]
LABEL = "Price"
feature_cols = [tf.feature_column.numeric_column(k) for k in FEATURES]
estimator = tf.estimator.LinearRegressor(
feature_columns=feature_cols,