Skip to content

Instantly share code, notes, and snippets.

{
"displayName": "e2e-tutorial-automl",
"inputDataConfig": {
"datasetId": "DATASET_ID",
"filterSplit": {
"trainingFilter": "labels.aiplatform.googleapis.com/ml_use=training",
"validationFilter": "labels.aiplatform.googleapis.com/ml_use=validation",
"testFilter": "labels.aiplatform.googleapis.com/ml_use=test"
}
},
from kfp import dsl
from google_cloud_pipeline_components import aiplatform
@dsl.pipeline(
name='e2e-tutorial-automl-pipeline',
description='A simple automl training',
pipeline_root='GCS_PATH_FOR_PIPELINE'
)
def e2e_tutorial_pipeline():
job = aiplatform.AutoMLImageTrainingJobRunOp(
@eileen-code4fun
eileen-code4fun / cifar10_pl_launch.py
Last active June 2, 2021 22:41
CIFAR10 PL Launch
from kfp.v2 import compiler
from google.cloud.aiplatform import AIPlatformClient
compiler.Compiler().compile(pipeline_func=e2e_tutorial_pipeline, package_path='e2e-tutorial-automl-pipeline.json')
api_client = AIPlatformClient(project_id='PROJECT_ID', region='us-central1')
api_client.create_run_from_job_spec(
job_spec_path='e2e-tutorial-automl-pipeline.json'
)
api_client.create_schedule_from_job_spec(
job_spec_path='e2e-tutorial-automl-pipeline-schedule.json',
# Start at the first minute of every hour.
schedule='1 * * * *'
)
import tensorflow as tf
import tensorflow_datasets as tfds
ds = tfds.load('cifar10', as_supervised=True)
def csvfy(dataset, filename):
with open(filename, 'w') as f:
for image, label in dataset:
f.write('{},'.format(label.numpy()))
f.write(','.join(str(x) for x in list(image.numpy().reshape(-1))))
SELECT * FROM `bqml.train` LIMIT 10;
CREATE MODEL
# Name of the model.
`bqml.e2e-tutorial`
OPTIONS
# BigQuery will detect that it's for multiclass.
( MODEL_TYPE='LOGISTIC_REG') AS
SELECT
# Use the first column as the label.
int64_field_0 as label,
# The rest are features.
SELECT
# Display all metrics.
*
FROM
ML.EVALUATE(
MODEL `bqml.e2e-tutorial`,
(
SELECT
int64_field_0 as label,
* EXCEPT (int64_field_0)
import numpy as np
from sklearn import decomposition
# Reuse the same ds from Tensorflow Dataset
train_data = list(ds['train'].map(lambda x, y: tf.reshape(x, [-1])))
test_data = list(ds['test'].map(lambda x, y: tf.reshape(x, [-1])))
X = tf.concat([train_data, test_data], 0).numpy()
print(np.shape(X))
# Output: (60000, 3072)
import tensorflow as tf
# Load the saved custom model.
model = tf.keras.models.load_model(GCS_PATH_FOR_SAVED_MODEL)
model.compile(optimizer='adam',
loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=False),
metrics=['accuracy'])
model.summary()
'''