Skip to content

Instantly share code, notes, and snippets.

@dawu76
Last active January 4, 2018 23:19
Show Gist options
  • Save dawu76/c107bd9fe5017fa66909037f7cc2b7fb to your computer and use it in GitHub Desktop.
Save dawu76/c107bd9fe5017fa66909037f7cc2b7fb to your computer and use it in GitHub Desktop.
results of `gcloud ml-engine local predict`attempts with different `tf.estimator.export.ServingInputReceiver` args
# --------------------------------------------------------
# result of a `json_serving_input_fn` that returns
# tf.estimator.export.ServingInputReceiver(inputs, inputs)
# where 'inputs' contains the instance key
# --------------------------------------------------------
export MODE=local_single
export TRAIN_STEPS=2000
export NUM_EPOCHS=1
DATE=`date '+%Y%m%d_%H%M%S'`
export TRAINER_DIR=prop_car_gen_model/trainer
export PARENT_DIR=/Users/$USER/dev/anaconda2/envs/repos
export DATA_DIR=$PARENT_DIR/prop_car_gen_data
export OUTPUT_DIR=$PARENT_DIR/prop_car_gen_${MODE}_${DATE}
rm -rf $OUTPUT_DIR
TRAIN_FILE_LOCAL=$DATA_DIR/tier_3_train_201712.csv
EVAL_FILE_LOCAL=$DATA_DIR/tier_3_eval_201712.csv
echo $TRAIN_STEPS
echo $NUM_EPOCHS
echo $DATE
echo $TRAINER_DIR
echo $PARENT_DIR
echo $OUTPUT_DIR
echo $TRAIN_FILE_LOCAL
echo $EVAL_FILE_LOCAL
# $ echo $DATE
# 20180104_111756
# first train the model
gcloud ml-engine local train --package-path $TRAINER_DIR \
--module-name trainer.task \
-- \
--train-files $TRAIN_FILE_LOCAL \
--eval-files $EVAL_FILE_LOCAL \
--train-steps $TRAIN_STEPS \
--job-dir $OUTPUT_DIR \
--eval-steps 100
model_export_id=`'ls' $OUTPUT_DIR/export/prop-car-gen`
# 1515103301
echo $model_export_id
ls $OUTPUT_DIR/export/prop-car-gen/$model_export_id/
# Result of `tf.estimator.export.ServingInputReceiver(inputs, inputs),
# where the 'inputs' dict contains the instance key
# 1. the instance key does _not_ appear among the 'predict' & 'serving_default'
# signature inputs
# 2. the instance key _does_ appear among the 'predict' & 'serving_default'
# signature outputs
# inspect the saved model's signature definition
saved_model_cli show \
--dir prop_car_gen_local_single_20180104_140037/export/prop-car-gen/1515103301/ \
--tag_set serve
# The given SavedModel MetaGraphDef contains SignatureDefs with the following keys:
# SignatureDef key: "predict"
# SignatureDef key: "serving_default"
saved_model_cli show \
--dir prop_car_gen_local_single_20180104_140037/export/prop-car-gen/1515103301/ \
--tag_set serve \
--signature_def serving_default
# The given SavedModel SignatureDef contains the following inputs(s):
# # ...
# # <no inputs['user_id'] tensor_info entry>
# The given SavedModel SignatureDef contains the following output(s):
# # ...
# outputs['user_id'] tensor_info:
# dtype: DT_INT64
# shape: (-1)
# name: Placeholder:0
# Method name is: tensorflow/serving/predict
# predict with the trained model
gcloud ml-engine local predict \
--model-dir=$OUTPUT_DIR/export/prop-car-gen/$model_export_id/ \
--json-instances=$DATA_DIR/tier_3_test_sample_v1_201712.json
# --- ERROR BEGIN ----
# prediction_lib.PredictionError:
# Invalid inputs: Unexpected tensor name: user_id (Error code: 1)
# --- ERROR END ----
# ----------------------------------------------------------
# result of a `json_serving_input_fn` that returns
# tf.estimator.export.ServingInputReceiver(features, inputs)
# where 'features' does NOT contain the instance key while
# 'inputs' does
# ----------------------------------------------------------
# CMLE run in local single-node mode
export MODE=local_single
export TRAIN_STEPS=2000
export NUM_EPOCHS=1
DATE=`date '+%Y%m%d_%H%M%S'`
export TRAINER_DIR=prop_car_gen_model/trainer
export PARENT_DIR=/Users/$USER/dev/anaconda2/envs/repos
export DATA_DIR=$PARENT_DIR/prop_car_gen_data
export OUTPUT_DIR=$PARENT_DIR/prop_car_gen_${MODE}_${DATE}
rm -rf $OUTPUT_DIR
TRAIN_FILE_LOCAL=$DATA_DIR/tier_3_train_201712.csv
EVAL_FILE_LOCAL=$DATA_DIR/tier_3_eval_201712.csv
# first train the model
gcloud ml-engine local train --package-path $TRAINER_DIR \
--module-name trainer.task \
-- \
--train-files $TRAIN_FILE_LOCAL \
--eval-files $EVAL_FILE_LOCAL \
--train-steps $TRAIN_STEPS \
--job-dir $OUTPUT_DIR \
--eval-steps 100
model_export_id=`'ls' $OUTPUT_DIR/export/prop-car-gen`
# 1515092673
echo $model_export_id
ls $OUTPUT_DIR/export/prop-car-gen/$model_export_id/
# Result of `tf.estimator.export.ServingInputReceiver(features, inputs)`, where
# 'features' dict does _not_ contain the instance key while 'inputs' dict does:
# 1. the instance key _does_ appear among the 'predict' & 'serving_default'
# signature inputs
# 2. the instance key does _not_ appear among the 'predict' & 'serving_default'
# signature outputs
# inspect the saved model's signature definition
saved_model_cli show \
--dir prop_car_gen_local_single_20180104_100210/export/prop-car-gen/1515092673/ \
--tag_set serve
# The given SavedModel MetaGraphDef contains SignatureDefs with the following keys:
# SignatureDef key: "predict"
# SignatureDef key: "serving_default"
# same results for both 'signature_def'
saved_model_cli show \
--dir prop_car_gen_local_single_20180104_100210/export/prop-car-gen/1515092673/ \
--tag_set serve \
--signature_def serving_default
saved_model_cli show \
--dir prop_car_gen_local_single_20180104_100210/export/prop-car-gen/1515092673/ \
--tag_set serve \
--signature_def predict
# The given SavedModel SignatureDef contains the following inputs(s):
# ...
# inputs['user_id'] tensor_info:
# dtype: DT_INT64
# shape: (-1)
# name: Placeholder:0
# The given SavedModel SignatureDef contains the following output(s):
# ...
# <no outputs['user_id'] tensor_info entry>
# predict with the trained model
gcloud ml-engine local predict \
--model-dir=$OUTPUT_DIR/export/prop-car-gen/$model_export_id/ \
--json-instances=$DATA_DIR/tier_3_test_sample_v1_201712.json
# --- ERROR BEGIN ----
# InvalidArgumentError (see above for traceback): You must feed a value for placeholder
# tensor 'Placeholder_52' with dtype int8 and shape [?]
# [[Node: Placeholder_52 = Placeholder[dtype=DT_INT8, shape=[?],
# _device="/job:localhost/replica:0/task:0/device:CPU:0"]()]]
# (Error code: 2)
# --- ERROR END ----
# -------------------------------------------------
# result of a `json_serving_input_fn` that returns
# tf.estimator.export.ServingInputReceiver(
# features, inputs, {'serving_default': inputs}),
# where 'features' does NOT contain the instance key
# while 'inputs' does
# -------------------------------------------------
export MODE=local_single
export TRAIN_STEPS=2000
export NUM_EPOCHS=1
DATE=`date '+%Y%m%d_%H%M%S'`
export TRAINER_DIR=prop_car_gen_model/trainer
export PARENT_DIR=/Users/$USER/dev/anaconda2/envs/repos
export DATA_DIR=$PARENT_DIR/prop_car_gen_data
export OUTPUT_DIR=$PARENT_DIR/prop_car_gen_${MODE}_${DATE}
rm -rf $OUTPUT_DIR
TRAIN_FILE_LOCAL=$DATA_DIR/tier_3_train_201712.csv
EVAL_FILE_LOCAL=$DATA_DIR/tier_3_eval_201712.csv
echo $TRAIN_STEPS
echo $NUM_EPOCHS
echo $DATE
echo $TRAINER_DIR
echo $PARENT_DIR
echo $OUTPUT_DIR
echo $TRAIN_FILE_LOCAL
echo $EVAL_FILE_LOCAL
# first train the model
gcloud ml-engine local train --package-path $TRAINER_DIR \
--module-name trainer.task \
-- \
--train-files $TRAIN_FILE_LOCAL \
--eval-files $EVAL_FILE_LOCAL \
--train-steps $TRAIN_STEPS \
--job-dir $OUTPUT_DIR \
--eval-steps 100
model_export_id=`'ls' $OUTPUT_DIR/export/prop-car-gen`
# 1515093102
echo $model_export_id
ls $OUTPUT_DIR/export/prop-car-gen/$model_export_id/
# Result of `tf.estimator.export.ServingInputReceiver(features, inputs,
# {'serving_default': inputs})`, where 'features' dict does _not_ contain the instance
# key while 'inputs' dict does:
# 1. the instance key _does_ appear among the 'predict' & 'serving_default'
# signature inputs
# 2. the instance key does _not_ appear among the 'predict' & 'serving_default'
# signature outputs
saved_model_cli show \
--dir prop_car_gen_local_single_20180104_110943/export/prop-car-gen/1515093102/ \
--tag_set serve
# The given SavedModel MetaGraphDef contains SignatureDefs with the following keys:
# SignatureDef key: "predict"
# SignatureDef key: "serving_default"
# SignatureDef key: "serving_default:predict"
# SignatureDef key: "serving_default:serving_default"
# inspect the saved model's signature definition
# same results for all of these `signature_def` settings
saved_model_cli show \
--dir prop_car_gen_local_single_20180104_110943/export/prop-car-gen/1515093102/ \
--tag_set serve \
--signature_def predict
saved_model_cli show \
--dir prop_car_gen_local_single_20180104_110943/export/prop-car-gen/1515093102/ \
--tag_set serve \
--signature_def serving_default
saved_model_cli show \
--dir prop_car_gen_local_single_20180104_110943/export/prop-car-gen/1515093102/ \
--tag_set serve \
--signature_def serving_default:serving_default
# The given SavedModel SignatureDef contains the following inputs(s):
# ...
# inputs['user_id'] tensor_info:
# dtype: DT_INT64
# shape: (-1)
# name: Placeholder:0
# The given SavedModel SignatureDef contains the following output(s):
# ...
# <no outputs['user_id'] tensor_info entry>
# predict with the trained model
gcloud ml-engine local predict \
--model-dir=$OUTPUT_DIR/export/prop-car-gen/$model_export_id/ \
--json-instances=$DATA_DIR/tier_3_test_sample_v1_201712.json
# --- ERROR BEGIN ----
# InvalidArgumentError (see above for traceback): You must feed a value for placeholder
# tensor 'Placeholder_52' with dtype int8 and shape [?]
# [[Node: Placeholder_52 = Placeholder[dtype=DT_INT8, shape=[?],
# _device="/job:localhost/replica:0/task:0/device:CPU:0"]()]]
# (Error code: 2)
# --- ERROR END ----
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment