Skip to content

Instantly share code, notes, and snippets.

@lakshmanok
Created August 28, 2018 16:09
Show Gist options
  • Select an option

  • Save lakshmanok/1ca2e7be69db34d38362a31c45f2f56c to your computer and use it in GitHub Desktop.

Select an option

Save lakshmanok/1ca2e7be69db34d38362a31c45f2f56c to your computer and use it in GitHub Desktop.
def make_input_fn(pattern, mode, num_cores=8, transpose_input=False):
def _set_shapes(batch_size, images, labels):
"""Statically set the batch_size dimension."""
if transpose_input:
images.set_shape(images.get_shape().merge_with(
tf.TensorShape([None, None, None, batch_size])))
labels.set_shape(labels.get_shape().merge_with(
tf.TensorShape([batch_size])))
else:
images.set_shape(images.get_shape().merge_with(
tf.TensorShape([batch_size, None, None, None])))
labels.set_shape(labels.get_shape().merge_with(
tf.TensorShape([batch_size])))
return images, labels
def _input_fn(params):
batch_size = params['batch_size']
is_training = (mode == tf.estimator.ModeKeys.TRAIN)
# read the dataset
dataset = tf.data.Dataset.list_files(pattern, shuffle=is_training)
if is_training:
dataset = dataset.repeat()
def fetch_dataset(filename):
buffer_size = 8 * 1024 * 1024 # 8 MiB per file
dataset = tf.data.TFRecordDataset(filename, buffer_size=buffer_size)
return dataset
dataset = dataset.apply(
tf.contrib.data.parallel_interleave(
fetch_dataset, cycle_length=64, sloppy=True))
dataset = dataset.shuffle(1024)
# augment and batch
dataset = dataset.apply(
tf.contrib.data.map_and_batch(
read_and_preprocess, batch_size=batch_size,
num_parallel_batches=num_cores, drop_remainder=True
))
if transpose_input:
dataset = dataset.map(
lambda images, labels: (tf.transpose(images, [1, 2, 3, 0]), labels),
num_parallel_calls=num_cores)
# assign static shape
dataset = dataset.map(
functools.partial(_set_shapes, batch_size)
)
# prefetch data while training
dataset = dataset.prefetch(tf.contrib.data.AUTOTUNE)
return dataset
return _input_fn
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment