Skip to content

Instantly share code, notes, and snippets.

@jph00
Created August 13, 2017 14:42
Show Gist options
  • Save jph00/f43941095103c7c62d46369ec2e37e5c to your computer and use it in GitHub Desktop.
Save jph00/f43941095103c7c62d46369ec2e37e5c to your computer and use it in GitHub Desktop.
nbs/keras_raw-vgg16.ipynb
Display the source blob
Display the rendered blob
Raw
{
"cells": [
{
"metadata": {},
"cell_type": "markdown",
"source": "## Start"
},
{
"metadata": {
"collapsed": true,
"trusted": true
},
"cell_type": "code",
"source": "%reload_ext autoreload\n%autoreload 2\n%matplotlib inline",
"execution_count": 1,
"outputs": []
},
{
"metadata": {
"trusted": true
},
"cell_type": "code",
"source": "from imports import *\nfrom fast_gen import preprocess_scale, scale_and_center",
"execution_count": 1,
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": "Using TensorFlow backend.\n"
}
]
},
{
"metadata": {
"collapsed": true,
"trusted": true
},
"cell_type": "code",
"source": "imagenet_mean = np.array([103.939, 116.779, 123.68], dtype=np.float32).reshape((1,1,3))\ndef preprocess_imagenet(x): return x[..., ::-1] - imagenet_mean",
"execution_count": 13,
"outputs": []
},
{
"metadata": {
"collapsed": true,
"trusted": true
},
"cell_type": "code",
"source": "bs=64; sz=224; lr=2e-3",
"execution_count": 3,
"outputs": []
},
{
"metadata": {
"collapsed": true,
"trusted": true
},
"cell_type": "code",
"source": "path = \"/data/jhoward/fast/dogscats/\"",
"execution_count": 4,
"outputs": []
},
{
"metadata": {
"collapsed": true,
"trusted": true
},
"cell_type": "code",
"source": "mn=VGG16()\nconv_outp=mn.get_layer('predictions').input\noutp = Dense(2, activation='softmax')(conv_outp)\nm = Model(mn.input, outp)\nfor l in m.layers[:-1]: l.trainable=False\nm.compile(SGD(lr, momentum=0.9), 'categorical_crossentropy', metrics=['accuracy'])",
"execution_count": 5,
"outputs": []
},
{
"metadata": {
"scrolled": false,
"trusted": true
},
"cell_type": "code",
"source": "gen = image.ImageDataGenerator(preprocessing_function=preprocess_imagenet)\ntrn_batches = gen.flow_from_directory(f'{path}train', (sz,sz), batch_size=bs)\nfix_batches = gen.flow_from_directory(f'{path}train', (sz,sz), batch_size=bs, shuffle=False)\nval_batches = gen.flow_from_directory(f'{path}valid', (sz,sz), batch_size=bs, shuffle=False)\nnb_trn = math.ceil(trn_batches.n/bs)\nnb_val = math.ceil(val_batches.n/bs)",
"execution_count": 9,
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": "Found 23000 images belonging to 2 classes.\nFound 23000 images belonging to 2 classes.\nFound 2000 images belonging to 2 classes.\n"
}
]
},
{
"metadata": {
"scrolled": false,
"trusted": true
},
"cell_type": "code",
"source": "m.fit_generator(trn_batches, nb_trn, workers=1, epochs=3,\n validation_data=val_batches, validation_steps=nb_val)",
"execution_count": 10,
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": "Epoch 1/3\n360/360 [==============================] - 136s - loss: 0.0698 - acc: 0.9772 - val_loss: 0.0515 - val_acc: 0.9805\nEpoch 2/3\n360/360 [==============================] - 132s - loss: 0.0400 - acc: 0.9859 - val_loss: 0.0441 - val_acc: 0.9830\nEpoch 3/3\n360/360 [==============================] - 132s - loss: 0.0294 - acc: 0.9891 - val_loss: 0.0425 - val_acc: 0.9850\n"
},
{
"data": {
"text/plain": "<keras.callbacks.History at 0x7facf4f38160>"
},
"execution_count": 10,
"metadata": {},
"output_type": "execute_result"
}
]
},
{
"metadata": {
"collapsed": true,
"trusted": true
},
"cell_type": "code",
"source": "",
"execution_count": null,
"outputs": []
},
{
"metadata": {
"collapsed": true,
"trusted": true
},
"cell_type": "code",
"source": "mn=VGG16()\nconv_outp=mn.get_layer('predictions').input\nm = Model(mn.input, conv_outp)",
"execution_count": 17,
"outputs": []
},
{
"metadata": {
"trusted": true
},
"cell_type": "code",
"source": "fix_batches.reset(); val_batches.reset()\ntrn_acts = m.predict_generator(generator=fix_batches, verbose=1, \n steps=nb_trn, workers=1)\nval_acts = m.predict_generator(generator=val_batches, verbose=1,\n steps=nb_val, workers=1)",
"execution_count": 18,
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": "360/360 [==============================] - 121s \n32/32 [==============================] - 10s \n"
}
]
},
{
"metadata": {
"trusted": true,
"collapsed": true
},
"cell_type": "code",
"source": "inp = Input(batch_shape=m.output_shape)\noutp = Dense(1, activation='sigmoid')(inp)\nfc = Model(inp, outp)\nfc.compile(SGD(lr, momentum=0.9), 'binary_crossentropy', metrics=['accuracy'])",
"execution_count": 20,
"outputs": []
},
{
"metadata": {
"trusted": true
},
"cell_type": "code",
"source": "fc.fit(trn_acts, fix_batches.classes, bs, 3, validation_data=(val_acts, val_batches.classes))",
"execution_count": 21,
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": "Train on 23000 samples, validate on 2000 samples\nEpoch 1/3\n23000/23000 [==============================] - 1s - loss: 0.0605 - acc: 0.9773 - val_loss: 0.0556 - val_acc: 0.9820\nEpoch 2/3\n23000/23000 [==============================] - 1s - loss: 0.0375 - acc: 0.9857 - val_loss: 0.0378 - val_acc: 0.9845\nEpoch 3/3\n23000/23000 [==============================] - 1s - loss: 0.0306 - acc: 0.9888 - val_loss: 0.0366 - val_acc: 0.9845\n"
},
{
"data": {
"text/plain": "<keras.callbacks.History at 0x7faceda16c50>"
},
"execution_count": 21,
"metadata": {},
"output_type": "execute_result"
}
]
},
{
"metadata": {
"collapsed": true,
"trusted": true
},
"cell_type": "code",
"source": "",
"execution_count": null,
"outputs": []
}
],
"metadata": {
"kernelspec": {
"name": "conda-root-py",
"display_name": "Python [conda root]",
"language": "python"
},
"language_info": {
"name": "python",
"version": "3.6.2",
"mimetype": "text/x-python",
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"pygments_lexer": "ipython3",
"nbconvert_exporter": "python",
"file_extension": ".py"
},
"gist": {
"id": "",
"data": {
"description": "nbs/keras_raw-vgg16.ipynb",
"public": true
}
}
},
"nbformat": 4,
"nbformat_minor": 2
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment