Skip to content

Instantly share code, notes, and snippets.

@gangiman
Last active May 6, 2016 13:49
Show Gist options
  • Save gangiman/4ea8025a49e7cd37cdcce30283663f4a to your computer and use it in GitHub Desktop.
Save gangiman/4ea8025a49e7cd37cdcce30283663f4a to your computer and use it in GitHub Desktop.
grid search for different architectures of SparseConvNet
from __future__ import print_function
import os
import sys
PSCN_DIR = '/var/workplace/PySparseConvNet/'
sys.path.insert(0, PSCN_DIR)
from glob import glob
import math
import gc
import numpy as np
from itertools import product
from PySparseConvNet import SparseNetwork
from PySparseConvNet import SparseDataset
from PySparseConvNet import Off3DPicture
def create_DeepC2Network(dimension, l, k, fn, nInputFeatures, nClasses, p, nThreads=1):
sparse_net = SparseNetwork(dimension, nInputFeatures, nClasses, nThreads=nThreads)
for i in range(l + 1):
sparse_net.addLeNetLayerMP(
(i + 1) * k, 2, 1, 3 if (i < l) else 1, 2 if (i < l) else 1, fn,
p * i * 1.0 / l)
sparse_net.addSoftmaxLayer()
return sparse_net
number_of_features = 1
renderSize = 40
path_to_esb_folders = [
'SparseConvNet/Data/ESB/Flat-Thin Wallcomponents/',
'SparseConvNet/Data/ESB/Rectangular-Cubic Prism/',
'SparseConvNet/Data/ESB/Solid Of Revolution/'
]
class ESB_dataset(object):
sparse_train_dataset = None
sparse_test_dataset = None
def __init__(self, number_of_features=1):
self.number_of_features = number_of_features
self.class_folders = [
os.path.join(folder, dirname)
for folder in path_to_esb_folders
for dirname in os.listdir(folder)
]
self.class_folders.sort()
self.number_of_classes = len(self.class_folders)
def summary(self):
print("ESB dataset wrapper object:")
print("Number of features {}".format(self.number_of_features))
print("Number of classes {}".format(self.number_of_classes))
# print("Number of files {}".format(len()))
def generate_shuffled_esb_dataset(self, test_percent=0.2):
if self.sparse_train_dataset is not None:
del self.sparse_train_dataset
if self.sparse_test_dataset is not None:
del self.sparse_test_dataset
gc.collect()
self.sparse_train_dataset = SparseDataset(
"ESB (Train subset)", 'TRAINBATCH',self.number_of_features,
self.number_of_classes)
self.sparse_test_dataset = SparseDataset(
"ESB (Test subset)", 'TESTBATCH', self.number_of_features,
self.number_of_classes)
for _class_id, _class_folder in enumerate(self.class_folders):
offs = glob(os.path.join(_class_folder, "*.off"))
shuffle(offs)
num_of_test_samples = int(math.ceil(len(offs) * test_percent))
for _i in range(num_of_test_samples):
self.sparse_test_dataset.add_picture(
Off3DPicture(offs.pop(), renderSize, label=_class_id))
for _off in offs:
self.sparse_train_dataset.add_picture(
Off3DPicture(_off, renderSize, label=_class_id))
self.sparse_train_dataset.repeatSamples(10)
return self.sparse_train_dataset, self.sparse_test_dataset
nFeatures = 1
n_epochs = 150
batch_size = 10
dimension = 3
nThreads = 1
depth_range = range(3, 8)
filter_multiplier_range = range(8, 49, 8)
dropout_range = np.arange(0.0, 0.6, 0.1)
attempts_range = range(2)
formatter = (("-" * 80) + """
New conditions:
depth: {}
filter_mult: {}
dropout: {}
attempt: {}
""")
# TODO: If NLL = NaN restart after 5 restarts move to another condition
# TODO: respawn after out of memory, and cudasafecall "out of memory"
from random import shuffle
from pprint import pprint
if __name__ == '__main__':
configurations = list(product(depth_range,
filter_multiplier_range,
dropout_range,
attempts_range))
shuffle(configurations)
dataset = ESB_dataset()
for (depth, filter_multiplier, dropout, attempt) in configurations:
# depth, filter_multiplier, dropout, attempt = 5, 32, 0.0, 1
print(formatter.format(depth, filter_multiplier, dropout, attempt))
train_set, test_set = dataset.generate_shuffled_esb_dataset(
test_percent=0.3)
dataset.summary()
network = create_DeepC2Network(dimension, depth, filter_multiplier,
'VLEAKYRELU', number_of_features,
train_set.nClasses, dropout)
for epoch in xrange(1, n_epochs + 1):
learning_rate = 0.003 * math.exp(-0.05 / 2 * epoch)
print("epoch {0}, lr={1} ".format(epoch, learning_rate))
train_report = network.processDataset(
train_set,
batchSize=batch_size,
learningRate=learning_rate)
pprint(train_report)
test_reports = network.processDatasetRepeatTest(test_set, batch_size, 3)
pprint(test_reports)
if epoch % 10 == 0:
print("saving state")
network.saveWeights('weights/esb_saved_state', 0)
# del network
# gc.collect()
# network = create_DeepC2Network(dimension, depth, filter_multiplier,
# 'VLEAKYRELU', number_of_features,
# train_set.nClasses, dropout)
# network.loadWeights('weights/esb_saved_state', 0)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment