This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# Building the seq2seq model | |
def seq2seq_model(inputs, targets, keep_prob, batch_size, sequence_length, answers_num_words, questions_num_words, encoder_embedding_size, decoder_embedding_size, rnn_size, num_layers, questionswords2int): | |
encoder_embedded_input = tf.contrib.layers.embed_sequence(inputs, | |
answers_num_words + 1, | |
encoder_embedding_size, | |
initializer = tf.random_uniform_initializer(0, 1)) | |
encoder_state = encoder_rnn(encoder_embedded_input, rnn_size, num_layers, keep_prob, sequence_length) | |
preprocessed_targets = preprocess_targets(targets, questionswords2int, batch_size) | |
decoder_embeddings_matrix = tf.Variable(tf.random_uniform([questions_num_words + 1, decoder_embedding_size], 0, 1)) | |
decoder_embedded_input = tf.nn.embedding_lookup(decoder_embeddings_matrix, preprocessed_targets) |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#Defining the decoder layers RNN | |
def decoder_rnn(decoder_embedded_input,decoder_embeddings_matrix,encoder_state,num_words,sequence_length,rnn_size, num_layers,word2int,keep_prob,batch_size): | |
with tf.variable_scope("decoding") as decoding_scope: | |
lstm= tf.contrib.rnn.BasicLSTMCell(rnn_size) | |
lstm_dropout= tf.contrib.rnn.DropoutWrapper(lstm, input_keep_prob=keep_prob) | |
decoder_cell= tf.contrib.rnn.MultiRNNCell([lstm_dropout]*num_layers) | |
weights = tf.truncated_normal_initializer(stddev = 0.1) | |
biases = tf.zeros_initializer() | |
output_function = lambda x: tf.contrib.layers.fully_connected(x, | |
num_words, |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# Decoding the test/validation set | |
def decode_test_set(encoder_state, decoder_cell, decoder_embeddings_matrix, sos_id, eos_id, maximum_length, num_words, decoding_scope, output_function, keep_prob, batch_size): | |
attention_states = tf.zeros([batch_size, 1, decoder_cell.output_size]) | |
attention_keys, attention_values, attention_score_function, attention_construct_function = tf.contrib.seq2seq.prepare_attention(attention_states, attention_option = "bahdanau", num_units = decoder_cell.output_size) | |
test_decoder_function = tf.contrib.seq2seq.attention_decoder_fn_inference(output_function, | |
encoder_state[0], | |
attention_keys, | |
attention_values, | |
attention_score_function, | |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# Decoding the test/validation set | |
def decode_test_set(encoder_state, decoder_cell, decoder_embeddings_matrix, sos_id, eos_id, maximum_length, num_words, decoding_scope, output_function, keep_prob, batch_size): | |
attention_states = tf.zeros([batch_size, 1, decoder_cell.output_size]) | |
attention_keys, attention_values, attention_score_function, attention_construct_function = tf.contrib.seq2seq.prepare_attention(attention_states, attention_option = "bahdanau", num_units = decoder_cell.output_size) | |
test_decoder_function = tf.contrib.seq2seq.attention_decoder_fn_inference(output_function, | |
encoder_state[0], | |
attention_keys, | |
attention_values, | |
attention_score_function, | |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
###Decoding Training Set | |
def decode_training_set(encoder_state,decoder_cell,decoder_embedded_input,sequence_length,decoding_scope,output_function,keep_prob,batch_size): | |
attention_states=tf_zeros([batch_size,1,decoder_cell.output_size]) | |
attention_keys,attention_values,attention_score_function,attention_construct_function=tf.contrib.seq2seq.prepare_attention(attention_states, | |
attention_option="bahdanau", | |
num_units=decoder_cell.output_size) | |
training_decoder_function=tf.contrib.seq2seq.attention_decoder_fn_train(encoder_state[0], | |
attention_keys, | |
attention_values, | |
attention_score_functi |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
def encoder_rnn(rnn_inputs, rnn_size, num_layers, keep_prob, sequence_length): | |
lstm = tf.contrib.rnn.BasicLSTMCell(rnn_size) | |
lstm_dropout = tf.contrib.rnn.DropoutWrapper(lstm, input_keep_prob = keep_prob) | |
encoder_cell = tf.contrib.rnn.MultiRNNCell([lstm_dropout] * num_layers) | |
encoder_output, encoder_state = tf.nn.bidirectional_dynamic_rnn(cell_fw = encoder_cell, | |
cell_bw = encoder_cell, | |
sequence_length = sequence_length, | |
inputs = rnn_inputs, | |
dtype = tf.float32) | |
return encoder_state |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#Pre-processing the targets | |
def preprocess_targets(): | |
left_side=tf.fill([batch_size,1],word2int["<SOS>"]) | |
right_side=tf.strided_slice(targets,[0,0],[0,-1],[1,1]) | |
preprocessed_targets=tf.concat([left_side,right_side],1) | |
return preprocessed_targets |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
def model_inputs(): | |
input=tf.placeholder(tf.int32,[None,None],name="Inputs") | |
targets=tf.placeholder(tf.int32,[None,None],name="Targets") | |
lr=tf.placeholder(tf.float32,name="learning_rate") | |
keep_prob=tf.placeholder(tf.float32,name="keep_prob") | |
return inputs,targets,lr,keep_prob |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
if("pacman" %in% rownames(installed.packages()) == FALSE) {install.packages("pacman")} # Check if you have universal installer package, install if not | |
pacman::p_load("caret","ROCR","lift","glmnet","MASS","e1071","readxl") #Check, and if needed install the necessary packages | |
###########LOADING THE CREDIT DATA FILE | |
creditdata1<-read_excel(file.choose()) | |
names(creditdata1)[1]<-"CurrentId" | |
str(creditdata1) | |
creditdata1$`OLD SYSTEM ID`<-NULL | |
###########LOADING THE NEW APPLICATIONS FILE | |
NEWAPPLICATIONS<-read.csv(choose.files()) |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
########loading the Titanic Train Data Set | |
TitanicTrain<-train1 | |
######Checking Missing Values in the Train Data Set | |
sapply(TitanicTrain, function(x)sum(is.na(x))) | |
#######Loading the Titanic Test Data Set | |
TitanicTest<-test11 | |
#######Checking Missing Values in the Test Data Set |
NewerOlder