Skip to content

Instantly share code, notes, and snippets.

View analyticsindiamagazine's full-sized avatar
:octocat:

Analytics India Magazine analyticsindiamagazine

:octocat:
View GitHub Profile
#Importing necessary packages
import numpy as np
from flask import Flask, request, render_template
import pickle
from fastai.tabular import *
import os
#Saving the working directory and model directory
cwd = os.getcwd()
path = cwd + '/model'
<!DOCTYPE html>
<html >
<!--From https://codepen.io/frytyler/pen/EGdtg-->
<head>
<meta charset="UTF-8">
<title>Find The Resale Price Of Your Car Now!!</title>
<meta name="viewport" content="width=device-width, initial-scale=1">
<link href='https://fonts.googleapis.com/css?family=Pacifico' rel='stylesheet' type='text/css'>
<link rel="stylesheet" href="https://fonts.googleapis.com/css?family=Raleway">
<link rel="stylesheet" href="https://www.w3schools.com/w3css/4/w3.css">
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
self.args = {
"output_dir": "outputs/",
"cache_dir": "cache_dir/",
"fp16": True,
"fp16_opt_level": "O1",
"max_seq_length": 128,
"train_batch_size": 8,
"gradient_accumulation_steps": 1,
"eval_batch_size": 8,
"num_train_epochs": 1,
#TESTING
max_seq_length = 128 #This number will determine the number of tokens
#An example for tokenization
s1 = train['STORY'].iloc[0]
stokens1 = tokenizer.tokenize(s1)
stokens1 = ["[CLS]"] + stokens1 + ["[SEP]"]
input_ids1 = get_ids(stokens1, tokenizer, max_seq_length)
input_masks1 = get_masks(stokens1, max_seq_length)
input_word_ids = tf.keras.layers.Input(shape=(max_seq_length,), dtype=tf.int32,
name="input_word_ids")
input_mask = tf.keras.layers.Input(shape=(max_seq_length,), dtype=tf.int32,
name="input_mask")
segment_ids = tf.keras.layers.Input(shape=(max_seq_length,), dtype=tf.int32,
name="segment_ids")
bert_inputs = [input_word_ids, input_mask, segment_ids]
pooled_output, _ = bert_layer(bert_inputs)
def get_masks(tokens, max_seq_length):
"""Mask for padding"""
if len(tokens)>max_seq_length:
#Cutting down the excess length
tokens = tokens[0:max_seq_length]
return [1]*len(tokens)
else :
return [1]*len(tokens) + [0] * (max_seq_length - len(tokens))
def get_segments(tokens, max_seq_length):
@analyticsindiamagazine
analyticsindiamagazine / XAI_WHAT_IF.ipynb
Created November 27, 2019 11:20
How To Implement Explainable AI With What-If Tool For Model Comparison
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
# Importing The WIT essentials
from witwidget.notebook.visualization import WitConfigBuilder
from witwidget.notebook.visualization import WitWidget
#Setting the WIT tool box height in pixels
tool_height_in_px = 600
# Configuring the WIT tool with the test examples and the trained classifier
config_builder = WitConfigBuilder(test_examples[0:num_datapoints]).set_estimator_and_feature_spec(
classifier, feature_spec).set_compare_estimator_and_feature_spec(
#Setting the number of datapoints to choose
num_datapoints = len(val)
# Preprocessing The validation data
make_label_column_numeric(val, label_column, classes)
test_examples = df_to_examples(val[0:num_datapoints])