Skip to content

Instantly share code, notes, and snippets.

View pranjalAI's full-sized avatar

Pranjal Saxena pranjalAI

View GitHub Profile
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
dataset = sns.load_dataset("tips")
sns.relplot(
data=dataset,
x="total_bill", y="tip", col="time",
hue="smoker", style="smoker", size="size",
)
from bokeh.plotting import figure, output_file, show
plot = figure(plot_width=300, plot_height=300)
plot.annulus(x=[1, 2, 3], y=[1, 2, 3], color="#7FC97F",
inner_radius=0.2, outer_radius=0.5)
show(plot)
with pdfplumber.open("VISTA_Menu_Authorizations.pdf") as pdf:
next_page_continue=False
for pcount in range(len(pdf.pages)):
page=pdf.pages[pcount]
text=page.extract_text()
if not next_page_continue:
out=[]
next_page_menu=[]
#getting code & description
menu_re= re.compile(r'^MENU:')
def get_coefs(word,*arr):
return word, np.asarray(arr, dtype='float32')
embeddings_index = dict(get_coefs(*o.strip().split()) for o in open(EMBEDDING_FILE, encoding="utf8"))
all_embs = np.stack(embeddings_index.values())
emb_mean,emb_std = all_embs.mean(), all_embs.std()
emb_mean,emb_std
word_index = tokenizer.word_index
function onFrontCamEvent(eventData)
{
for(var i = 0; i < script.faceContent.length; i++)
{
var faceObject = script.faceContent[i];
if(faceObject)
{
faceObject.enabled = true;
}
}
url = 'https://images.pexels.com/photos/5045947/pexels-photo-5045947.jpeg?auto=compress&cs=tinysrgb&dpr=2&h=750&w=1260' #@param {type:"string"}
response = requests.get(url)
img = PIL.Image.open(BytesIO(response.content)).convert("RGB")
img_t = T.ToTensor()(img)
img_fast = Image(img_t)
show_image(img_fast, figsize=(8,8), interpolation='nearest');
import fastai
from fastai.vision import *
from fastai.utils.mem import *
from fastai.vision import open_image, load_learner, image, torch
import numpy as np
import urllib.request
import PIL.Image
from io import BytesIO
import torchvision.transforms as T
from PIL import Image
print("hello")
pairs_2018=list(dictt.values())
plt.figure(figsize=(10,8))
plt.subplot(2,2,1)
pivot_2018[pairs_2018[0][0]].plot(label=pairs_2018[0][0])
pivot_2018[pairs_2018[0][1]].plot(label=pairs_2018[0][1])
plt.subplot(2,2,2)
pivot_2018[pairs_2018[1][0]].plot(label=pairs_2018[1][0])
pivot_2018[pairs_2018[1][1]].plot(label=pairs_2018[1][1])
plt.subplot(2,2,3)
pivot_2018[pairs_2018[2][0]].plot(label=pairs_2018[2][0])
my_doc=get_doc(my_link)
query_doc=[w.lower() for w in word_tokenize(my_doc)]
Query_doc_bow=dictionary.doc2bow(query_doc)
Query_doc_tfidf= tf_idf[Query_doc_bow]
final_list=list(sims[Query_doc_tfidf])
Best_match_index=final_list.index(max(final_list))
Output=data.iloc[Best_match_index]["publication"]