Skip to content

Instantly share code, notes, and snippets.

View aniruddha27's full-sized avatar

Aniruddha Bhandari aniruddha27

View GitHub Profile
# Streaming tweets from user timeline
user = "AnalyticsVidhya"
public_tweet = api.user_timeline(id=user,count=5)
for tweet in public_tweet:
print("-->",tweet.text)
# Streaming tweets from home timeline
public_tweet = api.home_timeline(count=5)
for tweet in public_tweet:
print("-->",tweet.text)
# Twitter API authentication
import tweepy
api_key = # api_key
api_secret_key = # api_secret_key
access_token = # access_token
access_token_secret = # access_token_secret
# authorize the API Key
import pandas as pd
import os
import shutil
from sklearn.model_selection import train_test_split
# Home directory
home_path = r'C:/Users/Dell/Desktop/Analytics Vidhya/ImageDataGenerator/emergency_vs_non-emergency_dataset/emergency_vs_non-emergency_dataset'
# Create train and validation directories
train_path = os.path.join(home_path,'train')
# Images
train_images = df_train.loc[:,'image_name']
train_labels = df_train.loc[:,'category']
test_images = df_test.loc[:,'image_name']
test_labels = df_test.loc[:,'category']
# Train images
x_train = []
for i in train_images:
# Compile
optim = RMSprop(learning_rate=0.00001)
model.compile(loss='categorical_crossentropy',
optimizer=optim,
metrics=['accuracy'])
# Fit
history = model.fit(x_train,y_train,
epochs=25,
validation_data=(x_test,y_test),
batch_size=32,
# Split data into train-test data sets
X = df.loc[:,'image_names']
y = df.loc[:,'emergency_or_not']
# Split
train_x, val_x, train_y, val_y = train_test_split(X, y,
test_size = 0.1,
random_state = 27,
stratify=y)
# Augmenting on the fly with fit_generator()
# Directly use .flow()
model.fit_generator(datagen.flow(x_train, y_train, batch_size=batch_size),
epochs=epochs, # one forward/backward pass of training data
steps_per_epoch=x_train.shape[0]//batch_size, # number of images comprising of one epoch
validation_data=(x_test, y_test), # data for validation
validation_steps=x_test.shape[0]//batch_size)
# or use iterator from .flow_from_directory()
# Epochs
epochs = 25
# Batch size
batch_size = 32
history = model.fit(train_datagen.flow(x_train,y_train,
batch_size=batch_size,
seed=27,
shuffle=False),
epochs=epochs,
# Augmentation
train_datagen = ImageDataGenerator(rotation_range=5, # rotation
width_shift_range=0.2, # horizontal shift
zoom_range=0.2, # zoom
horizontal_flip=True, # horizontal flip
brightness_range=[0.2,0.8]) # brightness