Created
September 17, 2022 13:26
-
-
Save manitgupta/6cf699ec655805cc51a0927bfc6e184f to your computer and use it in GitHub Desktop.
jugaad trader sample code
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
from datetime import date, timedelta | |
from jugaad_data.nse import bhavcopy_save | |
from mysql_details import mydb | |
from mysql_details import cursor | |
import pandas as pd | |
from tenacity import retry, stop_after_attempt | |
from jugaad_trader import Zerodha | |
from sqlalchemy import create_engine | |
import urllib.parse | |
import pprint as pp | |
SQL = "INSERT IGNORE INTO OHLC_DATA (STOCK, DATA_DATE, TIMEFRAME, OPEN, CLOSE, HIGH, LOW, VOLUME) values (%s, " \ | |
"%s, %s, %s, %s, %s, %s, %s)" | |
db_connection_str = 'mysql+pymysql://root:%s@localhost/PERSONAL' % urllib.parse.quote_plus('mysql@123') | |
db_connection = create_engine(db_connection_str) | |
WEEKLY_FAILED_LIST = [] | |
SUFFIX_LIST = ['', '-BE', '-BZ'] | |
kite = Zerodha() | |
kite.set_access_token() | |
def get_unique_stock_list(): | |
stock_list = [] | |
print("-------------------------------------------------------") | |
print("Fetching list of unique stocks from DB...") | |
df = pd.read_sql('select DISTINCT(STOCK) from MARKET_DATA', con=db_connection) | |
for index, rows in df.iterrows(): | |
stock_list.append(rows['STOCK']) | |
print("Total unique stocks in DB: " + str(len(stock_list))) | |
return stock_list | |
def get_last_sunday(): | |
today = date.today() | |
idx = (today.weekday() + 1) % 7 | |
sun = today - timedelta(idx) | |
return sun | |
def download_latest_weekly_candle(): | |
end_date = date.today() | |
start_date = end_date - timedelta(weeks=1) | |
stock_list = get_unique_stock_list() | |
for stock in stock_list: | |
download_weekly_stock_data(stock, start_date, end_date) | |
print("Following stocks could not be downloaded: ") | |
pp.pprint(WEEKLY_FAILED_LIST) | |
def download_weekly_historical_data(weeks=30): | |
end_date = get_last_sunday() | |
start_date = end_date - timedelta(weeks=weeks) | |
stock_list = get_unique_stock_list() | |
for stock in stock_list: | |
download_weekly_stock_data(stock, start_date, end_date) | |
print("Following stocks could not be downloaded: ") | |
pp.pprint(WEEKLY_FAILED_LIST) | |
def download_weekly_stock_data(symbol, from_, to): | |
df = download_from_kite(symbol, from_, to) | |
weekly_data = [] | |
for idx, row in df.iterrows(): | |
insert_data = (symbol, row['date'].strftime('%Y-%m-%d %H:%M:%S'), 'WEEKLY', row['open'], | |
row['close'], row['high'], row['low'], row['volume']) | |
weekly_data.append(insert_data) | |
cursor.executemany(SQL, weekly_data) | |
mydb.commit() | |
print("Saved data in DB for stock: {}".format(symbol)) | |
def download_from_kite(symbol, from_, to, retry_count=0): | |
if retry_count == len(SUFFIX_LIST): # Not able to download even after applying suffix. | |
WEEKLY_FAILED_LIST.append(symbol) | |
return pd.DataFrame() | |
updated_symbol = symbol + SUFFIX_LIST[retry_count] | |
try: | |
instrument = 'NSE:' + updated_symbol | |
q = kite.ltp(instrument) | |
token = q[instrument]['instrument_token'] | |
data = kite.historical_data(token, from_, to, 'week') | |
except Exception: | |
print("Could not download weekly data for: {}".format(updated_symbol)) | |
retry_count += 1 | |
return download_from_kite(symbol, from_, to, retry_count) | |
return pd.DataFrame(data) | |
def download_latest_daily_data(): | |
date_ = date.today() | |
download_daily_data(date_) | |
def download_daily_historical_data(days=30): | |
end_date = date.today() | |
start_date = end_date - timedelta(days=days) | |
while start_date <= end_date: | |
download_daily_data(start_date) | |
start_date += timedelta(days=1) | |
def download_daily_data(date_): | |
f = download_day_data(date_) | |
if f is not None: | |
print("Downloaded data for date: {}".format(str(date_))) | |
df = pd.read_csv(f) | |
day_data = [] | |
for idx, row in df.iterrows(): | |
insert_data = (row['SYMBOL'], date_.strftime('%Y-%m-%d %H:%M:%S'), 'DAILY', row['OPEN'], | |
row['CLOSE'], row['HIGH'], row['LOW'], row['TOTTRDQTY']) | |
day_data.append(insert_data) | |
cursor.executemany(SQL, day_data) | |
mydb.commit() | |
print("Saved data in DB for date: {}".format(str(date_))) | |
else: | |
print("Skipped day: {}".format(str(date_))) | |
@retry(stop=stop_after_attempt(1)) | |
def download_day_data(data_date): | |
try: | |
f = bhavcopy_save(data_date, "data/") | |
return f | |
except Exception: | |
return None | |
def main(): | |
download_daily_historical_data(days=20) | |
download_weekly_historical_data(weeks=4) | |
# download_latest_weekly_candle() | |
# download_latest_daily_data() | |
# Using the special variable | |
# __name__ | |
if __name__ == "__main__": | |
main() |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment