-
-
Save shashankvemuri/50ed514a0ed41599ac29cc297efc3c05 to your computer and use it in GitHub Desktop.
# Imports | |
from pandas_datareader import data as pdr | |
from pandas import ExcelWriter | |
import yfinance as yf | |
import pandas as pd | |
import datetime | |
import time | |
import requests | |
yf.pdr_override() | |
# Get tickers for all S&P 500 stocks from Wikipedia | |
def tickers_sp500(): | |
url = 'https://en.wikipedia.org/wiki/List_of_S%26P_500_companies' | |
html = requests.get(url).text | |
df = pd.read_html(html, header=0)[0] | |
tickers = df['Symbol'].tolist() | |
return tickers | |
# Variables | |
tickers = tickers_sp500() | |
tickers = [item.replace(".", "-") for item in tickers] # Yahoo Finance uses dashes instead of dots | |
index_name = '^GSPC' # S&P 500 | |
start_date = datetime.datetime.now() - datetime.timedelta(days=365) | |
end_date = datetime.date.today() | |
exportList = pd.DataFrame(columns=['Stock', "RS_Rating", "50 Day MA", "150 Day Ma", "200 Day MA", "52 Week Low", "52 week High"]) | |
returns_multiples = [] | |
# Index Returns | |
index_df = pdr.get_data_yahoo(index_name, start_date, end_date) | |
index_df['Percent Change'] = index_df['Adj Close'].pct_change() | |
index_return = (index_df['Percent Change'] + 1).cumprod()[-1] | |
# Find top 30% performing stocks (relative to the S&P 500) | |
for ticker in tickers: | |
# Download historical data as CSV for each stock (makes the process faster) | |
df = pdr.get_data_yahoo(ticker, start_date, end_date) | |
df.to_csv(f'{ticker}.csv') | |
# Calculating returns relative to the market (returns multiple) | |
df['Percent Change'] = df['Adj Close'].pct_change() | |
stock_return = (df['Percent Change'] + 1).cumprod()[-1] | |
returns_multiple = round((stock_return / index_return), 2) | |
returns_multiples.extend([returns_multiple]) | |
print (f'Ticker: {ticker}; Returns Multiple against S&P 500: {returns_multiple}\n') | |
time.sleep(1) | |
# Creating dataframe of only top 30% | |
rs_df = pd.DataFrame(list(zip(tickers, returns_multiples)), columns=['Ticker', 'Returns_multiple']) | |
rs_df['RS_Rating'] = rs_df.Returns_multiple.rank(pct=True) * 100 | |
rs_df = rs_df[rs_df.RS_Rating >= rs_df.RS_Rating.quantile(.70)] | |
# Checking Minervini conditions of top 30% of stocks in given list | |
rs_stocks = rs_df['Ticker'] | |
for stock in rs_stocks: | |
try: | |
df = pd.read_csv(f'{stock}.csv', index_col=0) | |
sma = [50, 150, 200] | |
for x in sma: | |
df["SMA_"+str(x)] = round(df['Adj Close'].rolling(window=x).mean(), 2) | |
# Storing required values | |
currentClose = df["Adj Close"][-1] | |
moving_average_50 = df["SMA_50"][-1] | |
moving_average_150 = df["SMA_150"][-1] | |
moving_average_200 = df["SMA_200"][-1] | |
low_of_52week = round(min(df["Low"][-260:]), 2) | |
high_of_52week = round(max(df["High"][-260:]), 2) | |
RS_Rating = round(rs_df[rs_df['Ticker']==stock].RS_Rating.tolist()[0]) | |
try: | |
moving_average_200_20 = df["SMA_200"][-20] | |
except Exception: | |
moving_average_200_20 = 0 | |
# Condition 1: Current Price > 150 SMA and > 200 SMA | |
condition_1 = currentClose > moving_average_150 > moving_average_200 | |
# Condition 2: 150 SMA and > 200 SMA | |
condition_2 = moving_average_150 > moving_average_200 | |
# Condition 3: 200 SMA trending up for at least 1 month | |
condition_3 = moving_average_200 > moving_average_200_20 | |
# Condition 4: 50 SMA> 150 SMA and 50 SMA> 200 SMA | |
condition_4 = moving_average_50 > moving_average_150 > moving_average_200 | |
# Condition 5: Current Price > 50 SMA | |
condition_5 = currentClose > moving_average_50 | |
# Condition 6: Current Price is at least 30% above 52 week low | |
condition_6 = currentClose >= (1.3*low_of_52week) | |
# Condition 7: Current Price is within 25% of 52 week high | |
condition_7 = currentClose >= (.75*high_of_52week) | |
# If all conditions above are true, add stock to exportList | |
if(condition_1 and condition_2 and condition_3 and condition_4 and condition_5 and condition_6 and condition_7): | |
exportList = exportList.append({'Stock': stock, "RS_Rating": RS_Rating ,"50 Day MA": moving_average_50, "150 Day Ma": moving_average_150, "200 Day MA": moving_average_200, "52 Week Low": low_of_52week, "52 week High": high_of_52week}, ignore_index=True) | |
print (stock + " made the Minervini requirements") | |
except Exception as e: | |
print (e) | |
print(f"Could not gather data on {stock}") | |
exportList = exportList.sort_values(by='RS_Rating', ascending=False) | |
print('\n', exportList) | |
writer = ExcelWriter("ScreenOutput.xlsx") | |
exportList.to_excel(writer, "Sheet1") | |
writer.save() |
I have an excel file with all the US-tickers. I´d like to check the RS of all these tickers and afterwards the 7 other condtions... How do i need to change the code to check my file instead of the 500 stocks of the S&P?
hey Shashank,
as in ticker its s&p 500, i am trying to do nifty 500 but i am getting error, I am doing for Indian markets, please do help me with it,
regards
as a ticker you have taken sp500, i want to see for indian markets(nifty 500) code have changed as below( took nifty 500 ticker reference from yahoo finance)
Before:
tickers = si.tickers_sp500()
tickers = [item.replace(".", "-") for item in tickers] # Yahoo Finance uses dashes instead of dots
index_name = '^GSPC' # S&P 500
After:
tickers = si.tickers_nifty500()
tickers = [item.replace(".", "-") for item in tickers]
index_name = '^CRSLDX' # NIFTY 500
GETTING ERROR:
Traceback (most recent call last):
File "C:\Users\Ketan\Documents\python\helloworld.py", line 12, in
tickers = si.tickers_nifty500()
AttributeError: module 'yahoo_fin.stock_info' has no attribute 'tickers_nifty500'. Did you mean: 'tickers_nifty50'?
---------------------------I CHANGE NIFTY 500 TO NIFTY 50----------------
ERROR AS FOLLOWED
Exception in thread Thread-2 (_run_via_pool):
Traceback (most recent call last):
File "C:\Program Files\WindowsApps\PythonSoftwareFoundation.Python.3.10_3.10.2288.0_x64__qbz5n2kfra8p0\lib\threading.py", line 1016, in _bootstrap_inner
self.run()
File "C:\Program Files\WindowsApps\PythonSoftwareFoundation.Python.3.10_3.10.2288.0_x64__qbz5n2kfra8p0\lib\threading.py", line 953, in run
self._target(*self._args, **self.kwargs)
File "C:\Users\Ketan\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.10_qbz5n2kfra8p0\LocalCache\local-packages\Python310\site-packages\multitasking_init.py", line 104, in _run_via_pool
return callee(*args, **kwargs)
File "C:\Users\Ketan\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.10_qbz5n2kfra8p0\LocalCache\local-packages\Python310\site-packages\yfinance\multi.py", line 199, in _download_one_threaded
data = _download_one(ticker, start, end, auto_adjust, back_adjust,
File "C:\Users\Ketan\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.10_qbz5n2kfra8p0\LocalCache\local-packages\Python310\site-packages\yfinance\multi.py", line 213, in _download_one
return Ticker(ticker).history(period=period, interval=interval,
File "C:\Users\Ketan\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.10_qbz5n2kfra8p0\LocalCache\local-packages\Python310\site-packages\yfinance\base.py", line 168, in history
end = utils._parse_user_dt(end, tz)
File "C:\Users\Ketan\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.10_qbz5n2kfra8p0\LocalCache\local-packages\Python310\site-packages\yfinance\utils.py", line 157, in _parse_user_dt
dt = tz.timezone(exchange_tz).localize(dt)
File "C:\Users\Ketan\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.10_qbz5n2kfra8p0\LocalCache\local-packages\Python310\site-packages\pytz_init.py", line 170, in timezone
if zone.upper() == 'UTC':
AttributeError: 'float' object has no attribute 'upper'
Traceback (most recent call last):
File "C:\Users\Ketan\Documents\python\helloworld.py", line 26, in
df = pdr.get_data_yahoo(ticker, start_date, end_date)
File "C:\Users\Ketan\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.10_qbz5n2kfra8p0\LocalCache\local-packages\Python310\site-packages\yfinance\multi.py", line 118, in download
_time.sleep(0.01)
KeyboardInterrupt
^C
PLEASE DO HELP ME WITH IT,
REGARDS
Imports
from pandas_datareader import data as pdr
from yahoo_fin import stock_info as si
from pandas import ExcelWriter
import yfinance as yf
import pandas as pd
import datetime
import time
yf.pdr_override()
Variables
tickers =si.tickers_nifty50()
tickers = [item.replace(" ", " ") for item in tickers] # Yahoo Finance uses dashes instead of dots
index_name = '^CRSLDX' # nifty india
start_date = datetime.datetime.now() - datetime.timedelta(days=365)
end_date = datetime.datetime.now()
end_date = datetime.date.today()
exportList = pd.DataFrame(columns=['Stock', "RS_Rating", "50 Day MA", "150 Day Ma", "200 Day MA", "52 Week Low", "52 week High"])
returns_multiples = []
Index Returns
index_df = pdr.get_data_yahoo(index_name, start_date, end_date)
index_df['Percent Change'] = index_df['Adj Close'].pct_change()
index_return = (index_df['Percent Change'] + 1).cumprod()[-1]
Find top 30% performing stocks (relative to the Nifty)
for ticker in tickers:
Download historical data as CSV for each stock (makes the process faster)
df = pdr.get_data_yahoo(ticker, start_date, end_date)
df.to_csv(f'{ticker}.csv')
Calculating returns relative to the market (returns multiple)
df['Percent Change'] = df['Adj Close'].pct_change()
stock_return = (df['Percent Change'] + 1).cumprod()[-1]
returns_multiple = round((stock_return / index_return), 2)
returns_multiples.extend([returns_multiple])
print (f'Ticker: {ticker}; Returns Multiple against Nifty: {returns_multiple}\n')
time.sleep(1)
# Creating dataframe of only top 30%
rs_df = pd.DataFrame(list(zip(tickers, returns_multiples)), columns=['Ticker', 'Returns_multiple'])
rs_df['RS_Rating'] = rs_df.Returns_multiple.rank(pct=True) * 100
rs_df = rs_df[rs_df.RS_Rating >= rs_df.RS_Rating.quantile(.70)]
# Checking Minervini conditions of top 30% of stocks in given list
rs_stocks = rs_df['Ticker']
for stock in rs_stocks:
try:
df = pd.read_csv(f'{stock}.csv', index_col=0)
sma = [50, 150, 200]
for x in sma:
df["SMA_"+str(x)] = round(df['Adj Close'].rolling(window=x).mean(), 2)
# Storing required values
currentClose = df["Adj Close"][-1]
moving_average_50 = df["SMA_50"][-1]
moving_average_150 = df["SMA_150"][-1]
moving_average_200 = df["SMA_200"][-1]
low_of_52week = round(min(df["Low"][-260:]), 2)
high_of_52week = round(max(df["High"][-260:]), 2)
RS_Rating = round(rs_df[rs_df['Ticker']==stock].RS_Rating.tolist()[0])
try:
moving_average_200_20 = df["SMA_200"][-20]
except Exception:
moving_average_200_20 = 0
# Condition 1: Current Price > 150 SMA and > 200 SMA
condition_1 = currentClose > moving_average_150 > moving_average_200
# Condition 2: 150 SMA and > 200 SMA
condition_2 = moving_average_150 > moving_average_200
# Condition 3: 200 SMA trending up for at least 1 month
condition_3 = moving_average_200 > moving_average_200_20
# Condition 4: 50 SMA> 150 SMA and 50 SMA> 200 SMA
condition_4 = moving_average_50 > moving_average_150 > moving_average_200
# Condition 5: Current Price > 50 SMA
condition_5 = currentClose > moving_average_50
# Condition 6: Current Price is at least 30% above 52 week low
condition_6 = currentClose >= (1.3*low_of_52week)
# Condition 7: Current Price is within 25% of 52 week high
condition_7 = currentClose >= (.75*high_of_52week)
# If all conditions above are true, add stock to exportList
if(condition_1 and condition_2 and condition_3 and condition_4 and condition_5 and condition_6 and condition_7):
exportList = exportList.append({'Stock': stock, "RS_Rating": RS_Rating ,"50 Day MA": moving_average_50, "150 Day Ma": moving_average_150, "200 Day MA": moving_average_200, "52 Week Low": low_of_52week, "52 week High": high_of_52week}, ignore_index=True)
print (stock + " made the Minervini requirements")
except Exception as e:
print (e)
print(f"Could not gather data on {stock}")
exportList = exportList.sort_values(by='RS_Rating', ascending=False)
print('\n', exportList)
writer = ExcelWriter("ScreenOutput.xlsx")
exportList.to_excel(writer, "Sheet1")
writer.save()
for nifty 500
tickers=["360ONE.NS","3MINDIA.NS","ABB.NS","ACC.NS","AIAENG.NS","APLAPOLLO.NS","AUBANK.NS","AARTIDRUGS.NS","AARTIIND.NS","AAVAS.NS","ABBOTINDIA.NS","ADANIENT.NS","ADANIGREEN.NS","ADANIPORTS.NS","ADANIPOWER.NS","ATGL.NS","ADANITRANS.NS","AWL.NS","ABCAPITAL.NS","ABFRL.NS","AEGISCHEM.NS","AETHER.NS","AFFLE.NS","AJANTPHARM.NS","APLLTD.NS","ALKEM.NS","ALKYLAMINE.NS","AMARAJABAT.NS","AMBER.NS","AMBUJACEM.NS","ANGELONE.NS","ANURAS.NS","APARINDS.NS","APOLLOHOSP.NS","APOLLOTYRE.NS","APTUS.NS","ACI.NS","ASAHIINDIA.NS","ASHOKLEY.NS","ASIANPAINT.NS","ASTERDM.NS","ASTRAL.NS","ATUL.NS","AUROPHARMA.NS","AVANTIFEED.NS","DMART.NS","AXISBANK.NS","BASF.NS","BEML.NS","BLS.NS","BSE.NS","BAJAJ-AUTO.NS","BAJFINANCE.NS","BAJAJFINSV.NS","BAJAJHLDNG.NS","BALAMINES.NS","BALKRISIND.NS","BALRAMCHIN.NS","BANDHANBNK.NS","BANKBARODA.NS","BANKINDIA.NS","MAHABANK.NS","BATAINDIA.NS","BAYERCROP.NS","BERGEPAINT.NS","BDL.NS","BEL.NS","BHARATFORG.NS","BHEL.NS","BPCL.NS","BHARTIARTL.NS","BIKAJI.NS","BIOCON.NS","BIRLACORPN.NS","BSOFT.NS","BLUEDART.NS","BLUESTARCO.NS","BBTC.NS","BORORENEW.NS","BOSCHLTD.NS","BRIGADE.NS","BCG.NS","BRITANNIA.NS","MAPMYINDIA.NS","CCL.NS","CESC.NS","CGPOWER.NS","CRISIL.NS","CSBBANK.NS","CAMPUS.NS","CANFINHOME.NS","CANBK.NS","CGCL.NS","CARBORUNIV.NS","CASTROLIND.NS","CEATLTD.NS","CENTRALBK.NS","CDSL.NS","CENTURYPLY.NS","CENTURYTEX.NS","CERA.NS","CHALET.NS","CHAMBLFERT.NS","CHEMPLASTS.NS","CHOLAHLDNG.NS","CHOLAFIN.NS","CIPLA.NS","CUB.NS","CLEAN.NS","COALINDIA.NS","COCHINSHIP.NS","COFORGE.NS","COLPAL.NS","CAMS.NS","CONCOR.NS","COROMANDEL.NS","CRAFTSMAN.NS","CREDITACC.NS","CROMPTON.NS","CUMMINSIND.NS","CYIENT.NS","DCMSHRIRAM.NS","DLF.NS","DABUR.NS","DALBHARAT.NS","DATAPATTNS.NS","DEEPAKFERT.NS","DEEPAKNTR.NS","DELHIVERY.NS","DELTACORP.NS","DEVYANI.NS","DIVISLAB.NS","DIXON.NS","LALPATHLAB.NS","DRREDDY.NS","EIDPARRY.NS","EIHOTEL.NS","EPL.NS","EASEMYTRIP.NS","EICHERMOT.NS","ELGIEQUIP.NS","EMAMILTD.NS","ENDURANCE.NS","ENGINERSIN.NS","EQUITASBNK.NS","ERIS.NS","ESCORTS.NS","EXIDEIND.NS","FDC.NS","NYKAA.NS","FEDERALBNK.NS","FACT.NS","FINEORG.NS","FINCABLES.NS","FINPIPE.NS","FSL.NS","FIVESTAR.NS","FORTIS.NS","GRINFRA.NS","GAIL.NS","GMMPFAUDLR.NS","GMRINFRA.NS","GALAXYSURF.NS","GARFIBRES.NS","GICRE.NS","GLAND.NS","GLAXO.NS","GLENMARK.NS","MEDANTA.NS","GOCOLORS.NS","GODFRYPHLP.NS","GODREJAGRO.NS","GODREJCP.NS","GODREJIND.NS","GODREJPROP.NS","GRANULES.NS","GRAPHITE.NS","GRASIM.NS","GESHIP.NS","GREENPANEL.NS","GRINDWELL.NS","GUJALKALI.NS","GAEL.NS","FLUOROCHEM.NS","GUJGASLTD.NS","GNFC.NS","GPPL.NS","GSFC.NS","GSPL.NS","HEG.NS","HCLTECH.NS","HDFCAMC.NS","HDFCBANK.NS","HDFCLIFE.NS","HFCL.NS","HLEGLAS.NS","HAPPSTMNDS.NS","HAVELLS.NS","HEROMOTOCO.NS","HIKAL.NS","HINDALCO.NS","HGS.NS","HAL.NS","HINDCOPPER.NS","HINDPETRO.NS","HINDUNILVR.NS","HINDZINC.NS","POWERINDIA.NS","HOMEFIRST.NS","HONAUT.NS","HUDCO.NS","HDFC.NS","ICICIBANK.NS","ICICIGI.NS","ICICIPRULI.NS","ISEC.NS","IDBI.NS","IDFCFIRSTB.NS","IDFC.NS","IFBIND.NS","IIFL.NS","IRB.NS","ITC.NS","ITI.NS","INDIACEM.NS","IBULHSGFIN.NS","IBREALEST.NS","INDIAMART.NS","INDIANB.NS","IEX.NS","INDHOTEL.NS","IOC.NS","IOB.NS","IRCTC.NS","IRFC.NS","INDIGOPNTS.NS","IGL.NS","INDUSTOWER.NS","INDUSINDBK.NS","INFIBEAM.NS","NAUKRI.NS","INFY.NS","INGERRAND.NS","INTELLECT.NS","INDIGO.NS","IPCALAB.NS","JBCHEPHARM.NS","JKCEMENT.NS","JBMA.NS","JKLAKSHMI.NS","JKPAPER.NS","JMFINANCIL.NS","JSWENERGY.NS","JSWSTEEL.NS","JAMNAAUTO.NS","JSL.NS","JINDALSTEL.NS","JINDWORLD.NS","JUBLFOOD.NS","JUBLINGREA.NS","JUBLPHARMA.NS","JUSTDIAL.NS","JYOTHYLAB.NS","KPRMILL.NS","KEI.NS","KNRCON.NS","KPITTECH.NS","KRBL.NS","KSB.NS","KAJARIACER.NS","KALPATPOWR.NS","KALYANKJIL.NS","KANSAINER.NS","KARURVYSYA.NS","KEC.NS","KENNAMET.NS","RUSTOMJEE.NS","KFINTECH.NS","KOTAKBANK.NS","KIMS.NS","L&TFH.NS","LTTS.NS","LICHSGFIN.NS","LTIM.NS","LAXMIMACH.NS","LT.NS","LATENTVIEW.NS","LAURUSLABS.NS","LXCHEM.NS","LEMONTREE.NS","LICI.NS","LINDEINDIA.NS","LUPIN.NS","LUXIND.NS","MMTC.NS","MRF.NS","MTARTECH.NS","LODHA.NS","MGL.NS","M&MFIN.NS","M&M.NS","MAHINDCIE.NS","MHRIL.NS","MAHLIFE.NS","MAHLOG.NS","MANAPPURAM.NS","MRPL.NS","MARICO.NS","MARUTI.NS","MASTEK.NS","MFSL.NS","MAXHEALTH.NS","MAZDOCK.NS","MEDPLUS.NS","MFL.NS","METROBRAND.NS","METROPOLIS.NS","MSUMI.NS","MOTILALOFS.NS","MPHASIS.NS","MCX.NS","MUTHOOTFIN.NS","NATCOPHARM.NS","NBCC.NS","NCC.NS","NHPC.NS","NLCINDIA.NS","NMDC.NS","NSLNISP.NS","NOCIL.NS","NTPC.NS","NH.NS","NATIONALUM.NS","NAVINFLUOR.NS","NAZARA.NS","NESTLEIND.NS","NETWORK18.NS","NAM-INDIA.NS","NUVOCO.NS","OBEROIRLTY.NS","ONGC.NS","OIL.NS","OLECTRA.NS","PAYTM.NS","OFSS.NS","ORIENTELEC.NS","POLICYBZR.NS","PCBL.NS","PIIND.NS","PNBHOUSING.NS","PNCINFRA.NS","PVRINOX.NS","PAGEIND.NS","PATANJALI.NS","PERSISTENT.NS","PETRONET.NS","PFIZER.NS","PHOENIXLTD.NS","PIDILITIND.NS","PEL.NS","PPLPHARMA.NS","POLYMED.NS","POLYCAB.NS","POLYPLEX.NS","POONAWALLA.NS","PFC.NS","POWERGRID.NS","PRAJIND.NS","PRESTIGE.NS","PRINCEPIPE.NS","PRSMJOHNSN.NS","PGHH.NS","PNB.NS","QUESS.NS","RBLBANK.NS","RECLTD.NS","RHIM.NS","RITES.NS","RADICO.NS","RVNL.NS","RAIN.NS","RAINBOW.NS","RAJESHEXPO.NS","RALLIS.NS","RCF.NS","RATNAMANI.NS","RTNINDIA.NS","RAYMOND.NS","REDINGTON.NS","RELAXO.NS","RELIANCE.NS","RBA.NS","ROSSARI.NS","ROUTE.NS","SBICARD.NS","SBILIFE.NS","SJVN.NS","SKFINDIA.NS","SRF.NS","MOTHERSON.NS","SANOFI.NS","SAPPHIRE.NS","SCHAEFFLER.NS","SHARDACROP.NS","SHOPERSTOP.NS","SHREECEM.NS","RENUKA.NS","SHRIRAMFIN.NS","SHYAMMETL.NS","SIEMENS.NS","SOBHA.NS","SOLARINDS.NS","SONACOMS.NS","SONATSOFTW.NS","STARHEALTH.NS","SBIN.NS","SAIL.NS","SWSOLAR.NS","STLTECH.NS","SUMICHEM.NS","SPARC.NS","SUNPHARMA.NS","SUNTV.NS","SUNDARMFIN.NS","SUNDRMFAST.NS","SUNTECK.NS","SUPRAJIT.NS","SUPREMEIND.NS","SUVENPHAR.NS","SUZLON.NS","SWANENERGY.NS","SYNGENE.NS","TCIEXP.NS","TCNSBRANDS.NS","TTKPRESTIG.NS","TV18BRDCST.NS","TVSMOTOR.NS","TMB.NS","TANLA.NS","TATACHEM.NS","TATACOMM.NS","TCS.NS","TATACONSUM.NS","TATAELXSI.NS","TATAINVEST.NS","TATAMTRDVR.NS","TATAMOTORS.NS","TATAPOWER.NS","TATASTEEL.NS","TTML.NS","TEAMLEASE.NS","TECHM.NS","TEJASNET.NS","NIACL.NS","RAMCOCEM.NS","THERMAX.NS","TIMKEN.NS","TITAN.NS","TORNTPHARM.NS","TORNTPOWER.NS","TCI.NS","TRENT.NS","TRIDENT.NS","TRIVENI.NS","TRITURBINE.NS","TIINDIA.NS","UCOBANK.NS","UFLEX.NS","UNOMINDA.NS","UPL.NS","UTIAMC.NS","ULTRACEMCO.NS","UNIONBANK.NS","UBL.NS","MCDOWELL-N.NS","VGUARD.NS","VMART.NS","VIPIND.NS","VAIBHAVGBL.NS","VTL.NS","VARROC.NS","VBL.NS","MANYAVAR.NS","VEDL.NS","VIJAYA.NS","VINATIORGA.NS","IDEA.NS","VOLTAS.NS","WELCORP.NS","WELSPUNIND.NS","WESTLIFE.NS","WHIRLPOOL.NS","WIPRO.NS","YESBANK.NS","ZFCVINDIA.NS","ZEEL.NS","ZENSARTECH.NS","ZOMATO.NS","ZYDUSLIFE.NS","ZYDUSWELL.NS","ECLERX.NS"] #NIFTY 500
tickers = [item.replace(" ", " ") for item in tickers] # Yahoo Finance uses dashes instead of dots
index_name = '^CRSLDX' # nifty india
start_date = datetime.datetime.now() - datetime.timedelta(days=730)
end_date = datetime.datetime.now()
end_date = datetime.date.today()
exportList = pd.DataFrame(columns=['Stock', "RS_Rating", "50 Day MA", "150 Day Ma", "200 Day MA", "52 Week Low", "52 week High"])
returns_multiples = []
I get below error when in run in AWS EC2 instance
File "/root/test.py", line 1, in
from pandas_datareader import data as pdr
File "/usr/local/lib/python3.9/site-packages/pandas_datareader/init.py", line 5, in
from .data import (
File "/usr/local/lib/python3.9/site-packages/pandas_datareader/data.py", line 9, in
from pandas.util._decorators import deprecate_kwarg
ModuleNotFoundError: No module named 'pandas.util'; 'pandas' is not a package
Please advise how to fix it ?
Shashank,
Thank you for shaing this program. I have tried running and receive the following errors. Any suggestions ?
C:\Users\pneof\Python\Episode 3_ Mark Minervini Stock Screener>python vemuri.py
Traceback (most recent call last):
File "C:\Users\pneof\AppData\Local\Programs\Python\Python310\lib\urllib\request.py", line 1348, in do_open
h.request(req.get_method(), req.selector, req.data, headers,
File "C:\Users\pneof\AppData\Local\Programs\Python\Python310\lib\http\client.py", line 1282, in request
self._send_request(method, url, body, headers, encode_chunked)
File "C:\Users\pneof\AppData\Local\Programs\Python\Python310\lib\http\client.py", line 1328, in _send_request
self.endheaders(body, encode_chunked=encode_chunked)
File "C:\Users\pneof\AppData\Local\Programs\Python\Python310\lib\http\client.py", line 1277, in endheaders
self._send_output(message_body, encode_chunked=encode_chunked)
File "C:\Users\pneof\AppData\Local\Programs\Python\Python310\lib\http\client.py", line 1037, in _send_output
self.send(msg)
File "C:\Users\pneof\AppData\Local\Programs\Python\Python310\lib\http\client.py", line 975, in send
self.connect()
File "C:\Users\pneof\AppData\Local\Programs\Python\Python310\lib\http\client.py", line 1454, in connect
self.sock = self._context.wrap_socket(self.sock,
File "C:\Users\pneof\AppData\Local\Programs\Python\Python310\lib\ssl.py", line 512, in wrap_socket
return self.sslsocket_class._create(
File "C:\Users\pneof\AppData\Local\Programs\Python\Python310\lib\ssl.py", line 1070, in _create
self.do_handshake()
File "C:\Users\pneof\AppData\Local\Programs\Python\Python310\lib\ssl.py", line 1341, in do_handshake
self._sslobj.do_handshake()
ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: certificate has expired (_ssl.c:997)
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "C:\Users\pneof\Python\Episode 3_ Mark Minervini Stock Screener\vemuri.py", line 12, in
tickers = si.tickers_sp500()
File "C:\Users\pneof\AppData\Local\Programs\Python\Python310\lib\site-packages\yahoo_fin\stock_info.py", line 129, in tickers_sp500
sp500 = pd.read_html("https://en.wikipedia.org/wiki/List_of_S%26P_500_companies")[0]
File "C:\Users\pneof\AppData\Local\Programs\Python\Python310\lib\site-packages\pandas\util_decorators.py", line 311, in wrapper
return func(*args, **kwargs)
File "C:\Users\pneof\AppData\Local\Programs\Python\Python310\lib\site-packages\pandas\io\html.py", line 1098, in read_html
return _parse(
File "C:\Users\pneof\AppData\Local\Programs\Python\Python310\lib\site-packages\pandas\io\html.py", line 906, in _parse
tables = p.parse_tables()
File "C:\Users\pneof\AppData\Local\Programs\Python\Python310\lib\site-packages\pandas\io\html.py", line 222, in parse_tables
tables = self._parse_tables(self._build_doc(), self.match, self.attrs)
File "C:\Users\pneof\AppData\Local\Programs\Python\Python310\lib\site-packages\pandas\io\html.py", line 745, in _build_doc
raise e
File "C:\Users\pneof\AppData\Local\Programs\Python\Python310\lib\site-packages\pandas\io\html.py", line 726, in _build_doc
with urlopen(self.io) as f:
File "C:\Users\pneof\AppData\Local\Programs\Python\Python310\lib\site-packages\pandas\io\common.py", line 212, in urlopen
return urllib.request.urlopen(*args, **kwargs)
File "C:\Users\pneof\AppData\Local\Programs\Python\Python310\lib\urllib\request.py", line 216, in urlopen
return opener.open(url, data, timeout)
File "C:\Users\pneof\AppData\Local\Programs\Python\Python310\lib\urllib\request.py", line 519, in open
response = self._open(req, data)
File "C:\Users\pneof\AppData\Local\Programs\Python\Python310\lib\urllib\request.py", line 536, in _open
result = self._call_chain(self.handle_open, protocol, protocol +
File "C:\Users\pneof\AppData\Local\Programs\Python\Python310\lib\urllib\request.py", line 496, in _call_chain
result = func(*args)
File "C:\Users\pneof\AppData\Local\Programs\Python\Python310\lib\urllib\request.py", line 1391, in https_open
return self.do_open(http.client.HTTPSConnection, req,
File "C:\Users\pneof\AppData\Local\Programs\Python\Python310\lib\urllib\request.py", line 1351, in do_open
raise URLError(err)
urllib.error.URLError: <urlopen error [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: certificate has expired (_ssl.c:997)>
C:\Users\pneof\Python\Episode 3_ Mark Minervini Stock Screener>