Skip to content

Instantly share code, notes, and snippets.

def normalise_price(df):
df = df.assign(norm_close=0)
for symbol in df["symbol"].unique():
symbol_df = df[df["symbol"] == symbol]
min_date = symbol_df["date"].min()
ref_val = symbol_df[symbol_df["date"] == min_date]["close"].values[0]
df.loc[df["symbol"] == symbol, "norm_close"] = df.loc[df["symbol"] == symbol, "close"] / ref_val
return df
# ========== Get token ==========
with open("../../tokens/iex_token.txt", "r") as f:
iex_tkn = f.read().strip()
# ========== Get symbols to use ==========
symbol_list = ["MSFT", "AAPL", "NVDA", "JNJ", "KHC", "ALL"]
# ========== Get symbols to use ==========
date_range = '3m'
symbol_dict = dict()
import plotly.express as px
fig = px.line(df, x="date", y="close", template="plotly_white",
title=f"Stock price history for {symbol}")
fig.show()
resp = get_prices(symbol, iex_tkn, date_param=date_range)
if resp is not None:
prices_obj = json.loads(resp.text)
df = pd.DataFrame(prices_obj)
with open("../../tokens/iex_token.txt", "r") as f:
iex_tkn = f.read().strip()
date_range = "3m"
symbol = "MSFT"
resp = get_prices(symbol, iex_tkn, date_param=date_range)
def get_prices(symbol, key, date_param='5d'):
url_prefix = "https://cloud.iexapis.com/stable/"
path = f'stock/{symbol}/chart/{date_param}?chartCloseOnly=True&&token={key}'
print(f"Fetching {date_param} data for {symbol}")
full_url = requests.compat.urljoin(url_prefix, path)
try:
resp = requests.get(full_url)
# Add column for cold/neutral/hot start - based on (n_streak-margin) / n_streak makes or misses)
shots_df = shots_df.assign(start="Unknown")
for pl in players:
logger.info(f"Processing data for {pl}")
dates = shots_df[shots_df.player == pl].date.unique()
for date in dates:
day_filter = ((shots_df.player == pl) & (shots_df.date == date))
day_df = shots_df[day_filter].sort_values("tot_time")
if len(day_df) > n_streak:
if day_df[:n_streak]["shot_made"].sum() >= n_streak - margin:
# ===== Group dataframe by player & game
players = shots_df.groupby("player").count()["date"].sort_values()[-200:].index
pl_df = shots_df[shots_df.player.isin(players)]
grp_pl_df = pl_df.groupby(["player", "date"]).shot_made.sum().reset_index()
grp_pl_df = grp_pl_df.assign(shot_count=pl_df.groupby(["player", "date"]).shot_made.count().values)
grp_pl_df = grp_pl_df.assign(shot_acc=grp_pl_df["shot_made"]/grp_pl_df["shot_count"])
# Merge income data & plot
county_df = county_df.join(data_df[["fips", "Median_Household_Income_2018", "POP_ESTIMATE_2018"]].set_index("fips"), on="fips", how="left")
county_df["POP_ESTIMATE_2018"].fillna(0, inplace=True)
fig = px.scatter_mapbox(county_df, lat="lat", lon="lon", color="Median_Household_Income_2018", size="POP_ESTIMATE_2018",
range_color=[40000, 70000], size_max=40,
color_continuous_scale=px.colors.diverging.RdYlBu, labels=labels
)
fig.update_layout(mapbox_style="carto-positron",
mapbox_zoom=3.9, mapbox_center={"lat": 37.0902, "lon": -95.7129},
>>> county_df.head()
fips lon lat
0 01001 -86.664383 32.524095
1 01009 -86.633437 34.012316
2 01017 -85.358299 32.918304
3 01021 -86.697066 32.866796
4 01033 -87.778236 34.729641