Skip to content

Instantly share code, notes, and snippets.

View ahmedbesbes's full-sized avatar
💭
Building things, one line of code at a time 💻

Ahmed BESBES ahmedbesbes

💭
Building things, one line of code at a time 💻
View GitHub Profile
import streamlit as st
tabs = st.tabs(["metrics", "plots", "reports"])
tab_metrics = tabs[0]
with tab_metrics:
st.metric("Precision", 0.85, delta=0.2)
st.metric("Recall", 0.60, delta=-0.1)
import streamlit as st
tabs = st.tabs(["metrics", "plots", "reports"])
import os
import tweepy
from dotenv import load_dotenv
load_dotenv()
bearer_token = os.environ["BEARER_TOKEN"]
class MyStreamer(tweepy.StreamingClient):
def on_tweet(self, tweet):
extracted_pages = []
for page in tweepy.Cursor(api.search_tweets,
"Ukraine",
lang="en",
count=50).pages(5):
extracted_pages.append(page)
extracted_tweets_from_pages = []
import os
import tweepy
from dotenv import load_dotenv
consumer_key = os.environ["API_KEY"]
consumer_secret = os.environ["API_KEY_SECRET"]
access_token = os.environ["ACCESS_TOKEN"]
access_token_secret = os.environ["ACCESS_TOKEN_SECRET"]
auth = tweepy.OAuth1UserHandler(
import os
import tweepy
from dotenv import load_dotenv
consumer_key = os.environ["API_KEY"]
consumer_secret = os.environ["API_KEY_SECRET"]
access_token = os.environ["ACCESS_TOKEN"]
access_token_secret = os.environ["ACCESS_TOKEN_SECRET"]
auth = tweepy.OAuth1UserHandler(
import os
import tweepy
from dotenv import load_dotenv
consumer_key = os.environ["API_KEY"]
consumer_secret = os.environ["API_KEY_SECRET"]
access_token = os.environ["ACCESS_TOKEN"]
access_token_secret = os.environ["ACCESS_TOKEN_SECRET"]
auth = tweepy.OAuth1UserHandler(
from flask import Flask
app = Flask(__name__)
@app.route("/")
def hello_world():
return "<p>Hello, World!</p>"
FROM python:3.7-slim-buster
WORKDIR /usr/src/app
COPY requirements.txt ./
RUN pip install --no-cache-dir -r requirements.txt
COPY . .
import random
import numpy as np
import torch
random.seed(seed)
torch.manual_seed(0)
np.random.seed(0)
torch.backends.cudnn.deterministic = True
torch.backends.cudnn.benchmark = False