Skip to content

Instantly share code, notes, and snippets.

View 2tony2's full-sized avatar
🔥
Lighting my keyboard on fire

Tony Zeljkovic 2tony2

🔥
Lighting my keyboard on fire
View GitHub Profile
def my_generator():
yield 1
yield 2
yield 3
gen = my_generator()
for item in gen:
print(item)
class MyIterator:
def __init__(self, data):
self.data = data
self.index = 0
def __iter__(self):
return self
def __next__(self):
if self.index < len(self.data):
my_list = [1, 2, 3]
iterator = iter(my_list)
print(next(iterator)) # Output: 1
print(next(iterator)) # Output: 2
print(next(iterator)) # Output: 3
# Calling next again will raise a StopIteration Exception
my_list = [1, 2, 3]
for item in my_list:
print(item)
import pandas as pd
# Example of reading a CSV file in chunks with Pandas
chunk_size = 1000
for chunk in pd.read_csv('large_file.csv', chunksize=chunk_size):
process(chunk) # Replace with your processing logic
import inspect
# Get the class definition of the object
print(inspect.getmembers(pipeline))
# Output: [('__class__', <class '__main__.DataPipeline'>), ('__delattr__', <method-wrapper '__delattr__' of DataPipeline object at 0x7f8b6f3b1f10>), ('__dict__', {'name': 'ETL Pipeline', 'batch_size': 500, 'status': 'inactive'}), ('__dir__', <built-in method __dir__ of DataPipeline object at 0x7f8b6f3b1f10>), ...]
# Get the signature of the __init__ method
print(inspect.signature(DataPipeline.__init__))
# Output: (self, name: str, batch_size: int)
print(type(pipeline))
# Output: <class '__main__.DataPipeline'>
print(type(pipeline.name))
# Output: <class 'str'>
print(type(pipeline.batch_size))
# Output: <class 'int'>
print(dir(pipeline))
# Output: ['__class__', '__delattr__', '__dict__', '__dir__', '__doc__', '__eq__', '__format__', '__ge__', '__getattribute__', '__gt__', '__hash__', '__init__', '__init_subclass__', '__le__', '__lt__', '__module__', '__ne__', '__new__', '__reduce__', '__reduce_ex__', '__repr__', '__setattr__', '__sizeof__', '__str__', '__subclasshook__', '__weakref__', 'batch_size', 'name', 'status']
class DataPipeline:
def __init__(self, name: str, batch_size: int):
self.name = name
self.batch_size = batch_size
self.status = "inactive"
pipeline = DataPipeline(name="ETL Pipeline", batch_size=500)
print(vars(pipeline))
# Output: {'name': 'ETL Pipeline', 'batch_size': 500, 'status': 'inactive'}
from enum import Enum
class Environment(Enum):
DEVELOPMENT = "development"
STAGING = "staging"
PRODUCTION = "production"
def set_environment(env: Environment) -> str:
return f"Environment set to {env.value}"