Skip to content

Instantly share code, notes, and snippets.

View EricSchles's full-sized avatar

Eric Schles EricSchles

View GitHub Profile
from tensorflow.keras import Model
import tensorflow as tf
import numpy as np
import pandas as pd
import random
class ReluDense(tf.Module):
def __init__(self, in_features, out_features, name=None):
super().__init__(name=name)
self.w = tf.Variable(
from pyspark.sql.functions import col
def groupby(df, columns):
sdf = df.to_spark()
_groups = sdf.select(*columns).distinct().collect()
_groups = [group.asDict() for group in _groups]
groups = []
for group in _groups:
tmp = []
for column in columns:
from datetime import datetime, timedelta
from textwrap import dedent
from functor import my_function
# The DAG object; we'll need this to instantiate a DAG
from airflow import DAG
# Operators; we need this to operate!
from airflow.operators.bash import BashOperator