Skip to content

Instantly share code, notes, and snippets.

View sergeyprokudin's full-sized avatar

Sergey Prokudin sergeyprokudin

  • ETH Zürich
  • Zürich
View GitHub Profile
@sergeyprokudin
sergeyprokudin / bfs.py
Last active January 10, 2019 19:23
Breadth First Search of a Graph
#https://docs.python.org/2/tutorial/datastructures.html
from collections import deque
def bfs(g, root_node):
""" Implementation of a breadth first search (BFS)
BFS is useful e.g. when we need to find a route between
It is NOT recursive
Algo:
1. Enqueue root node;
@sergeyprokudin
sergeyprokudin / depth_first_search.py
Created January 10, 2019 18:21
Vanilla graph depth first search implementation in Python
def dfs(g, curr_node, visited=None):
"""
Parameters
----------
graph: dictionary
dictionary of nodes and its neighbors
node: list
Returns
@sergeyprokudin
sergeyprokudin / graph_definition.py
Created January 10, 2019 18:18
Graph definition as class
class GraphNode:
def __init__(self, value, neighbors):
"""Initialize graph node
Parameters
----------
name: str
node identifier
@sergeyprokudin
sergeyprokudin / linear_regression_sgd.py
Created January 9, 2019 22:55
Linear Regression with Stochastic Gradient Decent
import numpy as np
def linear_regression(x, y, lr=1.0e-4, n_steps=10000, lr_decay=0.5):
"""Solves 1 dimensional regression problem: provides w=(w_1, ..., w_n) such that:
w* = min_{w} sum_{i=1}{m}[(<w, x_i> - y_i)**2] (mean squared error)
L(w) = sum_{i=1}{m}[(<w, x_i> - y_i)**2]
<w, x_i> = w_1*x_{i1} + w_2*x_{i2} + ... w_n*x_{in}
@sergeyprokudin
sergeyprokudin / linear_regression.py
Last active January 7, 2019 22:18
The code for simple linear regression (exact solution)
import numpy as np
def linear_regression_1d(x, y):
"""Solves 1 dimensional regression problem: provides w=(w_1, ..., w_n) such that:
w* = min_{w} sum_{i=1}{m}[(<w, x_i> - y_i)**2]
L(w) = sum_{i=1}{m}[(<w, x_i> - y_i)**2]
<w, x_i> = w_1*x_{i1} + w_2*x_{i2} + ... w_n*x_{in}
@sergeyprokudin
sergeyprokudin / validate_bst.py
Last active August 4, 2021 14:23
Check if the binary tree is a binary search tree (Question 4.5 from "Cracking the coding Interview" book)
class BinaryTreeNode:
def __init__(self, value):
self.value = value
self.left = None
self.right = None
def insert(self, value):
"insert elements in the way to maintain binary search tree structure"
if self.value >= value:
@sergeyprokudin
sergeyprokudin / bits.py
Created January 6, 2019 14:50
Bitwise operations: set clear bits, check set bits
def set_bit(x, i):
bi = 0b1 << i
return x | bi
def xor_bit(x, i):
bi = 0b1 << i
return x ^ bi
def clear_bit(x, i):
bi = ~(0b1 << i)
@sergeyprokudin
sergeyprokudin / is_permuted_palindrome.py
Last active January 6, 2019 13:48
Test if the string is a permuted palindrome (problem 1.4. from "Cracking the Coding Interview")
def is_permuted_palindrome(s):
"""Checks if the string is a permuted version of a palindrome
Upper case do not matter, spaces can be ignored.
Example:
"cat cat" -> Yes (palindrome: "cat tac")
"cat do" -> No
@sergeyprokudin
sergeyprokudin / kde.py
Last active January 5, 2019 19:31
Kernel and Histogram Density Estimation
#!/usr/bin/env python
# coding: utf-8
# In[32]:
import numpy as np
def hist_pdf(x, data, n_bins=10, minv=None, maxv=None):
"""Histogram density estimator
@sergeyprokudin
sergeyprokudin / hist_pdf.py
Created January 5, 2019 18:41
Histogram Density Estimator
import numpy as np
def hist_pdf(x, data, n_bins=10, minv=None, maxv=None):
"""Histogram density estimator
[minv, minv+delta, , minv+delta*2, ..., 1]
for any x in B_l=[minv+delta*j, minv+delta*(j+1)] density is estimated in the following way
p(x) = p(x | x \in B_l) * p(x \in B_l) = (1/delta) * (\sum_{x_j}{I(x_j \in B_l)} / N)