Skip to content

Instantly share code, notes, and snippets.

def define_Q(input_shape=(16,16)):
"""
Defines the Q-matrix and returns the input and output tensorflow tensors.
Args:
:param Tuple input_shape:
:return: Tuple[tf.tensor, tf.tensor]
"""
input = tf.placeholder(shape=(None,)+input_shape+(1,), dtype=tf.float32)
nn_1 = tf.layers.batch_normalization(input)
def get_cost(target,Q,action_indices):
"""
Cost-function of the Q-matrix attempting to approximate the reward-function
:param tf.placeholder target: placeholder for the values of the registered rewards
:param tf.placeholder Q: output of the Q-matrix the registered state
:param tf.placeholder action_indices: placeholder for the indices of the registered actions
:return: tf.tensor mean_squared error the reward vs Q-matrix
"""
row_indices = tf.range(tf.shape(action_indices)[0])
full_indices = tf.stack([row_indices, action_indices], axis=1)
class XGBQuantile(XGBRegressor):
def __init__(self,quant_alpha=0.95,quant_delta = 1.0,quant_thres=1.0,quant_var =1.0,base_score=0.5, booster='gbtree', colsample_bylevel=1,
colsample_bytree=1, gamma=0, learning_rate=0.1, max_delta_step=0,max_depth=3, min_child_weight=1, missing=None, n_estimators=100,
n_jobs=1, nthread=None, objective='reg:linear', random_state=0,reg_alpha=0, reg_lambda=1, scale_pos_weight=1, seed=None,silent=True, subsample=1):
self.quant_alpha = quant_alpha
self.quant_delta = quant_delta
self.quant_thres = quant_thres
self.quant_var = quant_var
super().__init__(base_score=base_score, booster=booster, colsample_bylevel=colsample_bylevel,
import numpy as np
np.random.seed(1)
def f(x):
"""The function to predict."""
return x *np.sin(x)
#----------------------------------------------------------------------
# First the noiseless case
X = np.atleast_2d(np.random.uniform(0, 10.0, size=100)).T
class TreeProperties(object):
'''
:param max_leafs: maximum number of leafs
:param n_features: maximum number of feature available within the data
:param n_classes: number of classes
'''
def __init__(self,max_depth,max_leafs,n_features,n_classes,regularisation_penality=10.,decay_penality=0.9):
self.max_depth = max_depth
self.max_leafs = max_leafs
self.n_features = n_features
class Node(object):
def __init__(self,id,depth,pathprob,tree):
self.id = id
self.depth = depth
self.prune(tree)
if self.isLeaf:
self.W = tf.get_variable(...)
self.b = tf.get_variable(...)
else:
self.W = tf.get_variable(...)
class SoftDecisionTree(object):
def __init__(self, *args,**kwargs):
self.params = TreeProperties(*args,**kwargs)
self.loss = 0.0
self.output = list()
self.leafs_distribution = list()
def build_tree(self):
self.tf_X = tf.placeholder(tf.float32, [None, self.params.n_features])
# Gradient Descent
optimizer = tf.train.GradientDescentOptimizer(learning_rate).minimize(cost)
 def _create_slots(self, var_list):
# Create slots for allocation and later management of additional
# variables associated with the variables to train.
# for example: the first and second moments.
'''
for v in var_list:
self._zeros_slot(v, "m", self._name)
self._zeros_slot(v, "v", self._name)
'''
def _apply_dense(self, grad, var):
# This class defines the API to add Ops to train a model.
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import state_ops
from tensorflow.python.framework import ops
from tensorflow.python.training import optimizer
import tensorflow as tf