Created
May 23, 2018 12:27
-
-
Save JossWhittle/fa5cd682b3a21f8093d67ec2866efecf to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| # Perform 2d convolution using runtime-initialization constant evaluation | |
| # | |
| def dense_with_scaling(inputs, fan_out, activation=None, name='dense_with_scaling'): | |
| with tf.variable_scope(name, reuse=tf.AUTO_REUSE): | |
| # Features from previous tensor | |
| fan_in = int(inputs.get_shape()[-1]) | |
| # Compute He initialization constant | |
| C = np.sqrt(1.3 * 2.0 / fan_in) | |
| W = tf.get_variable('W', shape=(fan_in, fan_out), | |
| initializer=tf.initializers.truncated_normal()) #stddev=C | |
| B = tf.get_variable('B', shape=(fan_out,), | |
| initializer=tf.initializers.zeros()) | |
| # HE Initialization constant | |
| HE_constant = tf.constant(C, dtype=tf.float32, name='HE_constant') | |
| # Runtime scaling of convolutional filters by initialization constant | |
| W_scaled = tf.multiply(W, HE_constant, name='W_scaled') | |
| # Apply convolutions and add biases | |
| logits = tf.matmul(inputs, W_scaled, name='apply_matmul') | |
| logits = tf.nn.bias_add(logits, B, name='add_bias') | |
| # If provided apply an activation function | |
| if (activation is not None): logits = activation(logits) | |
| return logits |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment