This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import numpy as np | |
import matplotlib.pyplot as plt | |
from plotly import offline as py | |
import plotly.tools as tls | |
py.init_notebook_mode() | |
t = np.linspace(0, 10, 1000) | |
plt.plot(t, np.exp(-0.5 * t) * np.cos(2*np.pi*t)) | |
plt.xlim(0, 7) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import tensorflow as tf | |
from groupy.gconv.gconv_tensorflow.keras.layers import P4ConvZ2, P4ConvP4 | |
batch_size = 32 | |
num_classes = 10 | |
epochs = 25 | |
num_predictions = 20 | |
# The data, split between train and test sets: | |
(x_train, y_train), (x_test, y_test) = tf.keras.datasets.cifar10.load_data() |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
commit 1ebd8b170d31d64ad2523a1db81c5619fed24fc1 | |
Author: Lukas Geiger <[email protected]> | |
Date: Sat Mar 31 01:40:36 2018 +0200 | |
one_sided penalty | |
diff --git a/tensorflow/contrib/gan/python/losses/python/losses_impl.py b/tensorflow/contrib/gan/python/losses/python/losses_impl.py | |
index 2a40dbade6..77a86043d8 100644 | |
--- a/tensorflow/contrib/gan/python/losses/python/losses_impl.py | |
+++ b/tensorflow/contrib/gan/python/losses/python/losses_impl.py |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
from __future__ import absolute_import | |
from __future__ import division | |
from __future__ import print_function | |
from tensorflow.python.eager import context | |
from tensorflow.python.ops import control_flow_ops | |
from tensorflow.python.ops import math_ops | |
from tensorflow.python.ops import state_ops | |
from tensorflow.python.framework import ops | |
from tensorflow.python.training import optimizer |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
{ | |
"citation": "@inproceedings{Silberman:ECCV12,\n author = {Nathan Silberman, Derek Hoiem, Pushmeet Kohli and Rob Fergus},\n title = {Indoor Segmentation and Support Inference from RGBD Images},\n booktitle = {ECCV},\n year = {2012}\n}\n@article{Alhashim2018,\n author = {Ibraheem Alhashim and Peter Wonka},\n title = {High Quality Monocular Depth Estimation via Transfer Learning},\n journal = {arXiv e-prints},\n volume = {abs/1812.11941},\n year = {2018},\n url = {https://arxiv.org/abs/1812.11941},\n eid = {arXiv:1812.11941},\n eprint = {1812.11941}\n}\n", | |
"description": "The NYU-Depth V2 data set is comprised of video sequences from a variety of\nindoor scenes as recorded by both the RGB and Depth cameras from the\nMicrosoft Kinect.\n", | |
"location": { | |
"urls": [ | |
"https://cs.nyu.edu/~silberman/datasets/nyu_depth_v2.html" | |
] | |
}, | |
"name": "nyu_depth_v2", | |
"schema": { |