How to use Pelican on GitHub Pages
Author: Josef Jezek
sudo apt-get install python-setuptools
"""Script to illustrate usage of tf.estimator.Estimator in TF v1.3""" | |
import tensorflow as tf | |
from tensorflow.examples.tutorials.mnist import input_data as mnist_data | |
from tensorflow.contrib import slim | |
from tensorflow.contrib.learn import ModeKeys | |
from tensorflow.contrib.learn import learn_runner | |
# Show debugging output |
# USAGE | |
# python build_face_dataset.py --cascade haarcascade_frontalface_default.xml --output dataset/adrian | |
# import the necessary packages | |
from imutils.video import VideoStream | |
import argparse | |
import imutils | |
import time | |
import cv2 | |
import os |
# Implementation of a simple MLP network with one hidden layer. Tested on the iris data set. | |
# Requires: numpy, sklearn>=0.18.1, tensorflow>=1.0 | |
# NOTE: In order to make the code simple, we rewrite x * W_1 + b_1 = x' * W_1' | |
# where x' = [x | 1] and W_1' is the matrix W_1 appended with a new row with elements b_1's. | |
# Similarly, for h * W_2 + b_2 | |
import tensorflow as tf | |
import numpy as np | |
from sklearn import datasets | |
from sklearn.model_selection import train_test_split |
Author: Josef Jezek
sudo apt-get install python-setuptools
Movies Recommendation:
Music Recommendation:
'''This script goes along the blog post | |
"Building powerful image classification models using very little data" | |
from blog.keras.io. | |
It uses data that can be downloaded at: | |
https://www.kaggle.com/c/dogs-vs-cats/data | |
In our setup, we: | |
- created a data/ folder | |
- created train/ and validation/ subfolders inside data/ |
filename = 'twins'; | |
OUTPUT = 'C:\\Users\\huy\\Desktop\\matlab\\'; | |
TRAIN = 'C:\\Users\\huy\\Desktop\\matlab\\faces\\'; | |
INPUT = 'C:\\Users\\huy\\Desktop\\matlab\\'; | |
% load the training data set (60 faces) | |
load('data60_256.mat'); | |
% read in original image, face and eyes coordinates/sizes data (from OpenCV's |