Skip to content

Instantly share code, notes, and snippets.

View dan-zheng's full-sized avatar
🥛
우유맛

Dan Zheng dan-zheng

🥛
우유맛
View GitHub Profile
@dan-zheng
dan-zheng / param.swift
Created July 15, 2018 03:55
Check if AD works with parameter update SIL
struct Model {
var w: Float
struct Parameters {
var w: Float
}
var allParameters: Parameters {
get {
return Parameters(w: w)
// RUN: %target-swift-frontend -Xllvm -tf-dump-intermediates -O -emit-sil -verify %s | %FileCheck %s
import TensorFlow
// This test is intended to verify that all of the operations end up in the
// graph: that there are no host/accelerator copies generated. This tests a
// combination of the partitioning pass being able to recognize various forms,
// but also checks that certain ops implementations are promotable as well.
// Please keep it so no errors or warnings are generated by functions in this
@dan-zheng
dan-zheng / unexpected-send-recv.swift
Created July 28, 2018 01:28
Unexpected send/receive, scalar promotion deficiency
import TensorFlow
func test() {
for _ in 0...10 {
let x = Tensor<Float>(ones: [2, 2])
_ = x.reshaped(toShape: Tensor<Int32>([4, Int32(1 * 1)]))
// Alternative 1: literal `1` avoids send/receive.
// _ = x.reshaped(toShape: Tensor<Int32>([4, 1]))
// Alternative 2: avoiding `Int32` avoids send/receive.
@dan-zheng
dan-zheng / max-pool-graph-op.txt
Created July 30, 2018 18:28
Max pooling produces send/recv
%24 = graph_op "MaxPoolV2,i,i,i"(%4 : $TensorHandle<Float>, %14 : $TensorHandle<Int32>, %22 : $TensorHandle<Int32>) {T: $Float, padding: "SAME", data_format: "NHWC", __device: "/device:CPU:0"} : $TensorHandle<Float>
As you can see, kernel size and strides are SSA operands of `graph_op`, not constant attributes.
This is because of how MaxPool is defined:
REGISTER_OP("MaxPoolV2")
.Attr(
"T: {half, bfloat16, float, double, int32, int64, uint8, int16, int8, "
"uint16, qint8} = DT_FLOAT")
.Attr(GetPaddingAttrString())
@dan-zheng
dan-zheng / artificial-loc.swift
Created July 31, 2018 01:54
SIL verification failed, graph_op has artificial location
import TensorFlow
func train(iterationCount: Int) {
let images = Tensor<Float>(ones: [1000, 784])
let batchSize = Float(images.shape[0])
print("Begin training for \(iterationCount) iterations.")
for _ in 0...iterationCount {
let bound = Int32(batchSize)/25
@dan-zheng
dan-zheng / AdamOptimizer.swift
Last active November 17, 2018 23:22
Adam optimizer example
struct MNISTParameters : ParameterAggregate {
var w1 = Tensor<Float>(randomNormal: [784, 30])
var w2 = Tensor<Float>(randomNormal: [30, 10])
// Compiler-synthesized:
// static var allKeyPaths: [WritableKeyPath<MNISTParameters, Tensor<Float>>] {
// return [\MNISTParameters.w1, \MNISTParameters.w2]
// }
// Learn more about key paths here: https://github.com/apple/swift-evolution/blob/master/proposals/0161-key-paths.md
}
@dan-zheng
dan-zheng / _DynamicParameters.md
Last active December 15, 2019 06:03
Dynamic parameters and `allKeyPaths` synthesis
import TensorFlow
struct Model: Parameterized, Differentiable {
@TFParameter var w: Tensor<Float>
func tangentVector(from cotangent: Parameters) -> Parameters {
return cotangent
}
}
let model = Model(w: Tensor<Float>(zeros: [5]))
let pb = pullback(at: model) { m in m.w }
## Building Swift
./swift/utils/update-checkout --skip-repository swift --clone --scheme tensorflow
./swift/utils/build-script -x -R --debug-swift --skip-build-benchmarks --stdlib-deployment-targets=macosx-x86_64 2>&1 | tee log.txt
./swift/utils/build-script -R --skip-build-benchmarks --stdlib-deployment-targets=macosx-x86_64 --enable-tensorflow --no-swift-stdlib-assertions --reconfigure
@dan-zheng
dan-zheng / hello.swift
Last active February 18, 2019 01:19
POC API change to avoid manual specification of super long generic type parameter
import TensorFlow
extension SGD {
// Take `Model` metatype as an argument, so it doesn't need to be written out
// explicitly as a generic parameter.
// IMPORTANT: This API change might not be desirable, because usage involves
// getting dynamic type via `type(of:)`.
convenience init(_ modelType: Model.Type, learningRate: Scalar) {
self.init(learningRate: learningRate)
}