Skip to content

Instantly share code, notes, and snippets.

View Pythonista7's full-sized avatar
🕴️
Debugging Intensely

Ashwin Pythonista7

🕴️
Debugging Intensely
View GitHub Profile
package main
import (
"fmt"
"math"
"math/rand"
"sync"
"sync/atomic"
"time"
@Pythonista7
Pythonista7 / main.go
Created October 6, 2022 19:09
Concurrently Fetch but Sequential Response
package main
import (
"encoding/json"
"fmt"
"net/http"
"os"
"sync"
"time"
)
for i in $(find ./ -name '*.tsx')
do
if ! grep -q Copyright $i
then
cat copyright.txt $i >$i.new && mv $i.new $i
fi
done
# Common build stage
FROM node:14.14.0-alpine3.12 as common-build-stage
COPY . ./app
WORKDIR /app
RUN npm install
EXPOSE 3000
W = tf.Variable(tf.ones([num_features, num_classes]), name="weight")
b = tf.Variable(tf.zeros([num_classes]), name="bias")
# Comment 1
def logistic_regression(x):
return tf.nn.softmax(tf.matmul(x, W) + b)
# Comment 2
def cross_entropy(y_pred, y_true):
y_true = tf.one_hot(y_true, depth=num_classes)
y_pred = tf.clip_by_value(y_pred, 1e-9, 1.)
# Weight of shape [784, 10], the 28*28 image features, and total number of classes.
W = tf.Variable(tf.ones([num_features, num_classes]), name="weight")
# Bias of shape [10], the total number of classes.
b = tf.Variable(tf.zeros([num_classes]), name="bias")
# Logistic regression (Wx + b).
def logistic_regression(x):
# Apply softmax to normalize the logits to a probability distribution.
return tf.nn.softmax(tf.matmul(x, W) + b)
x = tf.constant(3.0)
with tf.GradientTape(persistent=True) as g:
  g.watch(x)
  y = x * x
  z = y * y
dz_dx = g.gradient(z, x)  # (4*x^3 at x = 3)
print(dz_dx)
# tf.Tensor(108.0, shape=(), dtype=float32)
dy_dx = g.gradient(y, x)
print(dy_dx)
with tf.GradientTape() as g:
g.watch(x)
with tf.GradientTape() as gg:
gg.watch(x)
y = x * x
dy_dx = gg.gradient(y, x) # dy_dx = 2 * x
d2y_dx2 = g.gradient(dy_dx, x) # d2y_dx2 = 2
/* eslint-disable no-use-before-define */
interface SlidevConfig {
title: string
/**
* String template to compose title
*
* @example "%s - Slidev" - to suffix " - Slidev" to all pages
* @defult '%s'
*/
titleTemplate: string
def linear_regression(x):
return W * x + b
def mean_square(y_pred,y_true):
return tf.reduce_mean(tf.square(y_pred-y_true))
optimizers = tf.optimizers.SGD(learning_rate=0.01)
def run_optimization():
with tf.GradientTape() as g: