Skip to content

Instantly share code, notes, and snippets.

View RahulBhalley's full-sized avatar
🏠
Working from home

RahulBhalley

🏠
Working from home
View GitHub Profile
# IDA (disassembler) and Hex-Rays (decompiler) plugin for Apple AMX
#
# WIP research. (This was edited to add more info after someone posted it to
# Hacker News. Click "Revisions" to see full changes.)
#
# Copyright (c) 2020 dougallj
# Based on Python port of VMX intrinsics plugin:
# Copyright (c) 2019 w4kfu - Synacktiv
@RahulBhalley
RahulBhalley / model.swift
Created January 30, 2020 07:40 — forked from kongzii/model.swift
Example of saving trained weights of model in the Swift For Tensorflow
import Foundation
import Python
import TensorFlow
public struct MyModel : Layer {
public var conv1d: Conv1D<Float>
public var dense1: Dense<Float>
public var dropout: Dropout<Float>
public var denseOut: Dense<Float>
@RahulBhalley
RahulBhalley / min-char-rnn.py
Created June 2, 2018 16:03 — forked from karpathy/min-char-rnn.py
Minimal character-level language model with a Vanilla Recurrent Neural Network, in Python/numpy
"""
Minimal character-level Vanilla RNN model. Written by Andrej Karpathy (@karpathy)
BSD License
"""
import numpy as np
# data I/O
data = open('input.txt', 'r').read() # should be simple plain text file
chars = list(set(data))
data_size, vocab_size = len(data), len(chars)
# 2nd layer hyperparameters of our network
w = tf.Variable([[1], [-2]]) # weights between hidden layer and output layer.
b = tf.Variable([[0], [0], [0], [0]]) # biases for output units.
import tensorflow as tf
# 1st layer hyperparameters of our network
X = tf.Variable([[0, 0], [0, 1], [1, 0], [1, 1]]) # inputs where the rows correspond to a single input.
W = tf.Variable([[1, 1], [1, 1]]) # weights between hidden layer and input layer.
c = tf.Variable([[0], [-1]]) # biases for hidden units.
# 2nd feedforward propagation
a_XW_c_w = tf.matmul(a_XW_c, w)
a_XW_c_w_b = tf.add(a_XW_c_w, b)
init = tf.global_variables_initializer()
# Launch the TensorFlow graph session
with tf.Session() as sess:
# initialize all the variables
sess.run(init)
# 1st feedforward propagation
XW = tf.matmul(X, tf.transpose(W))
XW_c = tf.add(XW, tf.transpose(c))
a_XW_c = tf.nn.relu(XW_c)