Skip to content

Instantly share code, notes, and snippets.

@attentionmech
Created May 13, 2025 14:20
Show Gist options
  • Save attentionmech/7dbc564ed3eb01f6e81cca6b262717b4 to your computer and use it in GitHub Desktop.
Save attentionmech/7dbc564ed3eb01f6e81cca6b262717b4 to your computer and use it in GitHub Desktop.
self-similar curve fitting test
import numpy as np
import matplotlib.pyplot as plt
import os
# --- GELU Activation ---
def gelu(x):
return 0.5 * x * (1 + np.tanh(np.sqrt(2 / np.pi) * (x + 0.044715 * x**3)))
def gelu_derivative(x):
tanh_term = np.tanh(np.sqrt(2 / np.pi) * (x + 0.044715 * x**3))
term1 = 0.5 * (1 + tanh_term)
term2 = (0.5 * x * (1 - tanh_term ** 2) *
(np.sqrt(2 / np.pi) * (1 + 3 * 0.044715 * x**2)))
return term1 + term2
# --- Self-Similar Sine Generator ---
def self_similar_sine(x, depth=5, base_freq=1.0, decay=0.5):
y = np.zeros_like(x)
for i in range(depth):
freq = base_freq * (2 ** i)
amp = decay ** i
y += amp * np.sin(freq * x)
return y
# --- Neural Network with 3 Hidden Layers and GELU ---
class NeuralNetwork:
def __init__(self, input_size, hidden_size, output_size):
self.weights1 = np.random.randn(input_size, hidden_size) * 0.1
self.bias1 = np.zeros((1, hidden_size))
self.weights2 = np.random.randn(hidden_size, hidden_size) * 0.1
self.bias2 = np.zeros((1, hidden_size))
self.weights3 = np.random.randn(hidden_size, hidden_size) * 0.1
self.bias3 = np.zeros((1, hidden_size))
self.weights4 = np.random.randn(hidden_size, output_size) * 0.1
self.bias4 = np.zeros((1, output_size))
def forward(self, X):
self.z1 = np.dot(X, self.weights1) + self.bias1
self.a1 = gelu(self.z1)
self.z2 = np.dot(self.a1, self.weights2) + self.bias2
self.a2 = gelu(self.z2)
self.z3 = np.dot(self.a2, self.weights3) + self.bias3
self.a3 = gelu(self.z3)
self.z4 = np.dot(self.a3, self.weights4) + self.bias4
return self.z4
def backward(self, X, y, learning_rate):
m = X.shape[0]
dZ4 = self.z4 - y
dW4 = np.dot(self.a3.T, dZ4) / m
db4 = np.sum(dZ4, axis=0, keepdims=True) / m
dA3 = np.dot(dZ4, self.weights4.T)
dZ3 = dA3 * gelu_derivative(self.z3)
dW3 = np.dot(self.a2.T, dZ3) / m
db3 = np.sum(dZ3, axis=0, keepdims=True) / m
dA2 = np.dot(dZ3, self.weights3.T)
dZ2 = dA2 * gelu_derivative(self.z2)
dW2 = np.dot(self.a1.T, dZ2) / m
db2 = np.sum(dZ2, axis=0, keepdims=True) / m
dA1 = np.dot(dZ2, self.weights2.T)
dZ1 = dA1 * gelu_derivative(self.z1)
dW1 = np.dot(X.T, dZ1) / m
db1 = np.sum(dZ1, axis=0, keepdims=True) / m
self.weights4 -= learning_rate * dW4
self.bias4 -= learning_rate * db4
self.weights3 -= learning_rate * dW3
self.bias3 -= learning_rate * db3
self.weights2 -= learning_rate * dW2
self.bias2 -= learning_rate * db2
self.weights1 -= learning_rate * dW1
self.bias1 -= learning_rate * db1
def train(self, X, y, epochs, learning_rate, output_folder):
os.makedirs(output_folder, exist_ok=True)
for epoch in range(epochs):
self.z4 = self.forward(X)
self.backward(X, y, learning_rate)
if epoch % 50 == 0:
loss = np.mean((self.z4 - y) ** 2)
print(f'Epoch {epoch}, Loss: {loss:.6f}')
plt.figure(figsize=(10, 6))
plt.plot(X, y, label="Target Curve", color='blue')
plt.plot(X, self.z4, label="NN Prediction", color='red', linestyle='dashed')
plt.legend()
plt.xlabel('x')
plt.ylabel('y')
plt.title(f'Epoch {epoch} - Prediction vs Target')
plt.grid(True)
image_path = os.path.join(output_folder, f'epoch_{epoch}.png')
plt.savefig(image_path)
plt.close()
# --- Dataset Generation ---
X = np.linspace(-2 * np.pi, 2 * np.pi, 1000).reshape(-1, 1)
y = self_similar_sine(X, depth=5, base_freq=1.0, decay=0.5)
# Optional: Visualize the target curve
plt.figure(figsize=(10, 4))
plt.plot(X, y, label='Self-Similar Sine Target', color='purple')
plt.title("Self-Similar Sine Curve")
plt.grid(True)
plt.legend()
plt.show()
# --- Train the Neural Network ---
input_size = 1
hidden_size = 50
output_size = 1
learning_rate = 0.05
epochs = 1000
output_folder = 'output_images'
nn = NeuralNetwork(input_size, hidden_size, output_size)
nn.train(X, y, epochs, learning_rate, output_folder)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment