From cccb0ca2af4e8dca5cdb822ec7ddfab0ff5f8176 Mon Sep 17 00:00:00 2001 From: shreyakash24 Date: Wed, 15 Oct 2025 18:31:43 +0530 Subject: [PATCH 1/3] added rnn code --- neural_network/recurrent_neural_network.py | 95 ++++++++++++++++++++++ 1 file changed, 95 insertions(+) create mode 100644 neural_network/recurrent_neural_network.py diff --git a/neural_network/recurrent_neural_network.py b/neural_network/recurrent_neural_network.py new file mode 100644 index 000000000000..61587aa9c0be --- /dev/null +++ b/neural_network/recurrent_neural_network.py @@ -0,0 +1,95 @@ +""" +Minimal Recurrent Neural Network (RNN) demonstration. + +Forward propagation explanation: +https://towardsdatascience.com/forward-propagation-in-neural-networks-simplified-math-and-code-version-bbcfef6f9250 +RNN fundamentals: +https://towardsdatascience.com/recurrent-neural-networks-d4642c9bc7ce/ +""" + +import math +import random + + +# Sigmoid activation +def sigmoid_function(value: float, deriv: bool = False) -> float: + """Return the sigmoid function of a float. + + >>> round(sigmoid_function(3.5), 4) + 0.9707 + >>> round(sigmoid_function(0.5, True), 4) + 0.25 + """ + if deriv: + return value * (1 - value) + return 1 / (1 + math.exp(-value)) + + +# Initial constants +INITIAL_VALUE = 0.02 # learning rate +SEQUENCE_LENGTH = 5 # time steps in the sequence + + +def forward_propagation_rnn(expected: int, number_propagations: int) -> float: + """Return the value found after RNN forward propagation training. + + >>> res = forward_propagation_rnn(50, 500_000) + >>> res > 45 and res < 55 + True + + >>> res = forward_propagation_rnn(50, 500) + >>> res > 48 and res < 50 + True + """ + random.seed(0) + + # Random weight initialization + W_xh = (random.random() * 2 - 1) # Input to hidden + W_hh = (random.random() * 2 - 1) # Hidden to hidden (recurrent) + W_hy = (random.random() * 2 - 1) # Hidden to output + + # Training loop + for _ in range(number_propagations): + h_prev = 0.0 # hidden state starts at zero + total_error = 0.0 + + # Forward pass through time + for t in range(SEQUENCE_LENGTH): + # Fake input sequence: small constant or could be pattern-based + x_t = INITIAL_VALUE + + # Hidden state update + h_t = sigmoid_function(W_xh * x_t + W_hh * h_prev) + + # Output + y_t = sigmoid_function(W_hy * h_t) + + # Error (target distributed over time steps) + error_t = (expected / 100) - y_t + total_error += abs(error_t) + + # Backpropagation Through Time (simplified) + d_y = error_t * sigmoid_function(y_t, True) + d_h = d_y * W_hy * sigmoid_function(h_t, True) + + # Weight updates + W_hy += INITIAL_VALUE * d_y * h_t + W_xh += INITIAL_VALUE * d_h * x_t + W_hh += INITIAL_VALUE * d_h * h_prev + + # Move to next time step + h_prev = h_t + + # Final output after training + final_output = y_t * 100 + return final_output + + +if __name__ == "__main__": + import doctest + + doctest.testmod() + + expected = int(input("Expected value: ")) + number_propagations = int(input("Number of propagations: ")) + print(forward_propagation_rnn(expected, number_propagations)) From f8a74ef2a0bcf66b8a04cc4c741b1d2bec3b2f49 Mon Sep 17 00:00:00 2001 From: shreyakash24 Date: Wed, 15 Oct 2025 18:40:48 +0530 Subject: [PATCH 2/3] modified formatting --- neural_network/recurrent_neural_network.py | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/neural_network/recurrent_neural_network.py b/neural_network/recurrent_neural_network.py index 61587aa9c0be..e7962e7e6270 100644 --- a/neural_network/recurrent_neural_network.py +++ b/neural_network/recurrent_neural_network.py @@ -27,7 +27,7 @@ def sigmoid_function(value: float, deriv: bool = False) -> float: # Initial constants INITIAL_VALUE = 0.02 # learning rate -SEQUENCE_LENGTH = 5 # time steps in the sequence +SEQUENCE_LENGTH = 5 # time steps in the sequence def forward_propagation_rnn(expected: int, number_propagations: int) -> float: @@ -44,9 +44,9 @@ def forward_propagation_rnn(expected: int, number_propagations: int) -> float: random.seed(0) # Random weight initialization - W_xh = (random.random() * 2 - 1) # Input to hidden - W_hh = (random.random() * 2 - 1) # Hidden to hidden (recurrent) - W_hy = (random.random() * 2 - 1) # Hidden to output + w_xh = random.random() * 2 - 1 # Input to hidden + w_hh = random.random() * 2 - 1 # Hidden to hidden (recurrent) + w_hy = random.random() * 2 - 1 # Hidden to output # Training loop for _ in range(number_propagations): @@ -54,15 +54,15 @@ def forward_propagation_rnn(expected: int, number_propagations: int) -> float: total_error = 0.0 # Forward pass through time - for t in range(SEQUENCE_LENGTH): + for _t in range(SEQUENCE_LENGTH): # Fake input sequence: small constant or could be pattern-based x_t = INITIAL_VALUE # Hidden state update - h_t = sigmoid_function(W_xh * x_t + W_hh * h_prev) + h_t = sigmoid_function(w_xh * x_t + w_hh * h_prev) # Output - y_t = sigmoid_function(W_hy * h_t) + y_t = sigmoid_function(w_hy * h_t) # Error (target distributed over time steps) error_t = (expected / 100) - y_t @@ -70,12 +70,12 @@ def forward_propagation_rnn(expected: int, number_propagations: int) -> float: # Backpropagation Through Time (simplified) d_y = error_t * sigmoid_function(y_t, True) - d_h = d_y * W_hy * sigmoid_function(h_t, True) + d_h = d_y * w_hy * sigmoid_function(h_t, True) # Weight updates - W_hy += INITIAL_VALUE * d_y * h_t - W_xh += INITIAL_VALUE * d_h * x_t - W_hh += INITIAL_VALUE * d_h * h_prev + w_hy += INITIAL_VALUE * d_y * h_t + w_xh += INITIAL_VALUE * d_h * x_t + w_hh += INITIAL_VALUE * d_h * h_prev # Move to next time step h_prev = h_t From 4f9038d3416cb4cbabb77f39cf8fbf31a639ecd6 Mon Sep 17 00:00:00 2001 From: shreyakash24 Date: Wed, 15 Oct 2025 18:43:26 +0530 Subject: [PATCH 3/3] edited input --- neural_network/recurrent_neural_network.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/neural_network/recurrent_neural_network.py b/neural_network/recurrent_neural_network.py index e7962e7e6270..a28567c78f6c 100644 --- a/neural_network/recurrent_neural_network.py +++ b/neural_network/recurrent_neural_network.py @@ -90,6 +90,6 @@ def forward_propagation_rnn(expected: int, number_propagations: int) -> float: doctest.testmod() - expected = int(input("Expected value: ")) - number_propagations = int(input("Number of propagations: ")) + expected = int(input("Expected value: ").strip()) + number_propagations = int(input("Number of propagations: ").strip()) print(forward_propagation_rnn(expected, number_propagations))