Skip to content
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 9 additions & 6 deletions demo.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
#The optimal values of m and b can be actually calculated with way less effort than doing a linear regression.
#this is just to demonstrate gradient descent

from numpy import *

import numpy as np
import pandas as pd
# y = mx + b
# m is slope, b is y-intercept
def compute_error_for_line_given_points(b, m, points):
Expand Down Expand Up @@ -34,15 +34,18 @@ def gradient_descent_runner(points, starting_b, starting_m, learning_rate, num_i
return [b, m]

def run():
points = genfromtxt("data.csv", delimiter=",")
points = np.genfromtxt("data.csv", delimiter=",")
arr=np.array(['0','1'])
points=arr.append(points)
points.rename(columns={'0':'column1','1':'column2'})
learning_rate = 0.0001
initial_b = 0 # initial y-intercept guess
initial_m = 0 # initial slope guess
num_iterations = 1000
print "Starting gradient descent at b = {0}, m = {1}, error = {2}".format(initial_b, initial_m, compute_error_for_line_given_points(initial_b, initial_m, points))
print "Running..."
print("Starting gradient descent at b = {0}, m = {1}, error = {2}".format(initial_b, initial_m, compute_error_for_line_given_points(initial_b, initial_m, points)))
print("Running...")
[b, m] = gradient_descent_runner(points, initial_b, initial_m, learning_rate, num_iterations)
print "After {0} iterations b = {1}, m = {2}, error = {3}".format(num_iterations, b, m, compute_error_for_line_given_points(b, m, points))
print("After {0} iterations b = {1}, m = {2}, error = {3}".format(num_iterations, b, m, compute_error_for_line_given_points(b, m, points)))

if __name__ == '__main__':
run()