What I am trying to achieve is to firstly read the intvariable values defined globally and insert into a textbox when the program is ran, secondly I need to enable the user to change the values if they wish to.
the code is:
from numpy import *
import numpy as np
import matplotlib.pypl ot as plt
import os
from matplotlib import style
from utils import DATA_DIR
from tkinter import *
style.use("ggpl ot")
# y = mx + b
# m is slope, b is y-intercept
##Creating the interface
root=Tk()
root.wm_title(" Linear Regression")
root.geometry(" 450x450+500+300 ")
###LoadData
data = np.loadtxt(os.p ath.join(DATA_D IR, "data.csv"),del imiter=",")
x = data[:, 0]
y = data[:, 1]
# I have set these values global so they can be called into the text box
LRrtt = float(0.00001)
initb = int(35) # initial y-intercept guess
initm = int(0) # initial slope guess
iternum =int(50)
## interface labels etc
lbl1 = Label(root, text="Initial intercept").gri d(row=0,sticky= 'E')
lbl2 = Label(root, text="Initial slope").grid(ro w=1,sticky='E')
lbl3 = Label(root, text="learning rate").grid(row =2,sticky='E')
lbl4 = Label(root, text="num_itera tion").grid(row =3,sticky='E')
txtb = Entry(root,text variable=initb) .grid(row=0, column=1)
txtm= Entry(root,text variable = initm).grid(row =1, column=1)
txtlr= Entry(root,text variable= LRrtt).grid(row =2, column=1)
txtiter= Entry(root,text variable=iternu m).grid(row=3, column=1)
### Defining error function f(x)
def compute_error_f or_line_given_p oints(b, m, points):
fx = 0
for i in range(0, len(points)):
x = points[i, 0]
y = points[i, 1]
fx += (y - (m * x + b)) ** 2 #TSSE
return fx / float(len(point s))
### Initialising the search for the best line fit(starting with any two values for m&b)
###allows the gradient descent algorithm to tries to reduce the error function f(x)
###to find the best fit line
def step_gradient(b _current, m_current, points, learningRate):
b_intercept = 0
m_gradient = 0
N = float(len(point s))
for i in range(0, len(points)):
x = points[i, 0]
y = points[i, 1]
b_intercept += -(2/N) * (y - ((m_current * x) + b_current))
m_gradient += -(2/N) * x * (y - ((m_current * x) + b_current))
new_b = b_current - (learningRate * b_intercept)
new_m = m_current - (learningRate * m_gradient)
return [new_b, new_m]
def gradient_descen t_runner(points , starting_b, starting_m, learning_rate, num_iterations) :
b = starting_b
m = starting_m
plt.ion()
for i in range(num_itera tions):
b, m = step_gradient(b , m, array(points), learning_rate)
# plt.set_title(' Linear Regression',col or='black')
#plt.set_xlabel ('This is the X axis',color='wh ite')
#plt.set_ylabel ('This is the Y axis',color='wh ite')
#plt.clf()
plt.plot(x,m*x+ b,'-')
plt.scatter(x, y, label='Points', color='k', s=20, marker='*')
plt.pause(0.2)
plt.draw()
plt.xlabel("Age ")
plt.ylabel("hei ght")
# plt.show()
print("New values for b and m: ")
return [b, m]
def run():
points = data
learning_rate = LRrtt
initial_b = initb #initial y-intercept guess
initial_m = initm # initial slope guess
num_iterations = iternum
print ("Starting gradient descent at b = {0}, m = {1}, error = {2}".format(ini tial_b, initial_m, compute_error_f or_line_given_p oints(initial_b , initial_m, points)))
print ("Running... ")
[b, m] = gradient_descen t_runner(points , initial_b, initial_m, learning_rate, num_iterations)
print("After {0} iterations b = {1}, m = {2}, error = {3}".format(num _iterations, b, m, compute_error_f or_line_given_p oints(b, m, points)))
Button_sub = Button(root,tex t="Start Analysis", command=run).gr id(row=4, column=0, sticky=W, pady=4)
Button_exit = Button(root, text='Quit',com mand=quit).grid (row=5, column=0, sticky=W, pady=4)
root.mainloop()
if __name__ == '__main__':
run()
###/show plot
the code is:
from numpy import *
import numpy as np
import matplotlib.pypl ot as plt
import os
from matplotlib import style
from utils import DATA_DIR
from tkinter import *
style.use("ggpl ot")
# y = mx + b
# m is slope, b is y-intercept
##Creating the interface
root=Tk()
root.wm_title(" Linear Regression")
root.geometry(" 450x450+500+300 ")
###LoadData
data = np.loadtxt(os.p ath.join(DATA_D IR, "data.csv"),del imiter=",")
x = data[:, 0]
y = data[:, 1]
# I have set these values global so they can be called into the text box
LRrtt = float(0.00001)
initb = int(35) # initial y-intercept guess
initm = int(0) # initial slope guess
iternum =int(50)
## interface labels etc
lbl1 = Label(root, text="Initial intercept").gri d(row=0,sticky= 'E')
lbl2 = Label(root, text="Initial slope").grid(ro w=1,sticky='E')
lbl3 = Label(root, text="learning rate").grid(row =2,sticky='E')
lbl4 = Label(root, text="num_itera tion").grid(row =3,sticky='E')
txtb = Entry(root,text variable=initb) .grid(row=0, column=1)
txtm= Entry(root,text variable = initm).grid(row =1, column=1)
txtlr= Entry(root,text variable= LRrtt).grid(row =2, column=1)
txtiter= Entry(root,text variable=iternu m).grid(row=3, column=1)
### Defining error function f(x)
def compute_error_f or_line_given_p oints(b, m, points):
fx = 0
for i in range(0, len(points)):
x = points[i, 0]
y = points[i, 1]
fx += (y - (m * x + b)) ** 2 #TSSE
return fx / float(len(point s))
### Initialising the search for the best line fit(starting with any two values for m&b)
###allows the gradient descent algorithm to tries to reduce the error function f(x)
###to find the best fit line
def step_gradient(b _current, m_current, points, learningRate):
b_intercept = 0
m_gradient = 0
N = float(len(point s))
for i in range(0, len(points)):
x = points[i, 0]
y = points[i, 1]
b_intercept += -(2/N) * (y - ((m_current * x) + b_current))
m_gradient += -(2/N) * x * (y - ((m_current * x) + b_current))
new_b = b_current - (learningRate * b_intercept)
new_m = m_current - (learningRate * m_gradient)
return [new_b, new_m]
def gradient_descen t_runner(points , starting_b, starting_m, learning_rate, num_iterations) :
b = starting_b
m = starting_m
plt.ion()
for i in range(num_itera tions):
b, m = step_gradient(b , m, array(points), learning_rate)
# plt.set_title(' Linear Regression',col or='black')
#plt.set_xlabel ('This is the X axis',color='wh ite')
#plt.set_ylabel ('This is the Y axis',color='wh ite')
#plt.clf()
plt.plot(x,m*x+ b,'-')
plt.scatter(x, y, label='Points', color='k', s=20, marker='*')
plt.pause(0.2)
plt.draw()
plt.xlabel("Age ")
plt.ylabel("hei ght")
# plt.show()
print("New values for b and m: ")
return [b, m]
def run():
points = data
learning_rate = LRrtt
initial_b = initb #initial y-intercept guess
initial_m = initm # initial slope guess
num_iterations = iternum
print ("Starting gradient descent at b = {0}, m = {1}, error = {2}".format(ini tial_b, initial_m, compute_error_f or_line_given_p oints(initial_b , initial_m, points)))
print ("Running... ")
[b, m] = gradient_descen t_runner(points , initial_b, initial_m, learning_rate, num_iterations)
print("After {0} iterations b = {1}, m = {2}, error = {3}".format(num _iterations, b, m, compute_error_f or_line_given_p oints(b, m, points)))
Button_sub = Button(root,tex t="Start Analysis", command=run).gr id(row=4, column=0, sticky=W, pady=4)
Button_exit = Button(root, text='Quit',com mand=quit).grid (row=5, column=0, sticky=W, pady=4)
root.mainloop()
if __name__ == '__main__':
run()
###/show plot