Demo entry 6899791

a

   

Submitted by anonymous on Oct 09, 2019 at 00:30
Language: Python 3. Code size: 2.1 kB.

import numpy as np
import random
import matplotlib.pyplot as plt
import math

def readFile():
    size = []
    numofbed =[]
    price =[]
    f = open(r"ex1data2.txt");
    for i in f.readlines():
        a = i.split(",")
        size.append(int(a[0]))
        numofbed.append(int(a[1]))
        price.append(int(a[2]))

    f.close()
    return size,numofbed,price

    
 

 
if __name__ == '__main__':

    size,numofbed,price = readFile()

    price_t = np.asarray(price)
    N = len(price)    # get num of data
    a = np.ones(N);   
    data_x = np.asarray([a,size,numofbed]).T   # change list to np array, the first col is all ones.
    print(data_x.shape)


    old_list = range(N) # create an index list

    random.seed(10)  # fix seed, make the random fixed 

    new_list = random.sample(old_list, N)   #Shuffle data by shuffle the index list
  
    w = np.ones(3)

    loss_list=[]

    count = 0   # the number of iterations

    index = 0  # index for iterate the shuffled data

    learning_rate = 0.0000001

    while (count<N*10):  # Repeat for 10 iterations for all data

        count +=1

        old_index = new_list[index]  # get one index from shuffed index list

        price_sample =  price_t[old_index] # get one data y from original list by index

        data_sample = data_x[old_index]  #  get one data X from original list by index

        temp = np.dot(data_sample,w) - price_sample #  loss = Xw-Y 

        loss = np.dot(temp,temp.T)  # # L2 Loss = ((Xw-Y)(Xw-Y).T)/N, N=1

        loss_list.append(loss)

        gradient = np.dot(data_sample.T,temp)*2  # gradient = 2/N(X.T Xw - X.T Y) = 2/N (X.T)(Xw-Y), N=1

        w = w - learning_rate * gradient # gradient desent 

        index = (index + 1)%47  # get next shuffed index 

    print(w)
    print(count)
    plt.plot(range(count),loss_list )
    plt.show(  )

    temp = np.dot(data_x,w)-price_t #  loss = Xw-Y
    loss = np.dot(temp,temp.T)/N  # L2 Loss = ((Xw-Y)(Xw-Y).T)/N
    print(loss)
    

    

This snippet took 0.00 seconds to highlight.

Back to the Entry List or Home.

Delete this entry (admin only).