Demo entry 6657945

test.py

   

Submitted by 高鑫 on Nov 05, 2017 at 16:06
Language: Python. Code size: 1.5 kB.

# encoding: utf-8
import os
# An example ,the training set and parameters' sizes are fixed
training_set = [[(0, 0, 0), 1], [(1, 0, 0), 1], [(1, 0, 1), 1], [(1, 1, 0), 1],
[(0, 0, 1), -1], [(0, 1, 1), -1], [(0, 1, 0), -1], [(1, 1, 1), -1]]
w = [0, 0, 0]
b = 0
 
# update parameters using stochastic gradient descent
# 使用随机梯度下降法更新参数   w = w + n*yi*xi
def update(item):
    global w, b
    w[0] = w[0] + 1 * item[1] * item[0][0]
    w[1] = w[1] + 1 * item[1] * item[0][1]
    w[2] = w[2] + 1 * item[1] * item[0][2]
    b = b + 1 * item[1]
    print w, b # you can uncomment this line to check the process of stochastic gradient descent
 
# calculate the functional distance between 'item' an the dicision surface
# 计算item和超平面的距离
def cal(item):		#对每一个特征值向量,计算yi(w*xi+b)的值
    global w, b
    res = 0
    for i in range(len(item[0])):
        res += item[0][i] * w[i]
    res += b
    res *= item[1]
    return res
 
# check if the hyperplane can classify the examples correctly
# 检查超平面是否能正确分类
def check():
    flag = False
    for item in training_set:	#对每一个特征向量数据
        if cal(item) <= 0:	#小于0需要更新参数,大于0不更新
            flag = True		#参数已更新
            update(item)	
    if not flag:		#如果对所有特征值参数没更新,则超平面已确定
        print "RESULT: w: " + str(w) + " b: "+ str(b)
        os._exit(0)
    flag = False
 
if __name__=="__main__":
    for i in range(1000):
        check()
    print "The training_set is not linear separable. "

This snippet took 0.01 seconds to highlight.

Back to the Entry List or Home.

Delete this entry (admin only).