Softmax Regression

1.简单介绍

Softmax Regression算法是Logistic Regression算法在多分类问题上的推广,主要用于处理多分类问题,其中,任意俩个类之间是线性可分的。

2.算法模型

(1).假设函数

Softmax Regression算法是Logistic Regression算法在多分类上的推广,即类标签
y的取值大于或等于2。假设有m个训练样本{(X(1)X^{(1)}y(1)y^{(1)}),(X(2)X^{(2)} ,y(2)y^{(2)} ),…,(X(m)X^{(m)} ,y(m)y^{(m)} )},对于Softmax Regression算法,其输入特征为:X(i)X^{(i)}Rn+1R^{n+1} ,类标记为:y(i)y^{(i)} ∈{0,1,…,k}。
假设函数为:
hθ(X(i))=[P(y(i)=1X(i);θ)P(y(i)=2X(i);θ)P(y(i)=kX(i);θ)]=1j=1keθjX(i)[eθ1TX(i)eθ2TX(i)eθkTX(i)]h_{\theta}\left(X^{(i)}\right)=\left[ \begin{array}{c}{P\left(y^{(i)}=1 | X^{(i)} ; \theta\right)} \\ {P\left(y^{(i)}=2 | X^{(i)} ; \theta\right)} \\ {\vdots} \\ {P\left(y^{(i)}=k | X^{(i)} ; \theta\right)}\end{array}\right]=\frac{1}{\sum_{j=1}^{k} e^{\theta_{j}} X^{(i)}} \left[ \begin{array}{c}{e^{\theta_{1}^{T} X^{(i)}}} \\ {e^{\theta_{2}^{T} X^{(i)}}} \\ {\vdots} \\ {e^{\theta_{k}^{T} X^{(i)}}}\end{array}\right]
则对于每一个样本估计其所属的类别的概率为:
P(y(i)=jX(i);θ)=eθjTX(i)l=1keθlTX(i)P\left(y^{(i)}=j | X^{(i)} ; \theta\right)=\frac{e^{\theta_{j}^{T} X^{(i)}}}{\sum_{l=1}^{k} e^{\theta_{l}^{T} X^{(i)}}}

(2).损失函数

在Softmax Regression算法的损失函数中引入指示函数,具体形式为:
I{x}={0, if x= false 1, if x=trueI\{x\}=\left\{\begin{array}{l}{0, \text { if } x=\text { false }} \\ {1, \text { if } x=\text {true}}\end{array}\right.
则其损失函数为:
J(θ)=1m[i=1mj=1kI{y(i)=j}logeθjTX(i)l=1keθlTX(i)]J(\theta)=-\frac{1}{m}\left[\sum_{i=1}^{m} \sum_{j=1}^{k} I\left\{y^{(i)}=j\right\}\right.\log \frac{e^{\theta_{j}^{T} X^{(i)}}}{\sum_{l=1}^{k} e^{\theta_{l}^{T} X^{(i)}}} ]

3.算法的求解

对于上述的损失函数,可以使用梯度下降法对其进行求解
θjJ(θ)=1mi=1m[θjj=1kI{y(i)=j}logeθjTX(i)l=1keθlTX(i)]\nabla_{\theta_ j}J(\theta)=-\frac{1}{m} \sum_{i=1}^{m}\left[\nabla_{\theta_j} \sum_{j=1}^{k} I\left\{y^{(i)}=j\right\}\right.\log \frac{e^{\theta_{j}^{T} X^{(i)}}}{\sum_{l=1}^{k} e^{\theta_{l}^{T} X^{(i)}}}]
最终结果为:
1mi=1m[X(i)(I{y(i)=j}P(y(i)=jX(i);θ))]-\frac{1}{m} \sum_{i=1}^{m}\left[X^{(i)} \cdot\left(I\left\{y^{(i)}=j\right\}-P\left(y^{(i)}=j | X^{(i)} ; \theta\right)\right)\right]
然后通过梯度下降法的公式可以更新:
θj=θj+αθjJ(θ)\theta_{j}=\theta_{j}+\alpha \nabla_{\theta_{j}} J(\theta)

4.算法实践

import numpy as np
import matplotlib.pyplot as plt

def load_data(inputfile):
    '''导入训练数据
    input: inputfile(string)训练样本的位置
    output: feature_data(mat)特征
    label_data(mat)标签
    k(int)类别的个数
    '''
    f = open(inputfile) # 打开文件
    feature_data = []
    label_data = []
    for line in f.readlines():
        feature_tmp = []
        feature_tmp.append(1) # 偏置项
        lines = line.strip().split("\t")
        for i in range(len(lines) - 1):
            feature_tmp.append(float(lines[i]))
        label_data.append(int(lines[-1]))
        feature_data.append(feature_tmp)
    f.close() # 关闭文件
    return np.mat(feature_data), np.mat(label_data).T, len(set(label_data))
    
def cost(err, label_data):
    '''计算损失函数值
    input: err(mat):exp的值
    label_data(mat):标签的值
    output: sum_cost / m(float):损失函数的值
    '''
    m = np.shape(err)[0]
    sum_cost = 0.0
    for i in range(m):
        if err[i, label_data[i, 0]] / np.sum(err[i, :]) > 0:
            sum_cost -= np.log(err[i, label_data[i, 0]] / np.sum(err[i, :]))
        else:
            sum_cost -= 0
    return sum_cost / m
    
def gradientAscent(feature_data, label_data, k, maxCycle, alpha):
    '''利用梯度下降法训练Softmax模型
    input: feature_data(mat):特征
    label_data(mat):标签
    k(int):类别的个数
    maxCycle(int):最大的迭代次数
    alpha(float):学习率
    output: weights(mat):权重
    '''
    m, n = np.shape(feature_data)
    weights = np.mat(np.ones((n, k))) # 权重的初始化
    i = 0
    while i <= maxCycle:
        err = np.exp(feature_data * weights)
        if i % 500 == 0:
            print ("\t-----iter: ", i , ", cost: ", cost(err, label_data))
        rowsum = -err.sum(axis=1)
        rowsum = rowsum.repeat(k, axis=1)
        err = err / rowsum
        for x in range(m):
            err[x, label_data[x, 0]] += 1
        weights = weights + (alpha / m) * feature_data.T * err
        i += 1
    return weights
    
def save_model(file_name, weights):
    '''保存最终的模型
    input: file_name(string):保存的文件名
    weights(mat):softmax模型
    '''
    f_w = open(file_name, "w")
    m, n = np.shape(weights)
    for i in range(m):
        w_tmp = []
        for j in range(n):
            w_tmp.append(str(weights[i, j]))
        f_w.write("\t".join(w_tmp) + "\n")
    f_w.close()
    
def draw(weight,file_name):
    x0List=[];y0List=[]
    x1List=[];y1List=[]
    x2List=[];y2List=[]
    x3List=[];y3List=[]
    f=open(file_name,'r')
    for line in f.readlines():
        lines=line.strip().split()
        if lines[2]=='0':
            x0List.append(float(lines[0]))
            y0List.append(float(lines[1]))
        elif lines[2]=='1':
            x1List.append(float(lines[0]))
            y1List.append(float(lines[1]))
        elif lines[2]=='2':
            x2List.append(float(lines[0]))
            y2List.append(float(lines[1]))
        else:
            x3List.append(float(lines[0]))
            y3List.append(float(lines[1]))
    fig=plt.figure()
    ax=fig.add_subplot(111)
    ax.scatter(x0List,y0List,s=10,c='red')
    ax.scatter(x1List,y1List,s=10,c='green')
    ax.scatter(x2List,y2List,s=10,c='blue')
    ax.scatter(x3List,y3List,s=10,c='yellow')
    plt.show()
    
if __name__ == "__main__":
    inputfile = "SoftInput.txt"
    # 1、导入训练数据
    print ("---------- 1.load data ------------")
    feature, label, k = load_data(inputfile)
    # 2、训练Softmax模型
    print ("---------- 2.training ------------")
    weights = gradientAscent(feature, label, k, 10000, 0.4)
    # 3、保存最终的模型
    print ("---------- 3.save model ------------")
    save_model("weights", weights)
    #4,绘制图像
    draw(weights,inputfile)

机器学习之Softmax Regression


import numpy as np
import random as rd
import matplotlib.pyplot as plt
def load_weights(weights_path):
	'''导入训练好的Softmax模型
	input: weights_path(string)权重的存储位置
	output: weights(mat)将权重存到矩阵中
	m(int)权重的行数
	n(int)权重的列数
	'''
	f = open(weights_path)
	w = []
	for line in f.readlines():
		w_tmp = []
		test_data = line.strip().split("\t")
		for x in test_data:
			w_tmp.append(float(x))
		w.append(w_tmp)
	f.close()
	weights = np.mat(w)
	m, n = np.shape(weights)
	return weights, m, n


def load_data(num, m):
	'''导入测试数据
	input: num(int)生成的测试样本的个数
	m(int)样本的维数
	output: testDataSet(mat)生成测试样本
	'''
	testDataSet = np.mat(np.ones((num, m)))
	for i in range(num):
		testDataSet[i, 1] = rd.random() * 6 - 3#随机生成[-3,3]之间的随机数
		testDataSet[i, 2] = rd.random() * 15#随机生成[0,15]之间是的随机数
	return testDataSet


def predict(test_data, weights):
	'''利用训练好的Softmax模型对测试数据进行预测
	input: test_data(mat)测试数据的特征
	weights(mat)模型的权重
	output: h.argmax(axis=1)所属的类别
	'''
	h = test_data * weights
	return h.argmax(axis=1)#获得所属的类别


def save_result(file_name, result):
	'''保存最终的预测结果
	input: file_name(string):保存最终结果的文件名
	result(mat):最终的预测结果
	'''
	f_result = open(file_name, "w")
	m = np.shape(result)[0]
	for i in range(m):
		f_result.write(str(result[i, 0]) + "\n")
	f_result.close()


def draw(test_data,result):
	x0List=[];y0List=[]
	x1List=[];y1List=[]
	x2List=[];y2List=[]
	x3List=[];y3List=[]
	for i in range(len(result)):
		if result[i,0]==0:
			x0List.append(float(test_data[i,1]))
			y0List.append(float(test_data[i,2]))
		elif result[i,0]==1:
			x1List.append(float(test_data[i,1]))
			y1List.append(float(test_data[i,2]))
		elif result[i,0]==2:
			x2List.append(float(test_data[i,1]))
			y2List.append(float(test_data[i,2]))
		else:
			x3List.append(float(test_data[i,1]))
			y3List.append(float(test_data[i,2]))
	fig=plt.figure()
	ax=fig.add_subplot(111)
	ax.scatter(x0List,y0List,s=10,c='red')
	ax.scatter(x1List,y1List,s=10,c='green')
	ax.scatter(x2List,y2List,s=10,c='blue')
	ax.scatter(x3List,y3List,s=10,c='yellow')
	plt.show()


if __name__ == "__main__":
	# 1、导入Softmax模型
	print ("---------- 1.load model ------------")
	w, m , n = load_weights("weights")
	# 2、导入测试数据
	print ("---------- 2.load data ------------")
	test_data = load_data(4000, m)
	# 3、利用训练好的Softmax模型对测试数据进行预测
	print ("---------- 3.get Prediction ------------")
	result = predict(test_data, w)
	print(len(result))
	# 4、保存最终的预测结果
	print ("---------- 4.save prediction ------------")
	save_result("result", result)
    # 5,绘制图像
	draw(test_data,result)

机器学习之Softmax Regression

相关文章: