python softmax函数

单维度 argmax,max函数:

import numpy as np
a = np.array([3, 1, 2, 4, 6, 1])
print(np.argmax(a))
4

print(a.max())

softmax后,直接获取第几维的分数,就是预测值

python numpy softmax函数:

import numpy as np
def softmax(x):
    x_exp = np.exp(x)
    # 如果是列向量,则axis=0
    x_sum = np.sum(x_exp, axis=0, keepdims=True)
    s = x_exp / x_sum
    return s


data=np.array([1,0.5,-2,2,5])
soft_v= softmax(data)
index_v=np.argmax(soft_v, axis=0)

print(index_v,round(soft_v[index_v],3))
print(soft_v)

pytorch softmax函数用法:

    import torch
    import numpy as np
    import torch.nn.functional as torch_F

    data=np.array([[[0.5,-0.5],[-0.05,-0.05]]])
    t_data = torch.from_numpy(data.astype(np.float32))

   

    scores = torch_F.softmax(t_data, dim=-1)
    print(scores)
    scores = scores.squeeze(0).data.cpu().numpy()[:, 1]
    print(scores)

softmx 计算公式如下,值越大,分数越高,不是正比例关系,能避免求和或乘法-0.5 和0.5的符号带来的抵消关系。

两组值是不一样的:

import numpy as np
z = np.array([1.0, 2.0])
print(np.exp(z)/sum(np.exp(z)))

z = np.array([0.1, 0.2])
print(np.exp(z)/sum(np.exp(z)))

import math
z = [1.0, 2.0, 3.0, 4.0, 1.0, 2.0, 3.0]
z_exp = [math.exp(i) for i in z]  
print(z_exp)  # Result: [2.72, 7.39, 20.09, 54.6, 2.72, 7.39, 20.09] 
sum_z_exp = sum(z_exp)  
print(sum_z_exp)  # Result: 114.98 
softmax = [round(i / sum_z_exp, 3) for i in z_exp]
print(softmax)  # Result: [0.024, 0.064, 0.175, 0.475, 0.024, 0.064, 0.175]
[2.718281828459045, 7.38905609893065, 20.085536923187668, 54.598150033144236, 2.718281828459045, 7.38905609893065, 20.085536923187668]
114.98389973429897
[0.024, 0.064, 0.175, 0.475, 0.024, 0.064, 0.175]

Python使用numpy计算的示例代码:

import numpy as np
z = np.array([1.0, 2.0, 3.0, 4.0, 1.0, 2.0, 3.0])
print(np.exp(z)/sum(np.exp(z)))

版权声明:本文为jacke121原创文章,遵循CC 4.0 BY-SA版权协议,转载请附上原文出处链接和本声明。