用纯numpy实现一个简单的神经网络(理解)

简单的一个numpy神经网络示例:

import numpy as np
class NeuralNetwork():
    def __init__(self):

        #将权重转换为3×1矩阵,其值从-1到1,平均值为0
        self.synaptic_weights = 2 * np.random.random((3, 1)) - 1
        
    def sigmoid(self, x):
        # 定义 sigmoid 函数
        return 1 / (1 + np.exp(-x))
    def sigmoid_derivative(self, x):
        # 计算Sigmoid函数的导数(用于反向传播)
        return x * (1 - x)
        
    # 训练
    def train(self, training_inputs, training_outputs, training_iterations):
        # 训练模型在不断调整权重的同时进行准确预测
        for iteration in range(training_iterations):
            # 训练数据通过每一层网络节点
            output = self.think(training_inputs)
            #反向传播误差率的计算
            error = training_outputs - output
            # performing weight adjustments
            # 进行权重调整
            adjustments = np.dot(training_inputs.T, error * self.sigmoid_derivative(output))
            self.synaptic_weights += adjustments

    def think(self, inputs):
        # 通过神经元传递输入以获得输出
        # 将值转换为浮点数
        inputs = inputs.astype(float)
        output = self.sigmoid(np.dot(inputs, self.synaptic_weights))
        return output
 
if __name__ == "__main__":
    # 初始化神经元类
    neural_network = NeuralNetwork()
    print("初始化权重: ")
    print(neural_network.synaptic_weights)
    # 4组训练数据--3输入1输出
    training_inputs = np.array([[0, 0, 1],
                                [1, 1, 1],
                                [1, 0, 1],
                                [0, 1, 1]])
    training_outputs = np.array([[0, 1, 1, 0]]).T
    # 训练
    neural_network.train(training_inputs, training_outputs, 20000)
    print("训练之后的权重: ")
    print(neural_network.synaptic_weights)
    user_input_one = str(input("输入第一个数据: "))
    user_input_two = str(input("输入第二个数据: "))
    user_input_three = str(input("输入第三个数据: "))
    print("输入数据转换", user_input_one, user_input_two, user_input_three)
    print("模型输出: ")
    print(neural_network.think(np.array([user_input_one, user_input_two, user_input_three])))

猜你喜欢

转载自blog.csdn.net/ws18921818762/article/details/85112568
今日推荐