1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171
| import pandas as pd import numpy as np from sklearn.datasets import load_iris
def get_data(name): ''' 获取数据 :param name: 文件名 :return:x, y ''' data_sets = pd.read_csv(name, header=None) x = data_sets.iloc[:, 0:4].values.T y = data_sets.iloc[:, 4:].values.T y = y.astype("uint8") return x, y
''' 构建一个具有1个隐藏层的神经网络,隐层的大小为10 输入层为4个特征,输出层为3个分类 (1,0,0)为第一类,(0,1,0)为第二类,(0,0,1)为第三类 ''' class MyBPNN(): def __init__(self, epochs, n_hide, n_input, n_output, learning_rate): ''' 初始化BP神经网络 :param epochs: 总训练次数 :param n_hide: 隐层节点数量 :param n_input: 输入层节点数量 :param n_output: 输出层节点数量 :param learning_rate: 学习率 ''' self.epochs = epochs self.n_hide = n_hide self.n_input = n_input self.n_output = n_output self.learning_rate = learning_rate
def _initialize_parameters(self): ''' 初始化权重和偏置矩阵 :return: ''' np.random.seed(2)
self.w1 = np.random.randn(self.n_hide, self.n_input) * 0.01 self.b1 = np.zeros(shape=(self.n_hide, 1)) self.w2 = np.random.randn(self.n_output, self.n_hide) * 0.01 self.b2 = np.zeros(shape=(self.n_output, 1))
def _forward_propagation(self): ''' 前向传播计算a2 :return: ''' self.z1 = np.dot(self.w1 , self.x_train) + self.b1 self.a1 = np.tanh(self.z1) self.z2 = np.dot(self.w2, self.a1) + self.b2 self.a2 = 1 / (1 + np.exp(-self.z2))
def _compute_cost(self): ''' 计算代价函数 :return: '''
log = np.multiply(np.log(self.a2), self.y_train) + np.multiply((1 - self.y_train), np.log(1 - self.a2)) self.cost = - np.sum(log) / self.number
def _backward_propagation(self): ''' 反向传播(计算代价函数的导数) :return: ''' self.dz2 = self.a2 - self.y_train self.dw2 = (1 / self.number) * np.dot(self.dz2, self.a1.T) self.db2 = (1 / self.number) * np.sum(self.dz2, axis=1, keepdims=True)
self.dz1 = np.multiply(np.dot(self.w2.T, self.dz2), 1 - np.power(self.a1, 2)) self.dw1 = (1 / self.number) * np.dot(self.dz1, self.x_train.T) self.db1 = (1 / self.number) * np.sum(self.dz1, axis=1, keepdims=True)
def _update_param(self): self.w1 = self.w1 - self.dw1 * self.learning_rate self.b1 = self.b1 - self.db1 * self.learning_rate self.w2 = self.w2 - self.dw2 * self.learning_rate self.b2 = self.b2 - self.db2 * self.learning_rate
def fit(self, x_train, y_train, print_cost = True): np.random.seed(3)
self.x_train = x_train self.y_train = y_train self.number = self.y_train.shape[1]
self._initialize_parameters()
for i in range(0, self.epochs): self._forward_propagation() self._compute_cost() self._backward_propagation() self._update_param()
if(print_cost and ((i % 1000) == 0)): print('迭代第%i次,代价为:%f' % (i, self.cost))
def predict(self, x_test, y_test): ''' 预测结果 :param x_test: :param y_test: :return: ''' z1 = np.dot(self.w1, x_test) + self.b1 a1 = np.tanh(z1) z2 = np.dot(self.w2, a1) + self.b2 a2 = 1 / (1 + np.exp(-z2))
n_rows = y_test.shape[0] n_cols = y_test.shape[1]
output = np.empty(shape=(n_rows, n_cols), dtype=int)
for i in range(n_rows): for j in range(n_cols): if a2[i][j] > 0.5: output[i][j] = 1 else: output[i][j] = 0
count = 0 for k in range(0, n_cols): if output[0][k] == y_test[0][k] and output[1][k] == y_test[1][k] and output[2][k] == y_test[2][k]: count = count + 1 else: continue
acc = count / int(y_test.shape[1]) * 100 print('测试集准确率:%.2f%%' % acc)
return output
if __name__ == '__main__': iris = load_iris() x_train, y_train = get_data('../Datasets/iris-train.csv') x_test, y_test = get_data('../Datasets/iris-test.csv')
my_bpnn = MyBPNN(10000, 10, 4, 3, 0.4) my_bpnn.fit(x_train, y_train) result = my_bpnn.predict(x_test, y_test)
|