【Python-ML】自适应线性神经网络(Adaline)
# -*- coding: utf-8 -*-
'''
Created on 2017年12月21日
@author: Jason.F
@summary: 自适应线性神经网络学习算法
'''
import numpy as np
import time
import matplotlib.pyplot as plt
import pandas as pdclass AdalineGD(object):'''Adaptive Linear Neuron classifier.hyper-Parameterseta:float=Learning rate (between 0.0 and 1.0)n_iter:int=Passes over the training dataset.Attributesw_:ld-array=weights after fitting.costs_:list=Number of misclassification in every epoch.'''def __init__(self,eta=0.01,n_iter=50):self.eta=etaself.n_iter=n_iterdef fit(self,X,y):'''Fit training data.ParametersX:{array-like},shape=[n_samples,n_features]=Training vectors,where n_samples is the number of samples and n_features is the number of features.y:array-like,shape=[n_samples]=Target values.Returnsself:object'''self.w_=np.zeros(1+X.shape[1])self.costs_=[]for i in range(self.n_iter):output=self.net_input(X)errors=(y-output)self.w_[1:] += self.eta * X.T.dot(errors)self.w_[0] += self.eta * errors.sum()cost=(errors ** 2).sum() /2.0self.costs_.append(cost)return selfdef net_input(self,X):#calculate net inputreturn np.dot(X,self.w_[1:])+self.w_[0]def activation(self,X):#computer linear activationreturn self.net_input(X)def predict(self,X):#return class label after unit stepreturn np.where(self.activation(X)>=0.0,1,-1) if __name__ == "__main__": start = time.clock() #训练数据train =pd.read_csv('https://archive.ics.uci.edu/ml/machine-learning-databases/iris/iris.data',header=None)X_train = train.drop([4], axis=1).values #dataframe convert to arrayy_train = train[4].values#特征值标准化,特征缩放方法,使数据具有标准正态分布的特性,各特征的均值为0,标准差为1.X_std=np.copy(X_train)X_std[:,0]=(X_train[:,0]-X_train[:,0].mean()) / X_train[:,0].std()X_std[:,1]=(X_train[:,1]-X_train[:,1].mean()) / X_train[:,1].std()#X_std[:,2]=(X_train[:,2]-X_train[:,2].mean()) / X_train[:,2].std()#X_std[:,3]=(X_train[:,3]-X_train[:,3].mean()) / X_train[:,3].std()y=np.where(y_train == 'Iris-setosa',-1,1)#one vs rest:OvR#学习速率和迭代次数者两个超参进行观察fig,ax=plt.subplots(nrows=1,ncols=2,figsize=(8,4))#eta=0.01,n_iter=20agd1 = AdalineGD(eta=0.01,n_iter=20).fit(X_std,y)print (agd1.predict([6.9,3.0,5.1,1.8]))#预测ax[0].plot(range(1,len(agd1.costs_)+1),np.log10(agd1.costs_),marker='o')ax[0].set_xlabel('Epochs')ax[0].set_ylabel('log(Sum-Squared-error)')ax[0].set_title('Adaline-learning rate 0.01')#eta=0.0001,n_iter=20agd2 = AdalineGD(eta=0.0001,n_iter=20).fit(X_std,y)print (agd2.predict([6.9,3.0,5.1,1.8]))#预测ax[1].plot(range(1,len(agd2.costs_)+1),np.log10(agd2.costs_),marker='x')ax[1].set_xlabel('Epochs')ax[1].set_ylabel('log(Sum-Squared-error)')ax[1].set_title('Adaline-learning rate 0.0001')#show plt.show()end = time.clock() print('finish all in %s' % str(end - start))
# -*- coding: utf-8 -*-
'''
Created on 2017年12月21日
@author: Jason.F
@summary: 自适应线性神经网络学习算法
'''
import numpy as np
import time
import matplotlib.pyplot as plt
import pandas as pd
from numpy.random import seedclass AdalineSGD(object):'''Adaptive Linear Neuron classifier.hyper-Parameterseta:float=Learning rate (between 0.0 and 1.0)n_iter:int=Passes over the training dataset.Attributesw_:ld-array=weights after fitting.costs_:list=Number of misclassification in every epoch.shuffle:bool(default:True)=Shuffles training data every epoch if True to prevent cycles.random_state:int(default:None)=set random state for shuffling and initializing the weights.'''def __init__(self,eta=0.01,n_iter=20,shuffle=True,random_state=None):self.eta=etaself.n_iter=n_iterself.w_initialized=Falseself.shuffle=shuffleif random_state:seed(random_state)def fit(self,X,y):'''Fit training data.ParametersX:{array-like},shape=[n_samples,n_features]=Training vectors,where n_samples is the number of samples and n_features is the number of features.y:array-like,shape=[n_samples]=Target values.Returnsself:object'''self._initialize_weights(X.shape[1])self.cost_=[]for i in range(self.n_iter):if self.shuffle:X,y=self._shuffle(X,y)cost=[]for xi,target in zip(X,y):cost.append(self._update_weights(xi,target))avg_cost=sum(cost)/len(y)self.cost_.append(avg_cost)return selfdef partial_fit(self,X,y):#Fit training data without reinitializing the weightsif not self.w_initialized:self._initialize_weights(X.shape[1])if y.ravel().shape[0]>1:for xi,target in zip(X,y):self._update_weights(xi,target)else:self._update_weights(X,y)return selfdef _shuffle(self,X,y):#shuffle training datar=np.random.permutation(len(y))return X[r],y[r]def _initialize_weights(self,m):#Initialize weights to zerosself.w_ =np.zeros(1+m)self.w_initialized=Truedef _update_weights(self,xi,target):#apply adaline learning rule to update the weightsoutput=self.net_input(xi)error=(target-output)self.w_[1:] += self.eta * xi.dot(error)self.w_[0] += self.eta * errorcost= 0.5 * error ** 2return costdef net_input(self,X):#calculate net inputreturn np.dot(X,self.w_[1:])+self.w_[0]def activation(self,X):#computer linear activationreturn self.net_input(X)def predict(self,X):#return class label after unit stepreturn np.where(self.activation(X)>=0.0,1,-1) if __name__ == "__main__": start = time.clock() #训练数据train =pd.read_csv('https://archive.ics.uci.edu/ml/machine-learning-databases/iris/iris.data',header=None)X_train = train.drop([4], axis=1).values #dataframe convert to arrayy_train = train[4].values#特征值标准化,特征缩放方法,使数据具有标准正态分布的特性,各特征的均值为0,标准差为1.X_std=np.copy(X_train)X_std[:,0]=(X_train[:,0]-X_train[:,0].mean()) / X_train[:,0].std()X_std[:,1]=(X_train[:,1]-X_train[:,1].mean()) / X_train[:,1].std()#X_std[:,2]=(X_train[:,2]-X_train[:,2].mean()) / X_train[:,2].std()#X_std[:,3]=(X_train[:,3]-X_train[:,3].mean()) / X_train[:,3].std()y=np.where(y_train == 'Iris-setosa',-1,1)#one vs rest:OvR#学习速率和迭代次数者两个超参进行观察fig,ax=plt.subplots(nrows=1,ncols=2,figsize=(8,4))#eta=0.01,n_iter=20agd1 = AdalineSGD(eta=0.01,n_iter=20,random_state=1).fit(X_std,y)print (agd1.predict([6.9,3.0,5.1,1.8]))#预测ax[0].plot(range(1,len(agd1.cost_)+1),agd1.cost_,marker='o')ax[0].set_xlabel('Epochs')ax[0].set_ylabel('Average Cost')ax[0].set_title('Adaline-learning rate 0.01')#eta=0.0001,n_iter=20agd2 = AdalineSGD(eta=0.0001,n_iter=20,random_state=1).fit(X_std,y)print (agd2.predict([6.9,3.0,5.1,1.8]))#预测ax[1].plot(range(1,len(agd2.cost_)+1),agd2.cost_,marker='x')ax[1].set_xlabel('Epochs')ax[1].set_ylabel('Average Cost')ax[1].set_title('Adaline-learning rate 0.0001')#show plt.show()#测试在线更新print (agd2.w_) #更新前agd2.partial_fit(X_std[0,:],y[0])print (agd2.w_) #更新后end = time.clock() print('finish all in %s' % str(end - start))
下图是对特征值不做标准化的,可以比对效果:
【Python-ML】自适应线性神经网络(Adaline)相关推荐
- 神经网络学习(二)——自适应线性神经网络
自适应线性神经网络(Adaline)和感知器的区别: 1.自适应线性神经网络的激活函数不再采用步调函数,而是直接将样本运算的结果(点乘)与实际结果相比较.(白话版:自适应线性神经网络的激活函数,是一个 ...
- C++Adaline自适应线性神经网络算法(附完整源码)
C++Adaline自适应线性神经网络算法 C++Adaline自适应线性神经网络算法完整源码(定义,实现,main函数测试) C++Adaline自适应线性神经网络算法完整源码(定义,实现,main ...
- MATLAB神经网络学习笔记之:对线性神经网络进行自适应训练
为什么80%的码农都做不了架构师?>>> % 对线性神经网络进行自适应训练 clear all; p1 = { -1 0 1 0 1 1 -1 0 -1 1 0 1}; t1 ...
- 看懂自适应模糊神经网络(ANFIS)并附ANFIS代码
ANFIS (Adaptive-Network-Based Fuzzy Inference System)--自适应模糊神经网络,最早于1993年由Jyh-Shing Roger Jang提出.采用模 ...
- [Python人工智能] 七.加速神经网络、激励函数和过拟合
从本系列文章开始,作者正式开始研究Python深度学习.神经网络及人工智能相关知识.前六篇文章讲解了神经网络基础概念.Theano库的安装过程及基础用法.theano实现回归神经网络.theano实现 ...
- Python 散点图线性拟合_机器学习之利用Python进行简单线性回归分析
前言:在利用机器学习方法进行数据分析时经常要了解变量的相关性,有时还需要对变量进行回归分析.本文首先对人工智能/机器学习/深度学习.相关分析/因果分析/回归分析等易混淆的概念进行区分,最后结合案例介绍 ...
- python 数学期望_(Python)零起步数学+神经网络入门
摘要: 手把手教你用(Python)零起步数学+神经网络入门! 在这篇文章中,我们将在Python中从头开始了解用于构建具有各种层神经网络(完全连接,卷积等)的小型库中的机器学习和代码.最终,我们将能 ...
- Python搭建tensorflow三层神经网络
Python搭建tensorflow三层神经网络的小例子 import tensorflow as tf import numpy as np import matplotlib.pyplot as ...
- 从零开始掌握Python机器学习(循环神经网络、卷积神经网络)
本文中的文章用于做笔记用,来源于网络,并非本人所写,如有侵权,请您联系我标明出处或删除,3Q~ 只需十四步:从零开始掌握 Python 机器学习(附资源) Python 可以说是现在最流行的机器学习语 ...
最新文章
- c语言调式有错误,vsc调式c语言
- §3—2 借贷记账法
- 聚类算法:Hierarchical Clustering层次聚类
- linux出站入站端口维护,linux下如何用iptables开放指定端口_网站服务器运行维护,linux,iptables,端口...
- 本人的博客只是工作期间随手记录的笔记而已,所以不会很详尽,由此给您带来的不便,恳请多多包涵~...
- js 创建keyframe_javascript – 查找特定的CSS @keyframes规则
- 前几天入手一大菠萝,写个初始化教程
- UIKit框架-基础控件Swift版本: 7.UISwitch方法/属性详解
- Android 3.2 联机测试adb驱动如何安装和配置?
- NSArray去除重复元素
- hadoop配置流程
- Python面试常见算法题集锦
- 大话2服务器丢失怎么修复,我玩大话2,现在服务器找不见了,怎么办?
- GPS经纬度坐标转换的方法
- 戏如人生,人生如戏!
- Icon图标 [Java]
- 计算机编程序专业,计算机编程方向专业
- window解决端口号被占用
- 2020互联网大厂职级对应薪资一览表
- OJ题——吃货排排坐