Anaconda安装Keras:

conda install keras

安装完成:

在Jupyter Notebook中新建并执行代码:

import keras
from keras.datasets import mnist # 从keras中导入mnist数据集
from keras.models import Sequential # 导入序贯模型
from keras.layers import Dense # 导入全连接层
from keras.optimizers import SGD # 导入优化函数

(x_train, y_train), (x_test, y_test) = mnist.load_data() # 加载mnist数据集

因为众所周知的原因,下载墙外的文件超时报错,参考 https://www.cnblogs.com/shinny/p/9283372.html 进行修改;

重复执行,报错:“TabError: inconsistent use of tabs and spaces in indentation”

参照 https://blog.csdn.net/qq_41096996/article/details/85947560 进行修改:

执行成功!

继续执行如下代码:

print(x_train.shape,y_train.shape)
#(60000, 28, 28) (60000,)print(x_test.shape,y_test.shape)
#(10000, 28, 28) (10000,)

继续执行:

import matplotlib.pyplot as plt # 导入可视化的包
im = plt.imshow(x_train[0],cmap='gray')

继续执行:

plt.show()
y_train[0]
#5

继续执行:

x_train = x_train.reshape(60000,784) # 将图片摊平,变成向量
x_test = x_test.reshape(10000,784) # 对测试集进行同样的处理
print(x_train.shape)
#(60000, 784)
print(x_test.shape)
#(10000, 784)

继续执行:

x_train[0]
#array([  0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,
         0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,0,   0,   0,   0,   0,   0,   0,   0,   0,   3,  18,  18,  18,126, 136, 175,  26, 166, 255, 247, 127,   0,   0,   0,   0,   0,0,   0,   0,   0,   0,   0,   0,  30,  36,  94, 154, 170, 253,253, 253, 253, 253, 225, 172, 253, 242, 195,  64,   0,   0,   0,0,   0,   0,   0,   0,   0,   0,   0,  49, 238, 253, 253, 253,253, 253, 253, 253, 253, 251,  93,  82,  82,  56,  39,   0,   0,0,   0,   0,   0,   0,   0,   0,   0,   0,   0,  18, 219, 253,253, 253, 253, 253, 198, 182, 247, 241,   0,   0,   0,   0,   0,0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,80, 156, 107, 253, 253, 205,  11,   0,  43, 154,   0,   0,   0,0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,0,   0,   0,  14,   1, 154, 253,  90,   0,   0,   0,   0,   0,0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,0,   0,   0,   0,   0,   0,   0, 139, 253, 190,   2,   0,   0,0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,0,   0,   0,   0,   0,   0,   0,   0,   0,  11, 190, 253,  70,0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,  35,241, 225, 160, 108,   1,   0,   0,   0,   0,   0,   0,   0,   0,0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,0,   0,  81, 240, 253, 253, 119,  25,   0,   0,   0,   0,   0,0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,0,   0,   0,   0,   0,  45, 186, 253, 253, 150,  27,   0,   0,0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,0,   0,   0,   0,   0,   0,   0,   0,  16,  93, 252, 253, 187,0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0, 249,253, 249,  64,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,  46, 130,183, 253, 253, 207,   2,   0,   0,   0,   0,   0,   0,   0,   0,0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,  39, 148,229, 253, 253, 253, 250, 182,   0,   0,   0,   0,   0,   0,   0,0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,  24, 114,221, 253, 253, 253, 253, 201,  78,   0,   0,   0,   0,   0,   0,0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,  23,  66,213, 253, 253, 253, 253, 198,  81,   2,   0,   0,   0,   0,   0,0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,  18, 171,219, 253, 253, 253, 253, 195,  80,   9,   0,   0,   0,   0,   0,0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,  55, 172,226, 253, 253, 253, 253, 244, 133,  11,   0,   0,   0,   0,   0,0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,136, 253, 253, 253, 212, 135, 132,  16,   0,   0,   0,   0,   0,0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,   0,0,   0,   0,   0], dtype=uint8)

继续执行:

x_train = x_train / 255
x_test = x_test / 255
x_train[0]
#array([0.        , 0.        , 0.        , 0.        , 0.        ,
       0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.01176471, 0.07058824, 0.07058824,0.07058824, 0.49411765, 0.53333333, 0.68627451, 0.10196078,0.65098039, 1.        , 0.96862745, 0.49803922, 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.11764706, 0.14117647, 0.36862745, 0.60392157,0.66666667, 0.99215686, 0.99215686, 0.99215686, 0.99215686,0.99215686, 0.88235294, 0.6745098 , 0.99215686, 0.94901961,0.76470588, 0.25098039, 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.19215686, 0.93333333,0.99215686, 0.99215686, 0.99215686, 0.99215686, 0.99215686,0.99215686, 0.99215686, 0.99215686, 0.98431373, 0.36470588,0.32156863, 0.32156863, 0.21960784, 0.15294118, 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.07058824, 0.85882353, 0.99215686, 0.99215686,0.99215686, 0.99215686, 0.99215686, 0.77647059, 0.71372549,0.96862745, 0.94509804, 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.31372549, 0.61176471, 0.41960784, 0.99215686, 0.99215686,0.80392157, 0.04313725, 0.        , 0.16862745, 0.60392157,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.05490196,0.00392157, 0.60392157, 0.99215686, 0.35294118, 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.54509804,0.99215686, 0.74509804, 0.00784314, 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.04313725, 0.74509804, 0.99215686,0.2745098 , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.1372549 , 0.94509804, 0.88235294, 0.62745098,0.42352941, 0.00392157, 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.31764706, 0.94117647, 0.99215686, 0.99215686, 0.46666667,0.09803922, 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.17647059,0.72941176, 0.99215686, 0.99215686, 0.58823529, 0.10588235,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.0627451 , 0.36470588,0.98823529, 0.99215686, 0.73333333, 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.97647059, 0.99215686,0.97647059, 0.25098039, 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.18039216, 0.50980392,0.71764706, 0.99215686, 0.99215686, 0.81176471, 0.00784314,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.15294118,0.58039216, 0.89803922, 0.99215686, 0.99215686, 0.99215686,0.98039216, 0.71372549, 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.09411765, 0.44705882, 0.86666667, 0.99215686, 0.99215686,0.99215686, 0.99215686, 0.78823529, 0.30588235, 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.09019608, 0.25882353, 0.83529412, 0.99215686,0.99215686, 0.99215686, 0.99215686, 0.77647059, 0.31764706,0.00784314, 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.07058824, 0.67058824, 0.85882353,0.99215686, 0.99215686, 0.99215686, 0.99215686, 0.76470588,0.31372549, 0.03529412, 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.21568627, 0.6745098 ,0.88627451, 0.99215686, 0.99215686, 0.99215686, 0.99215686,0.95686275, 0.52156863, 0.04313725, 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.53333333, 0.99215686, 0.99215686, 0.99215686,0.83137255, 0.52941176, 0.51764706, 0.0627451 , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        , 0.        ,0.        , 0.        , 0.        , 0.        ])

继续执行:

y_train = keras.utils.to_categorical(y_train,10)
y_test = keras.utils.to_categorical(y_test,10)

继续执行:

model = Sequential() # 构建一个空的序贯模型
# 添加神经网络层
model.add(Dense(512,activation='relu',input_shape=(784,)))
model.add(Dense(256,activation='relu'))
model.add(Dense(10,activation='softmax'))
model.summary()

返回信息:

WARNING:tensorflow:From C:\3rd\Anaconda2\lib\site-packages\tensorflow\python\framework\op_def_library.py:263: colocate_with (from tensorflow.python.framework.ops) is deprecated and will be removed in a future version.
Instructions for updating:
Colocations handled automatically by placer.
_________________________________________________________________
Layer (type)                 Output Shape              Param #
=================================================================
dense_1 (Dense)              (None, 512)               401920
_________________________________________________________________
dense_2 (Dense)              (None, 256)               131328
_________________________________________________________________
dense_3 (Dense)              (None, 10)                2570
=================================================================
Total params: 535,818
Trainable params: 535,818
Non-trainable params: 0
_________________________________________________________________

继续执行:

model.compile(optimizer=SGD(),loss='categorical_crossentropy',metrics=['accuracy'])
model.fit(x_train,y_train,batch_size=64,epochs=5,validation_data=(x_test,y_test)) # 此处直接将测试集用作了验证集

返回正在执行信息:

执行结束信息:

WARNING:tensorflow:From C:\3rd\Anaconda2\lib\site-packages\tensorflow\python\ops\math_ops.py:3066: to_int32 (from tensorflow.python.ops.math_ops) is deprecated and will be removed in a future version.
Instructions for updating:
Use tf.cast instead.
Train on 60000 samples, validate on 10000 samples
Epoch 1/5
60000/60000 [==============================] - 7s 123us/step - loss: 0.7558 - acc: 0.8162 - val_loss: 0.3672 - val_acc: 0.8991
Epoch 2/5
60000/60000 [==============================] - 7s 112us/step - loss: 0.3356 - acc: 0.9068 - val_loss: 0.2871 - val_acc: 0.9204
Epoch 3/5
60000/60000 [==============================] - 7s 112us/step - loss: 0.2798 - acc: 0.9211 - val_loss: 0.2537 - val_acc: 0.9296
Epoch 4/5
60000/60000 [==============================] - 7s 117us/step - loss: 0.2468 - acc: 0.9302 - val_loss: 0.2313 - val_acc: 0.9332
Epoch 5/5
60000/60000 [==============================] - 7s 122us/step - loss: 0.2228 - acc: 0.9378 - val_loss: 0.2084 - val_acc: 0.9404
<keras.callbacks.History at 0x1dcaea054a8>

继续执行:

score = model.evaluate(x_test,y_test)
#10000/10000 [==============================] - 1s 53us/step

继续执行:

print("loss:",score[0])
#loss: 0.2084256855905056

继续执行:

print("accu:",score[1])
#accu: 0.9404

参考:

https://www.cnblogs.com/ncuhwxiong/p/9836648.html

https://www.cnblogs.com/shinny/p/9283372.html

https://blog.csdn.net/qq_41096996/article/details/85947560

转载于:https://www.cnblogs.com/ratels/p/11144881.html

Keras入门——(1)全连接神经网络FCN相关推荐

  1. 全连接神经网络——MINST手写数字识别

    简介 本文构建了一个全连接神经网络(FCN),实现对MINST数据集手写数字的识别,没有借助任何深度学习算法库,从原理上理解手写数字识别的全过程,包括反向传播,梯度下降等.最终的代码总行数不超过200 ...

  2. 深度学习框架 TensorFlow:张量、自动求导机制、tf.keras模块(Model、layers、losses、optimizer、metrics)、多层感知机(即多层全连接神经网络 MLP)

    日萌社 人工智能AI:Keras PyTorch MXNet TensorFlow PaddlePaddle 深度学习实战(不定时更新) 安装 TensorFlow2.CUDA10.cuDNN7.6. ...

  3. PyTorch基础入门五:PyTorch搭建多层全连接神经网络实现MNIST手写数字识别分类

    )全连接神经网络(FC) 全连接神经网络是一种最基本的神经网络结构,英文为Full Connection,所以一般简称FC. FC的准则很简单:神经网络中除输入层之外的每个节点都和上一层的所有节点有连 ...

  4. Pytorch深度学习入门与实战一--全连接神经网络

    全连接神经网络在分类和回归问题中都非常有效,本节对全连接神经网及神经元结构进行介绍,并使用Pytorch针对分类和回归问题分别介绍全连接神经网络的搭建,训练及可视化相关方面的程序实现. 1.全连接神经 ...

  5. 【tensorlfow2】安装及简介全连接神经网络

    [tensorlfow]全连接神经网络FC-DNN 文章目录 一.[Win10+Anaconda+Tensorflow2.0安装教程 1.检测已经安装的环境 2.创建tensorflow虚拟环境 3. ...

  6. 基于PyTorch框架的多层全连接神经网络实现MNIST手写数字分类

    多层全连接神经网络实现MNIST手写数字分类 1 简单的三层全连接神经网络 2 添加激活函数 3 添加批标准化 4 训练网络 5 结论 参考资料 先用PyTorch实现最简单的三层全连接神经网络,然后 ...

  7. 神经网络 测试集loss不下降_代码实践 | 全连接神经网络回归---房价预测

    学习目录 阿力阿哩哩:深度学习 | 学习目录​zhuanlan.zhihu.com 前面我们介绍了: 阿力阿哩哩:深度学习开端|全连接神经网络​zhuanlan.zhihu.com 4.7代码实践 & ...

  8. Pytorch 实现全连接神经网络/卷积神经网络训练MNIST数据集,并将训练好的模型在制作自己的手写图片数据集上测试

    使用教程 代码下载地址:点我下载 模型在训练过程中会自动显示训练进度,如果您的pytorch是CPU版本的,代码会自动选择CPU训练,如果有cuda,则会选择GPU训练. 项目目录说明: CNN文件夹 ...

  9. 【神经网络与深度学习】 Numpy 实现全连接神经网络

    1.实验名称 Numpy 实现全连接神经网络实验指南 2.实验要求 用 python 的 numpy 模块实现全连接神经网络. 网络结构为一个输入层.一个隐藏层.一个输出层. 隐藏层的激活函数为 Re ...

  10. python——tensorflow使用和两层全连接神经网络搭建

    一.Tensorflow使用 1.Tensorflow简介 TensorFlow是一个软件库,封装了建构神经网络的API,类似于MatLab的神经网络工具箱,是Google于2015年推出的深度学习框 ...

最新文章

  1. H5学习系列之Communication API
  2. bzoj1207(HNOI2004)打鼹鼠
  3. POJ2553 强连通出度为0的应用
  4. flask-bootstrap-高亮-下划线-删除线-加粗-斜体
  5. AgilePoint模型驱动BPM(业务流程管理)介绍
  6. Java-gt;Android并发编程引气入门篇
  7. .Net Micro Framework SDK 2.5 发布
  8. C++ new一个数组时,指针移动程序崩溃问题
  9. python测试报告
  10. COGS2421 简单的Treap
  11. 【游戏引擎Easy2D】第一个引擎程序
  12. 29个用于石油和天然气等行业的最佳 GIS 软件
  13. rapidminer java_在Java应用程序RapidMiner的整合(Integration of RapidMiner i
  14. oracle 存储 秘密,鲜为人知的Exadata存储性能秘密
  15. 扩展卡尔曼滤波soc估算 基于EKF算法的锂电池SOC 卡尔曼滤波估计电池soc ,simulink模型
  16. [网络工程师]-IEEE 802.3定义的传输介质特性
  17. JavaScript进阶(四)
  18. CISP证书 +转来文章:漫谈信息安全认证(CISP与CISSP)
  19. Rust 创始人:社区应限制成长速度;电商法 1 月 1 日起实施
  20. 【黑金原创教程】【FPGA那些事儿-驱动篇I 】实验二十五:SDHC模块

热门文章

  1. javascript网页自动填表_javascript实现自动填写表单实例简析
  2. OpenCV-图像处理(26、直方图反向投影(Back Projection))
  3. 不读取nan行_一个参数一个Excel表,让你玩转Pandas中read_excel()表格读取!
  4. python 日历热力图_Python如何绘制日历图和热力图
  5. 计算机CG技术未来发展前景,CG就业前景怎么样?
  6. linux7mysql集群_linux下mysql集群搭建
  7. Python入门经典学习1-乳腺癌分类问题
  8. 支持向量机SVM原理(参数解读和python脚本)
  9. Docker Kubernetes k8s 从入门到精通 阿里云实战命令
  10. 阿里云云计算 26 SLB的配置