基于steam的游戏销量预测 — PART 3 — 基于BP神经网络的机器学习与预测
语言:c++
环境:windows
训练内容:根据从steam中爬取的数据经过文本分析制作的向量以及标签
使用相关:无
解释:
就是一个BP神经网络,借鉴参考了一些博客的解释和代码,具体哪些忘了,给出其中一个:
http://blog.csdn.net/zhongkejingwang/article/details/44514073
代码:
1 #include <iostream> 2 #include <cstring> 3 #include <cmath> 4 #include <vector> 5 #include <algorithm> 6 #include <stdlib.h> 7 #include <time.h> 8 #include <fstream> 9 10 #define TAG_NUM 200 11 #define VEC_NUM 216 12 #define BASE_SCORE 5 13 #define MX_STR 2400 14 #define DIV 1000000.0 15 #define INF (1e9 + 44) 16 17 using namespace std; 18 19 namespace BPnamespace 20 { 21 #define IN_NODE_NUM 216 //输入节点数 22 #define HIDDEN_NODE_NUM 24 //隐含节点数 23 #define HIDDEN_LAYER_NUM 1 //隐含层数 24 #define OUT_NODE_NUM 9 //输出节点数 25 #define LEARNING_RATE 0.03 //学习速率 26 #define MAX_RAND_SEG (int)144e4 27 28 inline double xrand() // 0.1 ~ -0.1 29 { 30 return ((2.0*(double)rand() / RAND_MAX) - 1) / 10.0; 31 } 32 33 inline double sigmoid(double x) //sigmoid 34 { 35 double ret = 1 / (1 + exp(-x)); 36 return ret; 37 } 38 39 struct InputNode 40 { 41 double value; //固定输入值 42 vector<double> weight; //到首个隐含层权值 43 vector<double> wdelta_sum; //到首个隐含层权值的delta值累积 44 45 InputNode() 46 { 47 weight.clear(); 48 wdelta_sum.clear(); 49 } 50 }; 51 52 struct OutputNode 53 { 54 double value; 55 double delta; //与正确值之间的偏差值 56 double rightout; //正确值 57 double bias; //偏移量 58 double bdelta_sum; //bias的delta累积 59 60 OutputNode(){ } 61 }; 62 63 struct HiddenNode 64 { 65 double value; 66 double delta; //BP推导出的delta 67 double bias; //偏移量 68 double bdelta_sum; //bias的delta值累积 69 vector<double> weight; //对于下一层的每个节点的权值 70 vector<double> wdelta_sum; //对于下一层的权值delta累积 71 72 HiddenNode() 73 { 74 weight.clear(); 75 wdelta_sum.clear(); 76 } 77 }; 78 79 struct RandSegNode 80 { 81 int id, val; 82 } rand_seg[MAX_RAND_SEG]; 83 84 struct Sample 85 { 86 vector<double> in, out; 87 }; 88 89 bool cmpRandSeg(RandSegNode a,RandSegNode b) 90 { 91 return a.val < b.val; 92 } 93 94 class BP 95 { 96 public: 97 double error; 98 InputNode* input_layer[IN_NODE_NUM]; 99 OutputNode* output_layer[OUT_NODE_NUM]; 100 HiddenNode* hidden_layer[HIDDEN_LAYER_NUM][HIDDEN_NODE_NUM]; 101 102 void load() 103 { 104 string file_name = "data\\data.txt"; 105 ifstream infile(file_name.c_str(), ios::in); 106 for (int i = 0; i < IN_NODE_NUM; i++) 107 for (int j = 0; j < HIDDEN_NODE_NUM; j++) 108 infile >> input_layer[i]->weight[j]; 109 for (int k = 0; k < HIDDEN_LAYER_NUM - 1; k++) 110 for (int i = 0; i < HIDDEN_NODE_NUM; i++) 111 for (int j = 0; j < HIDDEN_NODE_NUM; j++) 112 infile >> hidden_layer[k][i]->weight[j]; 113 for (int i = 0; i < HIDDEN_NODE_NUM; i++) 114 for (int j = 0; j < OUT_NODE_NUM; j++) 115 infile >> hidden_layer[HIDDEN_LAYER_NUM - 1][i]->weight[j]; 116 for (int k = 0; k < HIDDEN_LAYER_NUM; k++) 117 for (int i = 0; i < HIDDEN_NODE_NUM; i++) 118 infile >> hidden_layer[k][i]->bias; 119 for (int i = 0; i < OUT_NODE_NUM; i++) 120 infile >> output_layer[i]->bias; 121 } 122 123 void write() 124 { 125 string file_name = "data\\data.txt"; 126 ofstream outfile(file_name.c_str(), ios::out); 127 for (int i = 0; i < IN_NODE_NUM; i++) 128 for (int j = 0; j < HIDDEN_NODE_NUM; j++) 129 outfile << input_layer[i]->weight[j] << ' '; 130 for (int k = 0; k < HIDDEN_LAYER_NUM - 1; k++) 131 for (int i = 0; i < HIDDEN_NODE_NUM; i++) 132 for (int j = 0; j < HIDDEN_NODE_NUM; j++) 133 outfile << hidden_layer[k][i]->weight[j] << ' '; 134 for (int i = 0; i < HIDDEN_NODE_NUM; i++) 135 for (int j = 0; j < OUT_NODE_NUM; j++) 136 outfile << hidden_layer[HIDDEN_LAYER_NUM - 1][i]->weight[j] << ' '; 137 for (int k = 0; k < HIDDEN_LAYER_NUM; k++) 138 for (int i = 0; i < HIDDEN_NODE_NUM; i++) 139 outfile << hidden_layer[k][i]->bias << ' '; 140 for (int i = 0; i < OUT_NODE_NUM; i++) 141 outfile << output_layer[i]->bias << ' '; 142 } 143 144 BP() 145 { 146 srand((unsigned)time(NULL)); 147 error = 100; 148 //初始化输入层 149 for (int i = 0; i < IN_NODE_NUM; i++) 150 { 151 input_layer[i] = new InputNode(); 152 for (int j = 0; j < HIDDEN_NODE_NUM; j++) 153 { 154 input_layer[i]->weight.push_back(xrand()); 155 input_layer[i]->wdelta_sum.push_back(0); 156 } 157 } 158 //初始化隐藏层 159 for (int i = 0; i < HIDDEN_LAYER_NUM; i++) 160 { 161 if (i == HIDDEN_LAYER_NUM - 1) 162 { 163 for (int j = 0; j < HIDDEN_NODE_NUM;j++) 164 { 165 hidden_layer[i][j] = new HiddenNode(); 166 hidden_layer[i][j]->bias = 0; 167 for (int k = 0; k < OUT_NODE_NUM; k++) 168 { 169 hidden_layer[i][j]->weight.push_back(xrand()); 170 hidden_layer[i][j]->wdelta_sum.push_back(0); 171 } 172 } 173 } 174 else 175 { 176 for (int j = 0; j < HIDDEN_NODE_NUM; j++) 177 { 178 hidden_layer[i][j] = new HiddenNode(); 179 hidden_layer[i][j]->bias = 0; 180 for (int k = 0; k < HIDDEN_NODE_NUM; k++) 181 hidden_layer[i][j]->weight.push_back(xrand()); 182 } 183 } 184 } 185 //初始化输出层 186 for (int i = 0; i < OUT_NODE_NUM; i++) 187 { 188 output_layer[i] = new OutputNode(); 189 output_layer[i]->bias = 0; 190 } 191 } 192 193 void forwardPropagationEpoc() //单个样本 向前传播 194 { 195 //输入层->隐含层 隐含层->隐含层 196 for (int i = 0; i < HIDDEN_LAYER_NUM; i++) 197 { 198 if (i == 0) 199 { 200 for (int j = 0; j < HIDDEN_NODE_NUM; j++) 201 { 202 double sum = 0; 203 for (int k = 0; k < IN_NODE_NUM; k++) 204 sum += input_layer[k]->value * input_layer[k]->weight[j]; 205 sum += hidden_layer[i][j]->bias; 206 hidden_layer[i][j]->value = sigmoid(sum); 207 } 208 } 209 else 210 { 211 for (int j = 0; j < HIDDEN_NODE_NUM; j++) 212 { 213 double sum = 0; 214 for (int k = 0; k < HIDDEN_NODE_NUM; k++) 215 sum += hidden_layer[i - 1][k]->value*hidden_layer[i - 1][k]->weight[j]; 216 sum += hidden_layer[i][j]->bias; 217 hidden_layer[i][j]->value = sigmoid(sum); 218 } 219 } 220 } 221 //隐含层->输出层 222 for (int i = 0; i < OUT_NODE_NUM; i++) 223 { 224 double sum = 0; 225 for (int j = 0; j < HIDDEN_NODE_NUM; j++) 226 sum += hidden_layer[HIDDEN_LAYER_NUM - 1][j]->value * hidden_layer[HIDDEN_LAYER_NUM - 1][j]->weight[i]; 227 sum += output_layer[i]->bias; 228 output_layer[i]->value = sigmoid(sum); 229 } 230 //cout << "!!!" << ' ' << output_layer[0]->value << endl; 231 } 232 233 void backPropagationEpoc() //单个样本 向后传播 234 { 235 //输出层 计算delta 236 for (int i = 0; i < OUT_NODE_NUM; i++) 237 { 238 double tmp = output_layer[i]->rightout - output_layer[i]->value; 239 error += tmp*tmp / 2; 240 output_layer[i]->delta = tmp*(1 - output_layer[i]->value)*output_layer[i]->value; 241 } 242 //隐含层 计算delta 243 for (int i = HIDDEN_LAYER_NUM - 1; i >= 0; i--) 244 { 245 if (i == HIDDEN_LAYER_NUM - 1) 246 { 247 for (int j = 0; j < HIDDEN_NODE_NUM; j++) 248 { 249 double sum = 0; 250 for (int k = 0; k < OUT_NODE_NUM; k++) 251 sum += output_layer[k]->delta*hidden_layer[i][j]->weight[k]; 252 hidden_layer[i][j]->delta = sum*(1 - hidden_layer[i][j]->value)*hidden_layer[i][j]->value; 253 } 254 } 255 else 256 { 257 for (int j = 0; j < HIDDEN_LAYER_NUM; j++) 258 { 259 double sum = 0; 260 for (int k = 0; k < HIDDEN_NODE_NUM; k++) 261 sum += hidden_layer[i + 1][k]->delta*hidden_layer[i][j]->weight[k]; 262 hidden_layer[i][j]->delta = sum*(1 - hidden_layer[i][j]->value)*hidden_layer[i][j]->value; 263 } 264 } 265 } 266 //输入层 更新 wdelta_sum 267 for (int i = 0; i < IN_NODE_NUM; i++) 268 for (int j = 0; j < HIDDEN_NODE_NUM; j++) 269 input_layer[i]->wdelta_sum[j] += input_layer[i]->value*hidden_layer[0][j]->delta; 270 //隐含层 更新 wdelta_sum 和 bdelta_sum 271 for (int i = 0; i < HIDDEN_LAYER_NUM; i++) 272 { 273 if (i == HIDDEN_LAYER_NUM - 1) 274 { 275 for (int j = 0; j < HIDDEN_NODE_NUM; j++) 276 { 277 hidden_layer[i][j]->bdelta_sum += hidden_layer[i][j]->delta; 278 for (int k = 0; k < OUT_NODE_NUM; k++) 279 hidden_layer[i][j]->wdelta_sum[k] += hidden_layer[i][j]->value*output_layer[k]->delta; 280 } 281 } 282 else 283 { 284 for (int j = 0; j < HIDDEN_NODE_NUM; j++) 285 { 286 hidden_layer[i][j]->bdelta_sum += hidden_layer[i][j]->delta; 287 for (int k = 0; k < HIDDEN_NODE_NUM; k++) 288 hidden_layer[i][j]->wdelta_sum[k] += hidden_layer[i][j]->value*hidden_layer[i + 1][k]->delta; 289 } 290 } 291 } 292 //输出层 更新 bdelta_sum 293 for (int i = 0; i < OUT_NODE_NUM; i++) 294 output_layer[i]->bdelta_sum += output_layer[i]->delta; 295 } 296 297 void training(vector<Sample> sample_group, int cnt_bound) //更新weight,bias 298 { 299 int sample_num = sample_group.size(); 300 for (int i = 0; i < sample_num; i++) 301 rand_seg[i].id = i, rand_seg[i].val = rand(); 302 sort(rand_seg, rand_seg + sample_num, cmpRandSeg); 303 // double error_bound; 304 // double last_error = -1; 305 int cnt = 0; 306 int now_id; 307 while (cnt < cnt_bound) 308 { 309 // last_error = error; 310 error = 0; 311 for (int i = 0; i < IN_NODE_NUM; i++) 312 input_layer[i]->wdelta_sum.assign(input_layer[i]->wdelta_sum.size(), 0); 313 for (int i = 0; i < HIDDEN_LAYER_NUM; i++) 314 for (int j = 0; j < HIDDEN_NODE_NUM; j++) 315 { 316 hidden_layer[i][j]->wdelta_sum.assign(hidden_layer[i][j]->wdelta_sum.size(), 0); 317 hidden_layer[i][j]->bdelta_sum = 0; 318 } 319 for (int i = 0; i < OUT_NODE_NUM; i++) 320 output_layer[i]->bdelta_sum = 0; 321 now_id = rand_seg[cnt%sample_group.size()].id; 322 setInput(sample_group[now_id].in); 323 setOutput(sample_group[now_id].out); 324 forwardPropagationEpoc(); 325 backPropagationEpoc(); 326 //输出层反向传递 更新 weight 327 for (int i = 0; i < IN_NODE_NUM; i++) 328 for (int j = 0; j < HIDDEN_NODE_NUM; j++) 329 input_layer[i]->weight[j] += LEARNING_RATE*input_layer[i]->wdelta_sum[j]; 330 //隐含层反向传递 更新 weight 和 bias 331 for (int i = 0; i < HIDDEN_LAYER_NUM; i++) 332 { 333 if (i == HIDDEN_LAYER_NUM - 1) 334 { 335 for (int j = 0; j < HIDDEN_NODE_NUM; j++) 336 { 337 hidden_layer[i][j]->bias += LEARNING_RATE*hidden_layer[i][j]->bdelta_sum; 338 for (int k = 0; k < OUT_NODE_NUM; k++) 339 hidden_layer[i][j]->weight[k] += LEARNING_RATE*hidden_layer[i][j]->wdelta_sum[k]; 340 } 341 } 342 else 343 { 344 for (int j = 0; j < HIDDEN_NODE_NUM; j++) 345 { 346 hidden_layer[i][j]->bias += LEARNING_RATE*hidden_layer[i][j]->bdelta_sum; 347 for (int k = 0; k < HIDDEN_NODE_NUM; k++) 348 hidden_layer[i][j]->weight[k] += LEARNING_RATE*hidden_layer[i][j]->wdelta_sum[k]; 349 } 350 } 351 } 352 //输出层反向传递 更新bias 353 for (int i = 0; i < OUT_NODE_NUM; i++) 354 output_layer[i]->bias += LEARNING_RATE*output_layer[i]->bdelta_sum; 355 if (++cnt % 4000 == 0) 356 { 357 printf("turn %d/%d finished \n", cnt, cnt_bound); 358 printf("training error: %lf\n", error); 359 } 360 } 361 } 362 363 void predict(vector<Sample>& test_group) //神经网络预测 364 { 365 int test_num = test_group.size(); 366 for (int id = 0; id < test_num; id++) 367 { 368 test_group[id].out.clear(); 369 setInput(test_group[id].in); 370 //输入层->隐含层 隐含层->隐含层 正向传播 371 for (int i = 0; i < HIDDEN_LAYER_NUM; i++) 372 { 373 if (i == 0) 374 { 375 for (int j = 0; j < HIDDEN_NODE_NUM; j++) 376 { 377 double sum = 0; 378 for (int k = 0; k < IN_NODE_NUM; k++) 379 sum += input_layer[k]->value*input_layer[k]->weight[j]; 380 sum += hidden_layer[i][j]->bias; 381 hidden_layer[i][j]->value = sigmoid(sum); 382 } 383 } 384 else 385 { 386 for (int j = 0; j < HIDDEN_NODE_NUM; j++) 387 { 388 double sum = 0; 389 for (int k = 0; k < HIDDEN_NODE_NUM; k++) 390 sum += hidden_layer[i - 1][k]->value*hidden_layer[i - 1][k]->weight[j]; 391 sum += hidden_layer[i][j]->bias; 392 hidden_layer[i][j]->value = sigmoid(sum); 393 } 394 } 395 } 396 for (int i = 0; i < OUT_NODE_NUM; i++) 397 { 398 double sum = 0; 399 for (int j = 0; j < HIDDEN_NODE_NUM; j++) 400 sum += hidden_layer[HIDDEN_LAYER_NUM - 1][j]->value*hidden_layer[HIDDEN_LAYER_NUM - 1][j]->weight[i]; 401 sum += output_layer[i]->bias; 402 output_layer[i]->value = sigmoid(sum); 403 test_group[id].out.push_back(output_layer[i]->value); 404 } 405 } 406 } 407 408 void setInput(vector<double> sample_in) //设置学习样本输入 409 { 410 for (int i = 0; i < IN_NODE_NUM; i++) 411 input_layer[i]->value = sample_in[i]; 412 } 413 414 void setOutput(vector<double> sample_out) //设置学习样本输出 415 { 416 for (int i = 0; i < OUT_NODE_NUM; i++) 417 output_layer[i]->rightout = sample_out[i]; 418 } 419 }; 420 } 421 using namespace BPnamespace; 422 423 BP bp; 424 vector<Sample> sample_group; 425 char ex_str[MX_STR]; 426 427 int getRank(char num_str[]) 428 { 429 int len = strlen(num_str), ret = 0; 430 for(int i = 0; i < len; i++) 431 if(num_str[i] >= '0' && num_str[i] <= '9') 432 ret++; 433 // ret = ret * 10 + num_str[i] - '0'; 434 return ret; 435 } 436 437 void part_train() 438 { 439 sample_group.clear(); 440 char num_str[MX_STR]; 441 int tol, num; 442 double tmp; 443 Sample now; 444 string infile_name = "training_data.txt"; 445 ifstream infile(infile_name.c_str(), ios::in); 446 infile >> tol; 447 for(int i = 0; i < tol; i++) 448 { 449 infile >> ex_str; 450 infile >> num; 451 now.in.clear(),now.out.clear(); 452 for(int j = 0; j < num; j++) 453 { 454 infile >> tmp; 455 now.in.push_back(tmp); 456 } 457 for(int j = num; j < VEC_NUM; j++) 458 now.in.push_back(BASE_SCORE); 459 infile >> num_str; 460 if(num_str[0] == '-') continue; 461 num = getRank(num_str); 462 for(int j = 0; j < OUT_NODE_NUM; j++) 463 if(j == num) 464 now.out.push_back(1); 465 else now.out.push_back(0); 466 sample_group.push_back(now); 467 } 468 bp.training(sample_group,30000); 469 bp.write(); 470 } 471 472 void part_predict() 473 { 474 sample_group.clear(); 475 char num_str[MX_STR]; 476 int tol, num; 477 double tmp; 478 Sample now; 479 string infile_name = "sample.txt"; 480 ifstream infile(infile_name.c_str(), ios::in); 481 infile >> tol; 482 for(int i = 0; i < tol; i++) 483 { 484 infile >> ex_str; 485 infile >> num; 486 now.in.clear(),now.out.clear(); 487 for(int j = 0; j < num; j++) 488 { 489 infile >> tmp; 490 now.in.push_back(tmp); 491 } 492 for(int j = num; j < VEC_NUM; j++) 493 now.in.push_back(BASE_SCORE); 494 infile >> num_str; 495 sample_group.push_back(now); 496 } 497 bp.predict(sample_group); 498 string outfile_name = "result.txt"; 499 ofstream outfile(outfile_name.c_str(), ios::out); 500 for(int i = 0; i < sample_group.size(); i++) 501 { 502 tmp = -INF; 503 now = sample_group[i]; 504 for(int j = 0; j < now.out.size(); j++) 505 if(now.out[j] > tmp) 506 tmp = now.out[j], num = j; 507 outfile << "id: " << i << " rank: " << num << endl; 508 cout << "id: " << i << " rank: " << num << endl; 509 } 510 } 511 512 int main() 513 { 514 int tp; 515 puts("input\n 0: load last trainning data\n 1: restart tranning\n 2: load last tranning data and continue trainning\n"); 516 scanf("%d",&tp); 517 if(tp == 0) bp.load(); 518 else if(tp == 1) part_train(); 519 else if(tp == 2) bp.load(), part_train(); 520 else return puts("error"), 0; 521 part_predict(); 522 system("pause"); 523 return 0; 524 }
View Code
转载于:https://www.cnblogs.com/FxxL/p/8410591.html
基于steam的游戏销量预测 — PART 3 — 基于BP神经网络的机器学习与预测相关推荐
- 基于steam的游戏销量预测 — PART 1 — 爬取steam游戏相关数据的爬虫
语言:python 环境:ubuntu 爬取内容:steam游戏标签,评论,以及在 steamspy 爬取对应游戏的销量 使用相关:urllib,lxml,selenium,chrome 解释: 流程 ...
- 基于steam的游戏销量预测 — PART 2 — 文本分析
不是我做的,OvO,略过 转载于:https://www.cnblogs.com/FxxL/p/8410579.html
- matlab耀输,基于 Matlab的BP神经网络在太阳耀斑级别预测中的应用
第 42卷 第 3期 2014年 5月 河南师范大学学报(自然科学版) Journal of Henan Normal University(Natural Science Edition) Z.42 ...
- (文章复现)5.基于BP神经网络的风电功率预测方法(MATLAB程序)
复现文章: 基于BP神经网络的风电功率预测方法--刘立群(2021年) 摘要: 风电功率预测结果的准确性,不仅关系到风力发电厂的综合运行效率,也与区域运行成本具备直接联系,为解决传统预测方法的不足,基 ...
- 【Matlab】基于多层前馈网络BP神经网络实现多分类预测(Excel可直接替换数据)
[Matlab]基于多层前馈网络BP神经网络实现多分类预测(Excel可直接替换数据) 1.算法简介 1.1 算法原理 1.2 算法流程 2.测试数据集 3.替换数据 4.混淆矩阵 5.对比结果 6. ...
- 基于BP神经网络的足球结果预测软件开发
基于BP神经网络的足球结果预测软件开发 一.问题描述 足球,被称为世界第一运动.足球比赛充满了对抗.力量.激情,比赛过程跌宕起伏,结果难以预测.随着人工智能时代的 到来,越来越多的人期望于将智能算法应 ...
- 【Matlab风电功率预测】遗传算法优化BP神经网络风电功率预测【含源码 760期】
一.代码运行视频(哔哩哔哩) [Matlab风电功率预测]遗传算法优化BP神经网络风电功率预测[含源码 760期] 二.matlab版本及参考文献 1 matlab版本 2014a 2 参考文献 [1 ...
- BP神经网络如何用历史数据预测未来数据
本文主要为了解决如何用BP神经网络由历史的目标数据与因素数据去预测未来的目标数据.Bp神经网络的具体算法步骤与代码在网络上已经有很多大佬写过了,本文提供了将其应用于预测的方法.(附简单直接可使用代码) ...
- 分类预测 | MATLAB实现CNN卷积神经网络多特征分类预测
分类预测 | MATLAB实现CNN卷积神经网络多特征分类预测 目录 分类预测 | MATLAB实现CNN卷积神经网络多特征分类预测 分类效果 基本介绍 程序设计 参考资料 致谢 分类效果 基本介绍 ...
- 【BP预测】基于差分进化算法优化BP神经网络实现汇率数据预测matlab代码
1 简介 镍镉电池组作为移动电源对于现代军民用品是必不可少的.为了能够有效的提高镍镉电池寿命的预测精准度,在建立BP神经网络的电池寿命预测模型的基础上,运用差分进化算法(DE)优化BP神经网络连接的初 ...
最新文章
- 赵本山说日本车。(笑死不偿命)
- angularjs 中的scope继承关系——(2)
- 理解ROS Navigation Stack,看完这篇你就知道啦!
- MDT2010-windows 7镜像捕获与模板镜像部署(二)(特别版)
- php小总结之 mysqli面向过程、面向对象和PDO在增删改查使用中的一些小区别
- 大数斐波那契数列(nyoj655)光棍的yy
- 大道至简,凯里亚德酒店成为酒店投资圈万众瞩目的“新”星
- 浏览器ocx控件安装 IE浏览器可用
- 关于ARM公司的cortex系列
- iPhone OS 4发布:支持多任务
- Windos系统使用webdav协议将阿里云盘挂载在本地,并实现Notability等支持webdav协议软件的自动备份
- 设置Jenkins语言为中文或英文
- WhatsApp 批量解封提交工具
- Android Switch和ToggleButton控件
- Android Studio 导出的apk安装时出现解析软件包错误(已解决)
- java远程打印机 文件_使用Java将文本文件打印到特定打印机
- 下载并安装MS office 365
- 那些你可能需要的图片处理工具
- 51-20210316华为海思Hi3516DV300的linux系统编译1(SPI模式)
- 无人机航测行业中常见名词解释,你都知道吗?