aom2.0的工程中,aomdec.c是解码端工程,主要完成码流的解析,并输出解码后的YUV,其入口是amdec.c中的main函数

在main函数里面做的工作很简单,首先解析了命令行参数loops,获取其解码文件的次数,然后开始进入解码循环main_loop函数中

其代码如下:

int main(int argc, const char **argv_) {unsigned int loops = 1, i;char **argv, **argi, **argj;struct arg arg;int error = 0;argv = argv_dup(argc - 1, argv_ + 1);for (argi = argj = argv; (*argj = *argi); argi += arg.argv_step) {memset(&arg, 0, sizeof(arg));arg.argv_step = 1;if (arg_match(&arg, &looparg, argi)) {loops = arg_parse_uint(&arg);break;}}free(argv);for (i = 0; !error && i < loops; i++) error = main_loop(argc, argv_);return error;
}

main_loop函数,主要流程如下:

  1. 初始化变量, 主要包括一些解码器的上下文、读取帧数、解码时间等
  2. 解析命令行参数
  3. 打开输入文件,判断输入文件类型(IVF/OBU/WEBM/RAW),打开输出的yuv文件
  4. 解码器类型检查
  5. 进入解码循环
    1. 通过read_frame从码流中读取一帧数据
    2. 调用aom_codec_decode解码当前帧
    3. 调用aom_codec_get_frame获得解码一帧后的数据,然后输出该帧(如果需要可以对当前帧进行缩放)
  6. 释放变量

代码及注释如下:

static int main_loop(int argc, const char **argv_) {aom_codec_ctx_t decoder;char *fn = NULL;int i;int ret = EXIT_FAILURE;uint8_t *buf = NULL;size_t bytes_in_buffer = 0, buffer_size = 0;FILE *infile;int frame_in = 0, frame_out = 0, flipuv = 0, noblit = 0;int do_md5 = 0, progress = 0;int stop_after = 0, summary = 0, quiet = 1;int arg_skip = 0;int keep_going = 0;uint64_t dx_time = 0;struct arg arg;char **argv, **argi, **argj;int single_file;int use_y4m = 1;int opt_yv12 = 0;int opt_i420 = 0;int opt_raw = 0;aom_codec_dec_cfg_t cfg = { 0, 0, 0, !FORCE_HIGHBITDEPTH_DECODING };unsigned int fixed_output_bit_depth = 0;unsigned int is_annexb = 0;int frames_corrupted = 0;int dec_flags = 0;int do_scale = 0;int operating_point = 0;int output_all_layers = 0;int skip_film_grain = 0;int enable_row_mt = 0;aom_image_t *scaled_img = NULL;aom_image_t *img_shifted = NULL;int frame_avail, got_data, flush_decoder = 0;int num_external_frame_buffers = 0;struct ExternalFrameBufferList ext_fb_list = { 0, NULL };const char *outfile_pattern = NULL;char outfile_name[PATH_MAX] = { 0 };FILE *outfile = NULL;FILE *framestats_file = NULL;MD5Context md5_ctx;unsigned char md5_digest[16];struct AvxDecInputContext input = { NULL, NULL, NULL };struct AvxInputContext aom_input_ctx;memset(&aom_input_ctx, 0, sizeof(aom_input_ctx));
#if CONFIG_WEBM_IOstruct WebmInputContext webm_ctx;memset(&webm_ctx, 0, sizeof(webm_ctx));input.webm_ctx = &webm_ctx;
#endifstruct ObuDecInputContext obu_ctx = { NULL, NULL, 0, 0, 0 };int is_ivf = 0;obu_ctx.avx_ctx = &aom_input_ctx;input.obu_ctx = &obu_ctx;input.aom_input_ctx = &aom_input_ctx;/* Parse command line */exec_name = argv_[0];argv = argv_dup(argc - 1, argv_ + 1);aom_codec_iface_t *interface = NULL;for (argi = argj = argv; (*argj = *argi); argi += arg.argv_step) {memset(&arg, 0, sizeof(arg));arg.argv_step = 1;if (arg_match(&arg, &help, argi)) {show_help(stdout, 0);exit(EXIT_SUCCESS);} else if (arg_match(&arg, &codecarg, argi)) {interface = get_aom_decoder_by_short_name(arg.val);if (!interface)die("Error: Unrecognized argument (%s) to --codec\n", arg.val);} else if (arg_match(&arg, &looparg, argi)) {// no-op} else if (arg_match(&arg, &outputfile, argi)) { //输出文件类型outfile_pattern = arg.val;} else if (arg_match(&arg, &use_yv12, argi)) {use_y4m = 0;flipuv = 1;opt_yv12 = 1;opt_i420 = 0;opt_raw = 0;} else if (arg_match(&arg, &use_i420, argi)) {use_y4m = 0;flipuv = 0;opt_yv12 = 0;opt_i420 = 1;opt_raw = 0;} else if (arg_match(&arg, &rawvideo, argi)) {use_y4m = 0;opt_yv12 = 0;opt_i420 = 0;opt_raw = 1;} else if (arg_match(&arg, &flipuvarg, argi)) {flipuv = 1;} else if (arg_match(&arg, &noblitarg, argi)) {noblit = 1;} else if (arg_match(&arg, &progressarg, argi)) {progress = 1;} else if (arg_match(&arg, &limitarg, argi)) {stop_after = arg_parse_uint(&arg);} else if (arg_match(&arg, &skiparg, argi)) {arg_skip = arg_parse_uint(&arg);} else if (arg_match(&arg, &md5arg, argi)) {do_md5 = 1;} else if (arg_match(&arg, &framestatsarg, argi)) {framestats_file = fopen(arg.val, "w");if (!framestats_file) {die("Error: Could not open --framestats file (%s) for writing.\n",arg.val);}} else if (arg_match(&arg, &summaryarg, argi)) {summary = 1;} else if (arg_match(&arg, &threadsarg, argi)) {cfg.threads = arg_parse_uint(&arg);
#if !CONFIG_MULTITHREADif (cfg.threads > 1) {die("Error: --threads=%d is not supported when CONFIG_MULTITHREAD = ""0.\n",cfg.threads);}
#endif} else if (arg_match(&arg, &rowmtarg, argi)) {enable_row_mt = arg_parse_uint(&arg);} else if (arg_match(&arg, &verbosearg, argi)) {quiet = 0;} else if (arg_match(&arg, &scalearg, argi)) {do_scale = 1;} else if (arg_match(&arg, &fb_arg, argi)) {num_external_frame_buffers = arg_parse_uint(&arg);} else if (arg_match(&arg, &continuearg, argi)) {keep_going = 1;} else if (arg_match(&arg, &outbitdeptharg, argi)) {fixed_output_bit_depth = arg_parse_uint(&arg);} else if (arg_match(&arg, &isannexb, argi)) {is_annexb = 1;input.obu_ctx->is_annexb = 1;} else if (arg_match(&arg, &oppointarg, argi)) {operating_point = arg_parse_int(&arg);} else if (arg_match(&arg, &outallarg, argi)) {output_all_layers = 1;} else if (arg_match(&arg, &skipfilmgrain, argi)) {skip_film_grain = 1;} else {argj++;}}/* Check for unrecognized options */for (argi = argv; *argi; argi++)if (argi[0][0] == '-' && strlen(argi[0]) > 1)die("Error: Unrecognized option %s\n", *argi);/* Handle non-option arguments */fn = argv[0];if (!fn) {free(argv);fprintf(stderr, "No input file specified!\n");usage_exit();}/* Open file */infile = strcmp(fn, "-") ? fopen(fn, "rb") : set_binary_mode(stdin);if (!infile) {fatal("Failed to open input file '%s'", strcmp(fn, "-") ? fn : "stdin");}
#if CONFIG_OS_SUPPORT/* Make sure we don't dump to the terminal, unless forced to with -o - */if (!outfile_pattern && isatty(STDOUT_FILENO) && !do_md5 && !noblit) {fprintf(stderr,"Not dumping raw video to your terminal. Use '-o -' to ""override.\n");free(argv);return EXIT_FAILURE;}
#endifinput.aom_input_ctx->filename = fn;input.aom_input_ctx->file = infile;// 判断输入文件类型if (file_is_ivf(input.aom_input_ctx)) {input.aom_input_ctx->file_type = FILE_TYPE_IVF;is_ivf = 1;}
#if CONFIG_WEBM_IOelse if (file_is_webm(input.webm_ctx, input.aom_input_ctx))input.aom_input_ctx->file_type = FILE_TYPE_WEBM;
#endifelse if (file_is_obu(&obu_ctx))input.aom_input_ctx->file_type = FILE_TYPE_OBU;else if (file_is_raw(input.aom_input_ctx))input.aom_input_ctx->file_type = FILE_TYPE_RAW;else {fprintf(stderr, "Unrecognized input file type.\n");
#if !CONFIG_WEBM_IOfprintf(stderr, "aomdec was built without WebM container support.\n");
#endiffree(argv);return EXIT_FAILURE;}outfile_pattern = outfile_pattern ? outfile_pattern : "-";single_file = is_single_file(outfile_pattern);if (!noblit && single_file) {generate_filename(outfile_pattern, outfile_name, PATH_MAX,aom_input_ctx.width, aom_input_ctx.height, 0);if (do_md5)MD5Init(&md5_ctx);elseoutfile = open_outfile(outfile_name);}if (use_y4m && !noblit) {if (!single_file) {fprintf(stderr,"YUV4MPEG2 not supported with output patterns,"" try --i420 or --yv12 or --rawvideo.\n");return EXIT_FAILURE;}#if CONFIG_WEBM_IOif (aom_input_ctx.file_type == FILE_TYPE_WEBM) {if (webm_guess_framerate(input.webm_ctx, input.aom_input_ctx)) {fprintf(stderr,"Failed to guess framerate -- error parsing ""webm file?\n");return EXIT_FAILURE;}}
#endif}aom_codec_iface_t *fourcc_interface =get_aom_decoder_by_fourcc(aom_input_ctx.fourcc);if (is_ivf && !fourcc_interface)fatal("Unsupported fourcc: %x\n", aom_input_ctx.fourcc);if (interface && fourcc_interface && interface != fourcc_interface)warn("Header indicates codec: %s\n",aom_codec_iface_name(fourcc_interface));elseinterface = fourcc_interface;if (!interface) interface = get_aom_decoder_by_index(0);dec_flags = 0;if (aom_codec_dec_init(&decoder, interface, &cfg, dec_flags)) {fprintf(stderr, "Failed to initialize decoder: %s\n",aom_codec_error(&decoder));goto fail2;}if (!quiet) fprintf(stderr, "%s\n", decoder.name);if (AOM_CODEC_CONTROL_TYPECHECKED(&decoder, AV1D_SET_IS_ANNEXB, is_annexb)) {fprintf(stderr, "Failed to set is_annexb: %s\n", aom_codec_error(&decoder));goto fail;}if (AOM_CODEC_CONTROL_TYPECHECKED(&decoder, AV1D_SET_OPERATING_POINT,operating_point)) {fprintf(stderr, "Failed to set operating_point: %s\n",aom_codec_error(&decoder));goto fail;}if (AOM_CODEC_CONTROL_TYPECHECKED(&decoder, AV1D_SET_OUTPUT_ALL_LAYERS,output_all_layers)) {fprintf(stderr, "Failed to set output_all_layers: %s\n",aom_codec_error(&decoder));goto fail;}if (AOM_CODEC_CONTROL_TYPECHECKED(&decoder, AV1D_SET_SKIP_FILM_GRAIN,skip_film_grain)) {fprintf(stderr, "Failed to set skip_film_grain: %s\n",aom_codec_error(&decoder));goto fail;}if (AOM_CODEC_CONTROL_TYPECHECKED(&decoder, AV1D_SET_ROW_MT, enable_row_mt)) {fprintf(stderr, "Failed to set row multithreading mode: %s\n",aom_codec_error(&decoder));goto fail;}// 跳过前几个帧if (arg_skip) fprintf(stderr, "Skipping first %d frames.\n", arg_skip);while (arg_skip) {if (read_frame(&input, &buf, &bytes_in_buffer, &buffer_size)) break;arg_skip--;}if (num_external_frame_buffers > 0) {ext_fb_list.num_external_frame_buffers = num_external_frame_buffers;ext_fb_list.ext_fb = (struct ExternalFrameBuffer *)calloc(num_external_frame_buffers, sizeof(*ext_fb_list.ext_fb));if (aom_codec_set_frame_buffer_functions(&decoder, get_av1_frame_buffer,release_av1_frame_buffer,&ext_fb_list)) {fprintf(stderr, "Failed to configure external frame buffers: %s\n",aom_codec_error(&decoder));goto fail;}}frame_avail = 1;got_data = 0;if (framestats_file) fprintf(framestats_file, "bytes,qp\r\n");/* Decode file */while (frame_avail || got_data) {aom_codec_iter_t iter = NULL;aom_image_t *img;struct aom_usec_timer timer;int corrupted = 0;frame_avail = 0;if (!stop_after || frame_in < stop_after) {if (!read_frame(&input, &buf, &bytes_in_buffer, &buffer_size)) { //从码流中读取一帧数据frame_avail = 1;frame_in++;aom_usec_timer_start(&timer);//计时器开始if (aom_codec_decode(&decoder, buf, bytes_in_buffer, NULL)) { //进行解码const char *detail = aom_codec_error_detail(&decoder);warn("Failed to decode frame %d: %s", frame_in,aom_codec_error(&decoder));if (detail) warn("Additional information: %s", detail);if (!keep_going) goto fail;}if (framestats_file) { //打印解码时的数据int qp;if (AOM_CODEC_CONTROL_TYPECHECKED(&decoder, AOMD_GET_LAST_QUANTIZER,&qp)) {warn("Failed AOMD_GET_LAST_QUANTIZER: %s",aom_codec_error(&decoder));if (!keep_going) goto fail;}fprintf(framestats_file, "%d,%d\r\n", (int)bytes_in_buffer, qp);}aom_usec_timer_mark(&timer);dx_time += aom_usec_timer_elapsed(&timer);//累计解码时间} else {flush_decoder = 1;//如果没有读取到数据就刷新解码器}} else {flush_decoder = 1;//停止读取帧数据,刷新解码器}aom_usec_timer_start(&timer);if (flush_decoder) {// Flush the decoder. 刷新解码器if (aom_codec_decode(&decoder, NULL, 0, NULL)) {warn("Failed to flush decoder: %s", aom_codec_error(&decoder));}}aom_usec_timer_mark(&timer);dx_time += aom_usec_timer_elapsed(&timer);got_data = 0;while ((img = aom_codec_get_frame(&decoder, &iter))) {++frame_out;got_data = 1;if (AOM_CODEC_CONTROL_TYPECHECKED(&decoder, AOMD_GET_FRAME_CORRUPTED,&corrupted)) { //检查帧是否损坏warn("Failed AOM_GET_FRAME_CORRUPTED: %s", aom_codec_error(&decoder));if (!keep_going) goto fail;}frames_corrupted += corrupted;if (progress) show_progress(frame_in, frame_out, dx_time); //打印每一帧的解码信息if (!noblit) { //如果对解码帧进行处理const int PLANES_YUV[] = { AOM_PLANE_Y, AOM_PLANE_U, AOM_PLANE_V };const int PLANES_YVU[] = { AOM_PLANE_Y, AOM_PLANE_V, AOM_PLANE_U };const int *planes = flipuv ? PLANES_YVU : PLANES_YUV;if (do_scale) { //如果需要输出帧进行缩放if (frame_out == 1) {// If the output frames are to be scaled to a fixed display size// then use the width and height specified in the container. If// either of these is set to 0, use the display size set in the// first frame header. If that is unavailable, use the raw decoded// size of the first decoded frame.// 如果要将输出帧缩放为固定的显示大小,则使用容器中指定的宽度和高度。// 如果其中任何一个设置为0,则使用在第一个帧标题中设置的显示大小。// 如果不可用,则使用第一个解码帧的原始解码大小。int render_width = aom_input_ctx.width;int render_height = aom_input_ctx.height;if (!render_width || !render_height) {int render_size[2];if (AOM_CODEC_CONTROL_TYPECHECKED(&decoder, AV1D_GET_DISPLAY_SIZE,render_size)) {// As last resort use size of first frame as display size.render_width = img->d_w;render_height = img->d_h;} else {render_width = render_size[0];render_height = render_size[1];}}scaled_img =aom_img_alloc(NULL, img->fmt, render_width, render_height, 16);scaled_img->bit_depth = img->bit_depth;scaled_img->monochrome = img->monochrome;scaled_img->csp = img->csp;}if (img->d_w != scaled_img->d_w || img->d_h != scaled_img->d_h) {
#if CONFIG_LIBYUVlibyuv_scale(img, scaled_img, kFilterBox);img = scaled_img;
#elsefprintf(stderr,"Failed to scale output frame: %s.\n""libyuv is required for scaling but is currently disabled.\n""Be sure to specify -DCONFIG_LIBYUV=1 when running cmake.\n",aom_codec_error(&decoder));goto fail;
#endif}}// Default to codec bit depth if output bit depth not set // 如果未设置输出位深度,则默认为编解码器位深度unsigned int output_bit_depth;if (!fixed_output_bit_depth && single_file) {output_bit_depth = img->bit_depth;} else {output_bit_depth = fixed_output_bit_depth;}// Shift up or down if necessaryif (output_bit_depth != 0)aom_shift_img(output_bit_depth, &img, &img_shifted);aom_input_ctx.width = img->d_w;aom_input_ctx.height = img->d_h;int num_planes = (opt_raw && img->monochrome) ? 1 : 3;if (single_file) {if (use_y4m) { //如果输出y4m格式的视频char y4m_buf[Y4M_BUFFER_SIZE] = { 0 };size_t len = 0;if (frame_out == 1) {// Y4M file headerlen = y4m_write_file_header(y4m_buf, sizeof(y4m_buf), aom_input_ctx.width,aom_input_ctx.height, &aom_input_ctx.framerate,img->monochrome, img->csp, img->fmt, img->bit_depth);if (img->csp == AOM_CSP_COLOCATED) {fprintf(stderr,"Warning: Y4M lacks a colorspace for colocated ""chroma. Using a placeholder.\n");}if (do_md5) {MD5Update(&md5_ctx, (md5byte *)y4m_buf, (unsigned int)len);} else {fputs(y4m_buf, outfile);}}// Y4M frame headerlen = y4m_write_frame_header(y4m_buf, sizeof(y4m_buf));if (do_md5) {MD5Update(&md5_ctx, (md5byte *)y4m_buf, (unsigned int)len);y4m_update_image_md5(img, planes, &md5_ctx);} else {fputs(y4m_buf, outfile);y4m_write_image_file(img, planes, outfile);}} else { //输出原始YUV视频if (frame_out == 1) {// Check if --yv12 or --i420 options are consistent with the// bit-stream decodedif (opt_i420) {if (img->fmt != AOM_IMG_FMT_I420 &&img->fmt != AOM_IMG_FMT_I42016) {fprintf(stderr,"Cannot produce i420 output for bit-stream.\n");goto fail;}}if (opt_yv12) {if ((img->fmt != AOM_IMG_FMT_I420 &&img->fmt != AOM_IMG_FMT_YV12) ||img->bit_depth != 8) {fprintf(stderr,"Cannot produce yv12 output for bit-stream.\n");goto fail;}}}if (do_md5) {raw_update_image_md5(img, planes, num_planes, &md5_ctx);} else {raw_write_image_file(img, planes, num_planes, outfile);}}} else { //!singleFilegenerate_filename(outfile_pattern, outfile_name, PATH_MAX, img->d_w,img->d_h, frame_in);if (do_md5) {MD5Init(&md5_ctx);if (use_y4m) {y4m_update_image_md5(img, planes, &md5_ctx);} else {raw_update_image_md5(img, planes, num_planes, &md5_ctx);}MD5Final(md5_digest, &md5_ctx);print_md5(md5_digest, outfile_name);} else {outfile = open_outfile(outfile_name);if (use_y4m) {y4m_write_image_file(img, planes, outfile);} else {raw_write_image_file(img, planes, num_planes, outfile);}fclose(outfile);}}}}// 解码帧}// Decode fileif (summary || progress) {show_progress(frame_in, frame_out, dx_time);fprintf(stderr, "\n");}if (frames_corrupted) {fprintf(stderr, "WARNING: %d frames corrupted.\n", frames_corrupted);} else {ret = EXIT_SUCCESS;}fail:if (aom_codec_destroy(&decoder)) {fprintf(stderr, "Failed to destroy decoder: %s\n",aom_codec_error(&decoder));}fail2:if (!noblit && single_file) {if (do_md5) {MD5Final(md5_digest, &md5_ctx);print_md5(md5_digest, outfile_name);} else {fclose(outfile);}}#if CONFIG_WEBM_IOif (input.aom_input_ctx->file_type == FILE_TYPE_WEBM)webm_free(input.webm_ctx);
#endifif (input.aom_input_ctx->file_type == FILE_TYPE_OBU)obudec_free(input.obu_ctx);if (input.aom_input_ctx->file_type != FILE_TYPE_WEBM) free(buf);if (scaled_img) aom_img_free(scaled_img);if (img_shifted) aom_img_free(img_shifted);for (i = 0; i < ext_fb_list.num_external_frame_buffers; ++i) {free(ext_fb_list.ext_fb[i].data);}free(ext_fb_list.ext_fb);fclose(infile);if (framestats_file) fclose(framestats_file);free(argv);return ret;
}

AV1代码学习:解码端aomdec.c的main函数相关推荐

  1. AV1代码学习:函数encode_frame和aom_codec_encode

    1.encode_frame函数 在编码端aomenc.c的main函数中,在进入编码过程循环后,循环读取视频的每一帧数据,然后通过encode_frame函数对每一帧进行编码. encode_fra ...

  2. 【C语言进阶深度学习记录】二十九 main函数与命令行参数

    文章目录 1 main函数的返回值 2 main函数的参数 2.1 main函数的参数的代码案例分析 3 main函数不一定是程序中第一个执行的函数 4 总结 1 main函数的返回值 main函数是 ...

  3. AV1代码学习6:tpl_model之一

    AV1的tpl_model是AV1的一个c文件,包含了一系列函数,其主要目的是为了利用lookahead design记录每个块的一些数据,包括失真等,在实际编码时利用这些数据建立模型,调整QP或者l ...

  4. AV1代码学习:av1_foreach_transformed_block_in_plane函数

    在AV1中,进行预测变换都是基于Transform Block(变换块)进行的,变换块一共19种尺寸,并且其尺寸通常是小于或者等于编码块尺寸的,如下代码所示. enum {TX_4X4, // 4x4 ...

  5. AV1代码学习6:函数av1_encode和 av1_first_pass

    av1_encode没什么特别好说的,会把在av1_encode_strategy的参数(EncodeFrameInput和EncodeFrameParams)赋给结构体AV1_COMP和AV1_CO ...

  6. AV1代码学习3:函数aom_codec_encode

    函数aom_codec_encode主要就是根据命令行参数--cpu-used来决定num_enc, 通常情况下,为了通测方便,--cpu-used都是设置为1. 提高--cpu-used的数值会加快 ...

  7. AV1代码学习6:tpl_model之二

    mode_estimation字面意思就是模式估计,实质上是对帧内和帧间的模式进行遍历.帧内预测选取了13种模式,主要是DC模式.角度模式和新加入的PAETH模式.帧间对7个参考帧进行遍历,寻找COS ...

  8. x264代码学习笔记(五):x264_slicetype_analyse函数

    x264_slicetype_decide函数(代码所在位置为"x264-snapshot-20171128-2245-stable\encoder\slicetype.c")的主 ...

  9. x264代码学习笔记(二):x264_encoder_encode函数

    encode()函数中循环调用encode_frame()函数进行逐帧编码: 调用x264_encoder_encode()函数完成一帧编码: 将编码后的码流载入码流文件中. static int e ...

  10. H.266/VVC代码学习32:VTM5.0解码端最上层函数

    解码部分的研究不像编码端那样需要精雕细琢,但如果想研究一个内容划分或选择模式等的最终结果是怎样,那么应该从解码端入手.下面让我们来学习解码端的框架及最上层的三个函数. 文章目录 1 main() 2 ...

最新文章

  1. JSTL(fn函数)
  2. 移动APP开发使用什么样的原型设计工具比较合适?
  3. SpringBoot项目中对mysql数据库进行定时备份为sql文件的实现思路
  4. EditPlus3.21注册码
  5. 并发编程中常见的锁机制:乐观锁、悲观锁、CAS、自旋锁、互斥锁、读写锁
  6. 怎样填写个人简历较有效
  7. 深入理解C++中的explicit关键字
  8. 流程图虚线框表示什么_UI设计|APP的交互线框布局设计
  9. shell编程之if判断总结
  10. mybais 之parameterType =list
  11. 数据中心安全风控_平安银行Hadoop集群跨数据中心迁移项目告捷项目骨干专访
  12. SQL Server的锁机制
  13. Python绘制散点对比图
  14. 秩和比RSR法算法C语言,秩和比法
  15. clover安装黑苹果10.15.3常见问题集合
  16. 健康大讲堂—凡膳皆药 寓医于食
  17. 使用webgl(three.js)搭建一个3D建筑,3D消防模拟——第三课
  18. python爬虫教程:爬取破解无线网络wifi密码过程解析
  19. 使用Java实现alpha-beta剪枝算法(井字棋小游戏)
  20. 普及医疗类人工智能机器人的重要性

热门文章

  1. 【macOS免费软件推荐】第1期:MuseScore
  2. 让程序跳转到指定地址执行(绝对地址赋值/强转)
  3. ArcGISPro通视分析之视线分析
  4. 三级等保测评都有哪些环节?
  5. 记录自己在pyqt5用电脑摄像头拍照并保存照片过程中发现的问题
  6. java课程设计 成绩_Java课程设计—学生成绩管理系统(201521123005 杨雪莹)
  7. 第八章:Junit——selenium IED 的版本和 火狐浏览器的版本 相对应
  8. mac datagrip如何建立本地链接
  9. 通过you-get在哔哩哔哩下载视频报错处理
  10. 决策表(决策树)[软件工程]