一、环境

mac 10.12.2

cocos2dx-3.13.1

ffmpeg 3.0

二、新建项目和编译库

cocos2dx按照官网新建一个实例。

ffmpeg编译ios库http://blog.csdn.net/u013654125/article/details/73549132

ffmpeg编译完后,会有得到一个FFmpeg-iOS文件夹,文件夹里有include和lib这两个文件夹。这两个文件夹里就是ffmpeg编译好的头文件和库文件。

三、项目配置

如上图:

这三个地方需要做一些修改。

第一个方框的配置

LiveVideo.cpp

//
//  LiveVideo.cpp
//  Game
//
//  Created by zhufu on 2017/3/1.
//
//#include "LiveVideo.h"
#include "HelloWorldScene.h"#ifndef INT64_C
#define INT64_C(c) (c ## LL)
#define UINT64_C(c) (c ## ULL)
#endifextern "C"
{
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#if (CC_TARGET_PLATFORM == CC_PLATFORM_ANDROID)
#include "libswscale/swscale.h"
#include "libswresample/swresample.h"
#include "libavutil/avutil.h"
#include "libavfilter/avfilter.h"
#endif
}Scene* LiveVideo::createScene()
{auto scene = Scene::create();auto sprite = Sprite::create("HelloWorld.png");// position the sprite on the center of the screensprite->setPosition(Vec2(Director::getInstance()->getVisibleSize().width/2-140, Director::getInstance()->getVisibleSize().height/2+100));// add the sprite as a child to this layerscene->addChild(sprite, 0);LiveVideo* liveVideo = LiveVideo::create();scene->addChild(liveVideo);liveVideo->init();auto layer = HelloWorld::create();scene->addChild(layer);return scene;
}LiveVideo* LiveVideo::create()
{LiveVideo* liveVideo = new(std::nothrow) LiveVideo();if(liveVideo){liveVideo->autorelease();return liveVideo;}CC_SAFE_DELETE(liveVideo);return nullptr;
}bool LiveVideo::init()
{initEvents();initCommand();//ffmpeg解码线程play();addPlayButton();addPlayHKSButton();addStopButton();addRefreshButton();glEnable(GL_BLEND);glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);loadShader();loadTexture();loadRectangle();return true;
}void LiveVideo::addPlayButton()
{auto button = ui::Button::create("CloseNormal.png", "CloseSelected.png");button->setTitleText("play");button->setPosition(Vec2(170, 100));button->addClickEventListener(CC_CALLBACK_0(LiveVideo::play, this));button->setContentSize(Size(100, 100));addChild(button);
}void LiveVideo::addPlayHKSButton()
{auto button = ui::Button::create("CloseNormal.png", "CloseSelected.png");button->setTitleText("playHKS");button->setPosition(Vec2(240, 100));button->addClickEventListener(CC_CALLBACK_0(LiveVideo::playHKS, this));button->setContentSize(Size(100, 100));addChild(button);
}void LiveVideo::addStopButton()
{auto button = ui::Button::create("CloseNormal.png", "CloseSelected.png");button->setTitleText("stop");button->setPosition(Vec2(170, 50));button->addClickEventListener(CC_CALLBACK_0(LiveVideo::stop, this));button->setContentSize(Size(100, 100));addChild(button);
}void LiveVideo::addRefreshButton()
{auto button = ui::Button::create("CloseNormal.png", "CloseSelected.png");button->setTitleText("refresh");button->setPosition(Vec2(240, 50));button->addClickEventListener(CC_CALLBACK_0(LiveVideo::refresh, this));button->setContentSize(Size(100, 100));addChild(button);
}void LiveVideo::initEvents()
{EventListenerCustom* stopDecodeListener = EventListenerCustom::create("stopFFmpegDecode", CC_CALLBACK_0(LiveVideo::stop, this));Director::getInstance()->getEventDispatcher()->addEventListenerWithFixedPriority(stopDecodeListener, 1);EventListenerCustom* startDecodeListener = EventListenerCustom::create("startFFmpegDecode", CC_CALLBACK_0(LiveVideo::ffmpegDecode, this, currentLivePath));Director::getInstance()->getEventDispatcher()->addEventListenerWithFixedPriority(startDecodeListener, 1);
}void LiveVideo::play()
{if(!isPlay()){const char* filePath = "rtmp://113.10.194.251/live/mtable1";setPlay(true);std::thread t(&LiveVideo::ffmpegDecode, this, filePath);t.detach();}}void LiveVideo::playHKS()
{if(!isPlay()){const char* filePath ="rtmp://live.hkstv.hk.lxdns.com/live/hks";setPlay(true);std::thread t(&LiveVideo::ffmpegDecode, this, filePath);t.detach();}}void LiveVideo::stop()
{setPlay(false);
}void LiveVideo::refresh()
{if(_playFlag){clearBuf();} else {play();}}void LiveVideo::setPlay(bool playFlag)
{_playFlag = playFlag;
}bool LiveVideo::isPlay()
{return _playFlag;
}long LiveVideo::getCurrentTime()
{struct timeval now;gettimeofday(&now, NULL);return now.tv_sec*1000 +  (int)(now.tv_usec/1000+0.5);
}void LiveVideo::flipVertical(int width, int height, char* arr)
{int index = 0, f_index, cycle=height>>1;char buf;for (int i = 0; i < cycle; i++){for (int j = 0; j < width; j++){//当前像素index = i*width + j;//需要交换的像素f_index = (height - 1 - i)*width + j;//缓存当前像素buf = arr[index];//交换像素arr[index] = arr[f_index];//交换回像素arr[f_index] = buf;}}
}int LiveVideo::ffmpegDecode(const char* filePath)
{//开始解码CCLOG("filePath %s", filePath);
//    strcpy(currentLivePath, filePath);av_register_all();avformat_network_init();AVFormatContext* pFormat = NULL;AVCodecContext* video_dec_ctx = NULL;AVCodec* video_dec = NULL;AVPacket *pkt = NULL;AVFrame *pFrame = NULL;do {if (avformat_open_input(&pFormat, filePath, NULL, NULL) < 0){CCLOG("Couldn't open input stream.(无法打开输入流)\n");break;}CCLOG("%lld", pFormat->probesize);CCLOG("%lld", pFormat->max_analyze_duration);pFormat->probesize = 100;pFormat->max_analyze_duration = 0;if (avformat_find_stream_info(pFormat, NULL) < 0){CCLOG("Couldn't find stream information.(无法获取流信息)\n");break;}int i, videoIndex=-1;for(i = 0; i<pFormat->nb_streams; i++){if(pFormat->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO){videoIndex=i;break;}}if(videoIndex==-1){printf("Didn't find a video stream.(没有找到视频流)\n");break;}video_dec_ctx=pFormat->streams[videoIndex]->codec;_frameRate = pFormat->streams[videoIndex]->r_frame_rate.num;if(_frameRate == 0){_frameRate = 1;}_system_start_time = getCurrentTime();_start_time = pFormat->streams[videoIndex]->start_time;_pixel_w = video_dec_ctx->width;_pixel_h = video_dec_ctx->height;int pixel_w2 = _pixel_w >> 1;int pixel_h2 = _pixel_h >> 1;video_dec=avcodec_find_decoder(video_dec_ctx->codec_id);if(video_dec==NULL){printf("Codec not found.(没有找到解码器)\n");break;}if(avcodec_open2(video_dec_ctx, video_dec,NULL)<0){printf("Could not open codec.(无法打开解码器)\n");break;}pkt = av_packet_alloc();av_init_packet(pkt);pFrame = av_frame_alloc();while (_playFlag){if(av_read_frame(pFormat, pkt) < 0){CCLOG("读取帧失败!!!!");break;}if (pkt->stream_index == videoIndex){int got_picture = 0,ret = 0;ret = avcodec_decode_video2(video_dec_ctx, pFrame, &got_picture, pkt);if (ret < 0){printf("Decode Error.(解码错误)\n");break;}if (got_picture){char* buf = new char[video_dec_ctx->height * video_dec_ctx->width * 3 / 2];int a = 0, i;for (i = 0; i<_pixel_h; i++){memcpy(buf + a, pFrame->data[0] + i * pFrame->linesize[0], _pixel_w);a += _pixel_w;}flipVertical(_pixel_w, _pixel_h, buf);for (i = 0; i<pixel_h2; i++){memcpy(buf + a, pFrame->data[1] + i * pFrame->linesize[1], pixel_w2);a += pixel_w2;}flipVertical(pixel_w2, pixel_h2, buf+_pixel_w*_pixel_h);for (i = 0; i<pixel_h2; i++){memcpy(buf + a, pFrame->data[2] + i * pFrame->linesize[2], pixel_w2);a += pixel_w2;}flipVertical(pixel_w2, pixel_h2, buf+_pixel_w*_pixel_h+_pixel_w*_pixel_h/4);FrameData data;data.pts = pFrame->pkt_pts;data.buf = buf;_data.push_back(data);buf = NULL;CCLOG("pts %lld", pkt->pts);CCLOG("pts %lld", pFrame->pts);AVRational test;test = av_get_time_base_q();CCLOG("test %d, %d", test.num, test.den);}}av_packet_unref(pkt);}} while (0);_playFlag = false;clearBuf();if(pFrame){av_frame_free(&pFrame);av_free(pFrame);delete pFrame;pFrame = NULL;}if(pkt){av_packet_free_side_data(pkt);av_packet_unref(pkt);av_packet_free(&pkt);}if(video_dec_ctx){avcodec_close(video_dec_ctx);video_dec_ctx = NULL;}if(pFormat){avformat_close_input(&pFormat);avformat_free_context(pFormat);delete pFormat;pFormat = NULL;}return 0;
}void LiveVideo::clearBuf()
{std::vector<FrameData>::iterator it;for(it = _data.begin(); it != _data.end(); ){if(it->buf){delete[] it->buf;it->buf = NULL;}it = _data.erase(it);}
}void LiveVideo::loadShader()
{_glProgram = new GLProgram();_glProgram->initWithFilenames("shader/vertexShader.vsh", "shader/fragmentShader.fsh");_glProgram->link();
}void LiveVideo::loadTexture()
{glGenTextures(1, &_textureY);glBindTexture(GL_TEXTURE_2D, _textureY);glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);glGenTextures(1, &_textureU);glBindTexture(GL_TEXTURE_2D, _textureU);glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);glGenTextures(1, &_textureV);glBindTexture(GL_TEXTURE_2D, _textureV);glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
}void LiveVideo::loadRectangle()
{//创建vaoglGenVertexArrays(1, &_gVAO);glBindVertexArray(_gVAO);//创建vboglGenBuffers(1, &_gVBO);glBindBuffer(GL_ARRAY_BUFFER, _gVBO);//创建顶点数组GLfloat vertex[] = {//  X     Y     Z       U     V-1.0f, 1.0f, 0.0f,   0.0f, 1.0f,1.0f, 1.0f, 0.0f,   1.0f, 1.0f,-1.0f, -1.0f, 0.0f,   0.0f, 0.0f,1.0f, -1.0f, 0.0f,   1.0f, 0.0f,};glBufferData(GL_ARRAY_BUFFER, sizeof(vertex), vertex, GL_STATIC_DRAW);//绑定数据到vertexInglEnableVertexAttribArray(_glProgram->getAttribLocation("vertexIn"));glVertexAttribPointer(_glProgram->getAttribLocation("vertexIn"), 3, GL_FLOAT, GL_FALSE, 5 * sizeof(GLfloat), NULL);//绑定数据到textureInglEnableVertexAttribArray(_glProgram->getAttribLocation("textureIn"));glVertexAttribPointer(_glProgram->getAttribLocation("textureIn"), 2, GL_FLOAT, GL_TRUE, 5 * sizeof(GLfloat), (const GLvoid*)(3 * sizeof(GLfloat)));//unbind the VAOglBindVertexArray(0);
}void LiveVideo::initCommand()
{_command.init(_globalZOrder);_command.func = CC_CALLBACK_0(LiveVideo::onDraw, this);
}void LiveVideo::draw(Renderer *render, const Mat4 &transform, uint32_t flags)
{onDraw();
}void LiveVideo::onDraw()
{now = getCurrentTime();char* buf = getBuff();if(buf ==NULL){return;}// clear everythingglClearColor(0, 0, 0, 1); // blackglClear(GL_COLOR_BUFFER_BIT);// bind the program (the shaders)_glProgram->use();/*** 为了直播临时添加的bindLiveVideoTexture2DN*/GL::bindLiveVideoTexture2DN(0, _textureY);glTexImage2D(GL_TEXTURE_2D,0,0x1903,_pixel_w,_pixel_h,0,0x1903,GL_UNSIGNED_BYTE,buf);GLuint p =  glGetUniformLocation(_glProgram->getProgram(), "tex_y");glUniform1i(p, 0);GL::bindLiveVideoTexture2DN(1, _textureU);glTexImage2D(GL_TEXTURE_2D,0,0x1903,_pixel_w/2,_pixel_h/2,0,0x1903,GL_UNSIGNED_BYTE,buf + _pixel_w*_pixel_h);GLuint p1 =  glGetUniformLocation(_glProgram->getProgram(), "tex_u");glUniform1i(p1, 1);GL::bindLiveVideoTexture2DN(2, _textureV);glTexImage2D(GL_TEXTURE_2D,0,0x1903,_pixel_w/2,_pixel_h/2,0,0x1903,GL_UNSIGNED_BYTE,buf + _pixel_w*_pixel_h + _pixel_w*_pixel_h/4);GLuint p2 =  glGetUniformLocation(_glProgram->getProgram(), "tex_v");glUniform1i(p2, 2);buf = NULL;//    glProgram->setUniform("tex", 0); //set to 0 because the texture is bound to GL_TEXTURE0// bind the VAO (the triangle)glBindVertexArray(_gVAO);// draw the VAOglDrawArrays(GL_TRIANGLE_STRIP, 0, 4);// unbind the VAO, the program and the textureglBindVertexArray(0);glBindTexture(GL_TEXTURE_2D, 0);glUseProgram(0);
}char* LiveVideo::getBuff()
{char* buf = NULL;std::vector<FrameData>::iterator it;std::vector<FrameData>::iterator temp;for(it = _data.begin(); it != _data.end(); ){for(temp = _data.begin(); temp != _data.end();){if(temp != it && temp->buf == it->buf){CCLOG("~!!!有两个指针指向同一个地址");temp = _data.erase(temp);} else {++temp;}}if(getDifferTime(it) < 0 && _data.size() > 1){if(it->buf){delete[] it->buf;it->buf = NULL;}it = _data.erase(it);}else{buf = it->buf;++it;break;}}return buf;
}long LiveVideo::getDifferTime(std::vector<FrameData>::iterator it)
{int64_t differTime = (it->pts - _start_time) - (now - _system_start_time);if(_data.size() > 40){if(_data.size() <= 60){if(_system_start_time > now - (_data.at(_data.size() - 25).pts - _start_time)){_system_start_time -= 20;}differTime = (it->pts - _start_time) - (now - _system_start_time + (_data.size()- 30)*20);}if(_data.size() > 60){if(_system_start_time > now - (_data.at(_data.size() - 25).pts - _start_time)){_system_start_time -= 40;}differTime = (it->pts - _start_time) - (now - _system_start_time + (_data.size()- 30)*40);}}if(_data.size() > 30 && _data.size() <= 40){differTime = (it->pts - _start_time) - (now - _system_start_time + (_data.size()- 30)*2);}if(_data.size() <= 15){if(_system_start_time < now - (_data.begin()->pts - _start_time)){_system_start_time += 10;}differTime = (it->pts - _start_time) - (now - _system_start_time + (_data.size() - 15)*40);}return differTime;
}

LiveVideo.h

//
//  LiveVideo.h
//  Game
//
//  Created by zhufu on 2017/3/1.
//
//#ifndef LiveVideo_h
#define LiveVideo_h#include <stdio.h>
#include "ui/cocosGUI.h"USING_NS_CC;#endif /* LiveVideo_h */
class LiveVideo : public Node
{
private:struct FrameData {int64_t pts;char* buf;};
public:static Scene* createScene();static LiveVideo* create();virtual bool init() override;void initEvents();int ffmpegDecode(const char* filePath);void flipVertical(int width, int height, char* arr);virtual void draw(Renderer* renderer, const Mat4 &transform, uint32_t flags) override;void loadShader();void loadTexture();void loadRectangle();void onDraw();void initCommand();long getCurrentTime();long getDifferTime(std::vector<FrameData>::iterator it);char* getBuff();void addPlayButton();void addPlayHKSButton();void addStopButton();void addRefreshButton();void play();void playHKS();void stop();void refresh();void setPlay(bool playFlag);bool isPlay();void clearBuf();
private:CustomCommand _command;GLProgram* _glProgram;bool _playFlag = false;GLuint _textureY;GLuint _textureU;GLuint _textureV;GLuint _gVAO = 0;GLuint _gVBO = 0;int _pixel_w = 320, _pixel_h = 180;std::vector<FrameData> _data;int _frameRate;int64_t _start_time;long _system_start_time;char* currentLivePath = new char[256];long now;
};

之后,再在AppDelegate.cpp里引用LiveVideo.h文件:

第二个框的配置:

fragmentShader.fsh

varying vec2 textureOut;
uniform sampler2D tex_y;
uniform sampler2D tex_u;
uniform sampler2D tex_v;
void main(void)
{vec3 yuv;vec3 rgb;yuv.x = texture2D(tex_y, textureOut).r;yuv.y = texture2D(tex_u, textureOut).r - 0.5;yuv.z = texture2D(tex_v, textureOut).r - 0.5;rgb = mat3( 1,       1,         1,0,       -0.39465,  2.03211,1.13983, -0.58060,  0) * yuv;gl_FragColor = vec4(rgb, 1);
}

vertexShader.vsh

attribute vec3 vertexIn;
attribute vec2 textureIn;
varying vec2 textureOut;
void main(void)
{gl_Position = vec4(vertexIn, 1);textureOut = textureIn;
}

右击Resources,出现如图:

点击"Add Files to "Game",出现如图:

选择fshader文件夹,再点Add,完成第二个框的配置。

每三个框的配置:

把开始编译好的FFmpeg-iOS文件夹复制到如图路径:

再:

选择FFmpeg-iOS文件夹加入到项目中:

。。。。。

完成配置,运行。。。。。

最后,大家如果配置的时候出现问题可以留言或者直接下载项目https://github.com/zhu12345618/ffmpeg_ios_Test

cocos2dx跨平台直播实例-ffmpeg-ios篇相关推荐

  1. (转)【如何快速的开发一个完整的iOS直播app】(原理篇)

    原文链接:https://www.jianshu.com/p/bd42bacbe4cc [如何快速的开发一个完整的iOS直播app](原理篇) [如何快速的开发一个完整的iOS直播app](原理篇) ...

  2. iOS动手做一个直播app开发(代码篇)

    iOS动手做一个直播app开发(代码篇) ###开篇 好久没写简书,因为好奇的我跑去学习直播了,今天就分享一下我的感慨. 目前为止直播还是比较热点的技术的,简书,git上有几篇阅读量和含金量都不错的文 ...

  3. 【如何快速的开发一个简单的iOS直播app】(代码篇)

    开篇([如何快速的开发一个完整的iOS直播app](原理篇) ) 好久没写简书,因为好奇的我跑去学习直播了,今天就分享一下我的感慨. 目前为止直播还是比较热点的技术的,简书,git上有几篇阅读量和含金 ...

  4. vue+axios+qs序列化 “三步解析”【含demo实例】- 代码篇

    文章目录 qs序列化:是什么?为什么?怎么办?`实例截图参考` 一.`(简单了解)` · `三步解析 ` 序列化是一种用来处理对象流的机制: 对象.文件.数据,有许多不同的格式,很难统一传输和保存 序 ...

  5. iOS面试准备 - ios篇

    iOS面试准备 - ios篇 ios面试准备 - objective-c篇 ios面试准备 - 网络篇 IOS面试准备 - C++篇 iOS面试准备 - 其他篇 运行时 https://juejin. ...

  6. GJM:移动App入侵与逆向破解技术-iOS篇 【转载】

    GJM:移动App入侵与逆向破解技术-iOS篇 [转载] 转载: URL http://dev.qq.com/topic/577e0acc896e9ebb6865f321 如果您有耐心看完这篇文章,您 ...

  7. 集成 jpush-react-native 常见问题汇总 ( iOS 篇)

    给 iOS 应用添加推送功能是一件比较麻烦的事情,本篇文章收集了集成 jpush-react-native 的常见问题,目的是为了帮助用户更好地排查问题 1.收不到推送 确保是在真机上测试,而不是在模 ...

  8. Cocos2d-x Lua中实例:帧动画使用

    Cocos2d-x Lua中实例:帧动画使用 下面我们通过一个实例介绍一下帧动画的使用,这个实例如下图所示,点击Go按钮开始播放动画,这时候播放按钮标题变为Stop,点击Stop按钮可以停止播放动画. ...

  9. 直播绿幕抠图的例子(绿幕抠图直播实例参考)

    阿酷TONY  / 2022-11-21 / 长沙 什么是绿幕抠图: 设定绿幕或绿布,做直播软件抠图,这时绿幕绿布就可以被实时的抠掉,绿色就变成透明了,只剩下绿幕外的人物,此时添加上直播的背景画质,就 ...

最新文章

  1. windows2003系列(之)搭建DHCP服务与中继代理图解
  2. 增强学习(一) ----- 基本概念
  3. 【Zabbix】使用dbforbbix 2.2-beta监控Redhat 7.0上的Oracle、Mysql
  4. Java工作笔记-AJAX实现整体不变,局部更新(与整体刷新比较)
  5. JEECG 版本更新日志
  6. Mac勿扰模式使用技巧
  7. FLEX 与JAVA的LCDS BLAZEDS配置.
  8. 本地通过secureCRT连接虚拟机中CentOS7
  9. 生活中的设计模式之状态(State)模式
  10. 【24】NumPy IO
  11. 小学计算机二课堂活动总结,康宁路学校“信息技术与课堂教学深度融合”活动总结...
  12. 关于VS2005中C#代码用F12转到定义时,总是显示从元数据的问题?
  13. python自然语言处理实战源代码下载_NLP学习:涂铭《Python自然语言处理实战核心技术与算法》PDF+源代码...
  14. 私有云服务器和公有云服务器的区别你真的知道吗?
  15. 吴军的《见识》书的核心内容
  16. armbian清理_N1盒子armbian/ubuntu/linux系统修改日志输出到内存
  17. 如何将照片从iPhone导入到Mac
  18. [原创] HBuildX,微信小程序模拟器报错(如若已在管理后台更新域名配置,请刷新项目配置后重新编译项目)
  19. Hostker云主机
  20. python爬虫——正则获取手机号

热门文章

  1. mingw-w64-dgn
  2. python3 if语句嵌套_3.2.3 if语句的嵌套
  3. 2023最新仿今日头条新闻网站源码+带自动采集API更新文章
  4. Python画樱花树 !
  5. React飞行日记(七) - 在React中使用DOM操作
  6. 微信 Ubuntu个人服务器环境搭建
  7. Gdiplus读取PND2
  8. 开机总显示SQL server服务器,sql server 2008启动服务是:提示请求失败或者服务无及时响应...
  9. 职场交往礼仪——会见
  10. Android单元测试网络请求问题