lvrs.h //接口头文件

#ifndef _LVRS_H_
#define _LVRS_H_#ifdef __cplusplus
#define EXTERN            extern "C"
#define EXTERN_BEGIN      extern "C" {
#define EXTERN_END        }
#else
#define EXTERN            extern
#define EXTERN_BEGIN
#define EXTERN_END
#endif
EXTERN_BEGIN
typedef int (*myGetFrameCB)(int chId,int srcId,char* buf,int size);
EXTERN void* liveVideoServerStart(myGetFrameCB cb);
EXTERN_END#endif

推流类 liveVideoRTSPServer.h

#ifndef _LIVE_VIDEO_RTSP_SERVER_H
#define _LIVE_VIDEO_RTSP_SERVER_H#ifndef _RTSP_SERVER_SUPPORTING_HTTP_STREAMING_HH
#include "RTSPServerSupportingHTTPStreaming.hh"
#endif
#include <liveMedia.hh>class liveVideoRTSPServer: public RTSPServerSupportingHTTPStreaming {
public:static liveVideoRTSPServer* createNew(Port ourPort,UserAuthenticationDatabase* authDatabase,GetFrameCB cb,unsigned reclamationTestSeconds = 65);protected:liveVideoRTSPServer(UsageEnvironment& env, int ourSocket, Port ourPort,UserAuthenticationDatabase* authDatabase, unsigned reclamationTestSeconds);// called only by createNew();virtual ~liveVideoRTSPServer();protected: // redefined virtual functionsvirtual ServerMediaSession* lookupServerMediaSession(char const* streamName);
private:GetFrameCB readFreamCb;
public:static UsageEnvironment* s_env;static UsageEnvironment* getEnv();
};
#endif

liveVideoRTSPServer.cpp liveVideoRTSPServer实现

#include "liveVideoRTSPServer.h"
#include "BasicUsageEnvironment.hh"
#include "lvrs.h"
#include <string.h>
#include <iostream>
using namespace std;UsageEnvironment* liveVideoRTSPServer::s_env = NULL;UsageEnvironment* liveVideoRTSPServer::getEnv(){if(s_env == NULL){cout<< "create s_env"<<endl;s_env = BasicUsageEnvironment::createNew(*BasicTaskScheduler::createNew());cout<< "create s_env OK!"<<endl;}return s_env;
}liveVideoRTSPServer*
liveVideoRTSPServer::createNew(Port ourPort,UserAuthenticationDatabase* authDatabase,GetFrameCB cb,unsigned reclamationTestSeconds) {UsageEnvironment& env = * getEnv();int ourSocket = setUpOurSocket(env, ourPort);if (ourSocket == -1) return NULL;liveVideoRTSPServer* lvrs = new liveVideoRTSPServer(env,ourSocket, ourPort, authDatabase, reclamationTestSeconds);lvrs->readFreamCb =cb;return lvrs;
}liveVideoRTSPServer::liveVideoRTSPServer(UsageEnvironment& env, int ourSocket,Port ourPort,UserAuthenticationDatabase* authDatabase, unsigned reclamationTestSeconds): RTSPServerSupportingHTTPStreaming(env, ourSocket, ourPort, authDatabase, reclamationTestSeconds){
}liveVideoRTSPServer::~liveVideoRTSPServer(){
}static ServerMediaSession* createNewSMS(UsageEnvironment& env,char const* streamName, GetFrameCB  cb); // forwardServerMediaSession*
liveVideoRTSPServer::lookupServerMediaSession(char const* streamName) {cout<<"liveVideoRTSPServer::lookupServerMediaSession: "<<streamName<<endl;// First, check whether the specified "streamName" exists as a local file://FILE* fid = fopen(streamName, "rb");//Boolean fileExists = fid != NULL;// Next, check whether we already have a "ServerMediaSession" for this file:ServerMediaSession* sms = RTSPServer::lookupServerMediaSession(streamName);if(sms == NULL){}Boolean smsExists = sms != NULL;// Handle the four possibilities for "fileExists" and "smsExists"://if (smsExists){// "sms" was created for a file that no longer exists. Remove it://  removeServerMediaSession(sms);// }//else if (!smsExists) {printf("cread sms--------------------> \r\n");// Create a new "ServerMediaSession" object for streaming from the named file.sms = createNewSMS(envir(), streamName,readFreamCb);addServerMediaSession(sms);}return sms;
}// Special code for handling Matroska files:
static char newMatroskaDemuxWatchVariable;
static MatroskaFileServerDemux* demux;
static void onMatroskaDemuxCreation(MatroskaFileServerDemux* newDemux, void* /*clientData*/) {demux = newDemux;newMatroskaDemuxWatchVariable = 1;
}
// END Special code for handling Matroska files:#define NEW_SMS(description) do {\
char const* descStr = description\", streamed by the LIVE555 Media Server";\
sms = ServerMediaSession::createNew(env, streamName, streamName, descStr);\
} while(0)static ServerMediaSession* createNewSMS(UsageEnvironment& env,char const* streamName, GetFrameCB cb) {// Use the file name extension to determine the type of "ServerMediaSession":int chId,SrcId;int i;ServerMediaSession* sms = NULL;Boolean const reuseSource = False;char const* extension = strrchr(streamName, '/');char const* pstr = streamName;char chStr[10]={0} ;//pstr = streamName;if (extension == NULL) return NULL;for(i=0;i<strlen(streamName);i++){if(*pstr == '/'){break;    }chStr[i] = *pstr;pstr++;}chStr[i]='\0';if(strcmp(chStr,"ch0")){chId =0;}else if(strcmp(chStr,"ch1")){chId =0;}else return NULL;if (strcmp(extension, "/main") == 0) {SrcId = 0;   } else if (strcmp(extension, "/sub") == 0){SrcId = 1;}else return NULL;cout<<"create H264LiveVideoServerMediaSubsession"<<endl;NEW_SMS("H.264 Live Video");OutPacketBuffer::maxSize = 1920*1080*3/2; //6000000;//HIGH*WIDTH *3 /2 -> YUV4:2:0 // allow for some possibly large H.264 framessms->addSubsession(H264LiveVideoServerMediaSubsession::createNew(env,cb, chId,SrcId, reuseSource));return sms;
}void* liveVideoServerStart(myGetFrameCB cb){RTSPServer* rtspServer;portNumBits rtspServerPortNum = 554;// Begin by setting up our usage environment://TaskScheduler* scheduler = BasicTaskScheduler::createNew();//UsageEnvironment* env = BasicUsageEnvironment::createNew(*scheduler);rtspServer = liveVideoRTSPServer::createNew(rtspServerPortNum,NULL,(GetFrameCB)cb);if (rtspServer == NULL) {rtspServerPortNum = 8554;rtspServer = liveVideoRTSPServer::createNew(rtspServerPortNum, NULL,(GetFrameCB)cb);}if (rtspServer == NULL) {*liveVideoRTSPServer::getEnv() << "Failed to create RTSP server: " << liveVideoRTSPServer::getEnv()->getResultMsg() << "\n";exit(1);}char* urlPrefix = rtspServer->rtspURLPrefix();fprintf(stdout, "use like this:%s", urlPrefix);fprintf(stdout, "channel/srcch \n");liveVideoRTSPServer::getEnv()->taskScheduler().doEventLoop(); // does not returnreturn NULL;
}

H264LiveVideoServerMediaSubsession类实现

#ifndef _H264_LIVE_VIDEO_SERVER_MEDIA_SUBSESSION_HH_
#define _H264_LIVE_VIDEO_SERVER_MEDIA_SUBSESSION_HH_#include "ByteFrameLiveVideoSource.hh"#ifndef _ON_DEMAND_SERVER_MEDIA_SUBSESSION_HH
#include "OnDemandServerMediaSubsession.hh"
#endifclass H264LiveVideoServerMediaSubsession : public OnDemandServerMediaSubsession {
public:static H264LiveVideoServerMediaSubsession*createNew(UsageEnvironment& env,GetFrameCB cb, int mchId,int msrcId,Boolean reuseFirstSource);// Used to implement "getAuxSDPLine()":void checkForAuxSDPLine1();void afterPlayingDummy1();private: // redefined virtual functions  virtual FramedSource* createNewStreamSource(unsigned clientSessionId,  unsigned& estBitrate);virtual RTPSink* createNewRTPSink(Groupsock* rtpGroupsock,  unsigned char rtpPayloadTypeIfDynamic,  FramedSource* inputSource);protected:H264LiveVideoServerMediaSubsession(UsageEnvironment& env,GetFrameCB cb, int mchId,int msrcId, Boolean reuseFirstSource);// called only by createNew();virtual ~H264LiveVideoServerMediaSubsession();void setDoneFlag() { fDoneFlag = ~0; }protected: // redefined virtual functionsvirtual char const* getAuxSDPLine(RTPSink* rtpSink,FramedSource* inputSource);protected://virtual char const* sdpLines(); GetFrameCB tempCb;int chId; //0-1int srcId;//0 main,1 subchar* fAuxSDPLine;char fDoneFlag; // used when setting up "fAuxSDPLine"RTPSink* fDummyRTPSink; // ditto
};
#endif
#include "H264LiveVideoServerMediaSubsession.hh"
#include "H264VideoStreamFramer.hh"
#include "H264VideoRTPSink.hh"
#include <iostream>
using namespace std;
static void afterPlayingDummy(void* clientData);
static void checkForAuxSDPLine(void* clientData);H264LiveVideoServerMediaSubsession* H264LiveVideoServerMediaSubsession ::createNew(UsageEnvironment& env,GetFrameCB cb,int mchId,int msrcId, Boolean reuseFirstSource){return new H264LiveVideoServerMediaSubsession(env,cb,mchId,msrcId,reuseFirstSource);
}FramedSource* H264LiveVideoServerMediaSubsession ::createNewStreamSource(unsigned clientSessionId,unsigned & estBitrate){estBitrate = 500; // kbps, estimate// Create the video source:if(tempCb != NULL) cout<<"create new stream source------------------>"<<endl; printf("createNewStreamSource--------------====> \r\n");ByteFrameLiveVideoSource* liveVideoSource = ByteFrameLiveVideoSource::createNew(envir(),tempCb,chId,srcId);return H264VideoStreamFramer::createNew(envir(), liveVideoSource);
}
RTPSink* H264LiveVideoServerMediaSubsession ::createNewRTPSink(Groupsock * rtpGroupsock,unsigned char rtpPayloadTypeIfDynamic,FramedSource * inputSource){cout<<"H264LiveVideoServerMediaSubsession :: createNewRTPSink --------------->" <<endl;return H264VideoRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic);
}
H264LiveVideoServerMediaSubsession ::H264LiveVideoServerMediaSubsession(UsageEnvironment& env,GetFrameCB cb,int mchId,int msrcId, Boolean reuseFirstSource): OnDemandServerMediaSubsession(env,reuseFirstSource),tempCb(cb),chId(mchId),srcId(msrcId),fAuxSDPLine(NULL), fDoneFlag(0), fDummyRTPSink(NULL){
}H264LiveVideoServerMediaSubsession ::~H264LiveVideoServerMediaSubsession(){
}
char const* H264LiveVideoServerMediaSubsession::getAuxSDPLine(RTPSink* rtpSink, FramedSource* inputSource) {if (fAuxSDPLine != NULL) return fAuxSDPLine; // it's already been set up (for a previous client)if (fDummyRTPSink == NULL) { // we're not already setting it up for another, concurrent stream// Note: For H264 video files, the 'config' information ("profile-level-id" and "sprop-parameter-sets") isn't known// until we start reading the file.  This means that "rtpSink"s "auxSDPLine()" will be NULL initially,// and we need to start reading data from our file until this changes.cout<<"getAuxSDPLine: "<<endl;fDummyRTPSink = rtpSink;// Start reading the file://this have probrem!!!fDummyRTPSink->startPlaying(*inputSource, afterPlayingDummy, this);// Check whether the sink's 'auxSDPLine()' is ready:checkForAuxSDPLine(this);cout<<"getAuxSDPLine: ok ? "<<endl;}envir().taskScheduler().doEventLoop(&fDoneFlag);return fAuxSDPLine;
}static void afterPlayingDummy(void* clientData) {H264LiveVideoServerMediaSubsession* subsess = (H264LiveVideoServerMediaSubsession*)clientData;subsess->afterPlayingDummy1();
}void H264LiveVideoServerMediaSubsession::afterPlayingDummy1() {// Unschedule any pending 'checking' task:envir().taskScheduler().unscheduleDelayedTask(nextTask());// Signal the event loop that we're done:setDoneFlag();
}
static void checkForAuxSDPLine(void* clientData) {H264LiveVideoServerMediaSubsession* subsess = (H264LiveVideoServerMediaSubsession*)clientData;subsess->checkForAuxSDPLine1();
}
void H264LiveVideoServerMediaSubsession::checkForAuxSDPLine1() {char const* dasl;if (fAuxSDPLine != NULL) {// Signal the event loop that we're done:setDoneFlag();} else if (fDummyRTPSink != NULL && (dasl = fDummyRTPSink->auxSDPLine()) != NULL) {fAuxSDPLine = strDup(dasl);fDummyRTPSink = NULL;// Signal the event loop that we're done:setDoneFlag();} else {// try again after a brief delay:int uSecsToDelay = 100000; // 100 msnextTask() = envir().taskScheduler().scheduleDelayedTask(uSecsToDelay,(TaskFunc*)checkForAuxSDPLine, this);}
}

ByteFrameLiveVideoSource实现

#ifndef _BYTE_FRAME_LIVE_VIDEO_SOURCE_HH_
#define _BYTE_FRAME_LIVE_VIDEO_SOURCE_HH_#ifndef _FRAMED_SOURCE_HH
#include "FramedSource.hh"
#endiftypedef int (*GetFrameCB)(int chId,int srcId,unsigned char* buf,int size);class ByteFrameLiveVideoSource: public FramedSource{
public:static ByteFrameLiveVideoSource* createNew(UsageEnvironment& env,GetFrameCB funcCb,int chId=0,int srcId =0,unsigned preferredFrameSize = 0,unsigned playTimePerFrame = 0);//void seekToByteAbsolute(u_int64_t byteNumber, u_int64_t numBytesToStream = 0);// if "numBytesToStream" is >0, then we limit the stream to that number of bytes, before treating it as EOFprotected:ByteFrameLiveVideoSource(UsageEnvironment& env,int mchId,int msrcId,unsigned preferredFrameSize,unsigned playTimePerFrame);// called only by createNew()virtual ~ByteFrameLiveVideoSource();static void getFrameableHandler(ByteFrameLiveVideoSource* source, int mask);void doGetNextFrameFormEncoder();private:// redefined virtual functions:virtual void doGetNextFrame();virtual void doStopGettingFrames();GetFrameCB getFrame;private:int chId;int srcId;  unsigned fPreferredFrameSize;unsigned fPlayTimePerFrame;Boolean fFidIsSeekable;unsigned fLastPlayTime;Boolean fHaveStartedReading;Boolean fLimitNumBytesToStream;u_int64_t fNumBytesToStream; // used iff "fLimitNumBytesToStream" is True
};
#endif

#include "ByteFrameLiveVideoSource.hh"
#include "GroupsockHelper.hh"
#include <iostream>
using namespace std;#define GETFRAME_HANDLER_ID 5006
#define READ_FROM_FILES_SYNCHRONOUSLYByteFrameLiveVideoSource*
ByteFrameLiveVideoSource::createNew(UsageEnvironment& env,GetFrameCB funcCb,int chId,int srcId,unsigned preferredFrameSize,unsigned playTimePerFrame) {ByteFrameLiveVideoSource* newSource = new ByteFrameLiveVideoSource(env, chId, srcId,preferredFrameSize, playTimePerFrame);newSource->getFrame = funcCb;return newSource;
}
ByteFrameLiveVideoSource::ByteFrameLiveVideoSource(UsageEnvironment& env,int mchId,int msrcId,unsigned preferredFrameSize,unsigned playTimePerFrame): FramedSource(env),chId(mchId),srcId(msrcId),fPreferredFrameSize(preferredFrameSize),fPlayTimePerFrame(playTimePerFrame), fLastPlayTime(0),fHaveStartedReading(False), fLimitNumBytesToStream(False), fNumBytesToStream(0) {fMaxSize = 1920*1080*3/2;}
ByteFrameLiveVideoSource::~ByteFrameLiveVideoSource(){}
void ByteFrameLiveVideoSource::getFrameableHandler(ByteFrameLiveVideoSource* source, int /*mask*/) {if (!source->isCurrentlyAwaitingData()) {source->doStopGettingFrames(); // we're not ready for the data yetreturn;}////cout<<"[debug_msg]#"<<"do read from file"<<endl;source->doGetNextFrameFormEncoder();
}
void ByteFrameLiveVideoSource:: doGetNextFrameFormEncoder(){//printf("doGetNextFrameFormEncoder================== \r\n");// Try to read as many bytes as will fit in the buffer provided (or "fPreferredFrameSize" if less)if (fLimitNumBytesToStream && fNumBytesToStream < (u_int64_t)fMaxSize) {fMaxSize = (unsigned)fNumBytesToStream;}if (fPreferredFrameSize > 0 && fPreferredFrameSize < fMaxSize) {fMaxSize = fPreferredFrameSize;}cout<<"famxSize: "<<fMaxSize<<endl;cout<<"fPreferredFrameSize: " <<fPreferredFrameSize<<endl;cout<<"fNumBytesToStream: " <<fNumBytesToStream<<endl;//char bufAddr[1080*1920*3/2];fFrameSize =0;//cout<<"doGetNextFrame"<<endl;if(getFrame != NULL){//cout<<"doGetNextFrame call back getFrame"<<endl;cout<<"fMaxSize: "<<fMaxSize<<endl;//callback function get encoder frame-----fFrameSize = getFrame(chId,srcId,fTo,fMaxSize); }if (fFrameSize == 0) {handleClosure(this);return;} fNumBytesToStream -= fFrameSize;// Set the 'presentation time':if (fPlayTimePerFrame > 0 && fPreferredFrameSize > 0) {if (fPresentationTime.tv_sec == 0 && fPresentationTime.tv_usec == 0) {// This is the first frame, so use the current time:gettimeofday(&fPresentationTime, NULL);} else {// Increment by the play time of the previous data:unsigned uSeconds = fPresentationTime.tv_usec + fLastPlayTime;fPresentationTime.tv_sec += uSeconds/1000000;fPresentationTime.tv_usec = uSeconds%1000000;}// Remember the play time of this data:fLastPlayTime = (fPlayTimePerFrame*fFrameSize)/fPreferredFrameSize;fDurationInMicroseconds = fLastPlayTime;} else {// We don't know a specific play time duration for this data,// so just record the current time as being the 'presentation time':gettimeofday(&fPresentationTime, NULL);}// Inform the reader that he has data:
#ifdef READ_FROM_FILES_SYNCHRONOUSLY// To avoid possible infinite recursion, we need to return to the event loop to do this:nextTask() = envir().taskScheduler().scheduleDelayedTask(GETFRAME_HANDLER_ID,(TaskFunc*)FramedSource::afterGetting, this);
#else// Because the file read was done from the event loop, we can call the// 'after getting' function directly, without risk of infinite recursion:FramedSource::afterGetting(this);
#endif
}void ByteFrameLiveVideoSource:: doGetNextFrame()
{
//  printf("ByteFrameLiveVideoSource doGetNextFrame : \r\n");if(fLimitNumBytesToStream && fNumBytesToStream == 0) {handleClosure(this);return;}
//  printf("ByteFrameLiveVideoSource doGetNextFrame 1: \r\n");#ifdef READ_FROM_FILES_SYNCHRONOUSLY//cout<<"[debug_msg]#"<<"do read from file"<<endl;doGetNextFrameFormEncoder();
#elseif (!fHaveStartedReading) {// Await readable data from the file:envir().taskScheduler().turnOnBackgroundReadHandling(GETFRAME_HANDLER_ID,(TaskScheduler::BackgroundHandlerProc*)&getFrameableHandler, this);fHaveStartedReading = True;}
#endif}void ByteFrameLiveVideoSource:: doStopGettingFrames() {envir().taskScheduler().unscheduleDelayedTask(nextTask());
#ifndef READ_FROM_FILES_SYNCHRONOUSLY
envir().taskScheduler().turnOffBackgroundReadHandling(GETFRAME_HANDLER_ID);fHaveStartedReading = False;
#endif
}

测试

#include "lvrs.h"
#include <stdio.h>int readFrame(int chId,int srcId,char* buf,int size){printf("get frame fafaslfjljslfs------------ \n"); return 1024;
}
void main(){liveVideoServerStart(readFrame);
}

补充Live555推实时流相关推荐

  1. rtsp实时流通过rtmp推送到服务端

    rtsp实时流通过rtmp推送到服务端 很多朋友都会问到rtsp如何通过rtmp协议推送到服务端,正好前段时间开发了这个功能写在这里,和大家分享下. 首先我想说的是:ffmpeg可以实现这个功能.ff ...

  2. Nginx-rtmp 直播媒体实时流实现

    0. 前言 这段时间在搭建一个IPCamera项目服务器.视频点对点通话,客户端会查看设备端的音视频实时流.为了省流量,是通过P2P进行穿透.但是由于NAT设备的原因和IPV4的枯竭.有些设备是无法进 ...

  3. flink实时流遇到的问题排查——部分数据未落库redis问题

    flink实时流遇到的问题排查 1.技术和环境 2.问题表述 3.简化的代码 4.问题排查思路 5.结论 6.后续补充 1.技术和环境 技术:kafka.zookeeper.DataStream.re ...

  4. 基于Spark机器学习和实时流计算的智能推荐系统

    概要: 随着电子商务的高速发展和普及应用,个性化推荐的推荐系统已成为一个重要研究领域. 个性化推荐算法是推荐系统中最核心的技术,在很大程度上决定了电子商务推荐系统性能的优劣,决定着是否能够推荐用户真正 ...

  5. power bi 实时_Power BI中的实时流

    power bi 实时 The concept of the IOT (Internet of Things) is that every object that you might think of ...

  6. 国标MPEG-PS实时流播放器开发(附例子)

    公安部制定的GBT 28181标准广泛应用于安防领域,这个标准规定了传输的视音频数据要封装成PS流格式.PS格式(原名叫MPEG-PS)在很多领域已经应用了很长一段时间,特别是在安防.广播电视.影音制 ...

  7. 开源流媒体解决方案,流媒体服务器,推拉流,直播平台,SRS,WebRTC,移动端流媒体,网络会议,优秀博客资源等分享

    开源流媒体解决方案,流媒体服务器,推拉流,直播平台,SRS,WebRTC,移动端流媒体,网络会议,优秀博客资源等分享 一.优秀的流媒体博客资源 1.1 EasyNVR:专注于安防视频互联网化的技术 1 ...

  8. java web 流媒体播放_实时流(直播流)播放、上墙(大屏播放)解决方案

    场景描述 将实时流采集终端的视频数据实时推送到另外一个(多个)播放终端,完成远距离实时视频播放的功能.典型场景: (1)远程查看监控摄像头.选择指定摄像头,将该摄像头采集到的实时数据推送到指定播放终端 ...

  9. 视频直播技术干货:一文读懂主流视频直播系统的推拉流架构、传输协议等

    1.引言 随着移动网络网速的提升与资费的降低,视频直播作为一个新的娱乐方式已经被越来越多的用户逐渐接受.特别是最近这几年,视频直播已经不仅仅被运用在传统的秀场.游戏类板块,更是作为电商的一种新模式得到 ...

最新文章

  1. 聚合复合_【专家视觉】聚合物接枝多壁碳纳米管及其聚氨酯复合材料
  2. AI程序员,所有码农的梦想
  3. (DOM艺术) 实用的动画
  4. java 覆盖和隐藏_Java覆盖和隐藏2
  5. boot数据加解密 spring_springboot项目使用druid对数据库密码的加解密
  6. Python黑科技,教你学会Django系统错误监控
  7. 【数论】能量采集(P1447)
  8. UVA12542 LA6149 Prime Substring【筛选法+贪心】
  9. 创建第二个 vlan network vlan101 - 每天5分钟玩转 OpenStack(96)
  10. 【引用】关于close-on-exec标志
  11. sniffer抓包工具(中文版+视频教程)
  12. 简易抽奖软件逻辑实现
  13. 谱曲软件-MuseScore
  14. 微积分:2.2泰勒公式函数极值定积分
  15. 第二人生的源码分析(十九)人物组合显示
  16. 寻找发帖水王java_SWUST_OJ 水王发帖排序
  17. 华为:链路聚合配置LACP实现提高带宽
  18. 数学分析笔记-有限覆盖定理
  19. bootstrap 4 在VS中的使用
  20. 删除姓名、年龄重复的记录——数据库

热门文章

  1. UDT长度的含义是什么?
  2. 安装和删除Ubuntu双系统
  3. Ubuntu 18.04 安装RealSense D435教程
  4. intel realsense获取8位深度图
  5. SetFocus 方法
  6. 【unity】 untiy中如何导出FBX
  7. 数字签名(代码签名)流程和数字签名的验证
  8. CAD如何调整线形比例?
  9. 基于matlab的SMO实现
  10. 点击a标签,返回上一页