刚开始接触live555,在live555\testProgs中有很多很好的例子来讲解live555各方面的应用;

但是都是以文件形式给到服务器中,然后广播出来的,但是很多情况下我们是需要直播的,那就不能用文件的形式来操作了,

也试过用命名管道的方式,在linux上是可以的,但是在安卓上兼容性就不是很好了,像权限问题,以及命名管道不能在某种格式的内存中使用等,

所以本文就是描述怎样实现将已经成流的H264数据发送到网络(根据示例testH264VideoStreamer.cpp修改)。

核心部分是对类:FramedFileSource的继承;MyByteStreamFileSource

流程:

  1. 一个线程将H264数据从文件中读取出来放到缓存
  2. live555服务器不停地从缓存中获取数据
  3. live555广播出去
  1. 本地缓存需要的数据结构:

    #include <pthread.h>
    #include <semaphore.h>
    #include <string.h>
    #include <sys/types.h>
    #include <stdlib.h>
    #include <stdio.h>#define STATIC_MEMORY#define FRAME_MEM_SIZE (1920*1080*2)
    #define MAX_QUENE_LENGTH 10typedef struct NODE{void *msg;unsigned int msgsize;struct NODE *next;
    }Node_t,*pNode_t;typedef struct{//depend on Linux OSpthread_mutex_t qlock;sem_t sem;int q_num;pNode_t q_head;
    }Quene_t,*pQuene_t;static Quene_t ListData;
    static unsigned char availibleNode[MAX_QUENE_LENGTH]={0,0,0,0,0,0,0,0,0,0};
    static Node_t nodeMemStore[MAX_QUENE_LENGTH];
    static unsigned char availibleFrame[MAX_QUENE_LENGTH]={0,0,0,0,0,0,0,0,0,0};
    static char frameMemStore[MAX_QUENE_LENGTH][FRAME_MEM_SIZE];
    
  2. 缓存申请和释放:
    static void *mallocNode(void)
    {unsigned char i = 0;for(i=0;i<MAX_QUENE_LENGTH;i++){if(availibleNode[i] == 0){availibleNode[i] = 1;return &nodeMemStore[i];}}return NULL;
    }static void freeNode(pNode_t node)
    {unsigned char i = 0;for(i=0;i<MAX_QUENE_LENGTH;i++){if(&nodeMemStore[i] == node)availibleNode[i] = 0;}
    }static void *mallocFrame(void)
    {unsigned char i = 0;for(i=0;i<MAX_QUENE_LENGTH;i++){if(availibleFrame[i] == 0){availibleFrame[i] = 1;return &frameMemStore[i];}}return NULL;
    }static void freeFrame(void* frame)
    {unsigned char i = 0;for(i=0;i<MAX_QUENE_LENGTH;i++){if(&frameMemStore[i] == frame)availibleFrame[i] = 0;}
    }
  3. 缓存中数据的出入:
    static int PutData(void * msg , unsigned int * size)
    {int ret = -1;dataLock();if(ListData.q_num < MAX_QUENE_LENGTH){
    #ifndef STATIC_MEMORYpNode_t node = malloc(sizeof(Node_t));
    #elsepNode_t node = (pNode_t)mallocNode();
    #endifif(node == NULL){ret = -1;}else{
    #ifndef STATIC_MEMORYnode->msg = malloc(*size);
    #elsenode->msg = mallocFrame();
    #endifif(node->msg == NULL){
    #ifndef STATIC_MEMORYfree(node);
    #elsefreeNode(node);
    #endifret = -1;}else{int i = ListData.q_num;pNode_t n = ListData.q_head;memcpy(node->msg,msg,*size);node->msgsize = *size;node->next = NULL;if(i == 0){ListData.q_head = node;}else{while(i-1){n = n->next;i--;}n->next = node;}ListData.q_num += 1;ret = 0;}}}dataUnlock();//emitDataSignal();return ret;
    }static int RemoveData(void)
    {int ret = -1;dataLock();if(0 == ListData.q_num)ret = -1;else{pNode_t n = ListData.q_head;ListData.q_head = ListData.q_head->next;ListData.q_num -= 1;
    #ifndef STATIC_MEMORYfree(n->msg);free(n);
    #elsefreeFrame(n->msg);n->msgsize = 0;freeNode(n);
    #endifret = 0;}dataUnlock();return ret;
    }
    static unsigned int GetData(unsigned char * framebuf , unsigned int size)
    {unsigned int ret = 0;dataLock();if(ListData.q_num == 0){ret = 0;dataUnlock();return ret;}else{if(size < ListData.q_head->msgsize){dataUnlock();printf("error:%s line:%d %d %d\n",__func__,__LINE__,size,ListData.q_head->msgsize);RemoveData();return ret;}memcpy(framebuf,ListData.q_head->msg,ListData.q_head->msgsize);ret = ListData.q_head->msgsize;dataUnlock();RemoveData();return ret;}
    }
    
  4. 初始化缓存池:
    //depend on Linux OS
    static int InitDataList(void)
    {if(pthread_mutex_init(&ListData.qlock,NULL) != 0)return -1;memset(availibleNode,0,sizeof(availibleNode));memset(nodeMemStore,0,sizeof(nodeMemStore));memset(availibleFrame,0,sizeof(availibleFrame));memset(frameMemStore,0,sizeof(frameMemStore));return 0;
    }
  5. 缓存池同步操作:
    //depend on Linux OS
    static int dataLock(void)
    {return pthread_mutex_lock(&ListData.qlock);
    }
    //depend on Linux OS
    static int dataUnlock(void)
    {return pthread_mutex_unlock(&ListData.qlock);
    }
    
  6. 获取缓存中下一个数据的大小:
    static unsigned int GetNextFrameSize(void)
    {unsigned int szie = 0;dataLock();if(ListData.q_num != 0)szie = ListData.q_head->msgsize;dataUnlock();return szie;
    }
  7. live555服务器需要的资源结构:
    #include <liveMedia.hh>
    #include <BasicUsageEnvironment.hh>
    #include <GroupsockHelper.hh>#include <sys/types.h>
    #include <sys/stat.h>
    #include <unistd.h>
    #include <fcntl.h>
    #include <errno.h>typedef enum{server_idle = 0,server_inited,server_pause
    }service_status_t;typedef struct{service_status_t inited;UsageEnvironment* env;TaskScheduler* scheduler;unsigned short rtpPortNum;unsigned short rtcpPortNum;unsigned char ttl;Port *rtpPort;Port *rtcpPort;Groupsock *rtpGroupsock;Groupsock *rtcpGroupsock;H264VideoStreamFramer* videoSource;RTPSink* videoSink;RTCPInstance* rtcp;RTSPServer* rtspServer;ServerMediaSession* sms;PassiveServerMediaSubsession *passiveSubsession;MyByteStreamFileSource* fileSource;pthread_t putdata_pid;pthread_t loop_pid;
    }LocalServer_t;#define SIZES    2048
    #define BANDWIDTH   (512)
    #define MAX_CNAME_LEN   100static const char *scrpath = "test1.h264";
    static FILE* fd = NULL;
    static unsigned char running = 0;
    static unsigned char CNAME[MAX_CNAME_LEN+1];
    static LocalServer_t SeverSetting = {.inited = server_idle,.env = NULL,.scheduler = NULL,.rtpPortNum = 0,.rtcpPortNum = 0,.ttl = 0,.rtpPort = NULL,.rtcpPort = NULL,.rtpGroupsock = NULL,.rtcpGroupsock = NULL,.videoSource = NULL,.videoSink = NULL,.rtcp = NULL,.rtspServer = NULL,.sms = NULL,.passiveSubsession = NULL,.fileSource = NULL,.putdata_pid = 0,.loop_pid = 0,
    };
    
  8. live555服务器的注册和开启:
    static int start_play(void)
    {// Open the input file as a 'byte-stream file source':SeverSetting.fileSource = MyByteStreamFileSource::createNew(*SeverSetting.env, NULL);if(!SeverSetting.fileSource){//*SeverSetting.env << "Unable to open file \"" << inputFileName<< "\" as a byte-stream file source\n";return -1;}// Create a framer for the Video Elementary Stream:SeverSetting.videoSource = H264VideoStreamFramer::createNew(*SeverSetting.env,(FramedSource*)SeverSetting.fileSource);if(!SeverSetting.videoSource){printf("%s %d\r\n",__func__,__LINE__);return -1;}// Finally, start playing:*SeverSetting.env << "start playing..."<<__LINE__<<"\n";SeverSetting.videoSink->startPlaying(*SeverSetting.videoSource, NULL, NULL);return 0;
    }int StartRtspServer(char *url_suffix , int port)
    {printf("%s %d[%d]\r\n",__func__,__LINE__,SeverSetting.inited);if(SeverSetting.inited == server_pause){char* url = SeverSetting.rtspServer->rtspURL(SeverSetting.sms);*SeverSetting.env << "Play this stream using the URL \"" << url << "\"\n";delete[] url;*SeverSetting.env << "start playing..."<<__LINE__<<"\n";//SeverSetting.videoSink->startPlaying(*SeverSetting.videoSource, NULL, NULL);SeverSetting.inited = server_inited;return 0;}else if(SeverSetting.inited == server_inited)return -1;if(InitDataList()){printf("InitDataLock fail\n");return -1;}SeverSetting.scheduler = BasicTaskScheduler::createNew();if(!SeverSetting.scheduler){printf("%s %d\r\n",__func__,__LINE__);return -1;}SeverSetting.env = BasicUsageEnvironment::createNew(*SeverSetting.scheduler);if(!SeverSetting.env){printf("%s %d\r\n",__func__,__LINE__);return -1;}SeverSetting.rtpPortNum = 18888;SeverSetting.rtcpPortNum = SeverSetting.rtpPortNum+1;SeverSetting.ttl = 255;SeverSetting.rtpPort = new Port(SeverSetting.rtpPortNum);if(!SeverSetting.rtpPort){printf("%s %d\r\n",__func__,__LINE__);return -1;}SeverSetting.rtcpPort = new Port(SeverSetting.rtcpPortNum);if(!SeverSetting.rtcpPort){printf("%s %d\r\n",__func__,__LINE__);return -1;}struct in_addr destinationAddress;destinationAddress.s_addr = chooseRandomIPv4SSMAddress(*SeverSetting.env);SeverSetting.rtpGroupsock = new Groupsock(*SeverSetting.env,destinationAddress,*SeverSetting.rtpPort,SeverSetting.ttl);if(!SeverSetting.rtpGroupsock){printf("%s %d\r\n",__func__,__LINE__);return -1;}SeverSetting.rtpGroupsock->multicastSendOnly(); // we're a SSM sourceSeverSetting.rtcpGroupsock = new Groupsock(*SeverSetting.env,destinationAddress,*SeverSetting.rtcpPort,SeverSetting.ttl);if(!SeverSetting.rtcpGroupsock){printf("%s %d\r\n",__func__,__LINE__);return -1;}SeverSetting.rtcpGroupsock->multicastSendOnly(); // we're a SSM source// Create a 'H264 Video RTP' sink from the RTP 'groupsock':OutPacketBuffer::maxSize = 1024*1024;SeverSetting.videoSink = H264VideoRTPSink::createNew(*SeverSetting.env,SeverSetting.rtpGroupsock,96);if(!SeverSetting.videoSink){printf("%s %d\r\n",__func__,__LINE__);return -1;}// Create (and start) a 'RTCP instance' for this RTP sink:memset(CNAME,0,MAX_CNAME_LEN);gethostname((char*)CNAME, MAX_CNAME_LEN);CNAME[MAX_CNAME_LEN] = '\0'; // just in caseSeverSetting.rtcp = RTCPInstance::createNew(*SeverSetting.env,SeverSetting.rtcpGroupsock,BANDWIDTH,// in kbps; for RTCP b/w shareCNAME,SeverSetting.videoSink, NULL /* we're a server */,True /* we're a SSM source */);if(!SeverSetting.rtcp){printf("%s %d\r\n",__func__,__LINE__);return -1;}SeverSetting.rtspServer = RTSPServer::createNew(*SeverSetting.env, port);if(SeverSetting.rtspServer == NULL){*SeverSetting.env << "Failed to create RTSP server: " << SeverSetting.env->getResultMsg() << "\n";return -1;}SeverSetting.sms = ServerMediaSession::createNew(*SeverSetting.env,url_suffix,NULL,"Session streamed by \"testH264VideoStreamer\"",True);if(!SeverSetting.sms){printf("%s %d\r\n",__func__,__LINE__);return -1;}SeverSetting.passiveSubsession = PassiveServerMediaSubsession::createNew(*SeverSetting.videoSink,SeverSetting.rtcp);if(!SeverSetting.passiveSubsession){printf("%s %d\r\n",__func__,__LINE__);return -1;}SeverSetting.sms->addSubsession(SeverSetting.passiveSubsession);SeverSetting.rtspServer->addServerMediaSession(SeverSetting.sms);char* url = SeverSetting.rtspServer->rtspURL(SeverSetting.sms);*SeverSetting.env << "Play this stream using the URL \"" << url << "\"\n";delete[] url;// Start the streaming:*SeverSetting.env << "Beginning streaming...\n";if(start_play()){printf("%s %d\r\n",__func__,__LINE__);return -1;}run_tasks();SeverSetting.inited = server_inited;return 0;
    }
  9. 读取数据放到缓存中:
    int InsertFrame(char * buf ,int len)
    {if(SeverSetting.inited == server_inited)return PutData((void *)buf, (unsigned int*)&len);elsereturn -1;
    }static void* receive_thread(void* param)
    {unsigned int len = 0;unsigned char buf[FRAME_MEM_SIZE];printf("%s %d\r\n",__func__,__LINE__);fd = fopen(scrpath,"rb");if(fd)printf("open ok\n");else{printf("open \"%s\" fail\n",scrpath);_exit(1);}printf("%s %d\n",__func__,__LINE__);running = 1;while(running){if((len=fread(buf,1,SIZES,fd))>0){
    f_try:if(InsertFrame((char*)buf,(int)len) < 0){usleep(100000);goto f_try;}}if(len < SIZES || len == 0)fseek(fd,0,SEEK_SET);}fclose(fd);printf("%s %d\n",__func__,__LINE__);pthread_exit(NULL);return NULL;
    }static void* loop_thread(void* param)
    {printf("task schedule start...[%d]\n",__LINE__);SeverSetting.env->taskScheduler().doEventLoop();printf("task schedule start...[%d]\n",__LINE__);pthread_exit(NULL);return NULL;
    }static void run_tasks(void)
    {pthread_create(&SeverSetting.putdata_pid,NULL,receive_thread,NULL);pthread_detach(SeverSetting.putdata_pid);pthread_create(&SeverSetting.loop_pid,NULL,loop_thread,NULL);pthread_detach(SeverSetting.loop_pid);
    }
    
  10. 停止服务器:
    void StopRtspServer(void)
    {printf("%s relese\n",__func__);if(SeverSetting.inited == server_inited)SeverSetting.inited = server_pause;//SeverSetting.videoSink->stopPlaying();//Medium::close(SeverSetting.videoSource);   printf("%s relese all\n",__func__);
    }
    
  11. 主函数入口:
    int main(int argc , char *argv[])
    {StartRtspServer((char*)"xxx",8554);while(1){sleep(10);}return 0;
    }
  12. 主要部分:对FramedFileSource的继承
    class MyByteStreamFileSource: public FramedFileSource {
    public:static MyByteStreamFileSource* createNew(UsageEnvironment& env,char const* fileName,unsigned preferredFrameSize = 0,unsigned playTimePerFrame = 0){MyByteStreamFileSource* newSource= new MyByteStreamFileSource(env, 0, preferredFrameSize, playTimePerFrame);newSource->fFileSize = 0;//GetFileSize(fileName, fid);printf(">fFileSize %d<:%llu\n",__LINE__,newSource->fFileSize);return newSource;}u_int64_t fileSize() const { return fFileSize; }// 0 means zero-length, unbounded, or unknownvoid seekToByteAbsolute(u_int64_t byteNumber, u_int64_t numBytesToStream = 0){printf("%s %d\n",__func__,__LINE__);fNumBytesToStream = numBytesToStream;fLimitNumBytesToStream = fNumBytesToStream > 0;}// if "numBytesToStream" is >0, then we limit the stream to that number of bytes, before treating it as EOFvoid seekToByteRelative(int64_t offset, u_int64_t numBytesToStream = 0){printf("%s %d\n",__func__,__LINE__);fNumBytesToStream = numBytesToStream;fLimitNumBytesToStream = fNumBytesToStream > 0;}void seekToEnd()// to force EOF handling on the next read{printf("%s %d\n",__func__,__LINE__);}protected:MyByteStreamFileSource(UsageEnvironment& env, FILE* fid,unsigned preferredFrameSize,unsigned playTimePerFrame): FramedFileSource(env, fid), fFileSize(0), fPreferredFrameSize(preferredFrameSize),fPlayTimePerFrame(playTimePerFrame), fLastPlayTime(0),fHaveStartedReading(False), fLimitNumBytesToStream(False), fNumBytesToStream(0){printf(">%d<\n",__LINE__);overFlowNum = 0;overFlowIndex = 0;// Test whether the file is seekablefFidIsSeekable = 0;printf(">fFidIsSeekable %d<:%d\n",__LINE__,fFidIsSeekable);}virtual ~MyByteStreamFileSource(){if (fFid == NULL)return;CloseInputFile(fFid);}static void fileReadableHandler(void* source){MyByteStreamFileSource* pThis = (MyByteStreamFileSource*)source;if (!pThis->isCurrentlyAwaitingData()){pThis->doStopGettingFrames(); // we're not ready for the data yetreturn;}pThis->doReadFromFile();}void doReadFromFile(){//printf("%s %d\n",__func__,__LINE__);// Try to read as many bytes as will fit in the buffer provided (or "fPreferredFrameSize" if less)if (fLimitNumBytesToStream && fNumBytesToStream < (u_int64_t)fMaxSize){printf("fNumBytesToStream:%llu fMaxSize:%u fPreferredFrameSize:%u\n",fNumBytesToStream,fMaxSize,fPreferredFrameSize);fMaxSize = (unsigned)fNumBytesToStream;}if (fPreferredFrameSize > 0 && fPreferredFrameSize < fMaxSize){printf("fPreferredFrameSize:%llu fMaxSize:%u fPreferredFrameSize:%u\n",fNumBytesToStream,fMaxSize,fPreferredFrameSize);fMaxSize = fPreferredFrameSize;}
    f_again:if(overFlowNum){if(overFlowNum - overFlowIndex > fMaxSize){memcpy(fTo,&overFlow[overFlowIndex],fMaxSize);fFrameSize = fMaxSize;overFlowIndex += fFrameSize;printf("%s %d>\n",__func__,__LINE__);}else{memcpy(fTo,&overFlow[overFlowIndex],overFlowNum - overFlowIndex);fFrameSize = overFlowNum - overFlowIndex;overFlowIndex += fFrameSize;printf("%s %d<\n",__func__,__LINE__);}if(overFlowNum == overFlowIndex){overFlowNum = 0;overFlowIndex = 0;printf("%s %d reset\n",__func__,__LINE__);}}else if(overFlowNum == 0 && GetNextFrameSize() > fMaxSize){if((overFlowNum=GetData(overFlow,FRAME_MEM_SIZE)) > 0){printf("%s %d max\n",__func__,__LINE__);memcpy(fTo,overFlow,fMaxSize);fFrameSize = fMaxSize;overFlowIndex = fMaxSize;if(overFlowNum == overFlowIndex){overFlowNum = 0;overFlowIndex = 0;}}else{printf("%s sleep %d\n",__func__,__LINE__);usleep(50000);goto f_again;}}else if((fFrameSize = GetData(fTo,fMaxSize)) == 0){printf("%s no src data %d\n",__func__,__LINE__);usleep(50000);goto f_again;}if (fFrameSize == 0){handleClosure();return;}fNumBytesToStream -= fFrameSize;// Set the 'presentation time':if (fPlayTimePerFrame > 0 && fPreferredFrameSize > 0){if (fPresentationTime.tv_sec == 0 && fPresentationTime.tv_usec == 0){// This is the first frame, so use the current time:gettimeofday(&fPresentationTime, NULL);}else{// Increment by the play time of the previous data:unsigned uSeconds    = fPresentationTime.tv_usec + fLastPlayTime;fPresentationTime.tv_sec += uSeconds/1000000;fPresentationTime.tv_usec = uSeconds%1000000;}// Remember the play time of this data:fLastPlayTime = (fPlayTimePerFrame*fFrameSize)/fPreferredFrameSize;fDurationInMicroseconds = fLastPlayTime;}else{// We don't know a specific play time duration for this data,// so just record the current time as being the 'presentation time':gettimeofday(&fPresentationTime, NULL);}// Because the file read was done from the event loop, we can call the// 'after getting' function directly, without risk of infinite recursion:FramedSource::afterGetting(this);}private:// redefined virtual functions:virtual void doGetNextFrame(){//printf("%s %d\n",__func__,__LINE__);envir().taskScheduler().scheduleDelayedTask(0,fileReadableHandler, this);}virtual void doStopGettingFrames(){//printf("%s %d\n",__func__,__LINE__);envir().taskScheduler().unscheduleDelayedTask(nextTask());}protected:u_int64_t fFileSize;u_int64_t overFlowNum;u_int64_t overFlowIndex;unsigned char overFlow[FRAME_MEM_SIZE];private:unsigned fPreferredFrameSize;unsigned fPlayTimePerFrame;Boolean fFidIsSeekable;unsigned fLastPlayTime;Boolean fHaveStartedReading;Boolean fLimitNumBytesToStream;u_int64_t fNumBytesToStream; // used iff "fLimitNumBytesToStream" is True
    };
    

值得注意的是:在类MyByteStreamFileSource中doReadFromFile的时候,往数据区fTo中拷贝数据的时候,当前一帧数据可能会超过fMaxSize的大小,如果往fTo中考入数据超过fMaxSize的大小会导致live555报错,所以我们在这里采取一个中间缓存,当fMaxSize的大小小于下一帧的大小的时候,将下一帧的数据先考到中间缓存中,再从中间缓存中拷贝fMaxSize大小的数据到fTo中,下次再将缓存中的其他数据放入到fTo中;

此处有源码下载:http://download.csdn.net/download/xushan239/10221824

live555直播h264视频流相关推荐

  1. 基于hi3531的live555直播h264视频流

    刚开始接触live555,在live555\testProgs中有很多很好的例子来讲解live555各方面的应用: 但是都是以文件形式给到服务器中,然后广播出来的,但是很多情况下我们是需要直播的,那就 ...

  2. 通过live555实现H264 RTSP直播(Windows版)

    为何标明"Windows版",因为firehood大神已经实现了linux版:通过live555实现H264 RTSP直播 相关文章: [1]Win7(Windows 7)下用VS ...

  3. 通过live555实现H264 RTSP直播

    转载自:http://blog.csdn.net/firehood_/article/details/16844397 前面的文章中介绍了<H264视频通过RTMP流直播>,下面将介绍一下 ...

  4. live555 android 直播,通过live555实现H264 RTSP直播

    前面的文章中介绍了<H264视频通过RTMP流直播>,下面将介绍一下如何将H264实时视频通过RTSP直播. 实现思路是将视频流发送给live555, 由live555来实现H264数据流 ...

  5. OpenCV实时美颜摄像并生成H264视频流

    为什么美颜摄像这么简单的功能,OpenCV这个开源项目网上很少有代码呢?对于在windows平台下,生成h264视频流也比价麻烦,没有现成的api可以使用,需要借助MinGw编译libx264,或者f ...

  6. Live555接收h264使用ffmpeg解码为YUV420

    本文概要: 本文介绍了一种常用成熟的多媒体解码方案.使用live555作为流媒体数据源,建立rtsp会话请求h264数据流.后端使用ffmpeg解码h264流并保存为yuv420格式. 该方案比较成熟 ...

  7. 使用live555 直播来自v4l2的摄像头数据

    使用live555 直播来自v4l2的摄像头数据,在我的这个工程中,基本思路是:使用V4L2采集摄像头数据,然后使用x264库对摄像头数据进行编码,编成H264数据格式,然后把数据写入到命名管道中.最 ...

  8. 【网络通信 -- 直播】视频流编码 -- H.264 编码的一般概念

    [网络通信 -- 直播]视频流编码 -- H.264 编码的一般概念 [1]色彩空间 -- RGB YUV YCbCr RGB 依据人眼识别的颜色定义出的空间,可表示大部分颜色: YUV " ...

  9. 【网络通信 -- 直播】视频流编码 -- H.264 相关属性总结

    [网络通信 -- 直播]视频流编码 -- H.264 相关属性总结 [1]H.264 的 Profile 说明 H.264 Profile 的分类 Baseline 支持 I/P 帧,只支持无交错(P ...

最新文章

  1. Spark的transformation和action算子简介
  2. JAVA数据结构 线性表的链式存储及其实现
  3. JQ-CSS-实现导航菜单效果
  4. 四川大学计算机学院 期末考试安排,四川大学《计算机组成原理》2018期末考试B卷.doc...
  5. 【QGIS入门实战精品教程】9.1:QGIS构建泰森多边形(Thiessen Polygon)实例精解
  6. MiniDao_1.6.4 版本发布,轻量级Java持久化框架,Hibernate项目辅助利器
  7. Automatic Updates服务无法启动
  8. java进程CPU飙高
  9. PPT绘图保存为PDF的三种方式
  10. Thrift搭建分布式微服务(四)
  11. mysql 主从ppt_MYSQL主从复制和读写分离.ppt
  12. win7开机加速怎么设置|win7开机加速的方法
  13. axios+springboot 报错 POST http://localhost:8080/Login 404 (Not Found)
  14. ACL访问控制列表(访问控制、抓取感兴趣流)详解及基本ACL和高级ACL的配置。
  15. 词汇课程——导论(2)
  16. LWN: 华为EROFS能应对好损坏的文件系统数据吗?
  17. 【meng_项目】python+flask+html+css制作一个简单的生日祝福语网页
  18. webrtc部分手机黑屏的原因排查
  19. TCP/IP五层模型基本协议及相关报文知识
  20. 把深山红叶装进U盘的方法[转载]

热门文章

  1. Google的云计算
  2. 荣耀magic4.0系统是鸿蒙系统吗,快,不依赖于高刷!荣耀30 Pro+升级Magic UI 4.0后体验...
  3. 了解前端工程化之组件化——Vue组件
  4. python实现矩阵共轭和共轭转置
  5. Docker 安装 MySQL(借鉴菜鸟教程)
  6. Linux系统InfluxDB数据和日志目录迁移教程
  7. SpringBoot 自动配置原理(超级无敌详细)-2
  8. 假设你是个妹子,你敢这样谈恋爱吗?
  9. 使用tcpdump抓包实例
  10. 自制N6030便携收音机插头