视频1 视频21 视频41 视频61 视频文章1 视频文章21 视频文章41 视频文章61 推荐1 推荐3 推荐5 推荐7 推荐9 推荐11 推荐13 推荐15 推荐17 推荐19 推荐21 推荐23 推荐25 推荐27 推荐29 推荐31 推荐33 推荐35 推荐37 推荐39 推荐41 推荐43 推荐45 推荐47 推荐49 关键词1 关键词101 关键词201 关键词301 关键词401 关键词501 关键词601 关键词701 关键词801 关键词901 关键词1001 关键词1101 关键词1201 关键词1301 关键词1401 关键词1501 关键词1601 关键词1701 关键词1801 关键词1901 视频扩展1 视频扩展6 视频扩展11 视频扩展16 文章1 文章201 文章401 文章601 文章801 文章1001 资讯1 资讯501 资讯1001 资讯1501 标签1 标签501 标签1001 关键词1 关键词501 关键词1001 关键词1501 专题2001
使用live555直播来自v4l2的摄像头图像
2020-11-09 07:21:27 责编:小采
文档


结合前面的 采集 v4l2 视频, 使用 live555, 通过 rtsp 发布实时流. capture.h, capture.cpp, vcompress.h, vcompress.cpp 需要参考前面几片文章. 这里仅仅贴出 v4l2_x2_service.cpp [cpp] view plaincopy #includestdio.h #includestdlib.h #includeunistd

结合前面的 采集 v4l2 视频, 使用 live555, 通过 rtsp 发布实时流. capture.h, capture.cpp, vcompress.h, vcompress.cpp 需要参考前面几片文章. 这里仅仅贴出 v4l2_x2_service.cpp

[cpp] view plaincopy

  1. #include
  2. #include
  3. #include
  4. #include
  5. #include
  6. #include
  7. #include
  8. #include
  9. #include
  10. #include "capture.h"
  11. #include "vcompress.h"
  12. static UsageEnvironment *_env = 0;
  13. #define SINK_PORT 3030
  14. #define VIDEO_WIDTH 320
  15. #define VIDEO_HEIGHT 240
  16. #define FRAME_PER_SEC 5.0
  17. pid_t gettid()
  18. {
  19. return syscall(SYS_gettid);
  20. }
  21. // 使用 webcam + x2
  22. class WebcamFrameSource : public FramedSource
  23. {
  24. void *mp_capture, *mp_compress; // v4l2 + x2 encoder
  25. int m_started;
  26. void *mp_token;
  27. public:
  28. WebcamFrameSource (UsageEnvironment &env)
  29. : FramedSource(env)
  30. {
  31. fprintf(stderr, "[%d] %s .... calling\n", gettid(), __func__);
  32. mp_capture = capture_open("/dev/video0", VIDEO_WIDTH, VIDEO_HEIGHT, PIX_FMT_YUV420P);
  33. if (!mp_capture) {
  34. fprintf(stderr, "%s: open /dev/video0 err\n", __func__);
  35. exit(-1);
  36. }
  37. mp_compress = vc_open(VIDEO_WIDTH, VIDEO_HEIGHT, FRAME_PER_SEC);
  38. if (!mp_compress) {
  39. fprintf(stderr, "%s: open x2 err\n", __func__);
  40. exit(-1);
  41. }
  42. m_started = 0;
  43. mp_token = 0;
  44. }
  45. ~WebcamFrameSource ()
  46. {
  47. fprintf(stderr, "[%d] %s .... calling\n", gettid(), __func__);
  48. if (m_started) {
  49. envir().taskScheduler().unscheduleDelayedTask(mp_token);
  50. }
  51. if (mp_compress)
  52. vc_close(mp_compress);
  53. if (mp_capture)
  54. capture_close(mp_capture);
  55. }
  56. protected:
  57. virtual void doGetNextFrame ()
  58. {
  59. if (m_started) return;
  60. m_started = 1;
  61. // 根据 fps, 计算等待时间
  62. double delay = 1000.0 / FRAME_PER_SEC;
  63. int to_delay = delay * 1000; // us
  64. mp_token = envir().taskScheduler().scheduleDelayedTask(to_delay,
  65. getNextFrame, this);
  66. }

[cpp] view plaincopy

  1. virtual unsigned maxFrameSize() const // 这个很重要, 如果不设置, 可能导致 getNextFrame() 出现 fMaxSize 小于实际编码帧的情况, 导致图像不完整

[cpp] view plaincopy

  1. { return 100*1024; }

[cpp] view plaincopy

  1. private:
  2. static void getNextFrame (void *ptr)
  3. {
  4. ((WebcamFrameSource*)ptr)->getNextFrame1();
  5. }
  6. void getNextFrame1 ()
  7. {
  8. // capture:
  9. Picture pic;
  10. if (capture_get_picture(mp_capture, &pic) < 0) {
  11. fprintf(stderr, "==== %s: capture_get_picture err\n", __func__);
  12. m_started = 0;
  13. return;
  14. }
  15. // compress
  16. const void *outbuf;
  17. int outlen;
  18. if (vc_compress(mp_compress, pic.data, pic.stride, &outbuf, &outlen) < 0) {
  19. fprintf(stderr, "==== %s: vc_compress err\n", __func__);
  20. m_started = 0;
  21. return;
  22. }
  23. int_t pts, dts;
  24. int key;
  25. vc_get_last_frame_info(mp_compress, &key, &pts, &dts);
  26. // save outbuf
  27. gettimeofday(&fPresentationTime, 0);
  28. fFrameSize = outlen;
  29. if (fFrameSize > fMaxSize) {
  30. fNumTruncatedBytes = fFrameSize - fMaxSize;
  31. fFrameSize = fMaxSize;
  32. }
  33. else {
  34. fNumTruncatedBytes = 0;
  35. }
  36. memmove(fTo, outbuf, fFrameSize);
  37. // notify
  38. afterGetting(this);
  39. m_started = 0;
  40. }
  41. };
  42. class WebcamOndemandMediaSubsession : public OnDemandServerMediaSubsession
  43. {
  44. public:
  45. static WebcamOndemandMediaSubsession *createNew (UsageEnvironment &env, FramedSource *source)
  46. {
  47. return new WebcamOndemandMediaSubsession(env, source);
  48. }
  49. protected:
  50. WebcamOndemandMediaSubsession (UsageEnvironment &env, FramedSource *source)
  51. : OnDemandServerMediaSubsession(env, True) // reuse the first source
  52. {
  53. fprintf(stderr, "[%d] %s .... calling\n", gettid(), __func__);
  54. mp_source = source;
  55. mp_sdp_line = 0;
  56. }
  57. ~WebcamOndemandMediaSubsession ()
  58. {
  59. fprintf(stderr, "[%d] %s .... calling\n", gettid(), __func__);
  60. if (mp_sdp_line) free(mp_sdp_line);
  61. }
  62. private:
  63. static void afterPlayingDummy (void *ptr)
  64. {
  65. fprintf(stderr, "[%d] %s .... calling\n", gettid(), __func__);
  66. // ok
  67. WebcamOndemandMediaSubsession *This = (WebcamOndemandMediaSubsession*)ptr;
  68. This->m_done = 0xff;
  69. }
  70. static void chkForAuxSDPLine (void *ptr)
  71. {
  72. WebcamOndemandMediaSubsession *This = (WebcamOndemandMediaSubsession *)ptr;
  73. This->chkForAuxSDPLine1();
  74. }
  75. void chkForAuxSDPLine1 ()
  76. {
  77. fprintf(stderr, "[%d] %s .... calling\n", gettid(), __func__);
  78. if (mp_dummy_rtpsink->auxSDPLine())
  79. m_done = 0xff;
  80. else {
  81. int delay = 100*1000; // 100ms
  82. nextTask() = envir().taskScheduler().scheduleDelayedTask(delay,
  83. chkForAuxSDPLine, this);
  84. }
  85. }
  86. protected:
  87. virtual const char *getAuxSDPLine (RTPSink *sink, FramedSource *source)
  88. {
  89. fprintf(stderr, "[%d] %s .... calling\n", gettid(), __func__);
  90. if (mp_sdp_line) return mp_sdp_line;
  91. mp_dummy_rtpsink = sink;
  92. mp_dummy_rtpsink->startPlaying(*source, 0, 0);
  93. //mp_dummy_rtpsink->startPlaying(*source, afterPlayingDummy, this);
  94. chkForAuxSDPLine(this);
  95. m_done = 0;
  96. envir().taskScheduler().doEventLoop(&m_done);
  97. mp_sdp_line = strdup(mp_dummy_rtpsink->auxSDPLine());
  98. mp_dummy_rtpsink->stopPlaying();
  99. return mp_sdp_line;
  100. }
  101. virtual RTPSink *createNewRTPSink(Groupsock *rtpsock, unsigned char type, FramedSource *source)
  102. {
  103. fprintf(stderr, "[%d] %s .... calling\n", gettid(), __func__);
  104. return H2VideoRTPSink::createNew(envir(), rtpsock, type);
  105. }
  106. virtual FramedSource *createNewStreamSource (unsigned sid, unsigned &bitrate)
  107. {
  108. fprintf(stderr, "[%d] %s .... calling\n", gettid(), __func__);
  109. bitrate = 500;
  110. return H2VideoStreamFramer::createNew(envir(), new WebcamFrameSource(envir()));
  111. }
  112. private:
  113. FramedSource *mp_source; // 对应 WebcamFrameSource
  114. char *mp_sdp_line;
  115. RTPSink *mp_dummy_rtpsink;
  116. char m_done;
  117. };
  118. static void test_task (void *ptr)
  119. {
  120. fprintf(stderr, "test: task ....\n");
  121. _env->taskScheduler().scheduleDelayedTask(100000, test_task, 0);
  122. }
  123. static void test (UsageEnvironment &env)
  124. {
  125. fprintf(stderr, "test: begin...\n");
  126. char done = 0;
  127. int delay = 100 * 1000;
  128. env.taskScheduler().scheduleDelayedTask(delay, test_task, 0);
  129. env.taskScheduler().doEventLoop(&done);
  130. fprintf(stderr, "test: end..\n");
  131. }
  132. int main (int argc, char **argv)
  133. {
  134. // env
  135. TaskScheduler *scheduler = BasicTaskScheduler::createNew();
  136. _env = BasicUsageEnvironment::createNew(*scheduler);
  137. // test
  138. //test(*_env);
  139. // rtsp server
  140. RTSPServer *rtspServer = RTSPServer::createNew(*_env, 8554);
  141. if (!rtspServer) {
  142. fprintf(stderr, "ERR: create RTSPServer err\n");
  143. ::exit(-1);
  144. }
  145. // add live stream
  146. do {
  147. WebcamFrameSource *webcam_source = 0;
  148. ServerMediaSession *sms = ServerMediaSession::createNew(*_env, "webcam", 0, "Session from /dev/video0");
  149. sms->addSubsession(WebcamOndemandMediaSubsession::createNew(*_env, webcam_source));
  150. rtspServer->addServerMediaSession(sms);
  151. char *url = rtspServer->rtspURL(sms);
  152. *_env << "using url \"" << url << "\"\n";
  153. delete [] url;
  154. } while (0);
  155. // run loop
  156. _env->taskScheduler().doEventLoop();
  157. return 1;
  158. }


需要 live555 + libavcodec + libswscale + libx2, client 使用 vlc, mplayer, quicktime, .....

下载本文
显示全文
专题