1/* ---------------------------------------------------------------------------
2** This software is in the public domain, furnished "as is", without technical
3** support, and with no warranty, express or implied, as to its usefulness for
4** any purpose.
5**
6** main.cpp
7**
8** V4L2 RTSP streamer
9**
10** H264 capture using V4L2
11** RTSP using live555
12**
13** -------------------------------------------------------------------------*/
14
15#include <stdio.h>
16#include <stdlib.h>
17#include <string.h>
18#include <errno.h>
19#include <signal.h>
20#include <sys/ioctl.h>
21#include <dirent.h>
22
23#include <sstream>
24
25// libv4l2
26#include <linux/videodev2.h>
27
28// live555
29#include <BasicUsageEnvironment.hh>
30#include <GroupsockHelper.hh>
31
32// project
33#include "logger.h"
34
35#include "V4l2Device.h"
36#include "V4l2Capture.h"
37#include "V4l2Output.h"
38
39#include "H264_V4l2DeviceSource.h"
40#include "ServerMediaSubsession.h"
41#include "UnicastServerMediaSubsession.h"
42#include "MulticastServerMediaSubsession.h"
43#include "TSServerMediaSubsession.h"
44#include "HTTPServer.h"
45
46#ifdef HAVE_ALSA
47#include "ALSACapture.h"
48#endif
49
50// -----------------------------------------
51// signal handler
52// -----------------------------------------
53char quit = 0;
54void sighandler(int n)
55{
56 printf("SIGINT\n");
57 quit =1;
58}
59
60
61// -----------------------------------------
62// create UserAuthenticationDatabase for RTSP server
63// -----------------------------------------
64UserAuthenticationDatabase* createUserAuthenticationDatabase(const std::list<std::string> & userPasswordList, const char* realm)
65{
66 UserAuthenticationDatabase* auth = NULL;
67 if (userPasswordList.size() > 0)
68 {
69 auth = new UserAuthenticationDatabase(realm, (realm != NULL) );
70
71 std::list<std::string>::const_iterator it;
72 for (it = userPasswordList.begin(); it != userPasswordList.end(); ++it)
73 {
74 std::istringstream is(*it);
75 std::string user;
76 getline(is, user, ':');
77 std::string password;
78 getline(is, password);
79 auth->addUserRecord(user.c_str(), password.c_str());
80 }
81 }
82
83 return auth;
84}
85
86// -----------------------------------------
87// create RTSP server
88// -----------------------------------------
89RTSPServer* createRTSPServer(UsageEnvironment& env, unsigned short rtspPort, unsigned short rtspOverHTTPPort, int timeout, unsigned int hlsSegment, const std::list<std::string> & userPasswordList, const char* realm, const std::string & webroot)
90{
91 UserAuthenticationDatabase* auth = createUserAuthenticationDatabase(userPasswordList, realm);
92 RTSPServer* rtspServer = HTTPServer::createNew(env, rtspPort, auth, timeout, hlsSegment, webroot);
93 if (rtspServer != NULL)
94 {
95 // set http tunneling
96 if (rtspOverHTTPPort)
97 {
98 rtspServer->setUpTunnelingOverHTTP(rtspOverHTTPPort);
99 }
100 }
101 return rtspServer;
102}
103
104
105// -----------------------------------------
106// create FramedSource server
107// -----------------------------------------
108FramedSource* createFramedSource(UsageEnvironment* env, int format, DeviceInterface* videoCapture, int outfd, int queueSize, bool useThread, bool repeatConfig)
109{
110 FramedSource* source = NULL;
111 if (format == V4L2_PIX_FMT_H264)
112 {
113 source = H264_V4L2DeviceSource::createNew(*env, videoCapture, outfd, queueSize, useThread, repeatConfig, false);
114 }
115 else if (format == V4L2_PIX_FMT_HEVC)
116 {
117 source = H265_V4L2DeviceSource::createNew(*env, videoCapture, outfd, queueSize, useThread, repeatConfig, false);
118 }
119 else
120 {
121 source = V4L2DeviceSource::createNew(*env, videoCapture, outfd, queueSize, useThread);
122 }
123 return source;
124}
125
126// -----------------------------------------
127// add an RTSP session
128// -----------------------------------------
129int addSession(RTSPServer* rtspServer, const std::string & sessionName, const std::list<ServerMediaSubsession*> & subSession)
130{
131 int nbSubsession = 0;
132 if (subSession.empty() == false)
133 {
134 UsageEnvironment& env(rtspServer->envir());
135 ServerMediaSession* sms = ServerMediaSession::createNew(env, sessionName.c_str());
136 if (sms != NULL)
137 {
138 std::list<ServerMediaSubsession*>::const_iterator subIt;
139 for (subIt = subSession.begin(); subIt != subSession.end(); ++subIt)
140 {
141 sms->addSubsession(*subIt);
142 nbSubsession++;
143 }
144
145 rtspServer->addServerMediaSession(sms);
146
147 char* url = rtspServer->rtspURL(sms);
148 if (url != NULL)
149 {
150 LOG(NOTICE) << "Play this stream using the URL \"" << url << "\"";
151 delete[] url;
152 }
153 }
154 }
155 return nbSubsession;
156}
157
158// -----------------------------------------
159// convert V4L2 pix format to RTP mime
160// -----------------------------------------
161std::string getVideoRtpFormat(int format)
162{
163 std::string rtpFormat;
164 switch(format)
165 {
166 case V4L2_PIX_FMT_HEVC : rtpFormat = "video/H265"; break;
167 case V4L2_PIX_FMT_H264 : rtpFormat = "video/H264"; break;
168 case V4L2_PIX_FMT_MJPEG: rtpFormat = "video/JPEG"; break;
169 case V4L2_PIX_FMT_JPEG : rtpFormat = "video/JPEG"; break;
170 case V4L2_PIX_FMT_VP8 : rtpFormat = "video/VP8" ; break;
171 case V4L2_PIX_FMT_VP9 : rtpFormat = "video/VP9" ; break;
172 case V4L2_PIX_FMT_YUYV : rtpFormat = "video/RAW" ; break;
173 case V4L2_PIX_FMT_UYVY : rtpFormat = "video/RAW" ; break;
174 }
175
176 return rtpFormat;
177}
178
179// -----------------------------------------
180// convert string video format to fourcc
181// -----------------------------------------
182int decodeVideoFormat(const char* fmt)
183{
184 char fourcc[4];
185 memset(&fourcc, 0, sizeof(fourcc));
186 if (fmt != NULL)
187 {
188 strncpy(fourcc, fmt, 4);
189 }
190 return v4l2_fourcc(fourcc[0], fourcc[1], fourcc[2], fourcc[3]);
191}
192
193// -----------------------------------------
194// convert string audio format to pcm
195// -----------------------------------------
196#ifdef HAVE_ALSA
197snd_pcm_format_t decodeAudioFormat(const std::string& fmt)
198{
199 snd_pcm_format_t audioFmt = SND_PCM_FORMAT_UNKNOWN;
200 if (fmt == "S16_BE") {
201 audioFmt = SND_PCM_FORMAT_S16_BE;
202 } else if (fmt == "S16_LE") {
203 audioFmt = SND_PCM_FORMAT_S16_LE;
204 } else if (fmt == "S24_BE") {
205 audioFmt = SND_PCM_FORMAT_S24_BE;
206 } else if (fmt == "S24_LE") {
207 audioFmt = SND_PCM_FORMAT_S24_LE;
208 } else if (fmt == "S32_BE") {
209 audioFmt = SND_PCM_FORMAT_S32_BE;
210 } else if (fmt == "S32_LE") {
211 audioFmt = SND_PCM_FORMAT_S32_LE;
212 } else if (fmt == "ALAW") {
213 audioFmt = SND_PCM_FORMAT_A_LAW;
214 } else if (fmt == "MULAW") {
215 audioFmt = SND_PCM_FORMAT_MU_LAW;
216 } else if (fmt == "S8") {
217 audioFmt = SND_PCM_FORMAT_S8;
218 } else if (fmt == "MPEG") {
219 audioFmt = SND_PCM_FORMAT_MPEG;
220 }
221 return audioFmt;
222}
223std::string getAudioRtpFormat(snd_pcm_format_t format, int sampleRate, int channels)
224{
225 std::ostringstream os;
226 os << "audio/";
227 switch (format) {
228 case SND_PCM_FORMAT_A_LAW:
229 os << "PCMA";
230 break;
231 case SND_PCM_FORMAT_MU_LAW:
232 os << "PCMU";
233 break;
234 case SND_PCM_FORMAT_S8:
235 os << "L8";
236 break;
237 case SND_PCM_FORMAT_S24_BE:
238 case SND_PCM_FORMAT_S24_LE:
239 os << "L24";
240 break;
241 case SND_PCM_FORMAT_S32_BE:
242 case SND_PCM_FORMAT_S32_LE:
243 os << "L32";
244 break;
245 case SND_PCM_FORMAT_MPEG:
246 os << "MPEG";
247 break;
248 default:
249 os << "L16";
250 break;
251 }
252 os << "/" << sampleRate << "/" << channels;
253 return os.str();
254}
255#endif
256
257// -------------------------------------------------------
258// decode multicast url <group>:<rtp_port>:<rtcp_port>
259// -------------------------------------------------------
260void decodeMulticastUrl(const std::string & maddr, in_addr & destinationAddress, unsigned short & rtpPortNum, unsigned short & rtcpPortNum)
261{
262 std::istringstream is(maddr);
263 std::string ip;
264 getline(is, ip, ':');
265 if (!ip.empty())
266 {
267 destinationAddress.s_addr = inet_addr(ip.c_str());
268 }
269
270 std::string port;
271 getline(is, port, ':');
272 rtpPortNum = 20000;
273 if (!port.empty())
274 {
275 rtpPortNum = atoi(port.c_str());
276 }
277 rtcpPortNum = rtpPortNum+1;
278}
279
280// -------------------------------------------------------
281// split video,audio device
282// -------------------------------------------------------
283void decodeDevice(const std::string & device, std::string & videoDev, std::string & audioDev)
284{
285 std::istringstream is(device);
286 getline(is, videoDev, ',');
287 getline(is, audioDev);
288}
289
290std::string getDeviceName(const std::string & devicePath)
291{
292 std::string deviceName(devicePath);
293 size_t pos = deviceName.find_last_of('/');
294 if (pos != std::string::npos) {
295 deviceName.erase(0,pos+1);
296 }
297 return deviceName;
298}
299
300
301/* ---------------------------------------------------------------------------
302** get a "deviceid" from uevent sys file
303** -------------------------------------------------------------------------*/
304#ifdef HAVE_ALSA
305std::string getDeviceId(const std::string& evt) {
306 std::string deviceid;
307 std::istringstream f(evt);
308 std::string key;
309 while (getline(f, key, '=')) {
310 std::string value;
311 if (getline(f, value)) {
312 if ( (key =="PRODUCT") || (key == "PCI_SUBSYS_ID") ) {
313 deviceid = value;
314 break;
315 }
316 }
317 }
318 return deviceid;
319}
320
321std::string getV4l2Alsa(const std::string& v4l2device) {
322 std::string audioDevice(v4l2device);
323
324 std::map<std::string,std::string> videodevices;
325 std::string video4linuxPath("/sys/class/video4linux");
326 DIR *dp = opendir(video4linuxPath.c_str());
327 if (dp != NULL) {
328 struct dirent *entry = NULL;
329 while((entry = readdir(dp))) {
330 std::string devicename;
331 std::string deviceid;
332 if (strstr(entry->d_name,"video") == entry->d_name) {
333 std::string ueventPath(video4linuxPath);
334 ueventPath.append("/").append(entry->d_name).append("/device/uevent");
335 std::ifstream ifsd(ueventPath.c_str());
336 deviceid = std::string(std::istreambuf_iterator<char>{ifsd}, {});
337 deviceid.erase(deviceid.find_last_not_of("\n")+1);
338 }
339
340 if (!deviceid.empty()) {
341 videodevices[entry->d_name] = getDeviceId(deviceid);
342 }
343 }
344 closedir(dp);
345 }
346
347 std::map<std::string,std::string> audiodevices;
348 int rcard = -1;
349 while ( (snd_card_next(&rcard) == 0) && (rcard>=0) ) {
350 void **hints = NULL;
351 if (snd_device_name_hint(rcard, "pcm", &hints) >= 0) {
352 void **str = hints;
353 while (*str) {
354 std::ostringstream os;
355 os << "/sys/class/sound/card" << rcard << "/device/uevent";
356
357 std::ifstream ifs(os.str().c_str());
358 std::string deviceid = std::string(std::istreambuf_iterator<char>{ifs}, {});
359 deviceid.erase(deviceid.find_last_not_of("\n")+1);
360 deviceid = getDeviceId(deviceid);
361
362 if (!deviceid.empty()) {
363 if (audiodevices.find(deviceid) == audiodevices.end()) {
364 std::string audioname = snd_device_name_get_hint(*str, "NAME");
365 audiodevices[deviceid] = audioname;
366 }
367 }
368
369 str++;
370 }
371
372 snd_device_name_free_hint(hints);
373 }
374 }
375
376 auto deviceId = videodevices.find(getDeviceName(v4l2device));
377 if (deviceId != videodevices.end()) {
378 auto audioDeviceIt = audiodevices.find(deviceId->second);
379
380 if (audioDeviceIt != audiodevices.end()) {
381 audioDevice = audioDeviceIt->second;
382 std::cout << v4l2device << "=>" << audioDevice << std::endl;
383 }
384 }
385
386
387 return audioDevice;
388}
389#endif
390
391// -----------------------------------------
392// entry point
393// -----------------------------------------
394int main(int argc, char** argv)
395{
396 // default parameters
397 const char *dev_name = "/dev/video0,/dev/video0";
398 unsigned int format = ~0;
399 std::list<unsigned int> videoformatList;
400 int width = 0;
401 int height = 0;
402 int queueSize = 10;
403 int fps = 25;
404 unsigned short rtspPort = 8554;
405 unsigned short rtspOverHTTPPort = 0;
406 bool multicast = false;
407 int verbose = 0;
408 std::string outputFile;
409 V4l2Access::IoType ioTypeIn = V4l2Access::IOTYPE_MMAP;
410 V4l2Access::IoType ioTypeOut = V4l2Access::IOTYPE_MMAP;
411 int openflags = O_RDWR | O_NONBLOCK;
412 std::string url = "unicast";
413 std::string murl = "multicast";
414 std::string tsurl = "ts";
415 bool useThread = true;
416 std::string maddr;
417 bool repeatConfig = true;
418 int timeout = 65;
419 int defaultHlsSegment = 2;
420 unsigned int hlsSegment = 0;
421 const char* realm = NULL;
422 std::list<std::string> userPasswordList;
423 std::string webroot;
424#ifdef HAVE_ALSA
425 int audioFreq = 44100;
426 int audioNbChannels = 2;
427 std::list<snd_pcm_format_t> audioFmtList;
428 snd_pcm_format_t audioFmt = SND_PCM_FORMAT_UNKNOWN;
429#endif
430 const char* defaultPort = getenv("PORT");
431 if (defaultPort != NULL) {
432 rtspPort = atoi(defaultPort);
433 }
434
435 // decode parameters
436 int c = 0;
437 while ((c = getopt (argc, argv, "v::Q:O:b:" "I:P:p:m:u:M:ct:S::" "R:U:" "rwBsf::F:W:H:G:" "A:C:a:" "Vh")) != -1)
438 {
439 switch (c)
440 {
441 case 'v': verbose = 1; if (optarg && *optarg=='v') verbose++; break;
442 case 'Q': queueSize = atoi(optarg); break;
443 case 'O': outputFile = optarg; break;
444 case 'b': webroot = optarg; break;
445
446 // RTSP/RTP
447 case 'I': ReceivingInterfaceAddr = inet_addr(optarg); break;
448 case 'P': rtspPort = atoi(optarg); break;
449 case 'p': rtspOverHTTPPort = atoi(optarg); break;
450 case 'u': url = optarg; break;
451 case 'm': multicast = true; murl = optarg; break;
452 case 'M': multicast = true; maddr = optarg; break;
453 case 'c': repeatConfig = false; break;
454 case 't': timeout = atoi(optarg); break;
455 case 'S': hlsSegment = optarg ? atoi(optarg) : defaultHlsSegment; break;
456
457 // users
458 case 'R': realm = optarg; break;
459 case 'U': userPasswordList.push_back(optarg); break;
460
461 // V4L2
462 case 'r': ioTypeIn = V4l2Access::IOTYPE_READWRITE; break;
463 case 'w': ioTypeOut = V4l2Access::IOTYPE_READWRITE; break;
464 case 'B': openflags = O_RDWR; break;
465 case 's': useThread = false; break;
466 case 'f': format = decodeVideoFormat(optarg); if (format) {videoformatList.push_back(format);}; break;
467 case 'F': fps = atoi(optarg); break;
468 case 'W': width = atoi(optarg); break;
469 case 'H': height = atoi(optarg); break;
470 case 'G': sscanf(optarg,"%dx%dx%d", &width, &height, &fps); break;
471
472 // ALSA
473#ifdef HAVE_ALSA
474 case 'A': audioFreq = atoi(optarg); break;
475 case 'C': audioNbChannels = atoi(optarg); break;
476 case 'a': audioFmt = decodeAudioFormat(optarg); if (audioFmt != SND_PCM_FORMAT_UNKNOWN) {audioFmtList.push_back(audioFmt);} ; break;
477#endif
478
479 // version
480 case 'V':
481 std::cout << VERSION << std::endl;
482 exit(0);
483 break;
484
485 // help
486 case 'h':
487 default:
488 {
489 std::cout << argv[0] << " [-v[v]] [-Q queueSize] [-O file]" << std::endl;
490 std::cout << "\t [-I interface] [-P RTSP port] [-p RTSP/HTTP port] [-m multicast url] [-u unicast url] [-M multicast addr] [-c] [-t timeout] [-T] [-S[duration]]" << std::endl;
491 std::cout << "\t [-r] [-w] [-s] [-f[format] [-W width] [-H height] [-F fps] [device] [device]" << std::endl;
492 std::cout << "\t -v : verbose" << std::endl;
493 std::cout << "\t -vv : very verbose" << std::endl;
494 std::cout << "\t -Q <length> : Number of frame queue (default "<< queueSize << ")" << std::endl;
495 std::cout << "\t -O <output> : Copy captured frame to a file or a V4L2 device" << std::endl;
496 std::cout << "\t -b <webroot> : path to webroot" << std::endl;
497
498 std::cout << "\t RTSP/RTP options" << std::endl;
499 std::cout << "\t -I <addr> : RTSP interface (default autodetect)" << std::endl;
500 std::cout << "\t -P <port> : RTSP port (default "<< rtspPort << ")" << std::endl;
501 std::cout << "\t -p <port> : RTSP over HTTP port (default "<< rtspOverHTTPPort << ")" << std::endl;
502 std::cout << "\t -U <user>:<pass> : RTSP user and password" << std::endl;
503 std::cout << "\t -R <realm> : use md5 password 'md5(<username>:<realm>:<password>')" << std::endl;
504 std::cout << "\t -u <url> : unicast url (default " << url << ")" << std::endl;
505 std::cout << "\t -m <url> : multicast url (default " << murl << ")" << std::endl;
506 std::cout << "\t -M <addr> : multicast group:port (default is random_address:20000)" << std::endl;
507 std::cout << "\t -c : don't repeat config (default repeat config before IDR frame)" << std::endl;
508 std::cout << "\t -t <timeout> : RTCP expiration timeout in seconds (default " << timeout << ")" << std::endl;
509 std::cout << "\t -S[<duration>] : enable HLS & MPEG-DASH with segment duration in seconds (default " << defaultHlsSegment << ")" << std::endl;
510
511 std::cout << "\t V4L2 options" << std::endl;
512 std::cout << "\t -r : V4L2 capture using read interface (default use memory mapped buffers)" << std::endl;
513 std::cout << "\t -w : V4L2 capture using write interface (default use memory mapped buffers)" << std::endl;
514 std::cout << "\t -B : V4L2 capture using blocking mode (default use non-blocking mode)" << std::endl;
515 std::cout << "\t -s : V4L2 capture using live555 mainloop (default use a reader thread)" << std::endl;
516 std::cout << "\t -f : V4L2 capture using current capture format (-W,-H,-F are ignored)" << std::endl;
517 std::cout << "\t -f<format> : V4L2 capture using format (-W,-H,-F are used)" << std::endl;
518 std::cout << "\t -W <width> : V4L2 capture width (default "<< width << ")" << std::endl;
519 std::cout << "\t -H <height> : V4L2 capture height (default "<< height << ")" << std::endl;
520 std::cout << "\t -F <fps> : V4L2 capture framerate (default "<< fps << ")" << std::endl;
521 std::cout << "\t -G <w>x<h>[x<f>] : V4L2 capture format (default "<< width << "x" << height << "x" << fps << ")" << std::endl;
522
523#ifdef HAVE_ALSA
524 std::cout << "\t ALSA options" << std::endl;
525 std::cout << "\t -A freq : ALSA capture frequency and channel (default " << audioFreq << ")" << std::endl;
526 std::cout << "\t -C channels : ALSA capture channels (default " << audioNbChannels << ")" << std::endl;
527 std::cout << "\t -a fmt : ALSA capture audio format (default S16_BE)" << std::endl;
528#endif
529
530 std::cout << "\t Devices :" << std::endl;
531 std::cout << "\t [V4L2 device][,ALSA device] : V4L2 capture device or/and ALSA capture device (default "<< dev_name << ")" << std::endl;
532 exit(0);
533 }
534 }
535 }
536 std::list<std::string> devList;
537 while (optind<argc)
538 {
539 devList.push_back(argv[optind]);
540 optind++;
541 }
542 if (devList.empty())
543 {
544 devList.push_back(dev_name);
545 }
546
547 // default format tries
548 if ((videoformatList.empty()) && (format!=0)) {
549 videoformatList.push_back(V4L2_PIX_FMT_H264);
550 videoformatList.push_back(V4L2_PIX_FMT_MJPEG);
551 videoformatList.push_back(V4L2_PIX_FMT_JPEG);
552 }
553
554#ifdef HAVE_ALSA
555 // default audio format tries
556 if (audioFmtList.empty()) {
557 audioFmtList.push_back(SND_PCM_FORMAT_S16_LE);
558 audioFmtList.push_back(SND_PCM_FORMAT_S16_BE);
559 }
560#endif
561
562 // init logger
563 initLogger(verbose);
564
565 // create live555 environment
566 TaskScheduler* scheduler = BasicTaskScheduler::createNew();
567 UsageEnvironment* env = BasicUsageEnvironment::createNew(*scheduler);
568
569 // split multicast info
570 struct in_addr destinationAddress;
571 destinationAddress.s_addr = chooseRandomIPv4SSMAddress(*env);
572 unsigned short rtpPortNum = 20000;
573 unsigned short rtcpPortNum = rtpPortNum+1;
574 unsigned char ttl = 5;
575 decodeMulticastUrl(maddr, destinationAddress, rtpPortNum, rtcpPortNum);
576
577 // create RTSP server
578 RTSPServer* rtspServer = createRTSPServer(*env, rtspPort, rtspOverHTTPPort, timeout, hlsSegment, userPasswordList, realm, webroot);
579 if (rtspServer == NULL)
580 {
581 LOG(ERROR) << "Failed to create RTSP server: " << env->getResultMsg();
582 }
583 else
584 {
585 V4l2Output* out = NULL;
586 int nbSource = 0;
587 std::list<std::string>::iterator devIt;
588 for ( devIt=devList.begin() ; devIt!=devList.end() ; ++devIt)
589 {
590 std::string deviceName(*devIt);
591
592 std::string videoDev;
593 std::string audioDev;
594 decodeDevice(deviceName, videoDev, audioDev);
595
596 std::string baseUrl;
597 if (devList.size() > 1)
598 {
599 baseUrl = getDeviceName(videoDev);
600 baseUrl.append("/");
601 }
602 StreamReplicator* videoReplicator = NULL;
603 std::string rtpFormat;
604 if (!videoDev.empty())
605 {
606 // Init video capture
607 LOG(NOTICE) << "Create V4L2 Source..." << videoDev;
608
609 V4L2DeviceParameters param(videoDev.c_str(), videoformatList, width, height, fps, verbose, openflags);
610 V4l2Capture* videoCapture = V4l2Capture::create(param, ioTypeIn);
611 if (videoCapture)
612 {
613 int outfd = -1;
614
615 if (!outputFile.empty())
616 {
617 V4L2DeviceParameters outparam(outputFile.c_str(), videoCapture->getFormat(), videoCapture->getWidth(), videoCapture->getHeight(), 0,verbose);
618 out = V4l2Output::create(outparam, ioTypeOut);
619 if (out != NULL)
620 {
621 outfd = out->getFd();
622 }
623 }
624
625 rtpFormat.assign(getVideoRtpFormat(videoCapture->getFormat()));
626 if (rtpFormat.empty()) {
627 LOG(FATAL) << "No Streaming format supported for device " << videoDev;
628 delete videoCapture;
629 } else {
630 LOG(NOTICE) << "Create Source ..." << videoDev;
631 FramedSource* videoSource = createFramedSource(env, videoCapture->getFormat(), new DeviceCaptureAccess<V4l2Capture>(videoCapture), outfd, queueSize, useThread, repeatConfig);
632 if (videoSource == NULL)
633 {
634 LOG(FATAL) << "Unable to create source for device " << videoDev;
635 delete videoCapture;
636 }
637 else
638 {
639 // extend buffer size if needed
640 if (videoCapture->getBufferSize() > OutPacketBuffer::maxSize)
641 {
642 OutPacketBuffer::maxSize = videoCapture->getBufferSize();
643 }
644 videoReplicator = StreamReplicator::createNew(*env, videoSource, false);
645 }
646 }
647 }
648 }
649
650 // Init Audio Capture
651 StreamReplicator* audioReplicator = NULL;
652 std::string rtpAudioFormat;
653#ifdef HAVE_ALSA
654 if (!audioDev.empty())
655 {
656 // find the ALSA device associated with the V4L2 device
657 audioDev = getV4l2Alsa(audioDev);
658
659 // Init audio capture
660 LOG(NOTICE) << "Create ALSA Source..." << audioDev;
661
662 ALSACaptureParameters param(audioDev.c_str(), audioFmtList, audioFreq, audioNbChannels, verbose);
663 ALSACapture* audioCapture = ALSACapture::createNew(param);
664 if (audioCapture)
665 {
666 FramedSource* audioSource = V4L2DeviceSource::createNew(*env, new DeviceCaptureAccess<ALSACapture>(audioCapture), -1, queueSize, useThread);
667 if (audioSource == NULL)
668 {
669 LOG(FATAL) << "Unable to create source for device " << audioDev;
670 delete audioCapture;
671 }
672 else
673 {
674 rtpAudioFormat.assign(getAudioRtpFormat(audioCapture->getFormat(),audioCapture->getSampleRate(), audioCapture->getChannels()));
675
676 // extend buffer size if needed
677 if (audioCapture->getBufferSize() > OutPacketBuffer::maxSize)
678 {
679 OutPacketBuffer::maxSize = audioCapture->getBufferSize();
680 }
681 audioReplicator = StreamReplicator::createNew(*env, audioSource, false);
682 }
683 }
684 }
685#endif
686
687
688 // Create Multicast Session
689 if (multicast)
690 {
691 LOG(NOTICE) << "RTP address " << inet_ntoa(destinationAddress) << ":" << rtpPortNum;
692 LOG(NOTICE) << "RTCP address " << inet_ntoa(destinationAddress) << ":" << rtcpPortNum;
693
694 std::list<ServerMediaSubsession*> subSession;
695 if (videoReplicator)
696 {
697 subSession.push_back(MulticastServerMediaSubsession::createNew(*env, destinationAddress, Port(rtpPortNum), Port(rtcpPortNum), ttl, videoReplicator, rtpFormat));
698 // increment ports for next sessions
699 rtpPortNum+=2;
700 rtcpPortNum+=2;
701 }
702
703 if (audioReplicator)
704 {
705 subSession.push_back(MulticastServerMediaSubsession::createNew(*env, destinationAddress, Port(rtpPortNum), Port(rtcpPortNum), ttl, audioReplicator, rtpAudioFormat));
706
707 // increment ports for next sessions
708 rtpPortNum+=2;
709 rtcpPortNum+=2;
710 }
711 nbSource += addSession(rtspServer, baseUrl+murl, subSession);
712 }
713
714 // Create HLS Session
715 if (hlsSegment > 0)
716 {
717 std::list<ServerMediaSubsession*> subSession;
718 if (videoReplicator)
719 {
720 subSession.push_back(TSServerMediaSubsession::createNew(*env, videoReplicator, rtpFormat, audioReplicator, rtpAudioFormat, hlsSegment));
721 }
722 nbSource += addSession(rtspServer, baseUrl+tsurl, subSession);
723
724 struct in_addr ip;
725 ip.s_addr = ourIPAddress(*env);
726 LOG(NOTICE) << "HLS http://" << inet_ntoa(ip) << ":" << rtspPort << "/" << baseUrl+tsurl << ".m3u8";
727 LOG(NOTICE) << "MPEG-DASH http://" << inet_ntoa(ip) << ":" << rtspPort << "/" << baseUrl+tsurl << ".mpd";
728 }
729
730 // Create Unicast Session
731 std::list<ServerMediaSubsession*> subSession;
732 if (videoReplicator)
733 {
734 subSession.push_back(UnicastServerMediaSubsession::createNew(*env, videoReplicator, rtpFormat));
735 }
736 if (audioReplicator)
737 {
738 subSession.push_back(UnicastServerMediaSubsession::createNew(*env, audioReplicator, rtpAudioFormat));
739 }
740 nbSource += addSession(rtspServer, baseUrl+url, subSession);
741 }
742
743 if (nbSource>0)
744 {
745 // main loop
746 signal(SIGINT,sighandler);
747 env->taskScheduler().doEventLoop(&quit);
748 LOG(NOTICE) << "Exiting....";
749 }
750
751 Medium::close(rtspServer);
752
753 if (out)
754 {
755 delete out;
756 }
757 }
758
759 env->reclaim();
760 delete scheduler;
761
762 return 0;
763}
764
765
766
767