1 | /********** |
2 | This library is free software; you can redistribute it and/or modify it under |
3 | the terms of the GNU Lesser General Public License as published by the |
4 | Free Software Foundation; either version 3 of the License, or (at your |
5 | option) any later version. (See <http://www.gnu.org/copyleft/lesser.html>.) |
6 | |
7 | This library is distributed in the hope that it will be useful, but WITHOUT |
8 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS |
9 | FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for |
10 | more details. |
11 | |
12 | You should have received a copy of the GNU Lesser General Public License |
13 | along with this library; if not, write to the Free Software Foundation, Inc., |
14 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA |
15 | **********/ |
16 | // "liveMedia" |
17 | // Copyright (c) 1996-2020 Live Networks, Inc. All rights reserved. |
18 | // A server demultiplexer for a MPEG 1 or 2 Program Stream |
19 | // Implementation |
20 | |
21 | #include "MPEG1or2FileServerDemux.hh" |
22 | #include "MPEG1or2DemuxedServerMediaSubsession.hh" |
23 | #include "ByteStreamFileSource.hh" |
24 | |
25 | MPEG1or2FileServerDemux* |
26 | MPEG1or2FileServerDemux::createNew(UsageEnvironment& env, char const* fileName, |
27 | Boolean reuseFirstSource) { |
28 | return new MPEG1or2FileServerDemux(env, fileName, reuseFirstSource); |
29 | } |
30 | |
31 | static float MPEG1or2ProgramStreamFileDuration(UsageEnvironment& env, |
32 | char const* fileName, |
33 | unsigned& fileSize); // forward |
34 | MPEG1or2FileServerDemux |
35 | ::MPEG1or2FileServerDemux(UsageEnvironment& env, char const* fileName, |
36 | Boolean reuseFirstSource) |
37 | : Medium(env), |
38 | fReuseFirstSource(reuseFirstSource), |
39 | fSession0Demux(NULL), fLastCreatedDemux(NULL), fLastClientSessionId(~0) { |
40 | fFileName = strDup(fileName); |
41 | fFileDuration = MPEG1or2ProgramStreamFileDuration(env, fileName, fFileSize); |
42 | } |
43 | |
44 | MPEG1or2FileServerDemux::~MPEG1or2FileServerDemux() { |
45 | Medium::close(fSession0Demux); |
46 | delete[] (char*)fFileName; |
47 | } |
48 | |
49 | ServerMediaSubsession* |
50 | MPEG1or2FileServerDemux::newAudioServerMediaSubsession() { |
51 | return MPEG1or2DemuxedServerMediaSubsession::createNew(*this, 0xC0, fReuseFirstSource); |
52 | } |
53 | |
54 | ServerMediaSubsession* |
55 | MPEG1or2FileServerDemux::newVideoServerMediaSubsession(Boolean iFramesOnly, |
56 | double vshPeriod) { |
57 | return MPEG1or2DemuxedServerMediaSubsession::createNew(*this, 0xE0, fReuseFirstSource, |
58 | iFramesOnly, vshPeriod); |
59 | } |
60 | |
61 | ServerMediaSubsession* |
62 | MPEG1or2FileServerDemux::newAC3AudioServerMediaSubsession() { |
63 | return MPEG1or2DemuxedServerMediaSubsession::createNew(*this, 0xBD, fReuseFirstSource); |
64 | // because, in a VOB file, the AC3 audio has stream id 0xBD |
65 | } |
66 | |
67 | MPEG1or2DemuxedElementaryStream* |
68 | MPEG1or2FileServerDemux::newElementaryStream(unsigned clientSessionId, |
69 | u_int8_t streamIdTag) { |
70 | MPEG1or2Demux* demuxToUse; |
71 | if (clientSessionId == 0) { |
72 | // 'Session 0' is treated especially, because its audio & video streams |
73 | // are created and destroyed one-at-a-time, rather than both streams being |
74 | // created, and then (later) both streams being destroyed (as is the case |
75 | // for other ('real') session ids). Because of this, a separate demux is |
76 | // used for session 0, and its deletion is managed by us, rather than |
77 | // happening automatically. |
78 | if (fSession0Demux == NULL) { |
79 | // Open our input file as a 'byte-stream file source': |
80 | ByteStreamFileSource* fileSource |
81 | = ByteStreamFileSource::createNew(envir(), fFileName); |
82 | if (fileSource == NULL) return NULL; |
83 | fSession0Demux = MPEG1or2Demux::createNew(envir(), fileSource, False/*note!*/); |
84 | } |
85 | demuxToUse = fSession0Demux; |
86 | } else { |
87 | // First, check whether this is a new client session. If so, create a new |
88 | // demux for it: |
89 | if (clientSessionId != fLastClientSessionId) { |
90 | // Open our input file as a 'byte-stream file source': |
91 | ByteStreamFileSource* fileSource |
92 | = ByteStreamFileSource::createNew(envir(), fFileName); |
93 | if (fileSource == NULL) return NULL; |
94 | |
95 | fLastCreatedDemux = MPEG1or2Demux::createNew(envir(), fileSource, True); |
96 | // Note: We tell the demux to delete itself when its last |
97 | // elementary stream is deleted. |
98 | fLastClientSessionId = clientSessionId; |
99 | // Note: This code relies upon the fact that the creation of streams for |
100 | // different client sessions do not overlap - so one "MPEG1or2Demux" is used |
101 | // at a time. |
102 | } |
103 | demuxToUse = fLastCreatedDemux; |
104 | } |
105 | |
106 | if (demuxToUse == NULL) return NULL; // shouldn't happen |
107 | |
108 | return demuxToUse->newElementaryStream(streamIdTag); |
109 | } |
110 | |
111 | |
112 | static Boolean getMPEG1or2TimeCode(FramedSource* dataSource, |
113 | MPEG1or2Demux& parentDemux, |
114 | Boolean returnFirstSeenCode, |
115 | float& timeCode); // forward |
116 | |
117 | static float MPEG1or2ProgramStreamFileDuration(UsageEnvironment& env, |
118 | char const* fileName, |
119 | unsigned& fileSize) { |
120 | FramedSource* dataSource = NULL; |
121 | float duration = 0.0; // until we learn otherwise |
122 | fileSize = 0; // ditto |
123 | |
124 | do { |
125 | // Open the input file as a 'byte-stream file source': |
126 | ByteStreamFileSource* fileSource = ByteStreamFileSource::createNew(env, fileName); |
127 | if (fileSource == NULL) break; |
128 | dataSource = fileSource; |
129 | |
130 | fileSize = (unsigned)(fileSource->fileSize()); |
131 | if (fileSize == 0) break; |
132 | |
133 | // Create a MPEG demultiplexor that reads from that source. |
134 | MPEG1or2Demux* baseDemux = MPEG1or2Demux::createNew(env, dataSource, True); |
135 | if (baseDemux == NULL) break; |
136 | |
137 | // Create, from this, a source that returns raw PES packets: |
138 | dataSource = baseDemux->newRawPESStream(); |
139 | |
140 | // Read the first time code from the file: |
141 | float firstTimeCode; |
142 | if (!getMPEG1or2TimeCode(dataSource, *baseDemux, True, firstTimeCode)) break; |
143 | |
144 | // Then, read the last time code from the file. |
145 | // (Before doing this, flush the demux's input buffers, |
146 | // and seek towards the end of the file, for efficiency.) |
147 | baseDemux->flushInput(); |
148 | unsigned const startByteFromEnd = 100000; |
149 | unsigned newFilePosition |
150 | = fileSize < startByteFromEnd ? 0 : fileSize - startByteFromEnd; |
151 | if (newFilePosition > 0) fileSource->seekToByteAbsolute(newFilePosition); |
152 | |
153 | float lastTimeCode; |
154 | if (!getMPEG1or2TimeCode(dataSource, *baseDemux, False, lastTimeCode)) break; |
155 | |
156 | // Take the difference between these time codes as being the file duration: |
157 | float timeCodeDiff = lastTimeCode - firstTimeCode; |
158 | if (timeCodeDiff < 0) break; |
159 | duration = timeCodeDiff; |
160 | } while (0); |
161 | |
162 | Medium::close(dataSource); |
163 | return duration; |
164 | } |
165 | |
166 | #define MFSD_DUMMY_SINK_BUFFER_SIZE (6+65535) /* large enough for a PES packet */ |
167 | |
168 | class MFSD_DummySink: public MediaSink { |
169 | public: |
170 | MFSD_DummySink(MPEG1or2Demux& demux, Boolean returnFirstSeenCode); |
171 | virtual ~MFSD_DummySink(); |
172 | |
173 | char watchVariable; |
174 | |
175 | private: |
176 | // redefined virtual function: |
177 | virtual Boolean continuePlaying(); |
178 | |
179 | private: |
180 | static void afterGettingFrame(void* clientData, unsigned frameSize, |
181 | unsigned numTruncatedBytes, |
182 | struct timeval presentationTime, |
183 | unsigned durationInMicroseconds); |
184 | void afterGettingFrame1(); |
185 | |
186 | private: |
187 | MPEG1or2Demux& fOurDemux; |
188 | Boolean fReturnFirstSeenCode; |
189 | unsigned char fBuf[MFSD_DUMMY_SINK_BUFFER_SIZE]; |
190 | }; |
191 | |
192 | static void afterPlayingMFSD_DummySink(MFSD_DummySink* sink); // forward |
193 | static float computeSCRTimeCode(MPEG1or2Demux::SCR const& scr); // forward |
194 | |
195 | static Boolean getMPEG1or2TimeCode(FramedSource* dataSource, |
196 | MPEG1or2Demux& parentDemux, |
197 | Boolean returnFirstSeenCode, |
198 | float& timeCode) { |
199 | // Start reading through "dataSource", until we see a SCR time code: |
200 | parentDemux.lastSeenSCR().isValid = False; |
201 | UsageEnvironment& env = dataSource->envir(); // alias |
202 | MFSD_DummySink sink(parentDemux, returnFirstSeenCode); |
203 | sink.startPlaying(*dataSource, |
204 | (MediaSink::afterPlayingFunc*)afterPlayingMFSD_DummySink, &sink); |
205 | env.taskScheduler().doEventLoop(&sink.watchVariable); |
206 | |
207 | timeCode = computeSCRTimeCode(parentDemux.lastSeenSCR()); |
208 | return parentDemux.lastSeenSCR().isValid; |
209 | } |
210 | |
211 | |
212 | ////////// MFSD_DummySink implementation ////////// |
213 | |
214 | MFSD_DummySink::MFSD_DummySink(MPEG1or2Demux& demux, Boolean returnFirstSeenCode) |
215 | : MediaSink(demux.envir()), |
216 | watchVariable(0), fOurDemux(demux), fReturnFirstSeenCode(returnFirstSeenCode) { |
217 | } |
218 | |
219 | MFSD_DummySink::~MFSD_DummySink() { |
220 | } |
221 | |
222 | Boolean MFSD_DummySink::continuePlaying() { |
223 | if (fSource == NULL) return False; // sanity check |
224 | |
225 | fSource->getNextFrame(fBuf, sizeof fBuf, |
226 | afterGettingFrame, this, |
227 | onSourceClosure, this); |
228 | return True; |
229 | } |
230 | |
231 | void MFSD_DummySink::afterGettingFrame(void* clientData, unsigned /*frameSize*/, |
232 | unsigned /*numTruncatedBytes*/, |
233 | struct timeval /*presentationTime*/, |
234 | unsigned /*durationInMicroseconds*/) { |
235 | MFSD_DummySink* sink = (MFSD_DummySink*)clientData; |
236 | sink->afterGettingFrame1(); |
237 | } |
238 | |
239 | void MFSD_DummySink::afterGettingFrame1() { |
240 | if (fReturnFirstSeenCode && fOurDemux.lastSeenSCR().isValid) { |
241 | // We were asked to return the first SCR that we saw, and we've seen one, |
242 | // so we're done. (Handle this as if the input source had closed.) |
243 | onSourceClosure(); |
244 | return; |
245 | } |
246 | |
247 | continuePlaying(); |
248 | } |
249 | |
250 | static void afterPlayingMFSD_DummySink(MFSD_DummySink* sink) { |
251 | // Return from the "doEventLoop()" call: |
252 | sink->watchVariable = ~0; |
253 | } |
254 | |
255 | static float computeSCRTimeCode(MPEG1or2Demux::SCR const& scr) { |
256 | double result = scr.remainingBits/90000.0 + scr.extension/300.0; |
257 | if (scr.highBit) { |
258 | // Add (2^32)/90000 == (2^28)/5625 |
259 | double const highBitValue = (256*1024*1024)/5625.0; |
260 | result += highBitValue; |
261 | } |
262 | |
263 | return (float)result; |
264 | } |
265 | |