1 | /********** |
2 | This library is free software; you can redistribute it and/or modify it under |
3 | the terms of the GNU Lesser General Public License as published by the |
4 | Free Software Foundation; either version 3 of the License, or (at your |
5 | option) any later version. (See <http://www.gnu.org/copyleft/lesser.html>.) |
6 | |
7 | This library is distributed in the hope that it will be useful, but WITHOUT |
8 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS |
9 | FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for |
10 | more details. |
11 | |
12 | You should have received a copy of the GNU Lesser General Public License |
13 | along with this library; if not, write to the Free Software Foundation, Inc., |
14 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA |
15 | **********/ |
16 | // "liveMedia" |
17 | // Copyright (c) 1996-2020 Live Networks, Inc. All rights reserved. |
18 | // A filter that breaks up an MPEG video elementary stream into |
19 | // headers and frames |
20 | // Implementation |
21 | |
22 | #include "MPEGVideoStreamParser.hh" |
23 | #include <GroupsockHelper.hh> |
24 | |
25 | ////////// TimeCode implementation ////////// |
26 | |
27 | TimeCode::TimeCode() |
28 | : days(0), hours(0), minutes(0), seconds(0), pictures(0) { |
29 | } |
30 | |
31 | TimeCode::~TimeCode() { |
32 | } |
33 | |
34 | int TimeCode::operator==(TimeCode const& arg2) { |
35 | return pictures == arg2.pictures && seconds == arg2.seconds |
36 | && minutes == arg2.minutes && hours == arg2.hours && days == arg2.days; |
37 | } |
38 | |
39 | ////////// MPEGVideoStreamFramer implementation ////////// |
40 | |
41 | MPEGVideoStreamFramer::MPEGVideoStreamFramer(UsageEnvironment& env, |
42 | FramedSource* inputSource) |
43 | : FramedFilter(env, inputSource), |
44 | fFrameRate(0.0) /* until we learn otherwise */, |
45 | fParser(NULL) { |
46 | reset(); |
47 | } |
48 | |
49 | MPEGVideoStreamFramer::~MPEGVideoStreamFramer() { |
50 | delete fParser; |
51 | } |
52 | |
53 | void MPEGVideoStreamFramer::flushInput() { |
54 | reset(); |
55 | if (fParser != NULL) fParser->flushInput(); |
56 | } |
57 | |
58 | void MPEGVideoStreamFramer::reset() { |
59 | fPictureCount = 0; |
60 | fPictureEndMarker = True; // So that we start looking as if we'd just ended an 'access unit' |
61 | fPicturesAdjustment = 0; |
62 | fPictureTimeBase = 0.0; |
63 | fTcSecsBase = 0; |
64 | fHaveSeenFirstTimeCode = False; |
65 | |
66 | // Use the current wallclock time as the base 'presentation time': |
67 | gettimeofday(&fPresentationTimeBase, NULL); |
68 | } |
69 | |
70 | #ifdef DEBUG |
71 | static struct timeval firstPT; |
72 | #endif |
73 | void MPEGVideoStreamFramer |
74 | ::computePresentationTime(unsigned numAdditionalPictures) { |
75 | // Computes "fPresentationTime" from the most recent GOP's |
76 | // time_code, along with the "numAdditionalPictures" parameter: |
77 | TimeCode& tc = fCurGOPTimeCode; |
78 | |
79 | unsigned tcSecs |
80 | = (((tc.days*24)+tc.hours)*60+tc.minutes)*60+tc.seconds - fTcSecsBase; |
81 | double pictureTime = fFrameRate == 0.0 ? 0.0 |
82 | : (tc.pictures + fPicturesAdjustment + numAdditionalPictures)/fFrameRate; |
83 | while (pictureTime < fPictureTimeBase) { // "if" should be enough, but just in case |
84 | if (tcSecs > 0) tcSecs -= 1; |
85 | pictureTime += 1.0; |
86 | } |
87 | pictureTime -= fPictureTimeBase; |
88 | if (pictureTime < 0.0) pictureTime = 0.0; // sanity check |
89 | unsigned pictureSeconds = (unsigned)pictureTime; |
90 | double pictureFractionOfSecond = pictureTime - (double)pictureSeconds; |
91 | |
92 | fPresentationTime = fPresentationTimeBase; |
93 | fPresentationTime.tv_sec += tcSecs + pictureSeconds; |
94 | fPresentationTime.tv_usec += (long)(pictureFractionOfSecond*1000000.0); |
95 | if (fPresentationTime.tv_usec >= 1000000) { |
96 | fPresentationTime.tv_usec -= 1000000; |
97 | ++fPresentationTime.tv_sec; |
98 | } |
99 | #ifdef DEBUG |
100 | if (firstPT.tv_sec == 0 && firstPT.tv_usec == 0) firstPT = fPresentationTime; |
101 | struct timeval diffPT; |
102 | diffPT.tv_sec = fPresentationTime.tv_sec - firstPT.tv_sec; |
103 | diffPT.tv_usec = fPresentationTime.tv_usec - firstPT.tv_usec; |
104 | if (fPresentationTime.tv_usec < firstPT.tv_usec) { |
105 | --diffPT.tv_sec; |
106 | diffPT.tv_usec += 1000000; |
107 | } |
108 | fprintf(stderr, "MPEGVideoStreamFramer::computePresentationTime(%d) -> %lu.%06ld [%lu.%06ld]\n" , numAdditionalPictures, fPresentationTime.tv_sec, fPresentationTime.tv_usec, diffPT.tv_sec, diffPT.tv_usec); |
109 | #endif |
110 | } |
111 | |
112 | void MPEGVideoStreamFramer |
113 | ::setTimeCode(unsigned hours, unsigned minutes, unsigned seconds, |
114 | unsigned pictures, unsigned picturesSinceLastGOP) { |
115 | TimeCode& tc = fCurGOPTimeCode; // abbrev |
116 | unsigned days = tc.days; |
117 | if (hours < tc.hours) { |
118 | // Assume that the 'day' has wrapped around: |
119 | ++days; |
120 | } |
121 | tc.days = days; |
122 | tc.hours = hours; |
123 | tc.minutes = minutes; |
124 | tc.seconds = seconds; |
125 | tc.pictures = pictures; |
126 | if (!fHaveSeenFirstTimeCode) { |
127 | fPictureTimeBase = fFrameRate == 0.0 ? 0.0 : tc.pictures/fFrameRate; |
128 | fTcSecsBase = (((tc.days*24)+tc.hours)*60+tc.minutes)*60+tc.seconds; |
129 | fHaveSeenFirstTimeCode = True; |
130 | } else if (fCurGOPTimeCode == fPrevGOPTimeCode) { |
131 | // The time code has not changed since last time. Adjust for this: |
132 | fPicturesAdjustment += picturesSinceLastGOP; |
133 | } else { |
134 | // Normal case: The time code changed since last time. |
135 | fPrevGOPTimeCode = tc; |
136 | fPicturesAdjustment = 0; |
137 | } |
138 | } |
139 | |
140 | void MPEGVideoStreamFramer::doGetNextFrame() { |
141 | fParser->registerReadInterest(fTo, fMaxSize); |
142 | continueReadProcessing(); |
143 | } |
144 | |
145 | void MPEGVideoStreamFramer::doStopGettingFrames() { |
146 | flushInput(); |
147 | FramedFilter::doStopGettingFrames(); |
148 | } |
149 | |
150 | void MPEGVideoStreamFramer |
151 | ::continueReadProcessing(void* clientData, |
152 | unsigned char* /*ptr*/, unsigned /*size*/, |
153 | struct timeval /*presentationTime*/) { |
154 | MPEGVideoStreamFramer* framer = (MPEGVideoStreamFramer*)clientData; |
155 | framer->continueReadProcessing(); |
156 | } |
157 | |
158 | void MPEGVideoStreamFramer::continueReadProcessing() { |
159 | unsigned acquiredFrameSize = fParser->parse(); |
160 | if (acquiredFrameSize > 0) { |
161 | // We were able to acquire a frame from the input. |
162 | // It has already been copied to the reader's space. |
163 | fFrameSize = acquiredFrameSize; |
164 | fNumTruncatedBytes = fParser->numTruncatedBytes(); |
165 | |
166 | // "fPresentationTime" should have already been computed. |
167 | |
168 | // Compute "fDurationInMicroseconds" now: |
169 | fDurationInMicroseconds |
170 | = (fFrameRate == 0.0 || ((int)fPictureCount) < 0) ? 0 |
171 | : (unsigned)((fPictureCount*1000000)/fFrameRate); |
172 | #ifdef DEBUG |
173 | fprintf(stderr, "%d bytes @%u.%06d, fDurationInMicroseconds: %d ((%d*1000000)/%f)\n" , acquiredFrameSize, fPresentationTime.tv_sec, fPresentationTime.tv_usec, fDurationInMicroseconds, fPictureCount, fFrameRate); |
174 | #endif |
175 | fPictureCount = 0; |
176 | |
177 | // Call our own 'after getting' function. Because we're not a 'leaf' |
178 | // source, we can call this directly, without risking infinite recursion. |
179 | afterGetting(this); |
180 | } else { |
181 | // We were unable to parse a complete frame from the input, because: |
182 | // - we had to read more data from the source stream, or |
183 | // - the source stream has ended. |
184 | } |
185 | } |
186 | |