| 1 | /********** | 
|---|
| 2 | This library is free software; you can redistribute it and/or modify it under | 
|---|
| 3 | the terms of the GNU Lesser General Public License as published by the | 
|---|
| 4 | Free Software Foundation; either version 3 of the License, or (at your | 
|---|
| 5 | option) any later version. (See <http://www.gnu.org/copyleft/lesser.html>.) | 
|---|
| 6 |  | 
|---|
| 7 | This library is distributed in the hope that it will be useful, but WITHOUT | 
|---|
| 8 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS | 
|---|
| 9 | FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for | 
|---|
| 10 | more details. | 
|---|
| 11 |  | 
|---|
| 12 | You should have received a copy of the GNU Lesser General Public License | 
|---|
| 13 | along with this library; if not, write to the Free Software Foundation, Inc., | 
|---|
| 14 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301  USA | 
|---|
| 15 | **********/ | 
|---|
| 16 | // "liveMedia" | 
|---|
| 17 | // Copyright (c) 1996-2020 Live Networks, Inc.  All rights reserved. | 
|---|
| 18 | // A simplified version of "MPEG1or2VideoStreamFramer" that takes only | 
|---|
| 19 | // complete, discrete frames (rather than an arbitrary byte stream) as input. | 
|---|
| 20 | // This avoids the parsing and data copying overhead of the full | 
|---|
| 21 | // "MPEG1or2VideoStreamFramer". | 
|---|
| 22 | // Implementation | 
|---|
| 23 |  | 
|---|
| 24 | #include "MPEG1or2VideoStreamDiscreteFramer.hh" | 
|---|
| 25 |  | 
|---|
| 26 | MPEG1or2VideoStreamDiscreteFramer* | 
|---|
| 27 | MPEG1or2VideoStreamDiscreteFramer::createNew(UsageEnvironment& env, | 
|---|
| 28 | FramedSource* inputSource, | 
|---|
| 29 | Boolean iFramesOnly, | 
|---|
| 30 | double vshPeriod, | 
|---|
| 31 | Boolean leavePresentationTimesUnmodified) { | 
|---|
| 32 | // Need to add source type checking here???  ##### | 
|---|
| 33 | return new MPEG1or2VideoStreamDiscreteFramer(env, inputSource, | 
|---|
| 34 | iFramesOnly, vshPeriod, leavePresentationTimesUnmodified); | 
|---|
| 35 | } | 
|---|
| 36 |  | 
|---|
| 37 | MPEG1or2VideoStreamDiscreteFramer | 
|---|
| 38 | ::MPEG1or2VideoStreamDiscreteFramer(UsageEnvironment& env, | 
|---|
| 39 | FramedSource* inputSource, | 
|---|
| 40 | Boolean iFramesOnly, double vshPeriod, Boolean leavePresentationTimesUnmodified) | 
|---|
| 41 | : MPEG1or2VideoStreamFramer(env, inputSource, iFramesOnly, vshPeriod, | 
|---|
| 42 | False/*don't create a parser*/), | 
|---|
| 43 | fLeavePresentationTimesUnmodified(leavePresentationTimesUnmodified), | 
|---|
| 44 | fLastNonBFrameTemporal_reference(0), | 
|---|
| 45 | fSavedVSHSize(0), fSavedVSHTimestamp(0.0), | 
|---|
| 46 | fIFramesOnly(iFramesOnly), fVSHPeriod(vshPeriod) { | 
|---|
| 47 | fLastNonBFramePresentationTime.tv_sec = 0; | 
|---|
| 48 | fLastNonBFramePresentationTime.tv_usec = 0; | 
|---|
| 49 | } | 
|---|
| 50 |  | 
|---|
| 51 | MPEG1or2VideoStreamDiscreteFramer::~MPEG1or2VideoStreamDiscreteFramer() { | 
|---|
| 52 | } | 
|---|
| 53 |  | 
|---|
| 54 | void MPEG1or2VideoStreamDiscreteFramer::doGetNextFrame() { | 
|---|
| 55 | // Arrange to read data (which should be a complete MPEG-1 or 2 video frame) | 
|---|
| 56 | // from our data source, directly into the client's input buffer. | 
|---|
| 57 | // After reading this, we'll do some parsing on the frame. | 
|---|
| 58 | fInputSource->getNextFrame(fTo, fMaxSize, | 
|---|
| 59 | afterGettingFrame, this, | 
|---|
| 60 | FramedSource::handleClosure, this); | 
|---|
| 61 | } | 
|---|
| 62 |  | 
|---|
| 63 | void MPEG1or2VideoStreamDiscreteFramer | 
|---|
| 64 | ::afterGettingFrame(void* clientData, unsigned frameSize, | 
|---|
| 65 | unsigned numTruncatedBytes, | 
|---|
| 66 | struct timeval presentationTime, | 
|---|
| 67 | unsigned durationInMicroseconds) { | 
|---|
| 68 | MPEG1or2VideoStreamDiscreteFramer* source | 
|---|
| 69 | = (MPEG1or2VideoStreamDiscreteFramer*)clientData; | 
|---|
| 70 | source->afterGettingFrame1(frameSize, numTruncatedBytes, | 
|---|
| 71 | presentationTime, durationInMicroseconds); | 
|---|
| 72 | } | 
|---|
| 73 |  | 
|---|
| 74 | static double const frameRateFromCode[] = { | 
|---|
| 75 | 0.0,          // forbidden | 
|---|
| 76 | 24000/1001.0, // approx 23.976 | 
|---|
| 77 | 24.0, | 
|---|
| 78 | 25.0, | 
|---|
| 79 | 30000/1001.0, // approx 29.97 | 
|---|
| 80 | 30.0, | 
|---|
| 81 | 50.0, | 
|---|
| 82 | 60000/1001.0, // approx 59.94 | 
|---|
| 83 | 60.0, | 
|---|
| 84 | 0.0,          // reserved | 
|---|
| 85 | 0.0,          // reserved | 
|---|
| 86 | 0.0,          // reserved | 
|---|
| 87 | 0.0,          // reserved | 
|---|
| 88 | 0.0,          // reserved | 
|---|
| 89 | 0.0,          // reserved | 
|---|
| 90 | 0.0           // reserved | 
|---|
| 91 | }; | 
|---|
| 92 |  | 
|---|
| 93 | #define MILLION 1000000 | 
|---|
| 94 |  | 
|---|
| 95 | void MPEG1or2VideoStreamDiscreteFramer | 
|---|
| 96 | ::afterGettingFrame1(unsigned frameSize, unsigned numTruncatedBytes, | 
|---|
| 97 | struct timeval presentationTime, | 
|---|
| 98 | unsigned durationInMicroseconds) { | 
|---|
| 99 | // Check that the first 4 bytes are a system code: | 
|---|
| 100 | if (frameSize >= 4 && fTo[0] == 0 && fTo[1] == 0 && fTo[2] == 1) { | 
|---|
| 101 | fPictureEndMarker = True; // Assume that we have a complete 'picture' here | 
|---|
| 102 |  | 
|---|
| 103 | u_int8_t nextCode = fTo[3]; | 
|---|
| 104 | if (nextCode == 0xB3) { // VIDEO_SEQUENCE_HEADER_START_CODE | 
|---|
| 105 | // Note the following 'frame rate' code: | 
|---|
| 106 | if (frameSize >= 8) { | 
|---|
| 107 | u_int8_t frame_rate_code = fTo[7]&0x0F; | 
|---|
| 108 | fFrameRate = frameRateFromCode[frame_rate_code]; | 
|---|
| 109 | } | 
|---|
| 110 |  | 
|---|
| 111 | // Also, save away this Video Sequence Header, in case we need it later: | 
|---|
| 112 | // First, figure out how big it is: | 
|---|
| 113 | unsigned vshSize; | 
|---|
| 114 | for (vshSize = 4; vshSize < frameSize-3; ++vshSize) { | 
|---|
| 115 | if (fTo[vshSize] == 0 && fTo[vshSize+1] == 0 && fTo[vshSize+2] == 1 && | 
|---|
| 116 | (fTo[vshSize+3] == 0xB8 || fTo[vshSize+3] == 0x00)) break; | 
|---|
| 117 | } | 
|---|
| 118 | if (vshSize == frameSize-3) vshSize = frameSize; // There was nothing else following it | 
|---|
| 119 | if (vshSize <= sizeof fSavedVSHBuffer) { | 
|---|
| 120 | memmove(fSavedVSHBuffer, fTo, vshSize); | 
|---|
| 121 | fSavedVSHSize = vshSize; | 
|---|
| 122 | fSavedVSHTimestamp | 
|---|
| 123 | = presentationTime.tv_sec + presentationTime.tv_usec/(double)MILLION; | 
|---|
| 124 | } | 
|---|
| 125 | } else if (nextCode == 0xB8) { // GROUP_START_CODE | 
|---|
| 126 | // If necessary, insert a saved Video Sequence Header in front of this: | 
|---|
| 127 | double pts = presentationTime.tv_sec + presentationTime.tv_usec/(double)MILLION; | 
|---|
| 128 | if (pts > fSavedVSHTimestamp + fVSHPeriod && | 
|---|
| 129 | fSavedVSHSize + frameSize <= fMaxSize) { | 
|---|
| 130 | memmove(&fTo[fSavedVSHSize], &fTo[0], frameSize); // make room for the header | 
|---|
| 131 | memmove(&fTo[0], fSavedVSHBuffer, fSavedVSHSize); // insert it | 
|---|
| 132 | frameSize += fSavedVSHSize; | 
|---|
| 133 | fSavedVSHTimestamp = pts; | 
|---|
| 134 | } | 
|---|
| 135 | } | 
|---|
| 136 |  | 
|---|
| 137 | unsigned i = 3; | 
|---|
| 138 | if (nextCode == 0xB3 /*VIDEO_SEQUENCE_HEADER_START_CODE*/ || | 
|---|
| 139 | nextCode == 0xB8 /*GROUP_START_CODE*/) { | 
|---|
| 140 | // Skip to the following PICTURE_START_CODE (if any): | 
|---|
| 141 | for (i += 4; i < frameSize; ++i) { | 
|---|
| 142 | if (fTo[i] == 0x00 /*PICTURE_START_CODE*/ | 
|---|
| 143 | && fTo[i-1] == 1 && fTo[i-2] == 0 && fTo[i-3] == 0) { | 
|---|
| 144 | nextCode = fTo[i]; | 
|---|
| 145 | break; | 
|---|
| 146 | } | 
|---|
| 147 | } | 
|---|
| 148 | } | 
|---|
| 149 |  | 
|---|
| 150 | if (nextCode == 0x00 /*PICTURE_START_CODE*/ && i+2 < frameSize) { | 
|---|
| 151 | // Get the 'temporal_reference' and 'picture_coding_type' from the | 
|---|
| 152 | // following 2 bytes: | 
|---|
| 153 | ++i; | 
|---|
| 154 | unsigned short temporal_reference = (fTo[i]<<2)|(fTo[i+1]>>6); | 
|---|
| 155 | unsigned char picture_coding_type = (fTo[i+1]&0x38)>>3; | 
|---|
| 156 |  | 
|---|
| 157 | // If this is not an "I" frame, but we were asked for "I" frames only, then try again: | 
|---|
| 158 | if (fIFramesOnly && picture_coding_type != 1) { | 
|---|
| 159 | doGetNextFrame(); | 
|---|
| 160 | return; | 
|---|
| 161 | } | 
|---|
| 162 |  | 
|---|
| 163 | // If this is a "B" frame, then we have to tweak "presentationTime": | 
|---|
| 164 | if (!fLeavePresentationTimesUnmodified && picture_coding_type == 3/*B*/ | 
|---|
| 165 | && (fLastNonBFramePresentationTime.tv_usec > 0 || | 
|---|
| 166 | fLastNonBFramePresentationTime.tv_sec > 0)) { | 
|---|
| 167 | int trIncrement | 
|---|
| 168 | = fLastNonBFrameTemporal_reference - temporal_reference; | 
|---|
| 169 | if (trIncrement < 0) trIncrement += 1024; // field is 10 bits in size | 
|---|
| 170 |  | 
|---|
| 171 | unsigned usIncrement = fFrameRate == 0.0 ? 0 | 
|---|
| 172 | : (unsigned)((trIncrement*MILLION)/fFrameRate); | 
|---|
| 173 | unsigned secondsToSubtract = usIncrement/MILLION; | 
|---|
| 174 | unsigned uSecondsToSubtract = usIncrement%MILLION; | 
|---|
| 175 |  | 
|---|
| 176 | presentationTime = fLastNonBFramePresentationTime; | 
|---|
| 177 | if ((unsigned)presentationTime.tv_usec < uSecondsToSubtract) { | 
|---|
| 178 | presentationTime.tv_usec += MILLION; | 
|---|
| 179 | if (presentationTime.tv_sec > 0) --presentationTime.tv_sec; | 
|---|
| 180 | } | 
|---|
| 181 | presentationTime.tv_usec -= uSecondsToSubtract; | 
|---|
| 182 | if ((unsigned)presentationTime.tv_sec > secondsToSubtract) { | 
|---|
| 183 | presentationTime.tv_sec -= secondsToSubtract; | 
|---|
| 184 | } else { | 
|---|
| 185 | presentationTime.tv_sec = presentationTime.tv_usec = 0; | 
|---|
| 186 | } | 
|---|
| 187 | } else { | 
|---|
| 188 | fLastNonBFramePresentationTime = presentationTime; | 
|---|
| 189 | fLastNonBFrameTemporal_reference = temporal_reference; | 
|---|
| 190 | } | 
|---|
| 191 | } | 
|---|
| 192 | } | 
|---|
| 193 |  | 
|---|
| 194 | // ##### Later: | 
|---|
| 195 | // - do "iFramesOnly" if requested | 
|---|
| 196 |  | 
|---|
| 197 | // Complete delivery to the client: | 
|---|
| 198 | fFrameSize = frameSize; | 
|---|
| 199 | fNumTruncatedBytes = numTruncatedBytes; | 
|---|
| 200 | fPresentationTime = presentationTime; | 
|---|
| 201 | fDurationInMicroseconds = durationInMicroseconds; | 
|---|
| 202 | afterGetting(this); | 
|---|
| 203 | } | 
|---|
| 204 |  | 
|---|