1 | /********** |
2 | This library is free software; you can redistribute it and/or modify it under |
3 | the terms of the GNU Lesser General Public License as published by the |
4 | Free Software Foundation; either version 3 of the License, or (at your |
5 | option) any later version. (See <http://www.gnu.org/copyleft/lesser.html>.) |
6 | |
7 | This library is distributed in the hope that it will be useful, but WITHOUT |
8 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS |
9 | FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for |
10 | more details. |
11 | |
12 | You should have received a copy of the GNU Lesser General Public License |
13 | along with this library; if not, write to the Free Software Foundation, Inc., |
14 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA |
15 | **********/ |
16 | // "liveMedia" |
17 | // Copyright (c) 1996-2020 Live Networks, Inc. All rights reserved. |
18 | // RTP sink for Raw video |
19 | // Implementation |
20 | |
21 | #include "RawVideoRTPSink.hh" |
22 | |
23 | RawVideoRTPSink* RawVideoRTPSink |
24 | ::createNew(UsageEnvironment& env, Groupsock* RTPgs, u_int8_t rtpPayloadFormat, |
25 | unsigned height, unsigned width, unsigned depth, |
26 | char const* sampling, char const* colorimetry) { |
27 | return new RawVideoRTPSink(env, RTPgs, |
28 | rtpPayloadFormat, |
29 | height, width, depth, |
30 | sampling, colorimetry); |
31 | } |
32 | |
33 | RawVideoRTPSink |
34 | ::RawVideoRTPSink(UsageEnvironment& env, Groupsock* RTPgs, u_int8_t rtpPayloadFormat, |
35 | unsigned height, unsigned width, unsigned depth, |
36 | char const* sampling, char const* colorimetry) |
37 | : VideoRTPSink(env, RTPgs, rtpPayloadFormat, 90000, "RAW" ), |
38 | fFmtpSDPLine(NULL), fSampling(NULL), fWidth(width), fHeight(height), |
39 | fDepth(depth), fColorimetry(NULL), fLineindex(0) { |
40 | |
41 | // Then use this 'config' string to construct our "a=fmtp:" SDP line: |
42 | unsigned fmtpSDPLineMaxSize = 200;// 200 => more than enough space |
43 | fFmtpSDPLine = new char[fmtpSDPLineMaxSize]; |
44 | sprintf(fFmtpSDPLine, "a=fmtp:%d sampling=%s;width=%u;height=%u;depth=%u;colorimetry=%s\r\n" , |
45 | rtpPayloadType(), sampling, width, height, depth, colorimetry); |
46 | |
47 | // Set parameters |
48 | fSampling = strDup(sampling); |
49 | fColorimetry = strDup(colorimetry); |
50 | setFrameParameters(); |
51 | } |
52 | |
53 | RawVideoRTPSink::~RawVideoRTPSink() { |
54 | delete[] fFmtpSDPLine; |
55 | delete[] fSampling; |
56 | delete[] fColorimetry;} |
57 | |
58 | char const* RawVideoRTPSink::auxSDPLine() { |
59 | return fFmtpSDPLine; |
60 | } |
61 | |
62 | void RawVideoRTPSink |
63 | ::doSpecialFrameHandling(unsigned fragmentationOffset, |
64 | unsigned char* frameStart, |
65 | unsigned numBytesInFrame, |
66 | struct timeval framePresentationTime, |
67 | unsigned numRemainingBytes) { |
68 | |
69 | unsigned * lengths = NULL; |
70 | unsigned * offsets= NULL; |
71 | unsigned nbLines = getNbLineInPacket(fragmentationOffset, lengths, offsets); |
72 | unsigned = 2 + (6 * nbLines); |
73 | u_int8_t* = new u_int8_t[specialHeaderSize]; |
74 | |
75 | // Extended Sequence Number (not used) |
76 | specialHeader[0] = 0; |
77 | specialHeader[1] = 0; |
78 | |
79 | for (unsigned i = 0; i < nbLines; i++) { |
80 | // detection of new line incrementation |
81 | if ((offsets[i] == 0) && fragmentationOffset != 0) { |
82 | fLineindex = fLineindex + fFrameParameters.scanLineIterationStep; |
83 | } |
84 | |
85 | // Set length |
86 | specialHeader[2 + (i * 6) + 0] = lengths[i] >> 8; |
87 | specialHeader[2 + (i * 6) + 1] = (u_int8_t)lengths[i]; |
88 | |
89 | // Field Identification (false for us) |
90 | bool fieldIdent = false; |
91 | |
92 | // Set line index |
93 | specialHeader[2 + (i * 6) + 2] = ((fLineindex >> 8) & 0x7F) | (fieldIdent << 7); |
94 | specialHeader[2 + (i * 6) + 3] = (u_int8_t)fLineindex; |
95 | |
96 | // Set Continuation bit |
97 | bool continuationBit = (i < nbLines - 1) ? true : false; |
98 | |
99 | // Set offset |
100 | specialHeader[2 + (i * 6) + 4] = ((offsets[i] >> 8) & 0x7F) | (continuationBit << 7); |
101 | specialHeader[2 + (i * 6) + 5] = (u_int8_t)offsets[i]; |
102 | } |
103 | |
104 | setSpecialHeaderBytes(specialHeader, specialHeaderSize); |
105 | |
106 | if (numRemainingBytes == 0) { |
107 | // This packet contains the last (or only) fragment of the frame. |
108 | // Set the RTP 'M' ('marker') bit: |
109 | setMarkerBit(); |
110 | // Reset line index |
111 | fLineindex = 0; |
112 | } |
113 | |
114 | // Also set the RTP timestamp: |
115 | setTimestamp(framePresentationTime); |
116 | |
117 | delete[] specialHeader; |
118 | delete[] lengths; |
119 | delete[] offsets; |
120 | } |
121 | |
122 | Boolean RawVideoRTPSink::frameCanAppearAfterPacketStart(unsigned char const* /*frameStart*/, |
123 | unsigned /*numBytesInFrame*/) const { |
124 | // Only one frame per packet: |
125 | return False; |
126 | } |
127 | |
128 | unsigned RawVideoRTPSink::() const { |
129 | unsigned * lengths = NULL; |
130 | unsigned * offsets= NULL; |
131 | unsigned nbLines = getNbLineInPacket(curFragmentationOffset(), lengths, offsets); |
132 | delete[] lengths; |
133 | delete[] offsets; |
134 | return 2 + (6 * nbLines); |
135 | } |
136 | |
137 | unsigned RawVideoRTPSink::getNbLineInPacket(unsigned fragOffset, unsigned * &lengths, unsigned * &offsets) const |
138 | { |
139 | unsigned = 12; |
140 | unsigned = 2; // Extended Sequence Nb |
141 | unsigned packetMaxSize = ourMaxPacketSize(); |
142 | unsigned nbLines = 0; |
143 | unsigned remainingSizeInPacket; |
144 | |
145 | if (fragOffset >= fFrameParameters.frameSize) { |
146 | envir() << "RawVideoRTPSink::getNbLineInPacket(): bad fragOffset " << fragOffset << "\n" ; |
147 | return 0; |
148 | } |
149 | unsigned lengthArray[100] = {0}; |
150 | unsigned offsetArray[100] = {0}; |
151 | unsigned curDataTotalLength = 0; |
152 | unsigned lineOffset = (fragOffset % fFrameParameters.scanLineSize); |
153 | |
154 | unsigned remainingLineSize = fFrameParameters.scanLineSize - (fragOffset % fFrameParameters.scanLineSize); |
155 | while(1) { |
156 | if (packetMaxSize - specialHeaderSize - rtpHeaderSize - 6 <= curDataTotalLength) { |
157 | break; // packet sanity check |
158 | } |
159 | |
160 | // add one line |
161 | nbLines ++; |
162 | specialHeaderSize += 6; |
163 | |
164 | remainingSizeInPacket = packetMaxSize - specialHeaderSize - rtpHeaderSize - curDataTotalLength; |
165 | remainingSizeInPacket -= remainingSizeInPacket % fFrameParameters.pGroupSize; // use only multiple of pgroup |
166 | lengthArray[nbLines-1] = remainingLineSize < remainingSizeInPacket ? remainingLineSize : remainingSizeInPacket; |
167 | offsetArray[nbLines-1] = lineOffset * fFrameParameters.scanLineIterationStep / fFrameParameters.pGroupSize; |
168 | if (remainingLineSize >= remainingSizeInPacket) { |
169 | break; //packet full |
170 | } |
171 | |
172 | remainingLineSize = fFrameParameters.scanLineSize; |
173 | curDataTotalLength += lengthArray[nbLines-1]; |
174 | lineOffset = 0; |
175 | |
176 | if (fragOffset + curDataTotalLength >= fFrameParameters.frameSize) { |
177 | break; // end of the frame. |
178 | } |
179 | } |
180 | |
181 | lengths = new unsigned[nbLines]; |
182 | offsets = new unsigned[nbLines]; |
183 | for (unsigned i = 0; i < nbLines; i++) { |
184 | lengths[i] = lengthArray[i]; |
185 | offsets[i] = offsetArray[i]; |
186 | } |
187 | return nbLines; |
188 | } |
189 | |
190 | unsigned RawVideoRTPSink::computeOverflowForNewFrame(unsigned newFrameSize) const { |
191 | unsigned initialOverflow = MultiFramedRTPSink::computeOverflowForNewFrame(newFrameSize); |
192 | |
193 | // Adjust (increase) this overflow to be a multiple of the pgroup value |
194 | unsigned numFrameBytesUsed = newFrameSize - initialOverflow; |
195 | initialOverflow += numFrameBytesUsed % fFrameParameters.pGroupSize; |
196 | |
197 | return initialOverflow; |
198 | } |
199 | |
200 | void RawVideoRTPSink::setFrameParameters() { |
201 | fFrameParameters.scanLineIterationStep = 1; |
202 | if ((strncmp("RGB" , fSampling, strlen(fSampling)) == 0) || (strncmp("BGR" , fSampling, strlen(fSampling)) == 0)) { |
203 | switch (fDepth) { |
204 | case 8: |
205 | fFrameParameters.pGroupSize = 3; |
206 | fFrameParameters.nbOfPixelInPGroup = 1; |
207 | break; |
208 | case 10: |
209 | fFrameParameters.pGroupSize = 15; |
210 | fFrameParameters.nbOfPixelInPGroup = 4; |
211 | break; |
212 | case 12: |
213 | fFrameParameters.pGroupSize = 9; |
214 | fFrameParameters.nbOfPixelInPGroup = 2; |
215 | break; |
216 | case 16: |
217 | fFrameParameters.pGroupSize = 6; |
218 | fFrameParameters.nbOfPixelInPGroup = 1; |
219 | break; |
220 | default: |
221 | break; |
222 | } |
223 | } |
224 | else if ((strncmp("RGBA" , fSampling, strlen(fSampling)) == 0) || (strncmp("BGRA" , fSampling, strlen(fSampling)) == 0)) { |
225 | switch (fDepth) { |
226 | case 8: |
227 | fFrameParameters.pGroupSize = 4; |
228 | break; |
229 | case 10: |
230 | fFrameParameters.pGroupSize = 5; |
231 | break; |
232 | case 12: |
233 | fFrameParameters.pGroupSize = 6; |
234 | break; |
235 | case 16: |
236 | fFrameParameters.pGroupSize = 8; |
237 | break; |
238 | default: |
239 | break; |
240 | } |
241 | fFrameParameters.nbOfPixelInPGroup = 1; |
242 | } else if (strncmp("YCbCr-4:4:4" , fSampling, strlen(fSampling)) == 0) { |
243 | switch (fDepth) { |
244 | case 8: |
245 | fFrameParameters.pGroupSize = 3; |
246 | fFrameParameters.nbOfPixelInPGroup = 1; |
247 | break; |
248 | case 10: |
249 | fFrameParameters.pGroupSize = 15; |
250 | fFrameParameters.nbOfPixelInPGroup = 4; |
251 | break; |
252 | case 12: |
253 | fFrameParameters.pGroupSize = 9; |
254 | fFrameParameters.nbOfPixelInPGroup = 2; |
255 | break; |
256 | case 16: |
257 | fFrameParameters.pGroupSize = 6; |
258 | fFrameParameters.nbOfPixelInPGroup = 1; |
259 | break; |
260 | default: |
261 | break; |
262 | } |
263 | } else if (strncmp("YCbCr-4:2:2" , fSampling, strlen(fSampling)) == 0) { |
264 | switch (fDepth) { |
265 | case 8: |
266 | fFrameParameters.pGroupSize = 4; |
267 | break; |
268 | case 10: |
269 | fFrameParameters.pGroupSize = 5; |
270 | break; |
271 | case 12: |
272 | fFrameParameters.pGroupSize = 6; |
273 | break; |
274 | case 16: |
275 | fFrameParameters.pGroupSize = 8; |
276 | break; |
277 | default: |
278 | break; |
279 | } |
280 | fFrameParameters.nbOfPixelInPGroup = 2; |
281 | } else if (strncmp("YCbCr-4:1:1" , fSampling, strlen(fSampling)) == 0) { |
282 | switch (fDepth) { |
283 | case 8: |
284 | fFrameParameters.pGroupSize = 6; |
285 | break; |
286 | case 10: |
287 | fFrameParameters.pGroupSize = 15; |
288 | break; |
289 | case 12: |
290 | fFrameParameters.pGroupSize = 9; |
291 | break; |
292 | case 16: |
293 | fFrameParameters.pGroupSize = 12; |
294 | break; |
295 | default: |
296 | break; |
297 | } |
298 | fFrameParameters.nbOfPixelInPGroup = 4; |
299 | } else if (strncmp("YCbCr-4:2:0" , fSampling, strlen(fSampling)) == 0) { |
300 | switch (fDepth) { |
301 | case 8: |
302 | fFrameParameters.pGroupSize = 6; |
303 | break; |
304 | case 10: |
305 | fFrameParameters.pGroupSize = 15; |
306 | break; |
307 | case 12: |
308 | fFrameParameters.pGroupSize = 9; |
309 | break; |
310 | case 16: |
311 | fFrameParameters.pGroupSize = 12; |
312 | break; |
313 | default: |
314 | break; |
315 | } |
316 | fFrameParameters.nbOfPixelInPGroup = 4; |
317 | fFrameParameters.scanLineIterationStep = 2; |
318 | } |
319 | fFrameParameters.frameSize = fHeight * fWidth * fFrameParameters.pGroupSize / fFrameParameters.nbOfPixelInPGroup; |
320 | fFrameParameters.scanLineSize = fWidth * fFrameParameters.pGroupSize / fFrameParameters.nbOfPixelInPGroup * fFrameParameters.scanLineIterationStep; |
321 | } |
322 | |