Cogs.Core
IVideoDecoder.cpp
1#include "IVideoDecoder.h"
2
3#include "H264Data.h"
4#include "H265Data.h"
5
6#include "../../Source/Renderer/RenderTexture.h"
7
8#include "Rendering/IRenderTargets.h"
9#include "Rendering/ITextures.h"
10
11#include "Foundation/Logging/Logger.h"
12
13namespace{
14 constexpr double nanosecondsPerSecond = 1000000000.0;
15
16 constexpr const char *colorSpaceName[] = {
17 "BT.601 Studio",
18 "BT.601 Full",
19 "BT.709 Studio",
20 "BT.709 Full"
21 };
22
23 constexpr const char *transferFunctionName[] = {
24 "Linear",
25 "BT.601/BT.709",
26 "sRGB",
27 "Gamma"
28 };
29
30 Cogs::Logging::Log logger = Cogs::Logging::getLogger("VideoDecoder");
31
32 int64_t now() {
33 return std::chrono::nanoseconds(std::chrono::steady_clock().now().time_since_epoch()).count();
34 }
35}
36
37void Cogs::Core::IVideoDecoder::cleanup() {
38 IGraphicsDevice* graphicsDevice = context.device;
39
40 if (HandleIsValid(texture)) {
41 graphicsDevice->getTextures()->releaseTexture(texture);
43 }
44 if (HandleIsValid(renderTarget)) {
45 graphicsDevice->getRenderTargets()->releaseRenderTarget(renderTarget);
47 }
48 if (renderTexture) {
49 RenderResources* renderResources = reinterpret_cast<RenderResources*>(context.renderer->getResources());
50
51 renderResources->destroyResource(renderTexture);
52 renderTexture = nullptr;
53 }
54
55 textureDesc = TextureDescription();
56 codec = Codec::None;
57 presentMode = VideoDecoderPresentMode::Latest;
58
61 surfaceCount = 0;
62 speedupQueueCount = 3;
63 speedupFactor = 1.0f;
64
65 decodeCallback = nullptr;
66 decodeCallbackData = nullptr;
67 presentCallback = nullptr;
68 presentCallbackData = nullptr;
69
70 chromaFormatIdc = static_cast<uint32_t>(-1);
71 chromaBitDepthMinus8 = static_cast<uint32_t>(-1);
72 lumaBitDepthMinus8 = static_cast<uint32_t>(-1);
73 codedWidth = static_cast<uint32_t>(-1);
74 codedHeight = static_cast<uint32_t>(-1);
75 displayArea = Rect();
76 outputWidth = 0;
77 outputHeight = 0;
78 colourSpace = ColourSpace::BT709Full;
79 transferFunction = TransferFunction::sRGB;
80 gamma = 1.0f / 2.2f;
81
82 clockRate = 0;
83 previousClock = 0;
84 clockResidual = 0;
85 previousClockResidual = 0;
86 checkUnderflow = false;
87
90
91 playTime = 0;
93
94
95 bufferUnderflowCounter = 0;
96 bufferUnderflowMajorStutterCounter = 0;
97 bufferOverflowCounter = 0;
98 decoderRestartCounter = 0;
99 discontinuitiesSkippedCounter = 0;
100
101 parameterSets.clear();
102}
103
104void Cogs::Core::IVideoDecoder::parseVideoDescription(const VideoDecoderDescription& desc) {
105 codec = desc.codec;
106 presentMode = desc.present_mode;
107 skipDiscontinuities = desc.skip_discontinuities;
108 speedupQueueCount = desc.speedup_queue_count;
109 speedupFactor = desc.speedup_factor;
110 clockRate = desc.clock_rate;
111
112 generateMipMaps = desc.generateMipMaps;
113
114 decodeCallback = desc.decodeCallback;
115 decodeCallbackData = desc.decodeCallbackData;
116 presentCallback = desc.presentCallback;
117 presentCallbackData = desc.presentCallbackData;
118
119 assert((desc.codec == Codec::H264) || (desc.codec == Codec::HEVC));
120 assert(!desc.sps.empty());
121 assert(!desc.pps.empty());
122
123 uint32_t fullRange = 0;
124 uint32_t transferCharacteristics = 2;
125 uint32_t matrixCoefficients = 2;
126
127 chromaFormatIdc = static_cast<uint32_t>(-1);
128 chromaBitDepthMinus8 = static_cast<uint32_t>(-1);
129 lumaBitDepthMinus8 = static_cast<uint32_t>(-1);
130 codedWidth = static_cast<uint32_t>(-1);
131 codedHeight = static_cast<uint32_t>(-1);
132
133 if (desc.codec == Codec::HEVC) {
134 assert(!desc.vps.empty());
135
136 H265ParseData parse = {};
137
138 video_parameter_set_nalu(parse, reinterpret_cast<const uint8_t*>(desc.vps.data()), desc.vps.size());
139 seq_parameter_set_nalu(parse, reinterpret_cast<const uint8_t*>(desc.sps.data()), desc.sps.size());
140 pic_parameter_set_nalu(parse, reinterpret_cast<const uint8_t*>(desc.pps.data()), desc.pps.size());
141
142 parse.video_set.print();
143 parse.seq_set.print();
144 if (parse.seq_set.vui_parameters_present_flag) {
145 parse.vui.print();
146 }
147 parse.pic_set.print();
148
149 if (parse.seq_set.vui_parameters_present_flag &&
150 parse.vui.video_signal_type_present_flag &&
151 parse.vui.colour_description_present_flag) {
152 fullRange = parse.vui.video_full_range_flag;
153 transferCharacteristics = parse.vui.transfer_characteristics;
154 matrixCoefficients = parse.vui.matrix_coefficients;
155 }
156
157 uint32_t SubWidthC = 1;
158 uint32_t SubHeightC = 1;
159
160 if (parse.seq_set.chroma_format_idc == 1) {
161 SubWidthC = 2;
162 SubHeightC = 2;
163 }
164 else if (parse.seq_set.chroma_format_idc == 2) {
165 SubWidthC = 2;
166 }
167 chromaFormatIdc = parse.seq_set.chroma_format_idc;
168 chromaBitDepthMinus8 = parse.seq_set.bit_depth_chroma_minus8;
169 lumaBitDepthMinus8 = parse.seq_set.bit_depth_luma_minus8;
170 codedWidth = parse.seq_set.pic_width_in_luma_samples;
171 codedHeight = parse.seq_set.pic_height_in_luma_samples;
172
173 displayArea.left = SubWidthC * parse.seq_set.conf_win_left_offset;
174 displayArea.top = SubHeightC * parse.seq_set.conf_win_top_offset;
175 displayArea.right = codedWidth - (SubWidthC * parse.seq_set.conf_win_right_offset);
176 displayArea.bottom = codedHeight - (SubHeightC * parse.seq_set.conf_win_bottom_offset);
177
178 // Surface Count
179 // surface_count = 6 + num_b_frames;
180 surfaceCount = 6 + parse.video_set.vps_max_dec_pic_buffering_minus1[0]; // TODO this needs further investigation.
181 // surface_count = max_surface_count;
182 }
183 else if (desc.codec == Codec::H264) {
184 H264ParseData parse = {};
185
186 seq_parameter_set_nalu(parse, reinterpret_cast<const uint8_t*>(desc.sps.data()), desc.sps.size());
187 pic_parameter_set_nalu(parse, reinterpret_cast<const uint8_t*>(desc.pps.data()), desc.pps.size());
188
189 parse.seq_set.print();
190 if (parse.seq_set.vui_parameters_present_flag) {
191 parse.vui.print();
192 }
193 parse.pic_set.print();
194
195 if (parse.seq_set.vui_parameters_present_flag &&
196 parse.vui.video_signal_type_present_flag &&
197 parse.vui.colour_description_present_flag) {
198 fullRange = parse.vui.video_full_range_flag;
199 transferCharacteristics = parse.vui.transfer_characteristics;
200 matrixCoefficients = parse.vui.matrix_coefficients;
201 }
202
203 uint32_t subWidthC = (uint32_t)-1;
204 uint32_t subHeightC = (uint32_t)-1;
205
206 if (parse.seq_set.chroma_format_idc == 1) {
207 subWidthC = 2;
208 subHeightC = 2;
209 }
210 else if (parse.seq_set.chroma_format_idc == 2) {
211 subWidthC = 2;
212 subHeightC = 1;
213 }
214 else if ((parse.seq_set.chroma_format_idc == 3) && (parse.seq_set.separate_colour_plane_flag == 0)) {
215 subWidthC = 1;
216 subHeightC = 1;
217 }
218
219 uint32_t chromaArrayType = (parse.seq_set.separate_colour_plane_flag == 0) ? parse.seq_set.chroma_format_idc : 0;
220 uint32_t picWidthInMbs = parse.seq_set.pic_width_in_mbs_minus1 + 1;
221 uint32_t picWidthInSamplesLuma = picWidthInMbs * 16;
222 uint32_t picHeightInMapUnits = parse.seq_set.pic_height_in_map_units_minus1 + 1;
223 uint32_t frameHeightInMbs = (2 - parse.seq_set.frame_mbs_only_flag) * picHeightInMapUnits;
224
225 chromaFormatIdc = parse.seq_set.chroma_format_idc;
226 chromaBitDepthMinus8 = parse.seq_set.bit_depth_chroma_minus8;
227 lumaBitDepthMinus8 = parse.seq_set.bit_depth_luma_minus8;
228 codedWidth = picWidthInSamplesLuma;
229 codedHeight = frameHeightInMbs * 16;
230
231 if (!parse.seq_set.frame_cropping_flag) {
232 displayArea.left = 0;
233 displayArea.top = 0;
234 displayArea.right = codedWidth;
235 displayArea.bottom = codedHeight;
236 }
237 else {
238 uint32_t cropUnitX;
239 uint32_t cropUnitY;
240
241 if (chromaArrayType == 0) {
242 cropUnitX = 1;
243 cropUnitY = 2 - parse.seq_set.frame_mbs_only_flag;
244 }
245 else {
246 cropUnitX = subWidthC;
247 cropUnitY = subHeightC * (2 - parse.seq_set.frame_mbs_only_flag);
248 }
249 displayArea.left = cropUnitX * parse.seq_set.frame_crop_left_offset; // TODO
250 displayArea.top = cropUnitY * parse.seq_set.frame_crop_top_offset;
251 displayArea.right = picWidthInSamplesLuma - (cropUnitX * parse.seq_set.frame_crop_right_offset);
252 displayArea.bottom = (16 * frameHeightInMbs) - (cropUnitY * parse.seq_set.frame_crop_bottom_offset);
253 }
254
255 // Surface Count
256 // H264 0 b-frames
257 // log2_max_frame_num_minus4 4 (256)
258 // pic_order_cnt_type 2
259 // num_ref_frames 1
260
261 // H264 1 b-frames
262 // log2_max_frame_num_minus4 4 (256)
263 // pic_order_cnt_type 0
264 // log2_max_pic_order_cnt_lsb_minus4 4 (256)
265 // num_ref_frames 4
266
267 // H264 2 b-frames
268 // log2_max_frame_num_minus4 4 (256)
269 // pic_order_cnt_type 0
270 // log2_max_pic_order_cnt_lsb_minus4 4 (256)
271 // num_ref_frames 4
272
273 // H264 4 b-frames
274 // log2_max_frame_num_minus4 4 (256)
275 // pic_order_cnt_type 0
276 // log2_max_pic_order_cnt_lsb_minus4 4 (256)
277 // num_ref_frames 4
278
279 // surface_count = 6 + num_b_frames;
280 surfaceCount = 6 + parse.seq_set.num_ref_frames; // TODO this needs further investigation.
281 // surface_count = max_surface_count;
282 }
283
284 outputWidth = abs(displayArea.right - displayArea.left);
285 outputHeight = abs(displayArea.bottom - displayArea.top);
286
287 constexpr uint8_t startCode[] = {0x00u, 0x00u, 0x00u, 0x01u};
288 size_t parameterSetSize = sizeof(startCode) * 2;
289 uint8_t* dest;
290
291 if (!desc.vps.empty()) {
292 parameterSetSize += sizeof(startCode) + desc.vps.size();
293 }
294 parameterSetSize += desc.sps.size();
295 parameterSetSize += desc.pps.size();
296
297 parameterSets.resize(parameterSetSize);
298 dest = parameterSets.data();
299
300 if (!desc.vps.empty()) {
301 memcpy(dest, startCode, sizeof(startCode));
302 dest += sizeof(startCode);
303 memcpy(dest, desc.vps.data(), desc.vps.size());
304 dest += desc.vps.size();
305 }
306 memcpy(dest, startCode, sizeof(startCode));
307 dest += sizeof(startCode);
308 memcpy(dest, desc.sps.data(), desc.sps.size());
309 dest += desc.sps.size();
310
311 memcpy(dest, startCode, sizeof(startCode));
312 dest += sizeof(startCode);
313 memcpy(dest, desc.pps.data(), desc.pps.size());
314 dest += desc.pps.size();
315
316 LOG_TRACE(logger, "Coded size: %u %u", codedWidth, codedHeight);
317 LOG_TRACE(logger, "Display area: %d,%d - %d,%d", displayArea.left, displayArea.top, displayArea.right, displayArea.bottom);
318 LOG_TRACE(logger, "Output size: %u %u", outputWidth, outputHeight);
319
320 if (matrixCoefficients == 1) {
321 colourSpace = fullRange ? ColourSpace::BT709Full : ColourSpace::BT709;
322 }
323 else if (matrixCoefficients == 2) { // Assume BT.709
324 }
325 else if ((matrixCoefficients == 5) || (matrixCoefficients == 6)) {
326 colourSpace = fullRange ? ColourSpace::BT601Full : ColourSpace::BT601;
327 }
328 else{
329 LOG_ERROR(logger, "Unsupported matrix coefficients:");
330 LOG_ERROR(logger, "video_full_range_flag %d", fullRange);
331 LOG_ERROR(logger, "matrixCoefficients %d", matrixCoefficients);
332 }
333 LOG_TRACE(logger, "Colour space: %s", colorSpaceName[static_cast<uint32_t>(colourSpace)]);
334
335 if ((transferCharacteristics == 1) || (transferCharacteristics == 6)) {
336 transferFunction = TransferFunction::BT601;
337 gamma = 1.0f / 2.0f;
338 }
339 else if (transferCharacteristics == 2) {
340 }
341 else if (transferCharacteristics == 4) {
342 transferFunction = TransferFunction::Gamma;
343 gamma = 1.0f / 2.2f;
344 }
345 else if (transferCharacteristics == 5) {
346 transferFunction = TransferFunction::Gamma;
347 gamma = 1.0f / 2.8f;
348 }
349 else if (transferCharacteristics == 8) {
350 transferFunction = TransferFunction::Linear;
351 gamma = 1.0f;
352 }
353 else if (transferCharacteristics == 13) {
354 transferFunction = TransferFunction::sRGB;
355 gamma = 1.0f / 2.2f;
356 }
357 else {
358 LOG_ERROR(logger, "Unsupported transferCharacteristics");
359 LOG_ERROR(logger, "transferCharacteristics %d", transferCharacteristics);
360 }
361 LOG_TRACE(logger, "Transfer function %s (Gamma %f)", transferFunctionName[static_cast<uint32_t>(transferFunction)], gamma);
362}
363
364void Cogs::Core::IVideoDecoder::createTexture() {
365 IGraphicsDevice* graphicsDevice = context.device;
366
367 textureDesc = TextureDescription();
368 textureDesc.width = outputWidth;
369 textureDesc.height = outputHeight;
370 textureDesc.flags = TextureFlags::RenderTarget;
371
372 if (generateMipMaps) {
373 textureDesc.flags |= TextureFlags::GenerateMipMaps;
374 }
375 if (graphicsDevice->getType() == GraphicsDeviceType::OpenGL20) {
376 textureDesc.format = TextureFormat::R8G8B8A8_UNORM; // TODO
377 }
378 else {
379 textureDesc.format = TextureFormat::R8G8B8A8_UNORM_SRGB;
380 }
381 texture = graphicsDevice->getTextures()->loadTexture(textureDesc, nullptr);
382 renderTarget = graphicsDevice->getRenderTargets()->createRenderTarget(texture);
383
384 RenderResources* renderResources = static_cast<RenderResources*>(context.renderer->getResources());
385
386 if (renderTexture) {
387 renderResources->destroyResource(renderTexture);
388 }
389 renderTexture = renderResources->createRenderTexture();
390 renderTexture->setName("VideoTexture");
391 renderTexture->setPersistent();
392 renderTexture->setOwned();
393 renderTexture->description = textureDesc;
394 renderTexture->textureHandle = texture;
395 renderTexture->width = textureDesc.width;
396 renderTexture->height = textureDesc.height;
397 renderTexture->levels = textureDesc.levels;
398 renderTexture->layers = textureDesc.layers;
399 renderTexture->faces = textureDesc.faces;
400}
401
402int Cogs::Core::IVideoDecoder::selectDisplayFrame(size_t queueLength) {
403 int frameToDisplay = -1;
404
405 switch (presentMode) {
406 // Smooth: Present frames with constant time difference specified in packet time stamps
407 case VideoDecoderPresentMode::Smooth: {
408 int64_t currentClock = now();
409 int64_t diff = currentClock - previousClock;
410
411 if ((presentationTime != 0) && (playTime != 0) && (previousClock > 0) && (diff > 0)) {
412 clockResidual += static_cast<int64_t>(static_cast<double>(diff) * clockRate / nanosecondsPerSecond);
413 }
414 previousClock = currentClock;
415
416 // Check for underflow
417 if (queueLength && checkUnderflow) { // We only know if we underflow when we know the next timestamp
418 int64_t nextPresentationTime = getPresentationTime(0);
419 int64_t presentationTimeDiff = nextPresentationTime - presentationTime;
420
421 if (previousClockResidual > presentationTimeDiff) {
422 ++bufferUnderflowCounter;
423
424 if (static_cast<double>(previousClockResidual) > (1.5 * presentationTimeDiff)) {
425 ++bufferUnderflowMajorStutterCounter;
426 }
427 clockResidual = presentationTimeDiff + (clockResidual - previousClockResidual);
428 }
429 checkUnderflow = false;
430 }
431 else if(queueLength == 0) {
432 checkUnderflow = true;
433 previousClockResidual = clockResidual;
434 }
435 // TODO: Use these if there are no packet timestamps
436 // int num = video_format.frame_rate.numerator;
437 // int denom = video_format.frame_rate.denominator;
438
439 // Loop over queued frames
440 for (size_t queueIdx = 0; queueIdx < queueLength; ++queueIdx) {
441 int64_t nextPresentationTime = getPresentationTime(queueIdx);
442
443 // TODO use num denom if nextPresentationTime is 0
444 if (nextPresentationTime < presentationTime) {
445 LOG_TRACE(logger, "Timestamp less than last timestamp. Resetting decoder time.");
446 ++decoderRestartCounter;
447 clockResidual = 0;
448 presentationTime = 0;
449 previousPresentationTime = 0;
450 playTime = 0;
451 previousPlayTime = 0;
452 }
453 if ((presentationTime != 0) && (playTime != 0)) { // Always use first frame
454 assert(nextPresentationTime != 0);
455
456 int64_t presentationTimeDiff = nextPresentationTime - presentationTime;
457
458 // Check for discontinuities
459 if (skipDiscontinuities) {
460 if ((static_cast<double>(presentationTimeDiff) / clockRate) > discontinuityDuration) {
461 ++discontinuitiesSkippedCounter;
462 presentationTimeDiff = 0;
463 }
464 }
465 if (clockResidual < presentationTimeDiff) {
466 break; // Break if next frame time diff is not reached
467 }
468 clockResidual -= presentationTimeDiff;
469 }
470 // Use frame
471 previousPresentationTime = presentationTime;
472 previousPlayTime = playTime;
473 presentationTime = nextPresentationTime;
474 playTime = currentClock;
475 frameToDisplay = static_cast<int>(queueIdx);
476 }
477 break;
478 }
479
480 // Realtime: Same as smooth but speed up playback if queue gets large
481 case VideoDecoderPresentMode::Realtime: {
482 int64_t currentClock = now();
483 int64_t diff = currentClock - previousClock;
484
485 if ((presentationTime != 0) && (playTime != 0) && (previousClock != 0) && (diff > 0)) {
486 clockResidual += static_cast<int64_t>(static_cast<double>(diff) * clockRate / nanosecondsPerSecond);
487 }
488 if ((previousClock != 0) && (diff > 0) && (queueLength >= speedupQueueCount)) {
489 int64_t nextPresentationTime = getPresentationTime(0);
490 int64_t presentationTimeDiff = nextPresentationTime - presentationTime;
491
492 clockResidual += static_cast<int64_t>(std::max(static_cast<double>(presentationTimeDiff) * diff * speedupFactor / nanosecondsPerSecond, 0.0));
493 }
494 previousClock = currentClock;
495
496 // Check for underflow
497 if (checkUnderflow && (queueLength != 0)) { // We only know if we underflow when we know the next timestamp
498 int64_t nextPresentationTime = getPresentationTime(0);
499 int64_t presentationTimeDiff = nextPresentationTime - presentationTime;
500
501 if (previousClockResidual > presentationTimeDiff) {
502 ++bufferUnderflowCounter;
503
504 if (static_cast<double>(previousClockResidual) > (1.5 * presentationTimeDiff)) {
505 bufferUnderflowMajorStutterCounter++;
506 }
507 clockResidual = presentationTimeDiff + (clockResidual - previousClockResidual);
508 }
509 checkUnderflow = false;
510 }
511 else if (queueLength == 0) {
512 checkUnderflow = true;
513 previousClockResidual = clockResidual;
514 }
515 // TODO: Use these if there are no packet timestamps
516 // int num = video_format.frame_rate.numerator;
517 // int denom = video_format.frame_rate.denominator;
518
519 // Loop over queued frames
520 for (size_t queueIdx = 0; queueIdx < queueLength; ++queueIdx) {
521 int64_t nextPresentationTime = getPresentationTime(queueIdx);
522
523 // TODO use num denom if nextPresentationTime is 0
524 if (nextPresentationTime < presentationTime) {
525 LOG_TRACE(logger, "Timestamp less than last timestamp. Resetting decoder time.");
526 ++decoderRestartCounter;
527 clockResidual = 0;
528 previousPresentationTime = 0;
529 previousPlayTime = 0;
530 presentationTime = 0;
531 playTime = 0;
532 }
533 if ((presentationTime != 0) && (playTime != 0)) { // Always use first frame
534 assert(nextPresentationTime != 0);
535
536 int64_t presentationTimeDiff = nextPresentationTime - presentationTime;
537
538 // Check for discontinuities
539 if (skipDiscontinuities) {
540 if ((static_cast<double>(presentationTimeDiff) / clockRate) > discontinuityDuration) {
541 ++discontinuitiesSkippedCounter;
542 presentationTimeDiff = 0;
543 }
544 }
545 if (clockResidual < presentationTimeDiff) {
546 break; // Break if next frame time diff is not reached
547 }
548 clockResidual -= presentationTimeDiff;
549 }
550 // Use frame
551 previousPresentationTime = presentationTime;
552 previousPlayTime = playTime;
553 presentationTime = nextPresentationTime;
554 playTime = currentClock;
555 frameToDisplay = static_cast<int>(queueIdx);
556 }
557 break;
558 }
559
560 // Latest: Always use the last frame in the present queue
561 case VideoDecoderPresentMode::Latest: {
562 int64_t currentClock = now();
563
564 clockResidual = 0;
565 previousClock = currentClock;
566
567 // Loop over queued frames
568 for (size_t queueIdx = 0; queueIdx < queueLength; ++queueIdx) {
569 int64_t nextPresentationTime = getPresentationTime(queueIdx);
570
571 if (nextPresentationTime < presentationTime) {
572 LOG_TRACE(logger, "Timestamp less than last timestamp. Resetting decoder time.");
573 ++decoderRestartCounter;
574 clockResidual = 0;
575 previousPresentationTime = 0;
576 previousPlayTime = 0;
577 presentationTime = 0;
578 playTime = 0;
579 }
580 // Use frame
581 previousPresentationTime = presentationTime;
582 previousPlayTime = playTime;
583 presentationTime = nextPresentationTime;
584 playTime = currentClock;
585 frameToDisplay = static_cast<int>(queueIdx);
586 }
587 break;
588 }
589 }
590 return frameToDisplay;
591}
class IRenderer * renderer
Renderer.
Definition: Context.h:228
virtual IRenderResources * getResources()=0
Get the render resources interface.
int64_t clockResidual
Used for pacing frames (clock_rate units)
float gamma
Gamma value to be used when transferFunction equals TransferFunction::Gamma.
int64_t previousPlayTime
play time (realtime ns)
double discontinuityDuration
Definition of a discontinuity (seconds)
std::vector< uint8_t > parameterSets
Cached VPS, SPS, and PPS data.
int64_t previousClock
Previous monotonic clock now() (ns)
int64_t presentationTime
Frame timestamp (clock_rate units)
int64_t previousPresentationTime
Frame timestamp (clock_rate units)
VideoDecoderPresentMode presentMode
How will frames be presented.
VideoPresentCallback * presentCallback
Called when a frame is chosen for presentation.
bool skipDiscontinuities
Are discontinuities skipped?
int64_t playTime
play time (realtime ns)
VideoDecoderCallback * decodeCallback
Called when a frame is submitted to the decoder.
Log implementation class.
Definition: LogManager.h:139
bool HandleIsValid(const ResourceHandle_t< T > &handle)
Check if the given resource is valid, that is not equal to NoHandle or InvalidHandle.
constexpr Log getLogger(const char(&name)[LEN]) noexcept
Definition: LogManager.h:180
@ OpenGL20
Graphics device using OpenGL, supporting at least OpenGL 2.0.
static const Handle_t NoHandle
Represents a handle to nothing.
Definition: Common.h:77
@ RenderTarget
The texture can be used as a render target and drawn into.
Definition: Flags.h:120
@ GenerateMipMaps
The texture supports automatic mipmap generation performed by the graphics device.
Definition: Flags.h:124