· 5 years ago · Jul 16, 2020, 03:32 PM
1/*
2 * Copyright (c) 2015, NVIDIA CORPORATION. All rights reserved.
3 *
4 * This library is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Lesser General Public
6 * License, version 2.1, as published by the Free Software Foundation.
7 *
8 * This library is distributed in the hope that it will be useful,
9 * but WITHOUT ANY WARRANTY; without even the implied warranty of
10 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
11 * Lesser General Public License for more details.
12 *
13 * You should have received a copy of the GNU Lesser General Public
14 * License along with this program. If not, see
15 * <http://www.gnu.org/licenses/>.
16 */
17
18//uint32_t slice_count
19//int32_t field_order_cnt [2]
20//VdpBool is_reference
21//VdpReferenceFrameH264 referenceFrames [16]
22//
23//uint16_t frame_num -> DONE HDR
24//uint8_t field_pic_flag -> DONE HDR
25//uint8_t bottom_field_flag -> DONE HDR
26//uint8_t num_ref_frames -> DONE SPS
27//uint8_t mb_adaptive_frame_field_flag -> DONE SPS
28//uint8_t constrained_intra_pred_flag -> DONE PPS
29//uint8_t weighted_pred_flag -> DONE PPS
30//uint8_t weighted_bipred_idc -> DONE PPS
31//uint8_t frame_mbs_only_flag -> DONE SPS
32//uint8_t transform_8x8_mode_flag -> DONE PPS
33//int8_t chroma_qp_index_offset -> DONE PPS
34//int8_t second_chroma_qp_index_offset -> DONE PPS
35//int8_t pic_init_qp_minus26 -> DONE PPS
36//uint8_t num_ref_idx_l0_active_minus1 -> DONE PPS
37//uint8_t num_ref_idx_l1_active_minus1 -> DONE PPS
38//uint8_t log2_max_frame_num_minus4 -> DONE SPS
39//uint8_t pic_order_cnt_type -> DONE SPS
40//uint8_t log2_max_pic_order_cnt_lsb_minus4 -> DONE SPS
41//uint8_t delta_pic_order_always_zero_flag -> DONE SPS
42//uint8_t direct_8x8_inference_flag -> DONE SPS
43//uint8_t entropy_coding_mode_flag -> DONE PPS
44//uint8_t pic_order_present_flag -> DONE PPS
45//uint8_t deblocking_filter_control_present_flag -> DONE PPS
46//uint8_t redundant_pic_cnt_present_flag -> DONE PPS
47//uint8_t scaling_lists_4x4 [6][16] -> DONE SPS / PPS
48//uint8_t scaling_lists_8x8 [2][64] -> DONE SPS / PPS
49
50#include <assert.h>
51#include <inttypes.h>
52#include <stdio.h>
53#include <stdlib.h>
54#include <stdint.h>
55#include <string.h>
56#include <sys/time.h>
57#include <time.h>
58#include <unistd.h>
59
60#include <gst/codecparsers/gsth264parser.h>
61
62#include <vdpau/vdpau_x11.h>
63
64#include "win_x11.h"
65
66#define ARSIZE(x) (sizeof(x) / sizeof((x)[0]))
67
68#define NALU_BUFFER_LENGTH 4194304
69#define H264_MAX_REFERENCES 16
70#define MAX_FRAMES 25
71#define MAX_WIN_WIDTH 1920
72#define MAX_WIN_HEIGHT 1200
73
74#define CHECK_STATE \
75 if (vdp_st != VDP_STATUS_OK) { \
76 const char* string_error = vdp_get_error_string(vdp_st);\
77 fprintf(stderr, "VDPAU error '%s' at %s:%d\n", string_error, __FILE__, __LINE__); \
78 exit(1); \
79 }
80
81// TODO: remove them form global
82static uint32_t vid_width = 0;
83static uint32_t vid_height = 0;
84
85typedef struct {
86 struct
87 {
88 int lsb;
89 int msb;
90 } prevPicOrderCnt;
91 unsigned prevFrameNum;
92 unsigned prevFrameNumOffset;
93 int prevRefPictureTFOC;
94 int prevRefPictureIsBottomField;
95// int prevRefPictureHasMMCO5; //No yet handled
96} picture_order_count_context;
97
98typedef struct {
99 int picture_order_count;
100 int top_foc;
101 int bottom_foc;
102} picture_order_count;
103
104typedef struct
105{
106 // GStreamer entities
107 GstH264NalParser* parser;
108 GstH264NalUnit* nalu;
109 GstH264SliceHdr* slice;
110 GstH264SPS* sps;
111 GstH264PPS* pps;
112 GstH264SEIMessage* sei;
113 picture_order_count_context poc;
114
115 // VDPAU entities
116 VdpDecoder decoder;
117 VdpVideoSurface scratch_frames[H264_MAX_REFERENCES];
118 VdpOutputSurface outputSurfaces[1];
119 VdpVideoMixer videoMixer;
120 VdpRect outRect;
121 VdpRect outRectVid;
122
123 int8_t vdpau_initialized;
124 int8_t error_detected;
125} h264_decoder_context;
126
127static inline void h264_poc_context_init(picture_order_count_context *pPOC )
128{
129 pPOC->prevPicOrderCnt.lsb = 0;
130 pPOC->prevPicOrderCnt.msb = 0;
131 pPOC->prevFrameNum = 0;
132 pPOC->prevFrameNumOffset = 0;
133 pPOC->prevRefPictureIsBottomField = 0;
134// pPOC->prevRefPictureHasMMCO5 = 0; // Not yet handled
135}
136
137static void error_notifier(VdpDevice device, void *data)
138{
139 h264_decoder_context* context = (h264_decoder_context*)data;
140 printf("Error Notifier called!\n");
141 context->error_detected = 1;
142}
143
144static inline void print_usage(const char* program_name)
145{
146 fprintf(stderr, "Usage:\n");
147 fprintf(stderr, "%s elementary_stream.h264\n", program_name);
148
149 exit(1);
150}
151
152static int free_gst_objects(h264_decoder_context* context)
153{
154 free(context->nalu);
155 free(context->slice);
156 free(context->sps);
157 free(context->pps);
158 free(context->sei);
159
160 return 0;
161}
162
163static int allocate_gst_objects(h264_decoder_context* context)
164{
165 context->slice = calloc(1, sizeof(GstH264SliceHdr));
166 if(context->slice == NULL) goto failure;
167
168 context->sps = calloc(1, sizeof(GstH264SPS));
169 if(context->sps == NULL) goto failure;
170
171 context->pps = calloc(1, sizeof(GstH264PPS));
172 if(context->pps == NULL) goto failure;
173
174 context->sei = calloc(1, sizeof(GstH264SEIMessage));
175 if(context->sei == NULL) goto failure;
176
177 context->nalu = calloc(1, sizeof(GstH264NalUnit));
178 if(context->nalu == NULL) goto failure;
179
180 return 0;
181
182 failure:
183 free_gst_objects(context);
184 return -1;
185}
186
187static int check_eof(FILE *file)
188{
189 if (feof(file))
190 {
191 return -1;
192 }
193
194 return 0;
195}
196
197static int get_next_nal_unit(FILE* file, const void* buf, int* nal_length)
198{
199 int found = 0;
200 long start_pos = 0, end_pos = 0;
201 int nals = 0;
202
203 // Start by finding the offsets of the first NAL unit in this file
204 while (!(found))
205 {
206 uint8_t a, b, c;
207 a = (uint8_t) fgetc(file);
208 b = (uint8_t) fgetc(file);
209 c = (uint8_t) fgetc(file);
210
211 if (check_eof(file)) {
212 return -1;
213 }
214
215 if (a == 0x00 &&
216 b == 0x00 &&
217 c == 0x01)
218 {
219 // Found a start code prefix.
220 start_pos = ftell(file);
221 end_pos = 0;
222 nals++;
223
224 if(nals == MAX_FRAMES) {
225 break;
226 }
227
228 // Now find the position of the next start code prefix, or the end of the file
229 for(;;)
230 {
231 a = (uint8_t) fgetc(file);
232 b = (uint8_t) fgetc(file);
233 c = (uint8_t) fgetc(file);
234
235 if (a == 0x00 &&
236 b == 0x00 &&
237 c == 0x01)
238 {
239 end_pos = ftell(file);
240 found = 1;
241 break;
242 }
243 else if (check_eof(file))
244 {
245 /* Good enough. Found end of file. */
246 end_pos = ftell(file);
247 found = 1;
248 break;
249 }
250 else
251 {
252 // Not a start code prefix. Try again
253 fseek(file, -2, SEEK_CUR);
254 }
255 }
256 }
257 else
258 {
259 // Not a start code prefix. Try again
260 fseek(file, - 2, SEEK_CUR);
261 }
262 }
263
264 *nal_length = end_pos-start_pos+4;
265 if (*nal_length > NALU_BUFFER_LENGTH)
266 {
267 fprintf(stderr,"Skipping jumbo sized NALU of size %x\n", *nal_length);
268 fseek(file, -2, SEEK_CUR);
269 return -1;
270 }
271
272 // Have a start and end position. Grab a NAL unit. Rewind an additional 3 to grab the start code
273 fseek(file, start_pos - end_pos - 3, SEEK_CUR);
274
275 // Make sure to include the trailing end code?
276 // That's an additional 3 to read for the start.
277 // API requires a read of one past the end.
278 fread((void *)buf, sizeof(uint8_t), *nal_length, file);
279
280 // TODO Check for EOF padding
281 if (check_eof(file))
282 {
283 // Need to add a start code after the last NAL unit in the file
284 uint8_t eos[3] = { 0x0, 0x0, 0x1 };
285 memcpy((void *)(buf + *nal_length - 1), (const void *)&eos, 3);
286 *nal_length = *nal_length + 3;
287 }
288
289 if(end_pos > 0)
290 {
291 /* Put file pointer just before start code for next frame. */
292 fseek(file, -4, SEEK_CUR);
293 }
294
295 return 0;
296}
297
298static int peek_next_nal_unit(FILE* file)
299{
300 long start_pos;
301 uint8_t a, b, c, type, layer_id, temporal_id;
302 uint16_t nal_unit_header;
303
304 /* Report on the type of this NAL Unit. */
305 /* Skip the first three bytes, should be 0x0 0x0 0x1. */
306 fseek(file, 3, SEEK_CUR);
307 if(check_eof(file))
308 {
309 fseek(file, -3, SEEK_CUR);
310 return -1;
311 }
312 /* nal_unit_header begins after start code prefix. */
313 start_pos = ftell(file);
314 /* Implement nal_unit_header(), 7.3.1.2, here. */
315 a = (uint8_t) fgetc(file);
316 b = (uint8_t) fgetc(file);
317 c = (uint8_t) fgetc(file);
318 if(check_eof(file))
319 {
320 fseek(file, -3, SEEK_CUR);
321 return -1;
322 }
323 nal_unit_header = a<<8 | b;
324 type = (nal_unit_header & 0x7e00) >> 9;
325 layer_id = (nal_unit_header & 0x1f8 ) >> 3;
326 temporal_id = (nal_unit_header & 0x7) - 1;
327 printf("NALU at 0x%08lx, type %d, layer id %d, temporal id %d\n",
328 start_pos, type, layer_id, temporal_id);
329 /* Go back to where we started. */
330 fseek(file, -6, SEEK_CUR);
331 return (type >= 0 && type < 32) ? c>>7 : 1;
332}
333
334static int check_nalu_result(GstH264ParserResult result)
335{
336 switch(result)
337 {
338 case GST_H264_PARSER_OK:
339 return 0;
340 break;
341 case GST_H264_PARSER_BROKEN_DATA:
342 printf("GST_H264_PARSER_BROKEN_DATA\n");
343 break;
344 case GST_H264_PARSER_BROKEN_LINK:
345 printf("GST_H264_PARSER_BROKEN_LINK\n");
346 break;
347 case GST_H264_PARSER_ERROR:
348 printf("GST_H264_PARSER_ERROR\n");
349 break;
350 case GST_H264_PARSER_NO_NAL:
351 printf("GST_H264_PARSER_NO_NAL\n");
352 break;
353 case GST_H264_PARSER_NO_NAL_END:
354 printf("GST_H264_PARSER_NO_NAL_END\n");
355 break;
356 default:
357 printf("GST_H264_PARSER_UNKNOWN_ERROR\n");
358 break;
359 }
360 return -1;
361}
362
363static int update_picture_info_sps(VdpPictureInfoH264 *pi, GstH264SPS *sps) {
364 pi->num_ref_frames = sps->num_ref_frames;
365 pi->mb_adaptive_frame_field_flag = sps->mb_adaptive_frame_field_flag;
366 pi->frame_mbs_only_flag = sps->frame_mbs_only_flag;
367 pi->log2_max_frame_num_minus4 = sps->log2_max_frame_num_minus4;
368 pi->pic_order_cnt_type = sps->pic_order_cnt_type;
369 pi->log2_max_pic_order_cnt_lsb_minus4 = sps->log2_max_pic_order_cnt_lsb_minus4;
370 pi->delta_pic_order_always_zero_flag = sps->delta_pic_order_always_zero_flag;
371 pi->direct_8x8_inference_flag = sps->direct_8x8_inference_flag;
372
373 // SPS Scaling Lists
374 // gstreamer takes care of initializing a default scaling list, or
375 // patching it if sps->scaling_list_data_present_flag is set.
376 for (int i = 0; i < 6; i++) {
377 for (int j = 0; j < 16; j++) {
378 pi->scaling_lists_4x4[i][j] = sps->scaling_lists_4x4[i][j];
379 }
380 }
381
382 for (int i = 0; i < 2; i++) {
383 for (int j = 0; j < 64; j++) {
384 pi->scaling_lists_8x8[i][j] = sps->scaling_lists_8x8[i][j];
385 }
386 }
387
388 return 0;
389}
390
391static int update_picture_info_pps(VdpPictureInfoH264 *pi, GstH264PPS *pps)
392{
393 pi->constrained_intra_pred_flag = pps->constrained_intra_pred_flag;
394 pi->weighted_pred_flag = pps->weighted_pred_flag;
395 pi->weighted_bipred_idc = pps->weighted_bipred_idc;
396 pi->transform_8x8_mode_flag = pps->transform_8x8_mode_flag;
397 pi->chroma_qp_index_offset = pps->chroma_qp_index_offset;
398 pi->second_chroma_qp_index_offset = pps->second_chroma_qp_index_offset;
399 pi->pic_init_qp_minus26 = pps->pic_init_qp_minus26;
400 pi->num_ref_idx_l0_active_minus1 = pps->num_ref_idx_l0_active_minus1;
401 pi->num_ref_idx_l1_active_minus1 = pps->num_ref_idx_l1_active_minus1;
402 pi->entropy_coding_mode_flag = pps->entropy_coding_mode_flag;
403 pi->pic_order_present_flag = pps->pic_order_present_flag;
404 pi->deblocking_filter_control_present_flag = pps->deblocking_filter_control_present_flag;
405 pi->redundant_pic_cnt_present_flag = pps->redundant_pic_cnt_present_flag;
406
407 // PPS Scaling Lists
408 // gstreamer takes care of initializing a default scaling list, or
409 // patching it if pps->scaling_list_data_present_flag is set.
410 for(int i=0; i<6; i++)
411 {
412 for(int j=0; j<16; j++)
413 {
414 pi->scaling_lists_4x4[i][j] = pps->scaling_lists_4x4[i][j];
415 }
416 }
417
418 for(int i=0; i<2; i++)
419 {
420 for(int j=0; j<64; j++)
421 {
422 pi->scaling_lists_8x8[i][j] = pps->scaling_lists_8x8[i][j];
423 }
424 }
425
426 return 0;
427}
428
429static void create_vdpau_objects(VdpPictureInfoH264 *pi, h264_decoder_context *context)
430{
431 VdpStatus vdp_st;
432
433 vdp_st = win_x11_init_vdpau_procs();
434 CHECK_STATE
435
436 vdp_st = win_x11_init_vdpau_flip_queue(0, 0);
437 CHECK_STATE
438
439 // Object creation
440
441 vdp_st = vdp_preemption_callback_register(
442 vdp_device,
443 error_notifier,
444 context
445 );
446
447 // TODO: Get the right size
448// vid_width = pi->pic_width_in_luma_samples;
449// vid_height = pi->pic_height_in_luma_samples;
450 vid_width = 1920;
451 vid_height = 1080;
452
453 // TODO: Read the right profile
454 vdp_st = vdp_decoder_create(
455 /* inputs */
456 vdp_device, /* device */
457 VDP_DECODER_PROFILE_H264_HIGH, /* profile */
458 vid_width, /* width */
459 vid_height, /* height */
460 H264_MAX_REFERENCES, /* max_references */
461 /* output */
462 &context->decoder
463 );
464 CHECK_STATE
465
466 for(int i = 0; i < H264_MAX_REFERENCES; i++)
467 {
468 vdp_st = vdp_video_surface_create(
469 /* inputs */
470 vdp_device, /* device */
471 VDP_CHROMA_TYPE_420, /* chroma_type */
472 vid_width, /* width */
473 vid_height, /* height */
474 /* output */
475 &(context->scratch_frames[i]) /* surface */
476 );
477 CHECK_STATE
478 }
479
480 // TODO: handle output
481
482 vdp_st = vdp_output_surface_create(
483 /* inputs */
484 vdp_device, /* device */
485 VDP_RGBA_FORMAT_B8G8R8A8, /* rgba_format */
486 MAX_WIN_WIDTH, /* width */
487 MAX_WIN_HEIGHT, /* height */
488 /* output */
489 &(context->outputSurfaces[0]) /* surface */
490 );
491 CHECK_STATE
492 vdp_st = vdp_output_surface_render_output_surface(
493 context->outputSurfaces[0], /* destination_surface */
494 NULL, /* destination_rect */
495 VDP_INVALID_HANDLE, /* source_surface */
496 NULL, /* source_rect */
497 NULL, /* colors */
498 NULL, /* blend_state */
499 0 /* flags */
500 );
501 CHECK_STATE
502
503 // Order is important in code below, where enables are set.
504 VdpVideoMixerFeature features[] =
505 {
506 VDP_VIDEO_MIXER_FEATURE_NOISE_REDUCTION,
507 VDP_VIDEO_MIXER_FEATURE_SHARPNESS,
508 VDP_VIDEO_MIXER_FEATURE_INVERSE_TELECINE,
509 VDP_VIDEO_MIXER_FEATURE_DEINTERLACE_TEMPORAL,
510 VDP_VIDEO_MIXER_FEATURE_DEINTERLACE_TEMPORAL_SPATIAL
511 };
512 VdpBool feature_enables[] =
513 {
514 VDP_FALSE,
515 VDP_FALSE,
516 VDP_FALSE,
517 VDP_FALSE,
518 VDP_FALSE
519 };
520
521 uint32_t vdp_width = vid_width;
522 uint32_t vdp_height = vid_height;
523 VdpChromaType vdp_chroma_type = VDP_CHROMA_TYPE_420;
524
525 VdpVideoMixerParameter parameters[] =
526 {
527 VDP_VIDEO_MIXER_PARAMETER_VIDEO_SURFACE_WIDTH,
528 VDP_VIDEO_MIXER_PARAMETER_VIDEO_SURFACE_HEIGHT,
529 VDP_VIDEO_MIXER_PARAMETER_CHROMA_TYPE
530 };
531 void const * parameter_values[ARSIZE(parameters)] =
532 {
533 &vdp_width,
534 &vdp_height,
535 &vdp_chroma_type
536 };
537
538 vdp_st = vdp_video_mixer_create(
539 vdp_device,
540 ARSIZE(features),
541 features,
542 ARSIZE(parameters),
543 parameters,
544 parameter_values,
545 &(context->videoMixer)
546 );
547 CHECK_STATE
548
549 vdp_st = vdp_video_mixer_set_feature_enables(
550 context->videoMixer, /* mixer */
551 ARSIZE(features), /* feature_count */
552 features, /* features */
553 feature_enables /* feature_enables */
554 );
555 CHECK_STATE
556}
557
558static void destroy_vdpau_objects(VdpPictureInfoH264 *pi, h264_decoder_context *context)
559{
560 VdpStatus vdp_st;
561
562 vdp_st = vdp_preemption_callback_register(vdp_device, NULL, NULL);
563 CHECK_STATE
564
565
566 for (int i = 0; i < H264_MAX_REFERENCES; i++)
567 {
568 vdp_st = vdp_video_surface_destroy(context->scratch_frames[i]);
569 CHECK_STATE
570 }
571
572 vdp_st = vdp_decoder_destroy(
573 context->decoder
574 );
575 CHECK_STATE
576
577 vdp_st = win_x11_fini_vdpau_flip_queue(0);
578 CHECK_STATE
579
580 vdp_st = win_x11_fini_vdpau_procs();
581 CHECK_STATE
582}
583
584static void update_picture_info_slice_header(VdpPictureInfoH264 *pi, h264_decoder_context *context)
585{
586 if(context->nalu->type == GST_H264_NAL_SLICE_IDR) {
587 pi->is_reference = VDP_TRUE;
588 }
589 else {
590 pi->is_reference = VDP_FALSE;
591 }
592
593 pi->frame_num = context->slice->frame_num;
594 pi->field_pic_flag = context->slice->field_pic_flag;
595 pi->bottom_field_flag = context->slice->bottom_field_flag;
596}
597
598static picture_order_count h264_compute_poc(h264_decoder_context* context) {
599 int topFOC = 0;
600 int bottomFOC = 0;
601
602 if (context->sps->pic_order_cnt_type == 0)
603 {
604 unsigned maxPocLSB = 1U << (context->sps->log2_max_pic_order_cnt_lsb_minus4 + 4);
605
606 /* POC reference */
607 if( context->slice->type == GST_H264_NAL_SLICE_IDR )
608 {
609 context->poc.prevPicOrderCnt.lsb = 0;
610 context->poc.prevPicOrderCnt.msb = 0;
611 }
612 // Not yet handled
613// else if( context->poc.prevRefPictureHasMMCO5 )
614// {
615// context->poc.prevPicOrderCnt.msb = 0;
616// if( !context->poc.prevRefPictureIsBottomField )
617// context->poc.prevPicOrderCnt.lsb = context->poc.prevRefPictureTFOC;
618// else
619// context->poc.prevPicOrderCnt.lsb = 0;
620// }
621
622 /* 8.2.1.1 */
623 int pocMSB = context->poc.prevPicOrderCnt.msb;
624 int64_t orderDiff = context->slice->pic_order_cnt_lsb - context->poc.prevPicOrderCnt.lsb;
625 if( orderDiff < 0 && -orderDiff >= maxPocLSB / 2 )
626 pocMSB += maxPocLSB;
627 else if( orderDiff > maxPocLSB / 2 )
628 pocMSB -= maxPocLSB;
629
630 topFOC = bottomFOC = pocMSB + context->slice->pic_order_cnt_lsb;
631 if( context->slice->field_pic_flag )
632 bottomFOC += context->slice->delta_pic_order_cnt_bottom;
633
634 /* Save from ref picture */
635 if( context->nalu->ref_idc /* Is reference */ )
636 {
637 context->poc.prevRefPictureIsBottomField = (context->slice->field_pic_flag &&
638 context->slice->bottom_field_flag);
639// context->poc.prevRefPictureHasMMCO5 = context->slice->has_mmco5; // not yet handled
640 context->poc.prevRefPictureTFOC = topFOC;
641 context->poc.prevPicOrderCnt.lsb = context->slice->pic_order_cnt_lsb;
642 context->poc.prevPicOrderCnt.msb = pocMSB;
643 }
644 }
645 else
646 {
647 assert(0);
648// unsigned maxFrameNum = 1 << (p_sps->i_log2_max_frame_num + 4);
649// unsigned frameNumOffset;
650// unsigned expectedPicOrderCnt = 0;
651//
652// if( p_slice->i_nal_type == H264_NAL_SLICE_IDR )
653// frameNumOffset = 0;
654// else if( p_ctx->prevFrameNum > p_slice->i_frame_num )
655// frameNumOffset = p_ctx->prevFrameNumOffset + maxFrameNum;
656// else
657// frameNumOffset = p_ctx->prevFrameNumOffset;
658//
659// if( p_sps->i_pic_order_cnt_type == 1 )
660// {
661// unsigned absFrameNum;
662//
663// if( p_sps->i_num_ref_frames_in_pic_order_cnt_cycle > 0 )
664// absFrameNum = frameNumOffset + p_slice->i_frame_num;
665// else
666// absFrameNum = 0;
667//
668// if( p_slice->i_nal_ref_idc == 0 && absFrameNum > 0 )
669// absFrameNum--;
670//
671// if( absFrameNum > 0 )
672// {
673// int32_t expectedDeltaPerPicOrderCntCycle = 0;
674// for( int i=0; i<p_sps->i_num_ref_frames_in_pic_order_cnt_cycle; i++ )
675// expectedDeltaPerPicOrderCntCycle += p_sps->offset_for_ref_frame[i];
676//
677// unsigned picOrderCntCycleCnt = 0;
678// unsigned frameNumInPicOrderCntCycle = 0;
679// if( p_sps->i_num_ref_frames_in_pic_order_cnt_cycle )
680// {
681// picOrderCntCycleCnt = ( absFrameNum - 1 ) / p_sps->i_num_ref_frames_in_pic_order_cnt_cycle;
682// frameNumInPicOrderCntCycle = ( absFrameNum - 1 ) % p_sps->i_num_ref_frames_in_pic_order_cnt_cycle;
683// }
684//
685// expectedPicOrderCnt = picOrderCntCycleCnt * expectedDeltaPerPicOrderCntCycle;
686// for( unsigned i=0; i <= frameNumInPicOrderCntCycle; i++ )
687// expectedPicOrderCnt = expectedPicOrderCnt + p_sps->offset_for_ref_frame[i];
688// }
689//
690// if( p_slice->i_nal_ref_idc == 0 )
691// expectedPicOrderCnt = expectedPicOrderCnt + p_sps->offset_for_non_ref_pic;
692//
693// *p_tFOC = expectedPicOrderCnt + p_slice->i_delta_pic_order_cnt0;
694// if( !p_slice->i_field_pic_flag )
695// *p_bFOC = *p_tFOC + p_sps->offset_for_top_to_bottom_field + p_slice->i_delta_pic_order_cnt1;
696// else if( p_slice->i_bottom_field_flag )
697// *p_bFOC = expectedPicOrderCnt + p_sps->offset_for_top_to_bottom_field + p_slice->i_delta_pic_order_cnt0;
698// }
699// else if( p_sps->i_pic_order_cnt_type == 2 )
700// {
701// unsigned tempPicOrderCnt;
702//
703// if( p_slice->i_nal_type == H264_NAL_SLICE_IDR )
704// tempPicOrderCnt = 0;
705// else if( p_slice->i_nal_ref_idc == 0 )
706// tempPicOrderCnt = 2 * ( frameNumOffset + p_slice->i_frame_num ) - 1;
707// else
708// tempPicOrderCnt = 2 * ( frameNumOffset + p_slice->i_frame_num );
709//
710// *p_bFOC = *p_tFOC = tempPicOrderCnt;
711// }
712//
713// p_ctx->prevFrameNum = p_slice->i_frame_num;
714// if( p_slice->has_mmco5 )
715// p_ctx->prevFrameNumOffset = 0;
716// else
717// p_ctx->prevFrameNumOffset = frameNumOffset;
718 }
719
720 /* 8.2.1 (8-1) */
721 picture_order_count poc;
722 poc.top_foc = topFOC;
723 poc.bottom_foc = bottomFOC;
724 if (!context->slice->field_pic_flag) /* progressive or contains both fields */
725 poc.picture_order_count = (bottomFOC < topFOC) ? bottomFOC : topFOC;
726 else /* split top or bottom field */ {
727 if (context->slice->bottom_field_flag)
728 poc.picture_order_count = bottomFOC;
729 else
730 poc.picture_order_count = topFOC;
731 }
732
733 return poc;
734}
735
736static VdpOutputSurface WaitForSurface(h264_decoder_context* context)
737{
738 VdpOutputSurface outputSurface;
739 VdpStatus vdp_st;
740 VdpTime displayed_at;
741 VdpPresentationQueueStatus status;
742
743 outputSurface = context->outputSurfaces[0];
744
745 vdp_st = vdp_presentation_queue_block_until_surface_idle(
746 /* inputs */
747 vdp_flip_queue[0], /* presentation_queue */
748 outputSurface, /* surface */
749 /* output */
750 &displayed_at /* first_presentation_time */
751 );
752 CHECK_STATE
753
754 vdp_st = vdp_presentation_queue_query_surface_status(
755 /* inputs */
756 vdp_flip_queue[0], /* presentation_queue */
757 outputSurface, /* surface */
758 /* outputs */
759 &status, /* status */
760 &displayed_at /* first_presentation_time */
761 );
762 CHECK_STATE
763
764 return outputSurface;
765}
766
767static void RecalcOutputRect(h264_decoder_context *context)
768{
769 uint32_t screenWidth, screenHeight;
770 float vidAspect, monAspect, factor;
771
772 win_x11_poll_events();
773 screenWidth = win_x11_get_width(0);
774 if (screenWidth > MAX_WIN_WIDTH)
775 {
776 screenWidth = MAX_WIN_WIDTH;
777 }
778 screenHeight = win_x11_get_height(0);
779 if (screenHeight > MAX_WIN_HEIGHT)
780 {
781 screenHeight = MAX_WIN_HEIGHT;
782 }
783
784 context->outRect.x0 = 0;
785 context->outRect.x1 = screenWidth;
786 context->outRect.y0 = 0;
787 context->outRect.y1 = screenHeight;
788
789 /* This is not the right way to get the aspect ratios */
790 vidAspect = (float)vid_width / (float)vid_height;
791 monAspect = (float)screenWidth / (float)screenHeight;
792
793 if(vidAspect > monAspect) /* letter box */
794 {
795 factor = (1.0 - (monAspect / vidAspect)) * 0.5;
796 factor *= (float)screenHeight;
797
798 context->outRectVid.x0 = 0;
799 context->outRectVid.x1 = screenWidth;
800 context->outRectVid.y0 = factor;
801 context->outRectVid.y1 = screenHeight - factor;
802 }
803 else
804 {
805 factor = (1.0 - (vidAspect / monAspect)) * 0.5;
806 factor *= (float)screenWidth;
807
808 context->outRectVid.x0 = factor;
809 context->outRectVid.x1 = screenWidth - factor;
810 context->outRectVid.y0 = 0;
811 context->outRectVid.y1 = screenHeight;
812 }
813}
814
815static void Flip(
816 h264_decoder_context* context,
817 VdpOutputSurface outputSurface
818)
819{
820 VdpStatus vdp_st;
821
822 vdp_st = vdp_presentation_queue_display(
823 vdp_flip_queue[0], /* presentation_queue */
824 outputSurface, /* surface */
825 context->outRect.x1, /* clip_width */
826 context->outRect.y1, /* clip_height */
827 0 /* earliest_presentation_time */
828 );
829 CHECK_STATE
830
831}
832
833static void DisplayFrame(
834 VdpPictureInfoH264 *pi,
835 h264_decoder_context* context)
836{
837 VdpOutputSurface outputSurface;
838 VdpStatus vdp_st;
839
840 outputSurface = WaitForSurface(context);
841
842 RecalcOutputRect(context);
843
844 /*
845
846 VDPAU implementations must allow VDP_VIDEO_MIXER_PICTURE_STRUCTURE_FRAME
847 to work correctly here. Players should not need to use a hack here by
848 declaring this frame to be a top or bottom field.
849
850 For VDPAU HEVC decoding, video_surface_past and video_surface_future
851 should be NULL for progressive frames. Presentation of interlaced
852 frames will work as for formats with native interlaced decoding but
853 note that each field will be an HEVC frame in its own right.
854
855 */
856
857 vdp_st = vdp_video_mixer_render(
858 context->videoMixer, /* mixer */
859 VDP_INVALID_HANDLE, /* background_surface */
860 0, /* background_source_rect */
861 /* current_picture_structure*/
862 VDP_VIDEO_MIXER_PICTURE_STRUCTURE_FRAME,
863 0, /* video_surface_past_count */
864 NULL, /* video_surface_past */
865 pi->referenceFrames[0].surface, /* video_surface_current */
866 0, /* video_surface_future_count */
867 NULL, /* video_surface_future */
868 NULL, /* video_source_rect */
869 outputSurface, /* destination_surface */
870 &(context->outRect), /* destination_rect */
871 &(context->outRectVid), /* destination_video_rect */
872 0, /* layer_count */
873 NULL /* layers */
874 );
875 CHECK_STATE
876
877 Flip(context, outputSurface);
878}
879
880int main(int argc, char *argv[])
881{
882 FILE *file = NULL;
883 GstH264ParserResult result;
884 int nals = 0;
885 int nal_length = 0;
886 VdpBitstreamBuffer bitstreamBuffer;
887 VdpPictureInfoH264 infoH264;
888 h264_decoder_context context;
889
890 // @ahugeat TODO: Alternately parse these from the SPS
891 vid_width=1920;
892 vid_height=1080;
893
894 memset(&infoH264, 0, sizeof(infoH264));
895 memset(&context, 0, sizeof(context));
896 memset(&bitstreamBuffer, 0, sizeof(VdpBitstreamBuffer));
897
898 // Mark all reference as invalid
899 for (int i = 0; i < H264_MAX_REFERENCES; ++i) {
900 infoH264.referenceFrames[i].surface = VDP_INVALID_HANDLE;
901 }
902
903 if (argc != 2) {
904 print_usage(argv[0]);
905 return -1;
906 }
907
908 // Open bitstream annexe B
909 // FFMPEG command: ffmpeg -i input.mp4 -vcodec copy -an -bsf:v h264_mp4toannexb out.h264
910 if (!(file = fopen(argv[1],"rb")))
911 {
912 fprintf(stderr, "Input file %s not found\n", argv[1]);
913 return -1;
914 }
915
916 // Initialize GStreamer library for H264 NAL Unit parsing. */
917 context.parser = gst_h264_nal_parser_new();
918 if (!context.parser)
919 {
920 fprintf(stderr, "Error: unable to call gst_h264_nal_parser_new.\n");
921 return -1;
922 }
923
924 // Initialize POC context
925 h264_poc_context_init(&context.poc);
926
927 if (allocate_gst_objects(&context) < 0)
928 {
929 printf("Failed to allocate Gst objects.\n");
930 gst_h264_nal_parser_free(context.parser);
931 return -1;
932 }
933
934 // Initialize X11
935 win_x11_init_x11();
936 win_x11_init_window(0);
937
938 // Initialize rendering
939 bitstreamBuffer.bitstream = calloc(NALU_BUFFER_LENGTH, sizeof(uint8_t));
940 if (bitstreamBuffer.bitstream == NULL)
941 {
942 fprintf(stderr, "Error: MALLOC: bitstreamBuffer.bitstream.\n");
943 win_x11_fini_window(0);
944 win_x11_fini_x11();
945 gst_h264_nal_parser_free(context.parser);
946 free_gst_objects(&context);
947 return -1;
948 }
949
950 /*
951
952 The most interesting API usage is in this loop. The flow is:
953
954 Parse the incoming bitstream.
955 Pull out the next NAL unit.
956 Parse every individual NAL unit.
957 Update decoder state after each NAL unit, saving it to
958 VdpPictureInfoHEVC.
959
960 For VCL NAL units ("frames"), the player must handle some parts of
961 Clause 8 as well as Annex C for correct decoding.
962
963 The order of operations for decoding a VCL NAL unit is:
964
965 8.2 NAL unit decoding process
966 8.3.1 Decoding process for picture order count
967 8.3.2 Decoding process for reference picture set
968 C.3.2 Removal of pictures from the DPB
969 8.3.3 Decoding process for generating unavailable reference pictures
970 C.3.4 Current decoded picture marking and storage
971 8.1 PicOutputFlag
972 (8.3.4 through 8.7 - handled by VdpDecoderRender - see note below)
973 C.3.3 Picture output
974
975 This player does _not_ implement a coded picture buffer (CPB) as
976 specified in C.2. We assume that a bitstream is encapsulated in a
977 file that we can access as needed, and do not handle underflows or
978 calculate timing.
979
980 VdpDecoderRender models an instantaneous decoding process. A decoding
981 process is defined in 8.1 as: NAL unit decoding (8.2), slice segment
982 layer decoding (8.3), and decoding using all syntax elements (8.4, 8.5,
983 8.6, 8.7). Since VDPAU is a NAL unit level API, any actions that are
984 done per slice are handled by the implementation. This includes 8.3.4,
985 8.4, 8.5, 8.6 and 8.7.
986
987 This implementation uses VdpPictureInfoHEVC.RefPics[] as the decoded
988 picture buffer (DPB). Other players are free to use RefPics[] directly,
989 or to keep a local, separate DPB. Other implementations may also choose
990 to maintain decoder state using a separate means, and copy data to
991 VdpPictureInfoHEVC on the fly prior to calling VdpDecoderRender.
992
993 For now, this player outputs frames in decode order, not display order.
994
995 */
996
997 while (get_next_nal_unit(file, bitstreamBuffer.bitstream, &nal_length)==0)
998 {
999 result = gst_h264_parser_identify_nalu(context.parser, (const guint8 *) bitstreamBuffer.bitstream,
1000 0, (gsize)nal_length, context.nalu);
1001
1002 if (check_nalu_result(result))
1003 {
1004 return -1;
1005 }
1006
1007 switch(context.nalu->type)
1008 {
1009 case GST_H264_NAL_SLICE:
1010 case GST_H264_NAL_SLICE_IDR:
1011 printf("Video Coding Layer\n");
1012
1013 // Create VDPAU API objects: decoder, renderer.
1014 if(!context.vdpau_initialized)
1015 {
1016 // Only create objects once we have parsed enough NAL units to know that we must perform decoding
1017 create_vdpau_objects(&infoH264, &context);
1018
1019 // TODO: handle time?
1020 context.vdpau_initialized = 1;
1021 }
1022
1023 // 8.2 NAL unit decoding process
1024 // Get header slice information
1025 gst_h264_parser_parse_slice_hdr(context.parser, context.nalu, context.slice, (gboolean)TRUE, (gboolean)TRUE);
1026 update_picture_info_slice_header(&infoH264, &context);
1027 nals++;
1028
1029 assert(infoH264.pic_order_cnt_type == 0);
1030 printf("\tframe_num: %d\n", infoH264.frame_num);
1031
1032 if (GST_H264_IS_P_SLICE(context.slice)) {
1033 printf("\tslice_type: GST_H264_IS_P_SLICE\n");
1034 }
1035 else if (GST_H264_IS_B_SLICE(context.slice)) {
1036 printf("\tslice_type: GST_H264_IS_B_SLICE\n");
1037 }
1038 else if (GST_H264_IS_I_SLICE(context.slice)) {
1039 printf("\tslice_type: GST_H264_IS_I_SLICE\n");
1040 }
1041 else if (GST_H264_IS_SP_SLICE(context.slice)) {
1042 printf("\tslice_type: GST_H264_IS_SP_SLICE\n");
1043 }
1044 else if (GST_H264_IS_SI_SLICE(context.slice)) {
1045 printf("\tslice_type: GST_H264_IS_SI_SLICE\n");
1046 }
1047
1048 // 8.2.1 Decoding process for picture order count
1049 picture_order_count poc = h264_compute_poc(&context);
1050
1051 // 8.2.2 Decoding process for macroblock to slice group map
1052 printf("num_slice_groups_minus1: %d\n", context.pps->num_slice_groups_minus1);
1053 assert(context.pps->num_slice_groups_minus1 == 0);
1054 assert(context.slice->field_pic_flag == 1 || context.sps->frame_mbs_only_flag == 1);
1055
1056 // 8.2.3 Decoding process for slice data partitions - SKIP IT no partition
1057
1058 // 8.2.4 Decoding process for reference picture lists construction - Useful ?
1059// if (GST_H264_IS_P_SLICE(context.slice) || GST_H264_IS_B_SLICE(context.slice) || GST_H264_IS_SP_SLICE(context.slice)) {
1060// // 8.2.4.1 Decoding process for picture numbers ?
1061//
1062// if (GST_H264_IS_P_SLICE(context.slice) || GST_H264_IS_SP_SLICE(context.slice)) {
1063//
1064// }
1065// }
1066
1067 infoH264.field_order_cnt[0] = poc.top_foc;
1068 infoH264.field_order_cnt[1] = poc.bottom_foc;
1069 infoH264.is_reference = VDP_TRUE;
1070 infoH264.slice_count = 1;
1071
1072 bitstreamBuffer.struct_version = VDP_BITSTREAM_BUFFER_VERSION;
1073
1074// while(peek_next_nal_unit(file) == 0)
1075// {
1076// int nal_extra_length;
1077// printf("Another NAL unit for this picture found!\n");
1078// /* Truncate by 4 - don't repeat start codes */
1079// get_next_nal_unit(file,
1080// (bitstreamBuffer.bitstream+nal_length-4),
1081// &nal_extra_length);
1082// nal_length += nal_extra_length - 4;
1083// }
1084
1085 VdpStatus vdp_st = vdp_decoder_render(
1086 context.decoder,
1087 context.scratch_frames[0],
1088 (void*)&infoH264,
1089 1,
1090 &bitstreamBuffer
1091 );
1092 CHECK_STATE
1093
1094// infoH264.referenceFrames[0].top_is_reference = VDP_TRUE;
1095// infoH264.referenceFrames[0].bottom_is_reference = VDP_TRUE;
1096 infoH264.referenceFrames[0].surface = context.scratch_frames[0];
1097 infoH264.referenceFrames[0].frame_idx = context.slice->frame_num;
1098 infoH264.referenceFrames[0].field_order_cnt[0] = poc.top_foc;
1099 infoH264.referenceFrames[0].field_order_cnt[1] = poc.bottom_foc;
1100
1101 DisplayFrame(&infoH264, &context);
1102
1103 // If case of num_slice_groups_minus1 == 0 -> mapUnitToSliceGroupMap[i] = 0
1104 // then:
1105 // mapUnitToSliceGroupMap[ i ] = 0 // (8-15)
1106 // If context.slice->field_pic_flag == 1 || context.sps->frame_mbs_only_flag == 1
1107 // then:
1108 // MbToSliceGroupMap[ i ] = mapUnitToSliceGroupMap[ i ] // eg. = 0 // 8-24
1109 // So:
1110 // i = n + 1
1111 // while( i < PicSizeInMbs && MbToSliceGroupMap[ i ] != MbToSliceGroupMap[ n ] ) (8-16)
1112 // i++;
1113 // nextMbAddress = i // eg. = 0 since MbToSliceGroupMap[ i ] == 0 && MbToSliceGroupMap[ n ] == 0
1114// int PicWidthInMbs = context.sps->pic_width_in_mbs_minus1 + 1;
1115// int PicHeightInMapUnits = context.sps->pic_height_in_map_units_minus1 + 1;
1116// int PicSizeInMapUnits = PicWidthInMbs * PicHeightInMapUnits;
1117// int *mapUnitToSliceGroupMap = calloc(PicSizeInMapUnits, sizeof(int));
1118//
1119// printf("PicSizeInMapUnits: %d\n", PicSizeInMapUnits);
1120//
1121// /* 8.3.1 Decoding process for picture order count */
1122// decode_picture_order_count(&infoHEVC, &context, slice, nalu);
1123// /* 8.3.2 Decoding process for reference picture set */
1124// decode_reference_picture_set(&infoHEVC, &context, slice, sps);
1125// /* C.3.2 Removal of pictures from the DPB */
1126// remove_pictures_from_dpb(&infoHEVC, &context, slice, nalu);
1127// /* 8.3.3 Decoding process for generating unavailable reference
1128// pictures */
1129// generate_unavailable_reference_pictures(
1130// &infoHEVC, &context, nalu);
1131// /* C.3.4 Current decoded picture marking and storage. */
1132// target_index = get_decoded_picture_index(&context);
1133// if(target_index < 0)
1134// printf("ERROR: Invalid target_index value\n");
1135// context.dpb_slice_pic_order_cnt_lsb[target_index] =
1136// slice->pic_order_cnt_lsb;
1137// /* 8.1 PicOutputFlag */
1138// calculate_PicOutputFlag(&context, slice, nalu, target_index);
1139// /* Remainder of decoding process - 8.3.4 8.4 8.5 8.6 8.7 */
1140// bitstreamBuffer.struct_version = VDP_BITSTREAM_BUFFER_VERSION;
1141//
1142// /*
1143// VDPAU HEVC NAL Length trickery.
1144// NAL units of same type, layer id, and temporal id form same
1145// picture. Need to find where the next differing NAL unit
1146// begins in the bitstream to give the correct bitstream_bytes
1147// value to VDPAU.
1148// */
1149//
1150// while(peek_next_nal_unit(file) == 0)
1151// {
1152// int nal_extra_length;
1153// printf("Another NAL unit for this picture found!\n");
1154// /* Truncate by 4 - don't repeat start codes */
1155// get_next_nal_unit(file,
1156// (bitstreamBuffer.bitstream+nal_length-4),
1157// &nal_extra_length);
1158// nal_length += nal_extra_length - 4;
1159// }
1160//
1161// printf("Decoding a buffer of length %d\n", nal_length);
1162// bitstreamBuffer.bitstream_bytes = nal_length;
1163// if(use_vdpau)
1164// {
1165// vdp_st = vdp_decoder_render(
1166// decoder,
1167// context.scratch_frames[target_index],
1168// (void*)&infoHEVC,
1169// 1,
1170// &bitstreamBuffer
1171// );
1172// CHECK_STATE
1173// }
1174// /* TODO - I think these need to be done AFTER decoding? */
1175// infoHEVC.PicOrderCntVal[target_index] =
1176// infoHEVC.CurrPicOrderCntVal;
1177// infoHEVC.RefPics[target_index] =
1178// context.scratch_frames[target_index];
1179// /* C.3.3 Frame Output */
1180// if(use_vdpau && do_display)
1181// {
1182// DisplayFrame(&infoHEVC, period, target_index);
1183// }
1184// context.IsFirstPicture = 0;
1185// if(delay)
1186// usleep(delay);
1187// else if (step)
1188// {
1189// printf("Press 'q' to quit, <any key> for next frame.\n");
1190// if(getchar() == 'q') return -1;
1191// }
1192// frame++;
1193// if(frames > 0 && frame > frames)
1194// return 0;
1195 break;
1196
1197 case GST_H264_NAL_SPS:
1198 printf("Sequence Parameter Set\n");
1199
1200 // Parse h264 SPS
1201 gst_h264_parser_parse_sps(context.parser, context.nalu, context.sps, (gboolean)TRUE);
1202 update_picture_info_sps(&infoH264, context.sps);
1203 nals++;
1204
1205 // TODO: Only for h265? If troubles, try: context.MaxDpbSize = 16
1206// // A.4.1 General tier and level limits. Calculate MaxDpbSize.
1207// // TODO - Make this more general. This is written against the
1208// // NVIDIA VDPAU implementation which supports Tier 5.1.
1209// // TODO - Move this into update_picture_info_sps ?
1210// PicSizeInSamplesY = sps->pic_width_in_luma_samples * sps->pic_height_in_luma_samples;
1211// if (sps->pic_width_in_luma_samples > SQRT_MAX_LUMA_PS_X8 ||
1212// sps->pic_height_in_luma_samples > SQRT_MAX_LUMA_PS_X8)
1213// printf("ERROR: picture width/height is out of bounds.\n");
1214//
1215// if(PicSizeInSamplesY <= (MAX_LUMA_PS >> 2))
1216// context.MaxDpbSize = min(4*MAX_DPB_PIC_BUF, 16);
1217// else if(PicSizeInSamplesY <= (MAX_LUMA_PS >> 1))
1218// context.MaxDpbSize = min(2*MAX_DPB_PIC_BUF, 16);
1219// else if(PicSizeInSamplesY <= ((3*MAX_LUMA_PS)>>2))
1220// context.MaxDpbSize = min((4*MAX_DPB_PIC_BUF)/3, 16);
1221// else
1222// context.MaxDpbSize = MAX_DPB_PIC_BUF;
1223 break;
1224
1225 case GST_H264_NAL_PPS:
1226 printf("Picture Parameter Set\n");
1227 /* Populate GstH265PPS */
1228 gst_h264_parser_parse_pps(context.parser, context.nalu, context.pps);
1229 update_picture_info_pps(&infoH264, context.pps);
1230 nals++;
1231 break;
1232
1233 case GST_H264_NAL_DEPTH_SPS:
1234 case GST_H264_NAL_UNKNOWN:
1235 case GST_H264_NAL_SLICE_DPA:
1236 case GST_H264_NAL_SLICE_DPB:
1237 case GST_H264_NAL_SLICE_DPC:
1238 case GST_H264_NAL_SEQ_END:
1239 case GST_H264_NAL_STREAM_END:
1240 case GST_H264_NAL_FILLER_DATA:
1241 case GST_H264_NAL_SPS_EXT:
1242 case GST_H264_NAL_PREFIX_UNIT:
1243 case GST_H264_NAL_SUBSET_SPS:
1244 case GST_H264_NAL_SLICE_AUX:
1245 case GST_H264_NAL_SLICE_EXT:
1246 case GST_H264_NAL_SLICE_DEPTH:
1247 case GST_H264_NAL_AU_DELIMITER:
1248 fprintf(stderr, "Unsupported NAL type: %d", context.nalu->type);
1249 assert(0);
1250 break;
1251 }
1252 }
1253 printf("Found %d NAL units!\n", nals);
1254 printf("%s\n", "Parsing complete.");
1255
1256 destroy_vdpau_objects(&infoH264, &context);
1257
1258 free_gst_objects(&context);
1259 gst_h264_nal_parser_free(context.parser);
1260
1261 win_x11_fini_window(0);
1262 win_x11_fini_x11();
1263 free((void *)bitstreamBuffer.bitstream);
1264
1265 fclose(file);
1266
1267 return 0;
1268}