jpeg: fix end-of-image (EOI) handler.
[vaapi:lorenzphs-gstreamer-vaapi.git] / gst-libs / gst / vaapi / gstvaapidecoder_jpeg.c
1 /*
2  *  gstvaapidecoder_jpeg.c - JPEG decoder
3  *
4  *  Copyright (C) 2011-2012 Intel Corporation
5  *
6  *  This library is free software; you can redistribute it and/or
7  *  modify it under the terms of the GNU Lesser General Public License
8  *  as published by the Free Software Foundation; either version 2.1
9  *  of the License, or (at your option) any later version.
10  *
11  *  This library is distributed in the hope that it will be useful,
12  *  but WITHOUT ANY WARRANTY; without even the implied warranty of
13  *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
14  *  Lesser General Public License for more details.
15  *
16  *  You should have received a copy of the GNU Lesser General Public
17  *  License along with this library; if not, write to the Free
18  *  Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
19  *  Boston, MA 02110-1301 USA
20  */
21
22 /**
23  * SECTION:gstvaapidecoder_jpeg
24  * @short_description: JPEG decoder
25  */
26
27 #include "sysdeps.h"
28 #include <string.h>
29 #include <gst/codecparsers/gstjpegparser.h>
30 #include "gstvaapicompat.h"
31 #include "gstvaapidecoder_jpeg.h"
32 #include "gstvaapidecoder_objects.h"
33 #include "gstvaapidecoder_priv.h"
34 #include "gstvaapidisplay_priv.h"
35 #include "gstvaapiobject_priv.h"
36
37 #define DEBUG 1
38 #include "gstvaapidebug.h"
39
40 G_DEFINE_TYPE(GstVaapiDecoderJpeg,
41               gst_vaapi_decoder_jpeg,
42               GST_VAAPI_TYPE_DECODER);
43
44 #define GST_VAAPI_DECODER_JPEG_GET_PRIVATE(obj)                 \
45     (G_TYPE_INSTANCE_GET_PRIVATE((obj),                         \
46                                  GST_VAAPI_TYPE_DECODER_JPEG,   \
47                                  GstVaapiDecoderJpegPrivate))
48
49 struct _GstVaapiDecoderJpegPrivate {
50     GstVaapiProfile             profile;
51     guint                       width;
52     guint                       height;
53     GstVaapiPicture            *current_picture;
54     GstJpegFrameHdr             frame_hdr;
55     GstJpegHuffmanTables        huf_tables;
56     GstJpegQuantTables          quant_tables;
57     gboolean                    has_huf_table;
58     gboolean                    has_quant_table;
59     guint                       mcu_restart;
60     guint                       is_opened       : 1;
61     guint                       profile_changed : 1;
62     guint                       is_constructed  : 1;
63 };
64
65 typedef struct _GstJpegScanSegment GstJpegScanSegment;
66 struct _GstJpegScanSegment {
67     guint                       header_offset;
68     guint                       header_size;
69     guint                       data_offset;
70     guint                       data_size;
71     guint                       is_valid        : 1;
72 };
73
74 static void
75 gst_vaapi_decoder_jpeg_close(GstVaapiDecoderJpeg *decoder)
76 {
77     GstVaapiDecoderJpegPrivate * const priv = decoder->priv;
78
79     gst_vaapi_picture_replace(&priv->current_picture, NULL);
80
81     /* Reset all */
82     priv->profile               = GST_VAAPI_PROFILE_JPEG_BASELINE;
83     priv->width                 = 0;
84     priv->height                = 0;
85     priv->is_opened             = FALSE;
86     priv->profile_changed       = TRUE;
87 }
88
89 static gboolean
90 gst_vaapi_decoder_jpeg_open(GstVaapiDecoderJpeg *decoder, GstBuffer *buffer)
91 {
92     gst_vaapi_decoder_jpeg_close(decoder);
93
94     return TRUE;
95 }
96
97 static void
98 gst_vaapi_decoder_jpeg_destroy(GstVaapiDecoderJpeg *decoder)
99 {
100     gst_vaapi_decoder_jpeg_close(decoder);
101 }
102
103 static gboolean
104 gst_vaapi_decoder_jpeg_create(GstVaapiDecoderJpeg *decoder)
105 {
106     if (!GST_VAAPI_DECODER_CODEC(decoder))
107         return FALSE;
108     return TRUE;
109 }
110
111 static GstVaapiDecoderStatus
112 ensure_context(GstVaapiDecoderJpeg *decoder)
113 {
114     GstVaapiDecoderJpegPrivate * const priv = decoder->priv;
115     GstVaapiProfile profiles[2];
116     GstVaapiEntrypoint entrypoint = GST_VAAPI_ENTRYPOINT_VLD;
117     guint i, n_profiles = 0;
118     gboolean reset_context = FALSE;
119
120     if (priv->profile_changed) {
121         GST_DEBUG("profile changed");
122         priv->profile_changed = FALSE;
123         reset_context         = TRUE;
124
125         profiles[n_profiles++] = priv->profile;
126         //if (priv->profile == GST_VAAPI_PROFILE_JPEG_EXTENDED)
127         //    profiles[n_profiles++] = GST_VAAPI_PROFILE_JPEG_BASELINE;
128
129         for (i = 0; i < n_profiles; i++) {
130             if (gst_vaapi_display_has_decoder(GST_VAAPI_DECODER_DISPLAY(decoder),
131                                               profiles[i], entrypoint))
132                 break;
133         }
134         if (i == n_profiles)
135             return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
136         priv->profile = profiles[i];
137     }
138
139     if (reset_context) {
140         reset_context = gst_vaapi_decoder_ensure_context(
141             GST_VAAPI_DECODER(decoder),
142             priv->profile,
143             entrypoint,
144             priv->width, priv->height
145         );
146         if (!reset_context)
147             return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
148     }
149     return GST_VAAPI_DECODER_STATUS_SUCCESS;
150 }
151
152 static gboolean
153 decode_current_picture(GstVaapiDecoderJpeg *decoder)
154 {
155     GstVaapiDecoderJpegPrivate * const priv = decoder->priv;
156     GstVaapiPicture * const picture = priv->current_picture;
157     gboolean success = TRUE;
158
159     if (picture) {
160         if (!gst_vaapi_picture_decode(picture))
161             success = FALSE;
162         else if (!gst_vaapi_picture_output(picture))
163             success = FALSE;
164         gst_vaapi_picture_replace(&priv->current_picture, NULL);
165     }
166     return success;
167 }
168
169 static gboolean
170 fill_picture(
171     GstVaapiDecoderJpeg *decoder, 
172     GstVaapiPicture     *picture,
173     GstJpegFrameHdr     *jpeg_frame_hdr
174 )
175 {
176     VAPictureParameterBufferJPEGBaseline *pic_param = picture->param;
177     guint i;
178
179     g_assert(pic_param);
180
181     memset(pic_param, 0, sizeof(VAPictureParameterBufferJPEGBaseline));
182     pic_param->picture_width    = jpeg_frame_hdr->width;
183     pic_param->picture_height   = jpeg_frame_hdr->height;
184
185     pic_param->num_components   = jpeg_frame_hdr->num_components;
186     if (jpeg_frame_hdr->num_components > 4)
187         return FALSE;
188     for (i = 0; i < pic_param->num_components; i++) {
189         pic_param->components[i].component_id =
190             jpeg_frame_hdr->components[i].identifier;
191         pic_param->components[i].h_sampling_factor =
192             jpeg_frame_hdr->components[i].horizontal_factor;
193         pic_param->components[i].v_sampling_factor =
194             jpeg_frame_hdr->components[i].vertical_factor;
195         pic_param->components[i].quantiser_table_selector =
196             jpeg_frame_hdr->components[i].quant_table_selector;
197     }
198     return TRUE;
199 }
200
201 static gboolean
202 fill_quantization_table(
203     GstVaapiDecoderJpeg *decoder, 
204     GstVaapiPicture     *picture
205 )
206 {
207     GstVaapiDecoderJpegPrivate * const priv = decoder->priv;
208     VAIQMatrixBufferJPEGBaseline *iq_matrix;
209     guint i, j, num_tables;
210
211     if (!priv->has_quant_table)
212         gst_jpeg_get_default_quantization_tables(&priv->quant_tables);
213     
214     picture->iq_matrix = GST_VAAPI_IQ_MATRIX_NEW(JPEGBaseline, decoder);
215     g_assert(picture->iq_matrix);
216     iq_matrix = picture->iq_matrix->param;
217
218     num_tables = MIN(G_N_ELEMENTS(iq_matrix->quantiser_table),
219                      GST_JPEG_MAX_QUANT_ELEMENTS);
220
221     for (i = 0; i < num_tables; i++) {
222         GstJpegQuantTable * const quant_table =
223             &priv->quant_tables.quant_tables[i];
224
225         iq_matrix->load_quantiser_table[i] = quant_table->valid;
226         if (!iq_matrix->load_quantiser_table[i])
227             continue;
228
229         g_assert(quant_table->quant_precision == 0);
230         for (j = 0; j < GST_JPEG_MAX_QUANT_ELEMENTS; j++)
231             iq_matrix->quantiser_table[i][j] = quant_table->quant_table[j];
232         iq_matrix->load_quantiser_table[i] = 1;
233         quant_table->valid = FALSE;
234     }
235     return TRUE;
236 }
237
238 static gboolean
239 fill_huffman_table(
240     GstVaapiDecoderJpeg *decoder, 
241     GstVaapiPicture     *picture
242 )
243 {
244     GstVaapiDecoderJpegPrivate * const priv = decoder->priv;
245     GstJpegHuffmanTables * const huf_tables = &priv->huf_tables;
246     VAHuffmanTableBufferJPEGBaseline *huffman_table;
247     guint i, num_tables;
248
249     if (!priv->has_huf_table)
250         gst_jpeg_get_default_huffman_tables(&priv->huf_tables);
251     
252     picture->huf_table = GST_VAAPI_HUFFMAN_TABLE_NEW(JPEGBaseline, decoder);
253     g_assert(picture->huf_table);
254     huffman_table = picture->huf_table->param;
255
256     num_tables = MIN(G_N_ELEMENTS(huffman_table->huffman_table),
257                      GST_JPEG_MAX_SCAN_COMPONENTS);
258
259     for (i = 0; i < num_tables; i++) {
260         huffman_table->load_huffman_table[i] =
261             huf_tables->dc_tables[i].valid && huf_tables->ac_tables[i].valid;
262         if (!huffman_table->load_huffman_table[i])
263             continue;
264
265         memcpy(huffman_table->huffman_table[i].num_dc_codes,
266                huf_tables->dc_tables[i].huf_bits,
267                sizeof(huffman_table->huffman_table[i].num_dc_codes));
268         memcpy(huffman_table->huffman_table[i].dc_values,
269                huf_tables->dc_tables[i].huf_values,
270                sizeof(huffman_table->huffman_table[i].dc_values));
271         memcpy(huffman_table->huffman_table[i].num_ac_codes,
272                huf_tables->ac_tables[i].huf_bits,
273                sizeof(huffman_table->huffman_table[i].num_ac_codes));
274         memcpy(huffman_table->huffman_table[i].ac_values,
275                huf_tables->ac_tables[i].huf_values,
276                sizeof(huffman_table->huffman_table[i].ac_values));
277         memset(huffman_table->huffman_table[i].pad,
278                0,
279                sizeof(huffman_table->huffman_table[i].pad));
280     }
281     return TRUE;
282 }
283
284 static guint
285 get_max_horizontal_samples(GstJpegFrameHdr *frame_hdr)
286 {
287     guint i, max_factor = 0;
288
289     for (i = 0; i < frame_hdr->num_components; i++) {
290         if (frame_hdr->components[i].horizontal_factor > max_factor)
291             max_factor = frame_hdr->components[i].horizontal_factor;
292     }
293     return max_factor;
294 }
295
296 static guint
297 get_max_vertical_samples(GstJpegFrameHdr *frame_hdr)
298 {
299     guint i, max_factor = 0;
300
301     for (i = 0; i < frame_hdr->num_components; i++) {
302         if (frame_hdr->components[i].vertical_factor > max_factor)
303             max_factor = frame_hdr->components[i].vertical_factor;
304     }
305     return max_factor;
306 }
307
308 static GstVaapiDecoderStatus
309 decode_picture(
310     GstVaapiDecoderJpeg *decoder, 
311     guint8               profile,
312     guchar              *buf,
313     guint                buf_size,
314     GstClockTime         pts
315 )
316 {
317     GstVaapiDecoderJpegPrivate * const priv = decoder->priv;
318     GstJpegFrameHdr * const frame_hdr = &priv->frame_hdr;
319     GstVaapiPicture *picture;
320     GstVaapiDecoderStatus status;
321
322     switch (profile) {
323     case GST_JPEG_MARKER_SOF_MIN:
324         priv->profile = GST_VAAPI_PROFILE_JPEG_BASELINE;
325         break;
326     default:
327         GST_ERROR("unsupported profile %d", profile);
328         return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
329     }
330
331     memset(frame_hdr, 0, sizeof(*frame_hdr));
332     if (!gst_jpeg_parse_frame_hdr(frame_hdr, buf, buf_size, 0)) {
333         GST_ERROR("failed to parse image");
334         return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
335     }
336     priv->height = frame_hdr->height;
337     priv->width  = frame_hdr->width;
338
339     status = ensure_context(decoder);
340     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) {
341         GST_ERROR("failed to reset context");
342         return status;
343     }
344
345     if (priv->current_picture && !decode_current_picture(decoder))
346         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
347
348     picture = GST_VAAPI_PICTURE_NEW(JPEGBaseline, decoder);
349     if (!picture) {
350         GST_ERROR("failed to allocate picture");
351         return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
352     }
353     gst_vaapi_picture_replace(&priv->current_picture, picture);
354     gst_vaapi_picture_unref(picture);
355
356     if (!fill_picture(decoder, picture, frame_hdr))
357         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
358
359     /* Update presentation time */
360     picture->pts = pts;
361     return GST_VAAPI_DECODER_STATUS_SUCCESS;
362 }
363
364 static GstVaapiDecoderStatus
365 decode_huffman_table(
366     GstVaapiDecoderJpeg *decoder,
367     guchar              *buf,
368     guint                buf_size
369 )
370 {
371     GstVaapiDecoderJpegPrivate * const priv = decoder->priv;
372
373     if (!gst_jpeg_parse_huffman_table(&priv->huf_tables, buf, buf_size, 0)) {
374         GST_DEBUG("failed to parse Huffman table");
375         return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
376     }
377     priv->has_huf_table = TRUE;
378     return GST_VAAPI_DECODER_STATUS_SUCCESS;
379 }
380
381 static GstVaapiDecoderStatus
382 decode_quant_table(
383     GstVaapiDecoderJpeg *decoder,
384     guchar              *buf,
385     guint                buf_size
386 )
387 {
388     GstVaapiDecoderJpegPrivate * const priv = decoder->priv;
389
390     if (!gst_jpeg_parse_quant_table(&priv->quant_tables, buf, buf_size, 0)) {
391         GST_DEBUG("failed to parse quantization table");
392         return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
393     }
394     priv->has_quant_table = TRUE;
395     return GST_VAAPI_DECODER_STATUS_SUCCESS;
396 }
397
398 static GstVaapiDecoderStatus
399 decode_restart_interval(
400     GstVaapiDecoderJpeg *decoder,
401     guchar              *buf,
402     guint                buf_size
403 )
404 {
405     GstVaapiDecoderJpegPrivate * const priv = decoder->priv;
406
407     if (!gst_jpeg_parse_restart_interval(&priv->mcu_restart, buf, buf_size, 0)) {
408         GST_DEBUG("failed to parse restart interval");
409         return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
410     }
411     return GST_VAAPI_DECODER_STATUS_SUCCESS;
412 }
413
414 static GstVaapiDecoderStatus
415 decode_scan(
416     GstVaapiDecoderJpeg *decoder,
417     guchar              *scan_header,
418     guint                scan_header_size,
419     guchar              *scan_data,
420     guint                scan_data_size)
421 {
422     GstVaapiDecoderJpegPrivate * const priv = decoder->priv;
423     GstVaapiPicture *picture = priv->current_picture;
424     VASliceParameterBufferJPEGBaseline *slice_param;
425     GstVaapiSlice *gst_slice;
426     guint total_h_samples, total_v_samples;
427     GstJpegScanHdr  scan_hdr;
428     guint i;
429
430     if (!picture) {
431         GST_ERROR("There is no VAPicture before decoding scan.");
432         return GST_VAAPI_DECODER_STATUS_ERROR_INVALID_SURFACE;
433     }
434
435     if (!fill_quantization_table(decoder, picture)) {
436         GST_ERROR("failed to fill in quantization table");
437         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
438     }
439
440     if (!fill_huffman_table(decoder, picture)) {
441         GST_ERROR("failed to fill in huffman table");
442         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
443     }
444
445     memset(&scan_hdr, 0, sizeof(scan_hdr));
446     if (!gst_jpeg_parse_scan_hdr(&scan_hdr, scan_header, scan_header_size, 0)) {
447         GST_DEBUG("Jpeg parsed scan failed.");
448         return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
449     }
450
451     gst_slice = GST_VAAPI_SLICE_NEW(JPEGBaseline, decoder, scan_data, scan_data_size);
452     gst_vaapi_picture_add_slice(picture, gst_slice);
453
454     slice_param = gst_slice->param;
455     slice_param->num_components = scan_hdr.num_components;
456     for (i = 0; i < scan_hdr.num_components; i++) {
457         slice_param->components[i].component_selector =
458             scan_hdr.components[i].component_selector;
459         slice_param->components[i].dc_table_selector =
460             scan_hdr.components[i].dc_selector;
461         slice_param->components[i].ac_table_selector =
462             scan_hdr.components[i].ac_selector;
463     }
464     slice_param->restart_interval = priv->mcu_restart;
465     if (scan_hdr.num_components == 1) { /*non-interleaved*/
466         slice_param->slice_horizontal_position = 0;
467         slice_param->slice_vertical_position = 0;
468         /* Y mcu numbers*/
469         if (slice_param->components[0].component_selector == priv->frame_hdr.components[0].identifier) {
470             slice_param->num_mcus = (priv->frame_hdr.width/8)*(priv->frame_hdr.height/8);
471         } else { /*Cr, Cb mcu numbers*/
472             slice_param->num_mcus = (priv->frame_hdr.width/16)*(priv->frame_hdr.height/16);
473         }
474     } else { /* interleaved */
475         slice_param->slice_horizontal_position = 0;
476         slice_param->slice_vertical_position = 0;
477         total_v_samples = get_max_vertical_samples(&priv->frame_hdr);
478         total_h_samples = get_max_horizontal_samples(&priv->frame_hdr);
479         slice_param->num_mcus = ((priv->frame_hdr.width + total_h_samples*8 - 1)/(total_h_samples*8)) *
480                                 ((priv->frame_hdr.height + total_v_samples*8 -1)/(total_v_samples*8));
481     }
482
483     if (picture->slices && picture->slices->len)
484         return GST_VAAPI_DECODER_STATUS_SUCCESS;
485     return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
486 }
487
488 static GstVaapiDecoderStatus
489 decode_buffer(GstVaapiDecoderJpeg *decoder, GstBuffer *buffer)
490 {
491     GstVaapiDecoderJpegPrivate * const priv = decoder->priv;
492     GstVaapiDecoderStatus status = GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
493     GstJpegMarkerSegment seg;
494     GstJpegScanSegment scan_seg;
495     GstClockTime pts;
496     guchar *buf;
497     guint buf_size, ofs;
498     gboolean append_ecs;
499
500     buf      = GST_BUFFER_DATA(buffer);
501     buf_size = GST_BUFFER_SIZE(buffer);
502     if (!buf && buf_size == 0)
503         return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
504
505     memset(&scan_seg, 0, sizeof(scan_seg));
506
507     pts = GST_BUFFER_TIMESTAMP(buffer);
508     ofs = 0;
509     while (gst_jpeg_parse(&seg, buf, buf_size, ofs)) {
510         if (seg.size < 0) {
511             GST_DEBUG("buffer to short for parsing");
512             return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
513         }
514         ofs += seg.size;
515
516         /* Decode scan, if complete */
517         if (seg.marker == GST_JPEG_MARKER_EOI && scan_seg.header_size > 0) {
518             scan_seg.data_size = seg.offset - scan_seg.data_offset;
519             scan_seg.is_valid  = TRUE;
520         }
521         if (scan_seg.is_valid) {
522             status = decode_scan(
523                 decoder,
524                 buf + scan_seg.header_offset,
525                 scan_seg.header_size,
526                 buf + scan_seg.data_offset,
527                 scan_seg.data_size
528             );
529             if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
530                 break;
531             memset(&scan_seg, 0, sizeof(scan_seg));
532         }
533
534         append_ecs = TRUE;
535         switch (seg.marker) {
536         case GST_JPEG_MARKER_SOI:
537             priv->has_quant_table = FALSE;
538             priv->has_huf_table   = FALSE;
539             priv->mcu_restart     = 0;
540             status = GST_VAAPI_DECODER_STATUS_SUCCESS;
541             break;
542         case GST_JPEG_MARKER_EOI:
543             if (decode_current_picture(decoder)) {
544                 /* Get out of the loop, trailing data is not needed */
545                 status = GST_VAAPI_DECODER_STATUS_SUCCESS;
546                 goto end;
547             }
548             status = GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
549             break;
550         case GST_JPEG_MARKER_DHT:
551             status = decode_huffman_table(decoder, buf + seg.offset, seg.size);
552             break;
553         case GST_JPEG_MARKER_DQT:
554             status = decode_quant_table(decoder, buf + seg.offset, seg.size);
555             break;
556         case GST_JPEG_MARKER_DRI:
557             status = decode_restart_interval(decoder, buf + seg.offset, seg.size);
558             break;
559         case GST_JPEG_MARKER_DAC:
560             GST_ERROR("unsupported arithmetic coding mode");
561             status = GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
562             break;
563         case GST_JPEG_MARKER_SOS:
564             scan_seg.header_offset = seg.offset;
565             scan_seg.header_size   = seg.size;
566             scan_seg.data_offset   = seg.offset + seg.size;
567             scan_seg.data_size     = 0;
568             append_ecs             = FALSE;
569             break;
570         default:
571             /* Restart marker */
572             if (seg.marker >= GST_JPEG_MARKER_RST_MIN &&
573                 seg.marker <= GST_JPEG_MARKER_RST_MAX) {
574                 append_ecs = FALSE;
575                 break;
576             }
577
578             /* Frame header */
579             if (seg.marker >= GST_JPEG_MARKER_SOF_MIN &&
580                 seg.marker <= GST_JPEG_MARKER_SOF_MAX) {
581                 status = decode_picture(
582                     decoder,
583                     seg.marker,
584                     buf + seg.offset, seg.size,
585                     pts
586                 );
587                 break;
588             }
589
590             /* Application segments */
591             if (seg.marker >= GST_JPEG_MARKER_APP_MIN &&
592                 seg.marker <= GST_JPEG_MARKER_APP_MAX) {
593                 status = GST_VAAPI_DECODER_STATUS_SUCCESS;
594                 break;
595             }
596
597             GST_WARNING("unsupported marker (0x%02x)", seg.marker);
598             status = GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
599             break;
600         }
601
602         /* Append entropy coded segments */
603         if (append_ecs)
604             scan_seg.data_size = seg.offset - scan_seg.data_offset;
605
606         if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
607             break;
608     }
609 end:
610     return status;
611 }
612
613 GstVaapiDecoderStatus
614 gst_vaapi_decoder_jpeg_decode(GstVaapiDecoder *base, GstBuffer *buffer)
615 {
616     GstVaapiDecoderJpeg * const decoder = GST_VAAPI_DECODER_JPEG(base);
617     GstVaapiDecoderJpegPrivate * const priv = decoder->priv;
618
619     if (!priv->is_opened) {
620         priv->is_opened = gst_vaapi_decoder_jpeg_open(decoder, buffer);
621         if (!priv->is_opened)
622             return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CODEC;
623     }
624     return decode_buffer(decoder, buffer);
625 }
626
627 static void
628 gst_vaapi_decoder_jpeg_finalize(GObject *object)
629 {
630     GstVaapiDecoderJpeg * const decoder = GST_VAAPI_DECODER_JPEG(object);
631
632     gst_vaapi_decoder_jpeg_destroy(decoder);
633
634     G_OBJECT_CLASS(gst_vaapi_decoder_jpeg_parent_class)->finalize(object);
635 }
636
637 static void
638 gst_vaapi_decoder_jpeg_constructed(GObject *object)
639 {
640     GstVaapiDecoderJpeg * const decoder = GST_VAAPI_DECODER_JPEG(object);
641     GstVaapiDecoderJpegPrivate * const priv = decoder->priv;
642     GObjectClass *parent_class;
643
644     parent_class = G_OBJECT_CLASS(gst_vaapi_decoder_jpeg_parent_class);
645     if (parent_class->constructed)
646         parent_class->constructed(object);
647
648     priv->is_constructed = gst_vaapi_decoder_jpeg_create(decoder);
649 }
650
651 static void
652 gst_vaapi_decoder_jpeg_class_init(GstVaapiDecoderJpegClass *klass)
653 {
654     GObjectClass * const object_class = G_OBJECT_CLASS(klass);
655     GstVaapiDecoderClass * const decoder_class = GST_VAAPI_DECODER_CLASS(klass);
656
657     g_type_class_add_private(klass, sizeof(GstVaapiDecoderJpegPrivate));
658
659     object_class->finalize      = gst_vaapi_decoder_jpeg_finalize;
660     object_class->constructed   = gst_vaapi_decoder_jpeg_constructed;
661
662     decoder_class->decode       = gst_vaapi_decoder_jpeg_decode;
663 }
664
665 static void
666 gst_vaapi_decoder_jpeg_init(GstVaapiDecoderJpeg *decoder)
667 {
668     GstVaapiDecoderJpegPrivate *priv;
669
670     priv                        = GST_VAAPI_DECODER_JPEG_GET_PRIVATE(decoder);
671     decoder->priv               = priv;
672     priv->profile               = GST_VAAPI_PROFILE_JPEG_BASELINE;
673     priv->width                 = 0;
674     priv->height                = 0;
675     priv->current_picture       = NULL;
676     priv->has_huf_table         = FALSE;
677     priv->has_quant_table       = FALSE;
678     priv->mcu_restart           = 0;
679     priv->is_opened             = FALSE;
680     priv->profile_changed       = TRUE;
681     priv->is_constructed        = FALSE;
682     memset(&priv->frame_hdr, 0, sizeof(priv->frame_hdr));
683     memset(&priv->huf_tables, 0, sizeof(priv->huf_tables));
684     memset(&priv->quant_tables, 0, sizeof(priv->quant_tables));
685 }
686
687 /**
688  * gst_vaapi_decoder_jpeg_new:
689  * @display: a #GstVaapiDisplay
690  * @caps: a #GstCaps holding codec information
691  *
692  * Creates a new #GstVaapiDecoder for JPEG decoding.  The @caps can
693  * hold extra information like codec-data and pictured coded size.
694  *
695  * Return value: the newly allocated #GstVaapiDecoder object
696  */
697 GstVaapiDecoder *
698 gst_vaapi_decoder_jpeg_new(GstVaapiDisplay *display, GstCaps *caps)
699 {
700     GstVaapiDecoderJpeg *decoder;
701
702     g_return_val_if_fail(GST_VAAPI_IS_DISPLAY(display), NULL);
703     g_return_val_if_fail(GST_IS_CAPS(caps), NULL);
704
705     decoder = g_object_new(
706         GST_VAAPI_TYPE_DECODER_JPEG,
707         "display",      display,
708         "caps",         caps,
709         NULL
710     );
711     if (!decoder->priv->is_constructed) {
712         g_object_unref(decoder);
713         return NULL;
714     }
715     return GST_VAAPI_DECODER_CAST(decoder);
716 }