Fix the typecast warnings:
[vaapi:sree-gstreamer-vaapi.git] / gst-libs / gst / vaapi / gstvaapidecoder_jpeg.c
1 /*
2  *  gstvaapidecoder_jpeg.c - JPEG decoder
3  *
4  *  Copyright (C) 2011-2012 Intel Corporation
5  *
6  *  This library is free software; you can redistribute it and/or
7  *  modify it under the terms of the GNU Lesser General Public License
8  *  as published by the Free Software Foundation; either version 2.1
9  *  of the License, or (at your option) any later version.
10  *
11  *  This library is distributed in the hope that it will be useful,
12  *  but WITHOUT ANY WARRANTY; without even the implied warranty of
13  *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
14  *  Lesser General Public License for more details.
15  *
16  *  You should have received a copy of the GNU Lesser General Public
17  *  License along with this library; if not, write to the Free
18  *  Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
19  *  Boston, MA 02110-1301 USA
20  */
21
22 /**
23  * SECTION:gstvaapidecoder_jpeg
24  * @short_description: JPEG decoder
25  */
26
27 #include "sysdeps.h"
28 #include <string.h>
29 #include <gst/codecparsers/gstjpegparser.h>
30 #include "gstvaapicompat.h"
31 #include "gstvaapidecoder_jpeg.h"
32 #include "gstvaapidecoder_objects.h"
33 #include "gstvaapidecoder_priv.h"
34 #include "gstvaapidisplay_priv.h"
35 #include "gstvaapiobject_priv.h"
36
37 #define DEBUG 1
38 #include "gstvaapidebug.h"
39
40 G_DEFINE_TYPE(GstVaapiDecoderJpeg,
41               gst_vaapi_decoder_jpeg,
42               GST_VAAPI_TYPE_DECODER)
43
44 #define GST_VAAPI_DECODER_JPEG_GET_PRIVATE(obj)                 \
45     (G_TYPE_INSTANCE_GET_PRIVATE((obj),                         \
46                                  GST_VAAPI_TYPE_DECODER_JPEG,   \
47                                  GstVaapiDecoderJpegPrivate))
48
49 typedef struct _GstJpegScanSegment GstJpegScanSegment;
50 struct _GstJpegScanSegment {
51     guint                       header_offset;
52     guint                       header_size;
53     guint                       data_offset;
54     guint                       data_size;
55     guint                       is_valid        : 1;
56 };
57
58 struct _GstVaapiDecoderJpegPrivate {
59     GstVaapiProfile             profile;
60     guint                       width;
61     guint                       height;
62     GstVaapiPicture            *current_picture;
63     GstJpegFrameHdr             frame_hdr;
64     GstJpegHuffmanTables        huf_tables;
65     GstJpegQuantTables          quant_tables;
66     gboolean                    has_huf_table;
67     gboolean                    has_quant_table;
68     guint                       mcu_restart;
69     guint                       is_opened       : 1;
70     guint                       profile_changed : 1;
71     guint                       is_constructed  : 1;
72     guint                       reset_context   : 1;
73 };
74
75
76 static void
77 gst_vaapi_decoder_jpeg_close(GstVaapiDecoderJpeg *decoder)
78 {
79     GstVaapiDecoderJpegPrivate * const priv = decoder->priv;
80
81     gst_vaapi_picture_replace(&priv->current_picture, NULL);
82
83     /* Reset all */
84     priv->profile               = GST_VAAPI_PROFILE_JPEG_BASELINE;
85     priv->width                 = 0;
86     priv->height                = 0;
87     priv->is_opened             = FALSE;
88     priv->profile_changed       = TRUE;
89 }
90
91 static gboolean
92 gst_vaapi_decoder_jpeg_open(GstVaapiDecoderJpeg *decoder)
93 {
94     gst_vaapi_decoder_jpeg_close(decoder);
95
96     return TRUE;
97 }
98
99 static void
100 gst_vaapi_decoder_jpeg_destroy(GstVaapiDecoderJpeg *decoder)
101 {
102     gst_vaapi_decoder_jpeg_close(decoder);
103 }
104
105 static gboolean
106 gst_vaapi_decoder_jpeg_create(GstVaapiDecoderJpeg *decoder)
107 {
108     if (!GST_VAAPI_DECODER_CODEC(decoder))
109         return FALSE;
110     return TRUE;
111 }
112
113 static void
114 check_context_reset (GstVaapiDecoderJpeg *decoder)
115 {
116     GstVaapiDecoderJpegPrivate * const priv = decoder->priv;
117     GstVaapiProfile profiles[2];
118     GstVaapiEntrypoint entrypoint = GST_VAAPI_ENTRYPOINT_VLD;
119     guint i, n_profiles = 0;
120     gboolean reset_context = FALSE;
121
122     if (priv->profile_changed) {
123         GST_DEBUG("profile changed");
124         priv->profile_changed = FALSE;
125         priv->reset_context   = TRUE;
126
127         profiles[n_profiles++] = priv->profile;
128         //if (priv->profile == GST_VAAPI_PROFILE_JPEG_EXTENDED)
129         //    profiles[n_profiles++] = GST_VAAPI_PROFILE_JPEG_BASELINE;
130
131         for (i = 0; i < n_profiles; i++) {
132             if (gst_vaapi_display_has_decoder(GST_VAAPI_DECODER_DISPLAY(decoder),
133                                               profiles[i], entrypoint))
134                 break;
135         }
136         if (i == n_profiles)
137             return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
138         priv->profile = profiles[i];
139     }
140     if(priv->reset_context)
141         gst_vaapi_decoder_emit_caps_change(GST_VAAPI_DECODER_CAST(decoder), priv->width, priv->height);     
142 }
143
144 static GstVaapiDecoderStatus
145 ensure_context(GstVaapiDecoderJpeg *decoder, GstBufferPool *pool)
146 {
147     GstVaapiDecoderJpegPrivate * const priv = decoder->priv;
148     GstVaapiEntrypoint entrypoint = GST_VAAPI_ENTRYPOINT_VLD;
149     gboolean reset_context = FALSE;
150
151     if (priv->reset_context) {
152         GstVaapiContextInfo info;
153
154         info.profile    = priv->profile;
155         info.entrypoint = entrypoint;
156         info.width      = priv->width;
157         info.height     = priv->height;
158         info.ref_frames = 2;
159         info.pool       = GST_VAAPI_SURFACE_POOL(pool);
160         reset_context   = gst_vaapi_decoder_ensure_context(
161             GST_VAAPI_DECODER(decoder),
162             &info
163         );
164         if (!reset_context)
165             return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
166     }
167     return GST_VAAPI_DECODER_STATUS_SUCCESS;
168 }
169
170 static gboolean
171 decode_current_picture(GstVaapiDecoderJpeg *decoder)
172 {
173     GstVaapiDecoderJpegPrivate * const priv = decoder->priv;
174     GstVaapiPicture * const picture = priv->current_picture;
175     gboolean success = TRUE;
176
177     if (picture) {
178         if (!gst_vaapi_picture_decode(picture))
179             success = FALSE;
180         else if (!gst_vaapi_picture_output(picture))
181             success = FALSE;
182         gst_vaapi_picture_replace(&priv->current_picture, NULL);
183     }
184     return success;
185 }
186
187 static gboolean
188 fill_picture(
189     GstVaapiDecoderJpeg *decoder, 
190     GstVaapiPicture     *picture,
191     GstJpegFrameHdr     *jpeg_frame_hdr
192 )
193 {
194     VAPictureParameterBufferJPEGBaseline *pic_param = picture->param;
195     guint i;
196
197     g_assert(pic_param);
198
199     memset(pic_param, 0, sizeof(VAPictureParameterBufferJPEGBaseline));
200     pic_param->picture_width    = jpeg_frame_hdr->width;
201     pic_param->picture_height   = jpeg_frame_hdr->height;
202
203     pic_param->num_components   = jpeg_frame_hdr->num_components;
204     if (jpeg_frame_hdr->num_components > 4)
205         return FALSE;
206     for (i = 0; i < pic_param->num_components; i++) {
207         pic_param->components[i].component_id =
208             jpeg_frame_hdr->components[i].identifier;
209         pic_param->components[i].h_sampling_factor =
210             jpeg_frame_hdr->components[i].horizontal_factor;
211         pic_param->components[i].v_sampling_factor =
212             jpeg_frame_hdr->components[i].vertical_factor;
213         pic_param->components[i].quantiser_table_selector =
214             jpeg_frame_hdr->components[i].quant_table_selector;
215     }
216     return TRUE;
217 }
218
219 static gboolean
220 fill_quantization_table(
221     GstVaapiDecoderJpeg *decoder, 
222     GstVaapiPicture     *picture
223 )
224 {
225     GstVaapiDecoderJpegPrivate * const priv = decoder->priv;
226     VAIQMatrixBufferJPEGBaseline *iq_matrix;
227     guint i, j, num_tables;
228
229     if (!priv->has_quant_table)
230         gst_jpeg_get_default_quantization_tables(&priv->quant_tables);
231     
232     picture->iq_matrix = GST_VAAPI_IQ_MATRIX_NEW(JPEGBaseline, decoder);
233     g_assert(picture->iq_matrix);
234     iq_matrix = picture->iq_matrix->param;
235
236     num_tables = MIN(G_N_ELEMENTS(iq_matrix->quantiser_table),
237                      GST_JPEG_MAX_QUANT_ELEMENTS);
238
239     for (i = 0; i < num_tables; i++) {
240         GstJpegQuantTable * const quant_table =
241             &priv->quant_tables.quant_tables[i];
242
243         iq_matrix->load_quantiser_table[i] = quant_table->valid;
244         if (!iq_matrix->load_quantiser_table[i])
245             continue;
246
247         g_assert(quant_table->quant_precision == 0);
248         for (j = 0; j < GST_JPEG_MAX_QUANT_ELEMENTS; j++)
249             iq_matrix->quantiser_table[i][j] = quant_table->quant_table[j];
250         iq_matrix->load_quantiser_table[i] = 1;
251         quant_table->valid = FALSE;
252     }
253     return TRUE;
254 }
255
256 static gboolean
257 fill_huffman_table(
258     GstVaapiDecoderJpeg *decoder, 
259     GstVaapiPicture     *picture
260 )
261 {
262     GstVaapiDecoderJpegPrivate * const priv = decoder->priv;
263     GstJpegHuffmanTables * const huf_tables = &priv->huf_tables;
264     VAHuffmanTableBufferJPEGBaseline *huffman_table;
265     guint i, num_tables;
266
267     if (!priv->has_huf_table)
268         gst_jpeg_get_default_huffman_tables(&priv->huf_tables);
269     
270     picture->huf_table = GST_VAAPI_HUFFMAN_TABLE_NEW(JPEGBaseline, decoder);
271     g_assert(picture->huf_table);
272     huffman_table = picture->huf_table->param;
273
274     num_tables = MIN(G_N_ELEMENTS(huffman_table->huffman_table),
275                      GST_JPEG_MAX_SCAN_COMPONENTS);
276
277     for (i = 0; i < num_tables; i++) {
278         huffman_table->load_huffman_table[i] =
279             huf_tables->dc_tables[i].valid && huf_tables->ac_tables[i].valid;
280         if (!huffman_table->load_huffman_table[i])
281             continue;
282
283         memcpy(huffman_table->huffman_table[i].num_dc_codes,
284                huf_tables->dc_tables[i].huf_bits,
285                sizeof(huffman_table->huffman_table[i].num_dc_codes));
286         memcpy(huffman_table->huffman_table[i].dc_values,
287                huf_tables->dc_tables[i].huf_values,
288                sizeof(huffman_table->huffman_table[i].dc_values));
289         memcpy(huffman_table->huffman_table[i].num_ac_codes,
290                huf_tables->ac_tables[i].huf_bits,
291                sizeof(huffman_table->huffman_table[i].num_ac_codes));
292         memcpy(huffman_table->huffman_table[i].ac_values,
293                huf_tables->ac_tables[i].huf_values,
294                sizeof(huffman_table->huffman_table[i].ac_values));
295         memset(huffman_table->huffman_table[i].pad,
296                0,
297                sizeof(huffman_table->huffman_table[i].pad));
298     }
299     return TRUE;
300 }
301
302 static guint
303 get_max_horizontal_samples(GstJpegFrameHdr *frame_hdr)
304 {
305     guint i, max_factor = 0;
306
307     for (i = 0; i < frame_hdr->num_components; i++) {
308         if (frame_hdr->components[i].horizontal_factor > max_factor)
309             max_factor = frame_hdr->components[i].horizontal_factor;
310     }
311     return max_factor;
312 }
313
314 static guint
315 get_max_vertical_samples(GstJpegFrameHdr *frame_hdr)
316 {
317     guint i, max_factor = 0;
318
319     for (i = 0; i < frame_hdr->num_components; i++) {
320         if (frame_hdr->components[i].vertical_factor > max_factor)
321             max_factor = frame_hdr->components[i].vertical_factor;
322     }
323     return max_factor;
324 }
325
326 static GstVaapiDecoderStatus
327 decode_picture(
328     GstVaapiDecoderJpeg *decoder, 
329     guint8               profile,
330     guchar              *buf,
331     guint                buf_size,
332     GstClockTime         pts
333 )
334 {
335     GstVaapiDecoderJpegPrivate * const priv = decoder->priv;
336     GstJpegFrameHdr * const frame_hdr = &priv->frame_hdr;
337     GstVaapiPicture *picture;
338     GstVaapiDecoderStatus status;
339
340     switch (profile) {
341     case GST_JPEG_MARKER_SOF_MIN:
342         priv->profile = GST_VAAPI_PROFILE_JPEG_BASELINE;
343         break;
344     default:
345         GST_ERROR("unsupported profile %d", profile);
346         return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
347     }
348
349     memset(frame_hdr, 0, sizeof(*frame_hdr));
350     if (!gst_jpeg_parse_frame_hdr(frame_hdr, buf, buf_size, 0)) {
351         GST_ERROR("failed to parse image");
352         return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
353     }
354     priv->height = frame_hdr->height;
355     priv->width  = frame_hdr->width;
356    
357     check_context_reset (decoder);
358     
359     picture = GST_VAAPI_PICTURE_NEW(JPEGBaseline, decoder);
360     if (!picture) {
361         GST_ERROR("failed to allocate picture");
362         return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
363     }
364     gst_vaapi_picture_replace(&priv->current_picture, picture);
365     gst_vaapi_picture_unref(picture);
366
367     if (!fill_picture(decoder, picture, frame_hdr))
368         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
369
370     /* Update presentation time */
371     picture->pts = pts;
372     return GST_VAAPI_DECODER_STATUS_SUCCESS;
373 }
374
375 static GstVaapiDecoderStatus
376 decode_huffman_table(
377     GstVaapiDecoderJpeg *decoder,
378     guchar              *buf,
379     guint                buf_size
380 )
381 {
382     GstVaapiDecoderJpegPrivate * const priv = decoder->priv;
383
384     if (!gst_jpeg_parse_huffman_table(&priv->huf_tables, buf, buf_size, 0)) {
385         GST_DEBUG("failed to parse Huffman table");
386         return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
387     }
388     priv->has_huf_table = TRUE;
389     return GST_VAAPI_DECODER_STATUS_SUCCESS;
390 }
391
392 static GstVaapiDecoderStatus
393 decode_quant_table(
394     GstVaapiDecoderJpeg *decoder,
395     guchar              *buf,
396     guint                buf_size
397 )
398 {
399     GstVaapiDecoderJpegPrivate * const priv = decoder->priv;
400
401     if (!gst_jpeg_parse_quant_table(&priv->quant_tables, buf, buf_size, 0)) {
402         GST_DEBUG("failed to parse quantization table");
403         return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
404     }
405     priv->has_quant_table = TRUE;
406     return GST_VAAPI_DECODER_STATUS_SUCCESS;
407 }
408
409 static GstVaapiDecoderStatus
410 decode_restart_interval(
411     GstVaapiDecoderJpeg *decoder,
412     guchar              *buf,
413     guint                buf_size
414 )
415 {
416     GstVaapiDecoderJpegPrivate * const priv = decoder->priv;
417
418     if (!gst_jpeg_parse_restart_interval(&priv->mcu_restart, buf, buf_size, 0)) {
419         GST_DEBUG("failed to parse restart interval");
420         return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
421     }
422     return GST_VAAPI_DECODER_STATUS_SUCCESS;
423 }
424
425 static GstVaapiDecoderStatus
426 decode_scan(
427     GstVaapiDecoderJpeg *decoder,
428     guchar              *scan_header,
429     guint                scan_header_size,
430     guchar              *scan_data,
431     guint                scan_data_size)
432 {
433     GstVaapiDecoderJpegPrivate * const priv = decoder->priv;
434     GstVaapiPicture *picture = priv->current_picture;
435     VASliceParameterBufferJPEGBaseline *slice_param;
436     GstVaapiSlice *gst_slice;
437     guint total_h_samples, total_v_samples;
438     GstJpegScanHdr  scan_hdr;
439     guint i;
440
441     if (!picture) {
442         GST_ERROR("There is no VAPicture before decoding scan.");
443         return GST_VAAPI_DECODER_STATUS_ERROR_INVALID_SURFACE;
444     }
445
446     if (!fill_quantization_table(decoder, picture)) {
447         GST_ERROR("failed to fill in quantization table");
448         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
449     }
450
451     if (!fill_huffman_table(decoder, picture)) {
452         GST_ERROR("failed to fill in huffman table");
453         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
454     }
455
456     memset(&scan_hdr, 0, sizeof(scan_hdr));
457     if (!gst_jpeg_parse_scan_hdr(&scan_hdr, scan_header, scan_header_size, 0)) {
458         GST_DEBUG("Jpeg parsed scan failed.");
459         return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
460     }
461
462     gst_slice = GST_VAAPI_SLICE_NEW(JPEGBaseline, decoder, scan_data, scan_data_size);
463     gst_vaapi_picture_add_slice(picture, gst_slice);
464
465     slice_param = gst_slice->param;
466     slice_param->num_components = scan_hdr.num_components;
467     for (i = 0; i < scan_hdr.num_components; i++) {
468         slice_param->components[i].component_selector =
469             scan_hdr.components[i].component_selector;
470         slice_param->components[i].dc_table_selector =
471             scan_hdr.components[i].dc_selector;
472         slice_param->components[i].ac_table_selector =
473             scan_hdr.components[i].ac_selector;
474     }
475     slice_param->restart_interval = priv->mcu_restart;
476     if (scan_hdr.num_components == 1) { /*non-interleaved*/
477         slice_param->slice_horizontal_position = 0;
478         slice_param->slice_vertical_position = 0;
479         /* Y mcu numbers*/
480         if (slice_param->components[0].component_selector == priv->frame_hdr.components[0].identifier) {
481             slice_param->num_mcus = (priv->frame_hdr.width/8)*(priv->frame_hdr.height/8);
482         } else { /*Cr, Cb mcu numbers*/
483             slice_param->num_mcus = (priv->frame_hdr.width/16)*(priv->frame_hdr.height/16);
484         }
485     } else { /* interleaved */
486         slice_param->slice_horizontal_position = 0;
487         slice_param->slice_vertical_position = 0;
488         total_v_samples = get_max_vertical_samples(&priv->frame_hdr);
489         total_h_samples = get_max_horizontal_samples(&priv->frame_hdr);
490         slice_param->num_mcus = ((priv->frame_hdr.width + total_h_samples*8 - 1)/(total_h_samples*8)) *
491                                 ((priv->frame_hdr.height + total_v_samples*8 -1)/(total_v_samples*8));
492     }
493
494     if (picture->slices && picture->slices->len)
495         return GST_VAAPI_DECODER_STATUS_SUCCESS;
496     return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
497 }
498
499 GstVaapiDecoderStatus
500 gst_vaapi_decoder_jpeg_parse(
501     GstVaapiDecoder *dec,
502     GstAdapter *adapter,
503     guint *toadd,
504     gboolean *have_frame)
505 {
506     GstVaapiDecoderJpeg *decoder = GST_VAAPI_DECODER_JPEG(dec);
507     GstVaapiDecoderJpegPrivate * const priv = decoder->priv;
508     GstVaapiDecoderStatus status = GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
509     GstJpegMarkerSegment seg;
510     GstJpegScanSegment scan_seg;
511     GstClockTime pts;
512     guint8 *data;
513     gint size = 0, ofs = 0;
514     gboolean append_ecs;
515     static guint init = 0;
516
517     size = gst_adapter_available (adapter);
518     data = (guint8 *)gst_adapter_map (adapter,size);
519
520
521     if (!data && size == 0)
522         return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
523
524     memset(&scan_seg, 0, sizeof(scan_seg));
525
526     pts = gst_adapter_prev_timestamp(adapter, NULL);
527    
528     init = 0; 
529     while(gst_jpeg_parse(&seg, data, size, ofs)) {
530        if (seg.size < 0) {
531            GST_DEBUG("buffer to short for parsing");
532            status = GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
533            goto beach;    
534        }
535        ofs += seg.size;
536
537        /* Decode scan, if complete */
538        if (seg.marker == GST_JPEG_MARKER_EOI && scan_seg.header_size > 0) {
539            scan_seg.data_size = seg.offset - scan_seg.data_offset;
540            scan_seg.is_valid  = TRUE;
541        }
542        if (scan_seg.is_valid) {
543            status = decode_scan(
544                  decoder,
545                  data + scan_seg.header_offset,
546                  scan_seg.header_size,
547                  data + scan_seg.data_offset,
548                  scan_seg.data_size
549                );
550            if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
551                break;
552            memset(&scan_seg, 0, sizeof(scan_seg));
553        }
554
555        append_ecs = TRUE;
556        switch (seg.marker) {
557        case GST_JPEG_MARKER_SOI:
558            priv->has_quant_table = FALSE;
559            priv->has_huf_table   = FALSE;
560            priv->mcu_restart     = 0;
561            status = GST_VAAPI_DECODER_STATUS_SUCCESS;
562            break;
563        case GST_JPEG_MARKER_EOI:
564            /*jpeg decoder is handling the data as packetized*/
565            if (seg.offset+ofs > size)
566                *toadd = size;
567            else
568                *toadd = seg.offset+seg.size;
569            *have_frame = TRUE;
570            status = GST_VAAPI_DECODER_STATUS_SUCCESS;
571            goto beach;
572            break;
573        case GST_JPEG_MARKER_DHT:
574            status = decode_huffman_table(decoder, data + seg.offset, seg.size);
575            break;
576        case GST_JPEG_MARKER_DQT:
577            status = decode_quant_table(decoder, data + seg.offset, seg.size);
578            break;
579        case GST_JPEG_MARKER_DRI:
580            status = decode_restart_interval(decoder, data + seg.offset, seg.size);
581            break;
582        case GST_JPEG_MARKER_DAC:
583            GST_ERROR("unsupported arithmetic coding mode");
584            status = GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
585            break;
586        case GST_JPEG_MARKER_SOS:
587            scan_seg.header_offset = seg.offset;
588            scan_seg.header_size   = seg.size;
589            scan_seg.data_offset   = seg.offset + seg.size;
590            scan_seg.data_size     = 0;
591            append_ecs             = FALSE;
592            break;
593        default:
594            /* Restart marker */
595            if (seg.marker >= GST_JPEG_MARKER_RST_MIN &&
596                seg.marker <= GST_JPEG_MARKER_RST_MAX) {
597                append_ecs = FALSE;
598                break;
599            }
600
601            /* Frame header */
602            if (seg.marker >= GST_JPEG_MARKER_SOF_MIN &&
603                seg.marker <= GST_JPEG_MARKER_SOF_MAX) {
604                status = decode_picture(
605                    decoder,
606                    seg.marker,
607                    data + seg.offset, seg.size,
608                    pts
609                );
610                break;
611            }
612
613            /* Application segments */
614            if (seg.marker >= GST_JPEG_MARKER_APP_MIN &&
615                seg.marker <= GST_JPEG_MARKER_APP_MAX) {
616                status = GST_VAAPI_DECODER_STATUS_SUCCESS;
617                break;
618            }
619
620            GST_WARNING("unsupported marker (0x%02x)", seg.marker);
621            status = GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
622            break;
623        }
624
625        /* Append entropy coded segments */
626        if (append_ecs)
627            scan_seg.data_size = seg.offset - scan_seg.data_offset;
628
629        if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) 
630         break;
631     }
632 beach:
633     return status;
634 }
635
636 gboolean
637 gst_vaapi_decoder_jpeg_decide_allocation(
638     GstVaapiDecoder *dec,
639     GstBufferPool *pool)
640 {
641     GstVaapiDecoderJpeg *decoder = GST_VAAPI_DECODER_JPEG(dec);
642     GstVaapiDecoderJpegPrivate * priv = decoder->priv;
643     GstVaapiDecoderStatus status = GST_VAAPI_DECODER_STATUS_SUCCESS;
644
645     status = ensure_context(decoder, pool);
646
647     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) {
648         GST_ERROR("failed to create VAContext....");
649         return FALSE;
650     }
651     return TRUE;
652 }
653
654 GstVaapiDecoderStatus
655 decode_buffer_jpeg(GstVaapiDecoderJpeg *decoder, GstBuffer *buffer, GstVideoCodecFrame *frame)
656 {
657     GstVaapiDecoderJpegPrivate * const priv = decoder->priv;
658     GstVaapiPicture *picture = priv->current_picture;
659
660     if (picture) {
661         if (!gst_vaapi_picture_allocate_surface(picture)) {
662             GST_ERROR("failed to allocate surface for current pic");
663             return FALSE;
664         }
665
666         picture->frame_id       = frame->system_frame_number;
667         
668         /*decode pic*/
669         if (!decode_current_picture(decoder))
670             return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
671     }
672     return GST_VAAPI_DECODER_STATUS_SUCCESS;
673 }
674
675 GstVaapiDecoderStatus
676 gst_vaapi_decoder_jpeg_decode(GstVaapiDecoder *base, GstVideoCodecFrame *frame)
677 {
678     GstVaapiDecoderJpeg * const decoder = GST_VAAPI_DECODER_JPEG(base);
679     GstVaapiDecoderJpegPrivate * const priv = decoder->priv;
680
681     g_return_val_if_fail(priv->is_constructed,
682                          GST_VAAPI_DECODER_STATUS_ERROR_INIT_FAILED);
683
684     return decode_buffer_jpeg(decoder, frame->input_buffer, frame);
685 }
686
687 gboolean
688 gst_vaapi_decoder_jpeg_reset(GstVaapiDecoder *bdec)
689 {
690     gboolean res = TRUE;
691     GstVaapiDecoderJpeg * const decoder = GST_VAAPI_DECODER_JPEG(bdec);
692     GstVaapiDecoderJpegPrivate * const priv = decoder->priv;
693
694     if (!gst_vaapi_decoder_jpeg_open(decoder)) {
695        GST_ERROR("Failed to re-initialize the mpeg2 decoder");
696        return FALSE;
697     }
698     return TRUE;
699 }
700
701 static void
702 gst_vaapi_decoder_jpeg_finalize(GObject *object)
703 {
704     GstVaapiDecoderJpeg * const decoder = GST_VAAPI_DECODER_JPEG(object);
705
706     gst_vaapi_decoder_jpeg_destroy(decoder);
707
708     G_OBJECT_CLASS(gst_vaapi_decoder_jpeg_parent_class)->finalize(object);
709 }
710
711 static void
712 gst_vaapi_decoder_jpeg_constructed(GObject *object)
713 {
714     GstVaapiDecoderJpeg * const decoder = GST_VAAPI_DECODER_JPEG(object);
715     GstVaapiDecoderJpegPrivate * const priv = decoder->priv;
716     GObjectClass *parent_class;
717
718     parent_class = G_OBJECT_CLASS(gst_vaapi_decoder_jpeg_parent_class);
719     if (parent_class->constructed)
720         parent_class->constructed(object);
721
722     priv->is_constructed = gst_vaapi_decoder_jpeg_create(decoder);
723     g_return_if_fail(priv->is_constructed);
724
725     if (!priv->is_opened) {
726         priv->is_opened = gst_vaapi_decoder_jpeg_open(decoder);
727         g_return_if_fail(priv->is_opened);
728     }
729
730 }
731
732 static void
733 gst_vaapi_decoder_jpeg_class_init(GstVaapiDecoderJpegClass *klass)
734 {
735     GObjectClass * const object_class = G_OBJECT_CLASS(klass);
736     GstVaapiDecoderClass * const decoder_class = GST_VAAPI_DECODER_CLASS(klass);
737
738     g_type_class_add_private(klass, sizeof(GstVaapiDecoderJpegPrivate));
739
740     object_class->finalize      = gst_vaapi_decoder_jpeg_finalize;
741     object_class->constructed   = gst_vaapi_decoder_jpeg_constructed;
742
743     decoder_class->parse             = gst_vaapi_decoder_jpeg_parse;
744     decoder_class->decide_allocation = gst_vaapi_decoder_jpeg_decide_allocation;
745     decoder_class->decode            = gst_vaapi_decoder_jpeg_decode;
746     decoder_class->reset             = gst_vaapi_decoder_jpeg_reset;
747
748 }
749 static void
750 gst_vaapi_decoder_jpeg_init(GstVaapiDecoderJpeg *decoder)
751 {
752     GstVaapiDecoderJpegPrivate *priv;
753
754     priv                        = GST_VAAPI_DECODER_JPEG_GET_PRIVATE(decoder);
755     decoder->priv               = priv;
756     priv->profile               = GST_VAAPI_PROFILE_JPEG_BASELINE;
757     priv->width                 = 0;
758     priv->height                = 0;
759     priv->current_picture       = NULL;
760     priv->has_huf_table         = FALSE;
761     priv->has_quant_table       = FALSE;
762     priv->mcu_restart           = 0;
763     priv->is_opened             = FALSE;
764     priv->profile_changed       = TRUE;
765     priv->is_constructed        = FALSE;
766     priv->reset_context         = FALSE;
767     memset(&priv->frame_hdr, 0, sizeof(priv->frame_hdr));
768     memset(&priv->huf_tables, 0, sizeof(priv->huf_tables));
769     memset(&priv->quant_tables, 0, sizeof(priv->quant_tables));
770 }
771
772 /**
773  * gst_vaapi_decoder_jpeg_new:
774  * @display: a #GstVaapiDisplay
775  * @caps: a #GstCaps holding codec information
776  *
777  * Creates a new #GstVaapiDecoder for JPEG decoding.  The @caps can
778  * hold extra information like codec-data and pictured coded size.
779  *
780  * Return value: the newly allocated #GstVaapiDecoder object
781  */
782 GstVaapiDecoder *
783 gst_vaapi_decoder_jpeg_new(GstVaapiDisplay *display, GstCaps *caps)
784 {
785     GstVaapiDecoderJpeg *decoder;
786
787     g_return_val_if_fail(GST_VAAPI_IS_DISPLAY(display), NULL);
788     g_return_val_if_fail(GST_IS_CAPS(caps), NULL);
789
790     decoder = g_object_new(
791         GST_VAAPI_TYPE_DECODER_JPEG,
792         "display",      display,
793         "caps",         caps,
794         NULL
795     );
796     if (!decoder->priv->is_constructed) {
797         g_object_unref(decoder);
798         return NULL;
799     }
800     return GST_VAAPI_DECODER_CAST(decoder);
801 }