hardware/intel/intel-driver
Revisão | 82a86f7a28671a7d996011925e6dee031edffd2b (tree) |
---|---|
Hora | 2016-05-09 17:52:22 |
Autor | Xiang, Haihao <haihao.xiang@inte...> |
Commiter | Xiang, Haihao |
Add a path to fetch encoder status from the underlying context
We can use it to get the coded buffer size if the underlying context support status query
Signed-off-by: Xiang, Haihao <haihao.xiang@intel.com>
Reviewed-By: Sean V Kelley <sean.v.kelley@intel.com>
@@ -2345,6 +2345,7 @@ i965_create_buffer_internal(VADriverContextP ctx, | ||
2345 | 2345 | obj_buffer->export_refcount = 0; |
2346 | 2346 | obj_buffer->buffer_store = NULL; |
2347 | 2347 | obj_buffer->wrapper_buffer = VA_INVALID_ID; |
2348 | + obj_buffer->context_id = context; | |
2348 | 2349 | |
2349 | 2350 | buffer_store = calloc(1, sizeof(struct buffer_store)); |
2350 | 2351 | assert(buffer_store); |
@@ -2409,6 +2410,7 @@ i965_create_buffer_internal(VADriverContextP ctx, | ||
2409 | 2410 | coded_buffer_segment->base.next = NULL; |
2410 | 2411 | coded_buffer_segment->mapped = 0; |
2411 | 2412 | coded_buffer_segment->codec = 0; |
2413 | + coded_buffer_segment->status_support = 0; | |
2412 | 2414 | dri_bo_unmap(buffer_store->bo); |
2413 | 2415 | } else if (data) { |
2414 | 2416 | dri_bo_subdata(buffer_store->bo, 0, size * num_elements, data); |
@@ -2499,6 +2501,7 @@ i965_MapBuffer(VADriverContextP ctx, | ||
2499 | 2501 | struct i965_driver_data *i965 = i965_driver_data(ctx); |
2500 | 2502 | struct object_buffer *obj_buffer = BUFFER(buf_id); |
2501 | 2503 | VAStatus vaStatus = VA_STATUS_ERROR_UNKNOWN; |
2504 | + struct object_context *obj_context = CONTEXT(obj_buffer->context_id); | |
2502 | 2505 | |
2503 | 2506 | ASSERT_RET(obj_buffer && obj_buffer->buffer_store, VA_STATUS_ERROR_INVALID_BUFFER); |
2504 | 2507 |
@@ -2532,6 +2535,7 @@ i965_MapBuffer(VADriverContextP ctx, | ||
2532 | 2535 | |
2533 | 2536 | ASSERT_RET(obj_buffer->buffer_store->bo->virtual, VA_STATUS_ERROR_OPERATION_FAILED); |
2534 | 2537 | *pbuf = obj_buffer->buffer_store->bo->virtual; |
2538 | + vaStatus = VA_STATUS_SUCCESS; | |
2535 | 2539 | |
2536 | 2540 | if (obj_buffer->type == VAEncCodedBufferType) { |
2537 | 2541 | int i; |
@@ -2544,69 +2548,77 @@ i965_MapBuffer(VADriverContextP ctx, | ||
2544 | 2548 | |
2545 | 2549 | coded_buffer_segment->base.buf = buffer = (unsigned char *)(obj_buffer->buffer_store->bo->virtual) + I965_CODEDBUFFER_HEADER_SIZE; |
2546 | 2550 | |
2547 | - if (coded_buffer_segment->codec == CODEC_H264 || | |
2548 | - coded_buffer_segment->codec == CODEC_H264_MVC) { | |
2549 | - delimiter0 = H264_DELIMITER0; | |
2550 | - delimiter1 = H264_DELIMITER1; | |
2551 | - delimiter2 = H264_DELIMITER2; | |
2552 | - delimiter3 = H264_DELIMITER3; | |
2553 | - delimiter4 = H264_DELIMITER4; | |
2554 | - } else if (coded_buffer_segment->codec == CODEC_MPEG2) { | |
2555 | - delimiter0 = MPEG2_DELIMITER0; | |
2556 | - delimiter1 = MPEG2_DELIMITER1; | |
2557 | - delimiter2 = MPEG2_DELIMITER2; | |
2558 | - delimiter3 = MPEG2_DELIMITER3; | |
2559 | - delimiter4 = MPEG2_DELIMITER4; | |
2560 | - } else if(coded_buffer_segment->codec == CODEC_JPEG) { | |
2561 | - //In JPEG End of Image (EOI = 0xDDF9) marker can be used for delimiter. | |
2562 | - delimiter0 = 0xFF; | |
2563 | - delimiter1 = 0xD9; | |
2564 | - } else if (coded_buffer_segment->codec == CODEC_HEVC) { | |
2565 | - delimiter0 = HEVC_DELIMITER0; | |
2566 | - delimiter1 = HEVC_DELIMITER1; | |
2567 | - delimiter2 = HEVC_DELIMITER2; | |
2568 | - delimiter3 = HEVC_DELIMITER3; | |
2569 | - delimiter4 = HEVC_DELIMITER4; | |
2570 | - } else if (coded_buffer_segment->codec != CODEC_VP8) { | |
2571 | - ASSERT_RET(0, VA_STATUS_ERROR_UNSUPPORTED_PROFILE); | |
2572 | - } | |
2551 | + if (obj_context && | |
2552 | + obj_context->hw_context && | |
2553 | + obj_context->hw_context->get_status && | |
2554 | + coded_buffer_segment->status_support) { | |
2555 | + vaStatus = obj_context->hw_context->get_status(ctx, obj_context->hw_context, coded_buffer_segment); | |
2556 | + } else { | |
2557 | + if (coded_buffer_segment->codec == CODEC_H264 || | |
2558 | + coded_buffer_segment->codec == CODEC_H264_MVC) { | |
2559 | + delimiter0 = H264_DELIMITER0; | |
2560 | + delimiter1 = H264_DELIMITER1; | |
2561 | + delimiter2 = H264_DELIMITER2; | |
2562 | + delimiter3 = H264_DELIMITER3; | |
2563 | + delimiter4 = H264_DELIMITER4; | |
2564 | + } else if (coded_buffer_segment->codec == CODEC_MPEG2) { | |
2565 | + delimiter0 = MPEG2_DELIMITER0; | |
2566 | + delimiter1 = MPEG2_DELIMITER1; | |
2567 | + delimiter2 = MPEG2_DELIMITER2; | |
2568 | + delimiter3 = MPEG2_DELIMITER3; | |
2569 | + delimiter4 = MPEG2_DELIMITER4; | |
2570 | + } else if(coded_buffer_segment->codec == CODEC_JPEG) { | |
2571 | + //In JPEG End of Image (EOI = 0xDDF9) marker can be used for delimiter. | |
2572 | + delimiter0 = 0xFF; | |
2573 | + delimiter1 = 0xD9; | |
2574 | + } else if (coded_buffer_segment->codec == CODEC_HEVC) { | |
2575 | + delimiter0 = HEVC_DELIMITER0; | |
2576 | + delimiter1 = HEVC_DELIMITER1; | |
2577 | + delimiter2 = HEVC_DELIMITER2; | |
2578 | + delimiter3 = HEVC_DELIMITER3; | |
2579 | + delimiter4 = HEVC_DELIMITER4; | |
2580 | + } else if (coded_buffer_segment->codec != CODEC_VP8) { | |
2581 | + ASSERT_RET(0, VA_STATUS_ERROR_UNSUPPORTED_PROFILE); | |
2582 | + } | |
2573 | 2583 | |
2574 | - if(coded_buffer_segment->codec == CODEC_JPEG) { | |
2575 | - for(i = 0; i < obj_buffer->size_element - header_offset - 1 - 0x1000; i++) { | |
2576 | - if( (buffer[i] == 0xFF) && (buffer[i + 1] == 0xD9)) { | |
2577 | - break; | |
2584 | + if(coded_buffer_segment->codec == CODEC_JPEG) { | |
2585 | + for(i = 0; i < obj_buffer->size_element - header_offset - 1 - 0x1000; i++) { | |
2586 | + if( (buffer[i] == 0xFF) && (buffer[i + 1] == 0xD9)) { | |
2587 | + break; | |
2588 | + } | |
2578 | 2589 | } |
2579 | - } | |
2580 | - coded_buffer_segment->base.size = i + 2; | |
2581 | - } else if (coded_buffer_segment->codec != CODEC_VP8) { | |
2582 | - /* vp8 coded buffer size can be told by vp8 internal statistics buffer, | |
2583 | - so it don't need to traversal the coded buffer */ | |
2584 | - for (i = 0; i < obj_buffer->size_element - header_offset - 3 - 0x1000; i++) { | |
2585 | - if ((buffer[i] == delimiter0) && | |
2586 | - (buffer[i + 1] == delimiter1) && | |
2587 | - (buffer[i + 2] == delimiter2) && | |
2588 | - (buffer[i + 3] == delimiter3) && | |
2589 | - (buffer[i + 4] == delimiter4)) | |
2590 | - break; | |
2590 | + coded_buffer_segment->base.size = i + 2; | |
2591 | + } else if (coded_buffer_segment->codec != CODEC_VP8) { | |
2592 | + /* vp8 coded buffer size can be told by vp8 internal statistics buffer, | |
2593 | + so it don't need to traversal the coded buffer */ | |
2594 | + for (i = 0; i < obj_buffer->size_element - header_offset - 3 - 0x1000; i++) { | |
2595 | + if ((buffer[i] == delimiter0) && | |
2596 | + (buffer[i + 1] == delimiter1) && | |
2597 | + (buffer[i + 2] == delimiter2) && | |
2598 | + (buffer[i + 3] == delimiter3) && | |
2599 | + (buffer[i + 4] == delimiter4)) | |
2600 | + break; | |
2601 | + } | |
2602 | + | |
2603 | + if (i == obj_buffer->size_element - header_offset - 3 - 0x1000) { | |
2604 | + coded_buffer_segment->base.status |= VA_CODED_BUF_STATUS_SLICE_OVERFLOW_MASK; | |
2605 | + } | |
2606 | + coded_buffer_segment->base.size = i; | |
2591 | 2607 | } |
2592 | 2608 | |
2593 | - if (i == obj_buffer->size_element - header_offset - 3 - 0x1000) { | |
2609 | + if (coded_buffer_segment->base.size >= obj_buffer->size_element - header_offset - 0x1000) { | |
2594 | 2610 | coded_buffer_segment->base.status |= VA_CODED_BUF_STATUS_SLICE_OVERFLOW_MASK; |
2595 | 2611 | } |
2596 | - coded_buffer_segment->base.size = i; | |
2597 | - } | |
2598 | 2612 | |
2599 | - if (coded_buffer_segment->base.size >= obj_buffer->size_element - header_offset - 0x1000) { | |
2600 | - coded_buffer_segment->base.status |= VA_CODED_BUF_STATUS_SLICE_OVERFLOW_MASK; | |
2613 | + vaStatus = VA_STATUS_SUCCESS; | |
2601 | 2614 | } |
2602 | 2615 | |
2603 | 2616 | coded_buffer_segment->mapped = 1; |
2604 | 2617 | } else { |
2605 | 2618 | assert(coded_buffer_segment->base.buf); |
2619 | + vaStatus = VA_STATUS_SUCCESS; | |
2606 | 2620 | } |
2607 | 2621 | } |
2608 | - | |
2609 | - vaStatus = VA_STATUS_SUCCESS; | |
2610 | 2622 | } else if (NULL != obj_buffer->buffer_store->buffer) { |
2611 | 2623 | *pbuf = obj_buffer->buffer_store->buffer; |
2612 | 2624 | vaStatus = VA_STATUS_SUCCESS; |
@@ -231,6 +231,9 @@ struct hw_context | ||
231 | 231 | union codec_state *codec_state, |
232 | 232 | struct hw_context *hw_context); |
233 | 233 | void (*destroy)(void *); |
234 | + VAStatus (*get_status)(VADriverContextP ctx, | |
235 | + struct hw_context *hw_context, | |
236 | + void *buffer); | |
234 | 237 | struct intel_batchbuffer *batch; |
235 | 238 | }; |
236 | 239 |
@@ -309,6 +312,7 @@ struct object_buffer | ||
309 | 312 | VABufferInfo export_state; |
310 | 313 | |
311 | 314 | VAGenericID wrapper_buffer; |
315 | + VAContextID context_id; | |
312 | 316 | }; |
313 | 317 | |
314 | 318 | struct object_image |
@@ -501,9 +505,17 @@ va_enc_packed_type_to_idx(int packed_type); | ||
501 | 505 | |
502 | 506 | struct i965_coded_buffer_segment |
503 | 507 | { |
504 | - VACodedBufferSegment base; | |
505 | - unsigned char mapped; | |
506 | - unsigned char codec; | |
508 | + union { | |
509 | + VACodedBufferSegment base; | |
510 | + unsigned char pad0[64]; /* change the size if sizeof(VACodedBufferSegment) > 64 */ | |
511 | + }; | |
512 | + | |
513 | + unsigned int mapped; | |
514 | + unsigned int codec; | |
515 | + unsigned int status_support; | |
516 | + unsigned int pad1; | |
517 | + | |
518 | + unsigned int codec_private_data[512]; /* Store codec private data, must be 16-bytes aligned */ | |
507 | 519 | }; |
508 | 520 | |
509 | 521 | #define I965_CODEDBUFFER_HEADER_SIZE ALIGN(sizeof(struct i965_coded_buffer_segment), 0x1000) |
@@ -662,6 +662,19 @@ intel_encoder_context_destroy(void *hw_context) | ||
662 | 662 | free(encoder_context); |
663 | 663 | } |
664 | 664 | |
665 | + | |
666 | +static VAStatus | |
667 | +intel_encoder_get_status(VADriverContextP ctx, struct hw_context *hw_context, void *buffer) | |
668 | +{ | |
669 | + struct intel_encoder_context *encoder_context = (struct intel_encoder_context *)hw_context; | |
670 | + struct i965_coded_buffer_segment *coded_buffer_segment = (struct i965_coded_buffer_segment *)buffer; | |
671 | + | |
672 | + if (encoder_context->get_status) | |
673 | + return encoder_context->get_status(ctx, encoder_context, coded_buffer_segment); | |
674 | + | |
675 | + return VA_STATUS_ERROR_UNIMPLEMENTED; | |
676 | +} | |
677 | + | |
665 | 678 | typedef Bool (* hw_init_func)(VADriverContextP, struct intel_encoder_context *); |
666 | 679 | |
667 | 680 | static struct hw_context * |
@@ -677,6 +690,7 @@ intel_enc_hw_context_init(VADriverContextP ctx, | ||
677 | 690 | assert(encoder_context); |
678 | 691 | encoder_context->base.destroy = intel_encoder_context_destroy; |
679 | 692 | encoder_context->base.run = intel_encoder_end_picture; |
693 | + encoder_context->base.get_status = intel_encoder_get_status; | |
680 | 694 | encoder_context->base.batch = intel_batchbuffer_new(intel, I915_EXEC_RENDER, 0); |
681 | 695 | encoder_context->input_yuv_surface = VA_INVALID_SURFACE; |
682 | 696 | encoder_context->is_tmp_id = 0; |
@@ -59,6 +59,10 @@ struct intel_encoder_context | ||
59 | 59 | struct intel_encoder_context *encoder_context); |
60 | 60 | void (*mfc_brc_prepare)(struct encode_state *encode_state, |
61 | 61 | struct intel_encoder_context *encoder_context); |
62 | + | |
63 | + VAStatus (*get_status)(VADriverContextP ctx, | |
64 | + struct intel_encoder_context *encoder_context, | |
65 | + struct i965_coded_buffer_segment *coded_buffer_segment); | |
62 | 66 | }; |
63 | 67 | |
64 | 68 | extern struct hw_context * |