本文整理汇总了C++中GST_ERROR函数的典型用法代码示例。如果您正苦于以下问题:C++ GST_ERROR函数的具体用法?C++ GST_ERROR怎么用?C++ GST_ERROR使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了GST_ERROR函数的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的C++代码示例。
示例1: gst_vaapi_surface_create_from_buffer_proxy
static gboolean
gst_vaapi_surface_create_from_buffer_proxy (GstVaapiSurface * surface,
GstVaapiBufferProxy * proxy, const GstVideoInfo * vip)
{
#if VA_CHECK_VERSION (0,36,0)
GstVaapiDisplay *const display = GST_VAAPI_OBJECT_DISPLAY (surface);
GstVideoFormat format;
VASurfaceID surface_id;
VAStatus status;
guint chroma_type, va_chroma_format;
const VAImageFormat *va_format;
VASurfaceAttrib attribs[2], *attrib;
VASurfaceAttribExternalBuffers extbuf;
unsigned long extbuf_handle;
guint i, width, height;
format = GST_VIDEO_INFO_FORMAT (vip);
width = GST_VIDEO_INFO_WIDTH (vip);
height = GST_VIDEO_INFO_HEIGHT (vip);
gst_vaapi_buffer_proxy_replace (&surface->extbuf_proxy, proxy);
va_format = gst_vaapi_video_format_to_va_format (format);
if (!va_format)
goto error_unsupported_format;
chroma_type = gst_vaapi_video_format_get_chroma_type (format);
if (!chroma_type)
goto error_unsupported_format;
va_chroma_format = from_GstVaapiChromaType (chroma_type);
if (!va_chroma_format)
goto error_unsupported_format;
extbuf_handle = GST_VAAPI_BUFFER_PROXY_HANDLE (proxy);
extbuf.pixel_format = va_format->fourcc;
extbuf.width = width;
extbuf.height = height;
extbuf.data_size = GST_VAAPI_BUFFER_PROXY_SIZE (proxy);
extbuf.num_planes = GST_VIDEO_INFO_N_PLANES (vip);
for (i = 0; i < extbuf.num_planes; i++) {
extbuf.pitches[i] = GST_VIDEO_INFO_PLANE_STRIDE (vip, i);
extbuf.offsets[i] = GST_VIDEO_INFO_PLANE_OFFSET (vip, i);
}
extbuf.buffers = &extbuf_handle;
extbuf.num_buffers = 1;
extbuf.flags = 0;
extbuf.private_data = NULL;
attrib = attribs;
attrib->type = VASurfaceAttribExternalBufferDescriptor;
attrib->flags = VA_SURFACE_ATTRIB_SETTABLE;
attrib->value.type = VAGenericValueTypePointer;
attrib->value.value.p = &extbuf;
attrib++;
attrib->type = VASurfaceAttribMemoryType;
attrib->flags = VA_SURFACE_ATTRIB_SETTABLE;
attrib->value.type = VAGenericValueTypeInteger;
attrib->value.value.i =
from_GstVaapiBufferMemoryType (GST_VAAPI_BUFFER_PROXY_TYPE (proxy));
attrib++;
GST_VAAPI_DISPLAY_LOCK (display);
status = vaCreateSurfaces (GST_VAAPI_DISPLAY_VADISPLAY (display),
va_chroma_format, width, height, &surface_id, 1, attribs,
attrib - attribs);
GST_VAAPI_DISPLAY_UNLOCK (display);
if (!vaapi_check_status (status, "vaCreateSurfaces()"))
return FALSE;
surface->format = format;
surface->chroma_type = chroma_type;
surface->width = width;
surface->height = height;
GST_DEBUG ("surface %" GST_VAAPI_ID_FORMAT, GST_VAAPI_ID_ARGS (surface_id));
GST_VAAPI_OBJECT_ID (surface) = surface_id;
return TRUE;
/* ERRORS */
error_unsupported_format:
GST_ERROR ("unsupported format %s",
gst_vaapi_video_format_to_string (format));
return FALSE;
#else
return FALSE;
#endif
}
开发者ID:01org,项目名称:iotg-lin-gfx-gstreamer-vaapi,代码行数:88,代码来源:gstvaapisurface.c
示例2: default_can_save_uri
static gboolean
default_can_save_uri (const gchar * uri)
{
GST_ERROR ("No 'can_save_uri' vmethod implementation");
return FALSE;
}
开发者ID:matasbbb,项目名称:GES,代码行数:6,代码来源:ges-formatter.c
示例3: decode_scan
static GstVaapiDecoderStatus
decode_scan (GstVaapiDecoderJpeg * decoder, GstJpegSegment * seg)
{
GstVaapiDecoderJpegPrivate *const priv = &decoder->priv;
GstVaapiPicture *const picture = priv->current_picture;
GstVaapiSlice *slice;
VASliceParameterBufferJPEGBaseline *slice_param;
GstJpegScanHdr scan_hdr;
guint scan_hdr_size, scan_data_size;
guint i, h_max, v_max, mcu_width, mcu_height;
if (!VALID_STATE (decoder, GOT_SOF))
return GST_VAAPI_DECODER_STATUS_SUCCESS;
scan_hdr_size = (seg->data[seg->offset] << 8) | seg->data[seg->offset + 1];
scan_data_size = seg->size - scan_hdr_size;
memset (&scan_hdr, 0, sizeof (scan_hdr));
if (!gst_jpeg_segment_parse_scan_header (seg, &scan_hdr)) {
GST_ERROR ("failed to parse scan header");
return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
}
slice = GST_VAAPI_SLICE_NEW (JPEGBaseline, decoder,
seg->data + seg->offset + scan_hdr_size, scan_data_size);
if (!slice) {
GST_ERROR ("failed to allocate slice");
return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
}
gst_vaapi_picture_add_slice (picture, slice);
if (!VALID_STATE (decoder, GOT_HUF_TABLE))
gst_jpeg_get_default_huffman_tables (&priv->huf_tables);
// Update VA Huffman table if it changed for this scan
if (huffman_tables_updated (&priv->huf_tables)) {
slice->huf_table = GST_VAAPI_HUFFMAN_TABLE_NEW (JPEGBaseline, decoder);
if (!slice->huf_table) {
GST_ERROR ("failed to allocate Huffman tables");
huffman_tables_reset (&priv->huf_tables);
return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
}
fill_huffman_table (slice->huf_table, &priv->huf_tables);
huffman_tables_reset (&priv->huf_tables);
}
slice_param = slice->param;
slice_param->num_components = scan_hdr.num_components;
for (i = 0; i < scan_hdr.num_components; i++) {
slice_param->components[i].component_selector =
scan_hdr.components[i].component_selector;
slice_param->components[i].dc_table_selector =
scan_hdr.components[i].dc_selector;
slice_param->components[i].ac_table_selector =
scan_hdr.components[i].ac_selector;
}
slice_param->restart_interval = priv->mcu_restart;
slice_param->slice_horizontal_position = 0;
slice_param->slice_vertical_position = 0;
get_max_sampling_factors (&priv->frame_hdr, &h_max, &v_max);
mcu_width = 8 * h_max;
mcu_height = 8 * v_max;
if (scan_hdr.num_components == 1) { // Non-interleaved
const guint Csj = slice_param->components[0].component_selector;
const GstJpegFrameComponent *const fcp =
get_component (&priv->frame_hdr, Csj);
if (!fcp || fcp->horizontal_factor == 0 || fcp->vertical_factor == 0) {
GST_ERROR ("failed to validate image component %u", Csj);
return GST_VAAPI_DECODER_STATUS_ERROR_INVALID_PARAMETER;
}
mcu_width /= fcp->horizontal_factor;
mcu_height /= fcp->vertical_factor;
}
slice_param->num_mcus =
((priv->frame_hdr.width + mcu_width - 1) / mcu_width) *
((priv->frame_hdr.height + mcu_height - 1) / mcu_height);
priv->decoder_state |= GST_JPEG_VIDEO_STATE_GOT_SOS;
return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
开发者ID:ceyusa,项目名称:gstreamer-vaapi,代码行数:83,代码来源:gstvaapidecoder_jpeg.c
示例4: mxf_partition_pack_parse
//.........这里部分代码省略.........
size -= 2;
GST_DEBUG (" MXF version = %u.%u", pack->major_version, pack->minor_version);
pack->kag_size = GST_READ_UINT32_BE (data);
data += 4;
size -= 4;
GST_DEBUG (" KAG size = %u", pack->kag_size);
pack->this_partition = GST_READ_UINT64_BE (data);
data += 8;
size -= 8;
GST_DEBUG (" this partition offset = %" G_GUINT64_FORMAT,
pack->this_partition);
pack->prev_partition = GST_READ_UINT64_BE (data);
data += 8;
size -= 8;
GST_DEBUG (" previous partition offset = %" G_GUINT64_FORMAT,
pack->prev_partition);
pack->footer_partition = GST_READ_UINT64_BE (data);
data += 8;
size -= 8;
GST_DEBUG (" footer partition offset = %" G_GUINT64_FORMAT,
pack->footer_partition);
pack->header_byte_count = GST_READ_UINT64_BE (data);
data += 8;
size -= 8;
GST_DEBUG (" header byte count = %" G_GUINT64_FORMAT,
pack->header_byte_count);
pack->index_byte_count = GST_READ_UINT64_BE (data);
data += 8;
size -= 8;
pack->index_sid = GST_READ_UINT32_BE (data);
data += 4;
size -= 4;
GST_DEBUG (" index sid = %u, size = %" G_GUINT64_FORMAT, pack->index_sid,
pack->index_byte_count);
pack->body_offset = GST_READ_UINT64_BE (data);
data += 8;
size -= 8;
pack->body_sid = GST_READ_UINT32_BE (data);
data += 4;
size -= 4;
GST_DEBUG (" body sid = %u, offset = %" G_GUINT64_FORMAT, pack->body_sid,
pack->body_offset);
memcpy (&pack->operational_pattern, data, 16);
data += 16;
size -= 16;
GST_DEBUG (" operational pattern = %s",
mxf_ul_to_string (&pack->operational_pattern, str));
pack->n_essence_containers = GST_READ_UINT32_BE (data);
data += 4;
size -= 4;
GST_DEBUG (" number of essence containers = %u", pack->n_essence_containers);
if (GST_READ_UINT32_BE (data) != 16)
goto error;
data += 4;
size -= 4;
if (size < 16 * pack->n_essence_containers)
goto error;
if (pack->n_essence_containers) {
pack->essence_containers = g_new (MXFUL, pack->n_essence_containers);
for (i = 0; i < pack->n_essence_containers; i++) {
memcpy (&pack->essence_containers[i], data + i * 16, 16);
GST_DEBUG (" essence container %u = %s", i,
mxf_ul_to_string (&pack->essence_containers[i], str));
}
}
pack->valid = TRUE;
return TRUE;
error:
GST_ERROR ("Invalid partition pack");
mxf_partition_pack_reset (pack);
return FALSE;
}
开发者ID:prajnashi,项目名称:gst-plugins-bad,代码行数:101,代码来源:mxfparse.c
示例5: gst_v4l2_memory_group_new
static GstV4l2MemoryGroup *
gst_v4l2_memory_group_new (GstV4l2Allocator * allocator, guint32 index)
{
GstV4l2Object *obj = allocator->obj;
guint32 memory = allocator->memory;
struct v4l2_format *format = &obj->format;
GstV4l2MemoryGroup *group;
gsize img_size, buf_size;
group = g_slice_new0 (GstV4l2MemoryGroup);
group->buffer.type = format->type;
group->buffer.index = index;
group->buffer.memory = memory;
if (V4L2_TYPE_IS_MULTIPLANAR (format->type)) {
group->n_mem = group->buffer.length = format->fmt.pix_mp.num_planes;
group->buffer.m.planes = group->planes;
} else {
group->n_mem = 1;
}
if (obj->ioctl (obj->video_fd, VIDIOC_QUERYBUF, &group->buffer) < 0)
goto querybuf_failed;
if (group->buffer.index != index) {
GST_ERROR_OBJECT (allocator, "Buffer index returned by VIDIOC_QUERYBUF "
"didn't match, this indicate the presence of a bug in your driver or "
"libv4l2");
g_slice_free (GstV4l2MemoryGroup, group);
return NULL;
}
/* Check that provided size matches the format we have negotiation. Failing
* there usually means a driver of libv4l bug. */
if (V4L2_TYPE_IS_MULTIPLANAR (obj->type)) {
gint i;
for (i = 0; i < group->n_mem; i++) {
img_size = obj->format.fmt.pix_mp.plane_fmt[i].sizeimage;
buf_size = group->planes[i].length;
if (buf_size < img_size)
goto buffer_too_short;
}
} else {
img_size = obj->format.fmt.pix.sizeimage;
buf_size = group->buffer.length;
if (buf_size < img_size)
goto buffer_too_short;
}
/* We save non planar buffer information into the multi-planar plane array
* to avoid duplicating the code later */
if (!V4L2_TYPE_IS_MULTIPLANAR (format->type)) {
group->planes[0].bytesused = group->buffer.bytesused;
group->planes[0].length = group->buffer.length;
group->planes[0].data_offset = 0;
g_assert (sizeof (group->planes[0].m) == sizeof (group->buffer.m));
memcpy (&group->planes[0].m, &group->buffer.m, sizeof (group->buffer.m));
}
GST_LOG_OBJECT (allocator, "Got %s buffer", memory_type_to_str (memory));
GST_LOG_OBJECT (allocator, " index: %u", group->buffer.index);
GST_LOG_OBJECT (allocator, " type: %d", group->buffer.type);
GST_LOG_OBJECT (allocator, " flags: %08x", group->buffer.flags);
GST_LOG_OBJECT (allocator, " field: %d", group->buffer.field);
GST_LOG_OBJECT (allocator, " memory: %d", group->buffer.memory);
GST_LOG_OBJECT (allocator, " planes: %d", group->n_mem);
#ifndef GST_DISABLE_GST_DEBUG
if (memory == V4L2_MEMORY_MMAP) {
gint i;
for (i = 0; i < group->n_mem; i++) {
GST_LOG_OBJECT (allocator,
" [%u] bytesused: %u, length: %u, offset: %u", i,
group->planes[i].bytesused, group->planes[i].length,
group->planes[i].data_offset);
GST_LOG_OBJECT (allocator, " [%u] MMAP offset: %u", i,
group->planes[i].m.mem_offset);
}
}
#endif
return group;
querybuf_failed:
{
GST_ERROR ("error querying buffer %d: %s", index, g_strerror (errno));
goto failed;
}
buffer_too_short:
{
GST_ERROR ("buffer size %" G_GSIZE_FORMAT
" is smaller then negotiated size %" G_GSIZE_FORMAT
", this is usually the result of a bug in the v4l2 driver or libv4l.",
buf_size, img_size);
goto failed;
}
failed:
gst_v4l2_memory_group_free (group);
//.........这里部分代码省略.........
开发者ID:nnikos123,项目名称:gst-plugins-good,代码行数:101,代码来源:gstv4l2allocator.c
示例6: gst_base_video_decoder_drain
static GstFlowReturn
gst_base_video_decoder_drain (GstBaseVideoDecoder * dec, gboolean at_eos)
{
GstBaseVideoDecoderClass *klass;
GstBaseVideoDecoderScanResult res;
guint size;
klass = GST_BASE_VIDEO_DECODER_GET_CLASS (dec);
if (gst_adapter_available (dec->input_adapter) == 0)
return GST_FLOW_OK;
lost_sync:
if (!dec->have_sync) {
gint n, m;
GST_DEBUG ("no sync, scanning");
n = gst_adapter_available (dec->input_adapter);
m = klass->scan_for_sync (dec, dec->input_adapter);
if (m == -1) {
gst_object_unref (dec);
return GST_FLOW_OK;
}
if (m < 0) {
g_warning ("subclass returned negative scan %d", m);
}
if (m >= n) {
GST_ERROR ("subclass scanned past end %d >= %d", m, n);
}
gst_adapter_flush (dec->input_adapter, m);
if (m < n) {
GST_DEBUG ("found possible sync after %d bytes (of %d)", m, n);
/* this is only "maybe" sync */
dec->have_sync = TRUE;
}
if (!dec->have_sync) {
return GST_FLOW_OK;
}
}
res = klass->scan_for_packet_end (dec, dec->input_adapter, &size, at_eos);
while (res == GST_BASE_VIDEO_DECODER_SCAN_RESULT_OK) {
GstBuffer *buf;
GstFlowReturn ret;
GST_DEBUG ("Packet size: %u", size);
if (size > gst_adapter_available (dec->input_adapter))
return GST_FLOW_OK;
buf = gst_adapter_take_buffer (dec->input_adapter, size);
dec->prev_buf_offset = dec->current_buf_offset;
dec->current_buf_offset = dec->input_offset -
gst_adapter_available (dec->input_adapter);
ret = klass->parse_data (dec, buf, at_eos);
if (ret != GST_FLOW_OK)
return ret;
res = klass->scan_for_packet_end (dec, dec->input_adapter, &size, at_eos);
}
switch (res) {
case GST_BASE_VIDEO_DECODER_SCAN_RESULT_LOST_SYNC:
dec->have_sync = FALSE;
goto lost_sync;
case GST_BASE_VIDEO_DECODER_SCAN_RESULT_NEED_DATA:
return GST_FLOW_OK;
default:
GST_ERROR_OBJECT (dec, "Subclass returned invalid scan result");
return GST_FLOW_ERROR;
}
}
开发者ID:spunktsch,项目名称:svtplayer,代码行数:81,代码来源:gstbasevideodecoder.c
示例7: test_one_after_other_full
//.........这里部分代码省略.........
GST_DEBUG ("Setting pipeline to PLAYING");
ASSERT_OBJECT_REFCOUNT (source1, "source1", 1);
fail_if (gst_element_set_state (GST_ELEMENT (pipeline),
GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE);
GST_DEBUG ("Let's poll the bus");
while (carry_on) {
message = gst_bus_poll (bus, GST_MESSAGE_ANY, GST_SECOND / 2);
if (message) {
switch (GST_MESSAGE_TYPE (message)) {
case GST_MESSAGE_EOS:
/* we should check if we really finished here */
GST_WARNING ("Got an EOS");
carry_on = FALSE;
break;
case GST_MESSAGE_SEGMENT_START:
case GST_MESSAGE_SEGMENT_DONE:
/* We shouldn't see any segement messages, since we didn't do a segment seek */
GST_WARNING ("Saw a Segment start/stop");
fail_if (TRUE);
break;
case GST_MESSAGE_ERROR:
GST_WARNING ("Saw an ERROR");
fail_if (TRUE);
default:
break;
}
gst_mini_object_unref (GST_MINI_OBJECT (message));
}
}
GST_DEBUG ("Setting pipeline to NULL");
fail_if (gst_element_set_state (GST_ELEMENT (pipeline),
GST_STATE_READY) == GST_STATE_CHANGE_FAILURE);
fail_if (collect->expected_segments != NULL);
GST_DEBUG ("Resetted pipeline to READY");
/* Expected segments */
collect->expected_segments = g_list_append (collect->expected_segments,
segment_new (1.0, GST_FORMAT_TIME, 5 * GST_SECOND, 6 * GST_SECOND, 0));
collect->expected_segments = g_list_append (collect->expected_segments,
segment_new (1.0, GST_FORMAT_TIME,
2 * GST_SECOND, 3 * GST_SECOND, 1 * GST_SECOND));
collect->gotsegment = FALSE;
GST_DEBUG ("Setting pipeline to PLAYING again");
fail_if (gst_element_set_state (GST_ELEMENT (pipeline),
GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE);
carry_on = TRUE;
GST_DEBUG ("Let's poll the bus AGAIN");
while (carry_on) {
message = gst_bus_poll (bus, GST_MESSAGE_ANY, GST_SECOND / 2);
if (message) {
switch (GST_MESSAGE_TYPE (message)) {
case GST_MESSAGE_EOS:
/* we should check if we really finished here */
carry_on = FALSE;
break;
case GST_MESSAGE_SEGMENT_START:
case GST_MESSAGE_SEGMENT_DONE:
/* We shouldn't see any segement messages, since we didn't do a segment seek */
GST_WARNING ("Saw a Segment start/stop");
fail_if (TRUE);
break;
case GST_MESSAGE_ERROR:
GST_ERROR ("Saw an ERROR");
fail_if (TRUE);
default:
break;
}
gst_mini_object_unref (GST_MINI_OBJECT (message));
} else {
GST_DEBUG ("bus_poll responded, but there wasn't any message...");
}
}
fail_if (collect->expected_segments != NULL);
gst_object_unref (GST_OBJECT (sinkpad));
fail_if (gst_element_set_state (GST_ELEMENT (pipeline),
GST_STATE_NULL) == GST_STATE_CHANGE_FAILURE);
ASSERT_OBJECT_REFCOUNT_BETWEEN (pipeline, "main pipeline", 1, 2);
gst_object_unref (pipeline);
ASSERT_OBJECT_REFCOUNT_BETWEEN (bus, "main bus", 1, 2);
gst_object_unref (bus);
g_free (collect);
}
开发者ID:alessandrod,项目名称:gnonlin,代码行数:101,代码来源:simple.c
示例8: read_one
static gboolean
read_one (GstPluginLoader * l)
{
guint64 magic;
guint32 to_read, packet_len, tag;
guint8 *in;
gint res;
to_read = HEADER_SIZE;
in = l->rx_buf;
do {
res = read (l->fd_r.fd, in, to_read);
if (G_UNLIKELY (res < 0)) {
if (errno == EAGAIN || errno == EINTR)
continue;
GST_LOG ("Failed reading packet header");
return FALSE;
}
to_read -= res;
in += res;
} while (to_read > 0);
magic = GST_READ_UINT32_BE (l->rx_buf + 8);
if (magic != HEADER_MAGIC) {
GST_WARNING
("Invalid packet (bad magic number) received from plugin scanner subprocess");
return FALSE;
}
packet_len = GST_READ_UINT32_BE (l->rx_buf + 4);
if (packet_len + HEADER_SIZE > BUF_MAX_SIZE) {
GST_WARNING
("Received excessively large packet for plugin scanner subprocess");
return FALSE;
}
tag = GST_READ_UINT24_BE (l->rx_buf + 1);
if (packet_len > 0) {
if (packet_len + HEADER_SIZE >= l->rx_buf_size) {
GST_LOG ("Expanding rx buf from %d to %d",
l->rx_buf_size, packet_len + HEADER_SIZE + BUF_GROW_EXTRA);
l->rx_buf_size = packet_len + HEADER_SIZE + BUF_GROW_EXTRA;
l->rx_buf = g_realloc (l->rx_buf, l->rx_buf_size);
}
in = l->rx_buf + HEADER_SIZE;
to_read = packet_len;
do {
res = read (l->fd_r.fd, in, to_read);
if (G_UNLIKELY (res < 0)) {
if (errno == EAGAIN || errno == EINTR)
continue;
GST_ERROR ("Packet payload read failed");
return FALSE;
}
to_read -= res;
in += res;
} while (to_read > 0);
} else {
GST_LOG ("No payload to read for 0 length packet type %d tag %u",
l->rx_buf[0], tag);
}
return handle_rx_packet (l, l->rx_buf[0], tag,
l->rx_buf + HEADER_SIZE, packet_len);
}
开发者ID:AlerIl,项目名称:gstreamer0.10,代码行数:66,代码来源:gstpluginloader.c
示例9: fill_planes
//.........这里部分代码省略.........
info->offset[0] = 0;
info->offset[1] = info->stride[0] * height;
info->offset[2] = info->offset[1] + info->stride[1] * height;
/* simplification of ROUNDUP4(w)*h + 2*((ROUNDUP16(w)/4)*h */
break;
case GST_VIDEO_FORMAT_Y42B:
info->stride[0] = GST_ROUND_UP_4 (width);
info->stride[1] = GST_ROUND_UP_8 (width) / 2;
info->stride[2] = info->stride[1];
info->offset[0] = 0;
info->offset[1] = info->stride[0] * height;
info->offset[2] = info->offset[1] + info->stride[1] * height;
/* simplification of ROUNDUP4(w)*h + 2*(ROUNDUP8(w)/2)*h */
break;
case GST_VIDEO_FORMAT_Y444:
info->stride[0] = GST_ROUND_UP_4 (width);
info->stride[1] = info->stride[0];
info->stride[2] = info->stride[0];
info->offset[0] = 0;
info->offset[1] = info->stride[0] * height;
info->offset[2] = info->offset[1] * 2;
break;
case GST_VIDEO_FORMAT_NV12:
case GST_VIDEO_FORMAT_NV21:
info->stride[0] = GST_ROUND_UP_4 (width);
info->stride[1] = info->stride[0];
info->offset[0] = 0;
info->offset[1] = info->stride[0] * GST_ROUND_UP_2 (height);
break;
case GST_VIDEO_FORMAT_A420:
info->stride[0] = GST_ROUND_UP_4 (width);
info->stride[1] = GST_ROUND_UP_4 (GST_ROUND_UP_2 (width) / 2);
info->stride[2] = info->stride[1];
info->stride[3] = info->stride[0];
info->offset[0] = 0;
info->offset[1] = info->stride[0] * GST_ROUND_UP_2 (height);
info->offset[2] = info->offset[1] +
info->stride[1] * (GST_ROUND_UP_2 (height) / 2);
info->offset[3] = info->offset[2] +
info->stride[2] * (GST_ROUND_UP_2 (height) / 2);
break;
case GST_VIDEO_FORMAT_YUV9:
info->stride[0] = GST_ROUND_UP_4 (width);
info->stride[1] = GST_ROUND_UP_4 (GST_ROUND_UP_4 (width) / 4);
info->stride[2] = info->stride[1];
info->offset[0] = 0;
info->offset[1] = info->stride[0] * height;
info->offset[2] = info->offset[1] +
info->stride[1] * (GST_ROUND_UP_4 (height) / 4);
break;
case GST_VIDEO_FORMAT_YVU9:
info->stride[0] = GST_ROUND_UP_4 (width);
info->stride[1] = GST_ROUND_UP_4 (GST_ROUND_UP_4 (width) / 4);
info->stride[2] = info->stride[1];
info->offset[0] = 0;
info->offset[2] = info->stride[0] * height;
info->offset[1] = info->offset[2] +
info->stride[1] * (GST_ROUND_UP_4 (height) / 4);
break;
case GST_VIDEO_FORMAT_I420_10LE:
case GST_VIDEO_FORMAT_I420_10BE:
info->stride[0] = GST_ROUND_UP_4 (width * 2);
info->stride[1] = GST_ROUND_UP_4 (width);
info->stride[2] = info->stride[1];
info->offset[0] = 0;
info->offset[1] = info->stride[0] * GST_ROUND_UP_2 (height);
info->offset[2] = info->offset[1] +
info->stride[1] * (GST_ROUND_UP_2 (height) / 2);
info->size = info->offset[2] +
info->stride[2] * (GST_ROUND_UP_2 (height) / 2);
break;
case GST_VIDEO_FORMAT_I422_10LE:
case GST_VIDEO_FORMAT_I422_10BE:
info->stride[0] = GST_ROUND_UP_4 (width * 2);
info->stride[1] = GST_ROUND_UP_4 (width);
info->stride[2] = info->stride[1];
info->offset[0] = 0;
info->offset[1] = info->stride[0] * GST_ROUND_UP_2 (height);
info->offset[2] = info->offset[1] +
info->stride[1] * GST_ROUND_UP_2 (height);
info->size = info->offset[2] + info->stride[2] * GST_ROUND_UP_2 (height);
break;
case GST_VIDEO_FORMAT_Y444_10LE:
case GST_VIDEO_FORMAT_Y444_10BE:
info->stride[0] = GST_ROUND_UP_4 (width * 2);
info->stride[1] = info->stride[0];
info->stride[2] = info->stride[0];
info->offset[0] = 0;
info->offset[1] = info->stride[0] * height;
info->offset[2] = info->offset[1] * 2;
info->size = info->stride[0] * height * 3;
break;
case GST_VIDEO_FORMAT_UNKNOWN:
default:
GST_ERROR ("invalid format");
g_warning ("invalid format");
break;
}
return 0;
}
开发者ID:pli3,项目名称:gst-plugins-base,代码行数:101,代码来源:video-blend.c
示例10: recv_sample
//.........这里部分代码省略.........
BaseTimeType *base_time;
GstClockTime offset;
g_signal_emit_by_name (appsink, "pull-sample", &sample);
if (sample == NULL)
return GST_FLOW_OK;
buffer = gst_sample_get_buffer (sample);
if (buffer == NULL) {
ret = GST_FLOW_OK;
goto end;
}
segment = gst_sample_get_segment (sample);
g_object_get (G_OBJECT (self), "state", &state, NULL);
if (state != KMS_URI_ENDPOINT_STATE_START) {
GST_WARNING ("Dropping buffer received in invalid state %" GST_PTR_FORMAT,
buffer);
// TODO: Add a flag to discard buffers until keyframe
ret = GST_FLOW_OK;
goto end;
}
gst_buffer_ref (buffer);
buffer = gst_buffer_make_writable (buffer);
if (GST_BUFFER_PTS_IS_VALID (buffer))
buffer->pts =
gst_segment_to_running_time (segment, GST_FORMAT_TIME, buffer->pts);
if (GST_BUFFER_DTS_IS_VALID (buffer))
buffer->dts =
gst_segment_to_running_time (segment, GST_FORMAT_TIME, buffer->dts);
BASE_TIME_LOCK (self);
base_time = g_object_get_data (G_OBJECT (self), BASE_TIME_DATA);
if (base_time == NULL) {
base_time = g_slice_new0 (BaseTimeType);
base_time->pts = buffer->pts;
base_time->dts = buffer->dts;
GST_DEBUG_OBJECT (appsrc, "Setting pts base time to: %" G_GUINT64_FORMAT,
base_time->pts);
g_object_set_data_full (G_OBJECT (self), BASE_TIME_DATA, base_time,
release_base_time_type);
}
if (!GST_CLOCK_TIME_IS_VALID (base_time->pts)
&& GST_BUFFER_PTS_IS_VALID (buffer)) {
base_time->pts = buffer->pts;
GST_DEBUG_OBJECT (appsrc, "Setting pts base time to: %" G_GUINT64_FORMAT,
base_time->pts);
base_time->dts = buffer->dts;
}
if (GST_CLOCK_TIME_IS_VALID (base_time->pts)) {
if (GST_BUFFER_PTS_IS_VALID (buffer)) {
offset = base_time->pts + self->priv->paused_time;
if (buffer->pts > offset) {
buffer->pts -= offset;
} else {
buffer->pts = 0;
}
}
}
if (GST_CLOCK_TIME_IS_VALID (base_time->dts)) {
if (GST_BUFFER_DTS_IS_VALID (buffer)) {
offset = base_time->dts + self->priv->paused_time;
if (buffer->dts > offset) {
buffer->dts -= offset;
} else {
buffer->dts = 0;
}
}
}
BASE_TIME_UNLOCK (GST_OBJECT_PARENT (appsink));
GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_LIVE);
if (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_HEADER))
GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT);
ret = gst_app_src_push_buffer (appsrc, buffer);
if (ret != GST_FLOW_OK) {
/* something wrong */
GST_ERROR ("Could not send buffer to appsrc %s. Cause: %s",
GST_ELEMENT_NAME (appsrc), gst_flow_get_name (ret));
}
end:
if (sample != NULL) {
gst_sample_unref (sample);
}
return ret;
}
开发者ID:s-silva,项目名称:kms-elements,代码行数:101,代码来源:kmsrecorderendpoint.c
示例11: gst_vaapidecode_handle_frame
static GstFlowReturn
gst_vaapidecode_handle_frame (GstVideoDecoder * vdec,
GstVideoCodecFrame * frame)
{
GstVaapiDecode *const decode = GST_VAAPIDECODE (vdec);
GstVaapiDecoderStatus status;
GstFlowReturn ret;
if (!decode->input_state)
goto not_negotiated;
/* Decode current frame */
for (;;) {
status = gst_vaapi_decoder_decode (decode->decoder, frame);
if (status == GST_VAAPI_DECODER_STATUS_ERROR_NO_SURFACE) {
/* Make sure that there are no decoded frames waiting in the
output queue. */
ret = gst_vaapidecode_push_all_decoded_frames (decode);
if (ret != GST_FLOW_OK)
goto error_push_all_decoded_frames;
g_mutex_lock (&decode->surface_ready_mutex);
if (gst_vaapi_decoder_check_status (decode->decoder) ==
GST_VAAPI_DECODER_STATUS_ERROR_NO_SURFACE)
g_cond_wait (&decode->surface_ready, &decode->surface_ready_mutex);
g_mutex_unlock (&decode->surface_ready_mutex);
continue;
}
if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
goto error_decode;
break;
}
/* Note that gst_vaapi_decoder_decode cannot return success without
completing the decode and pushing all decoded frames into the output
queue */
return gst_vaapidecode_push_all_decoded_frames (decode);
/* ERRORS */
error_push_all_decoded_frames:
{
GST_ERROR ("push loop error while decoding %d", ret);
gst_video_decoder_drop_frame (vdec, frame);
return ret;
}
error_decode:
{
GST_ERROR ("decode error %d", status);
switch (status) {
case GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CODEC:
case GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE:
case GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CHROMA_FORMAT:
ret = GST_FLOW_NOT_SUPPORTED;
break;
default:
GST_VIDEO_DECODER_ERROR (vdec, 1, STREAM, DECODE, ("Decoding error"),
("Decode error %d", status), ret);
break;
}
gst_video_decoder_drop_frame (vdec, frame);
return ret;
}
not_negotiated:
{
GST_ERROR_OBJECT (decode, "not negotiated");
ret = GST_FLOW_NOT_NEGOTIATED;
gst_video_decoder_drop_frame (vdec, frame);
return ret;
}
}
开发者ID:zzoon,项目名称:gstreamer-vaapi,代码行数:70,代码来源:gstvaapidecode.c
示例12: gst_gcs_set_caps
static gboolean gst_gcs_set_caps(GstBaseTransform * btrans, GstCaps * incaps, GstCaps * outcaps)
{
GstGcs *gcs = GST_GCS (btrans);
gint in_width, in_height;
gint out_width, out_height;
GST_GCS_LOCK (gcs);
gst_video_format_parse_caps(incaps, &gcs->in_format, &in_width, &in_height);
gst_video_format_parse_caps(outcaps, &gcs->out_format, &out_width, &out_height);
if (!(gcs->in_format == gcs->out_format) ||
!(in_width == out_width && in_height == out_height)) {
GST_WARNING("Failed to parse caps %" GST_PTR_FORMAT " -> %" GST_PTR_FORMAT, incaps, outcaps);
GST_GCS_UNLOCK (gcs);
return FALSE;
}
gcs->width = in_width;
gcs->height = in_height;
GST_INFO("Initialising Gcs...");
gst_pad_set_event_function(GST_BASE_TRANSFORM_SINK_PAD(gcs), gst_gcs_sink_event);
const CvSize size = cvSize(gcs->width, gcs->height);
GST_WARNING (" width %d, height %d", gcs->width, gcs->height);
//////////////////////////////////////////////////////////////////////////////
// allocate image structs in all spaces///////////////////////////////////////
gcs->pImageRGBA = cvCreateImageHeader(size, IPL_DEPTH_8U, 4);
gcs->pImgRGB = cvCreateImage(size, IPL_DEPTH_8U, 3);
gcs->pImgScratch = cvCreateImage(size, IPL_DEPTH_8U, 3);
gcs->pImgGRAY = cvCreateImage(size, IPL_DEPTH_8U, 1);
gcs->pImgGRAY_copy = cvCreateImage(size, IPL_DEPTH_8U, 1);
gcs->pImgGRAY_diff = cvCreateImage(size, IPL_DEPTH_8U, 1);
gcs->pImgGRAY_1 = cvCreateImage(size, IPL_DEPTH_8U, 1);
gcs->pImgGRAY_1copy= cvCreateImage(size, IPL_DEPTH_8U, 1);
cvZero( gcs->pImgGRAY_1 );
cvZero( gcs->pImgGRAY_1copy );
gcs->pImgChA = cvCreateImageHeader(size, IPL_DEPTH_8U, 1);
gcs->pImgCh1 = cvCreateImage(size, IPL_DEPTH_8U, 1);
gcs->pImgCh2 = cvCreateImage(size, IPL_DEPTH_8U, 1);
gcs->pImgCh3 = cvCreateImage(size, IPL_DEPTH_8U, 1);
gcs->pImgChX = cvCreateImage(size, IPL_DEPTH_8U, 1);
gcs->pImg_skin = cvCreateImage(size, IPL_DEPTH_8U, 1);
gcs->grabcut_mask = cvCreateMat( size.height, size.width, CV_8UC1);
cvZero(gcs->grabcut_mask);
initialise_grabcut( &(gcs->GC), gcs->pImgRGB, gcs->grabcut_mask );
gcs->bbox_prev = cvRect( 60,70, 210, 170 );
//////////////////////////////////////////////////////////////////////////////
#ifdef KMEANS
// k-means allocation ////////////////////////////////////////////////////////
gcs->pImgRGB_kmeans = cvCreateImage(size, IPL_DEPTH_8U, 3);
gcs->num_samples = size.height * size.width;
gcs->kmeans_points = cvCreateMat( gcs->num_samples, 5, CV_32FC1);
gcs->kmeans_clusters = cvCreateMat( gcs->num_samples, 1, CV_32SC1);
#endif //KMEANS
//////////////////////////////////////////////////////////////////////////////
// Init ghost file ///////////////////////////////////////////////////////////
curlMemoryStructGCS chunk;
//gchar url[]="file:///home/mcasassa/imco2/mods/gstreamer/cyclops/shaders/mask8.png";
//gchar url[]="file:///apps/devnfs/mcasassa/mask_320x240.png";
char curlErrBuf[255];
if( gcs->ghostfilename){
if(FALSE == curl_download(gcs->ghostfilename, "", &chunk, curlErrBuf)) {
GST_ERROR("download failed, err: %s", curlErrBuf);
}
char errBuf[255];
if( FALSE == read_png(&chunk, &(gcs->raw_image), &(gcs->info), errBuf)){
GST_ERROR("png load failed, err: %s", errBuf);
}
const CvSize sizegh = cvSize(gcs->info.width, gcs->info.height);
gcs->cvGhost = cvCreateImageHeader(sizegh, IPL_DEPTH_8U, gcs->info.channels);
gcs->cvGhost->imageData = (char*)gcs->raw_image;
gcs->cvGhostBw = cvCreateImage(sizegh, IPL_DEPTH_8U, 1);
if( gcs->info.channels > 1){
cvCvtColor( gcs->cvGhost, gcs->cvGhostBw, CV_RGB2GRAY );
}
else{
cvCopy(gcs->cvGhost, gcs->cvGhostBw, NULL);
}
gcs->cvGhostBwResized = cvCreateImage(size, IPL_DEPTH_8U, 1);
cvResize( gcs->cvGhostBw, gcs->cvGhostBwResized, CV_INTER_LINEAR);
gcs->cvGhostBwAffined = cvCreateImage(size, IPL_DEPTH_8U, 1);
}
GST_INFO(" Collected caps, image in size (%dx%d), ghost size (%dx%d) %dch",gcs->width, gcs->height,
gcs->info.width, gcs->info.height, gcs->info.channels );
//.........这里部分代码省略.........
开发者ID:miguelao,项目名称:gst_plugins_tsunami,代码行数:101,代码来源:gstgcs.c
示例13: gst_rtp_qdm2_depay_process
//.........这里部分代码省略.........
/* HEADERS */
GST_DEBUG ("Headers");
/* Store the incoming timestamp */
rtpqdm2depay->ptimestamp = rtpqdm2depay->timestamp;
rtpqdm2depay->timestamp = GST_BUFFER_TIMESTAMP (buf);
/* flush the internal data if needed */
flush_data (rtpqdm2depay);
if (G_UNLIKELY (!rtpqdm2depay->configured)) {
guint8 *ourdata;
GstBuffer *codecdata;
GstCaps *caps;
/* First bytes are unknown */
GST_MEMDUMP ("Header", payload + pos, 32);
ourdata = payload + pos + 10;
pos += 10;
rtpqdm2depay->channs = GST_READ_UINT32_BE (payload + pos + 4);
rtpqdm2depay->samplerate = GST_READ_UINT32_BE (payload + pos + 8);
rtpqdm2depay->bitrate = GST_READ_UINT32_BE (payload + pos + 12);
rtpqdm2depay->blocksize = GST_READ_UINT32_BE (payload + pos + 16);
rtpqdm2depay->framesize = GST_READ_UINT32_BE (payload + pos + 20);
rtpqdm2depay->packetsize = GST_READ_UINT32_BE (payload + pos + 24);
/* 16 bit empty block (0x02 0x00) */
pos += 30;
GST_DEBUG
("channs:%d, samplerate:%d, bitrate:%d, blocksize:%d, framesize:%d, packetsize:%d",
rtpqdm2depay->channs, rtpqdm2depay->samplerate,
rtpqdm2depay->bitrate, rtpqdm2depay->blocksize,
rtpqdm2depay->framesize, rtpqdm2depay->packetsize);
/* Caps */
codecdata = gst_buffer_new_and_alloc (48);
memcpy (GST_BUFFER_DATA (codecdata), headheader, 20);
memcpy (GST_BUFFER_DATA (codecdata) + 20, ourdata, 28);
caps = gst_caps_new_simple ("audio/x-qdm2",
"samplesize", G_TYPE_INT, 16,
"rate", G_TYPE_INT, rtpqdm2depay->samplerate,
"channels", G_TYPE_INT, rtpqdm2depay->channs,
"codec_data", GST_TYPE_BUFFER, codecdata, NULL);
gst_pad_set_caps (GST_BASE_RTP_DEPAYLOAD_SRCPAD (depayload), caps);
gst_caps_unref (caps);
rtpqdm2depay->configured = TRUE;
} else {
GST_DEBUG ("Already configured, skipping headers");
pos += 40;
}
break;
default:{
/* Shuffled packet contents */
guint packetid = payload[pos++];
guint packettype = payload[pos++];
guint packlen = payload[pos++];
guint hsize = 2;
GST_DEBUG ("Packet id:%d, type:0x%x, len:%d",
packetid, packettype, packlen);
/* Packets bigger than 0xff bytes have a type with the high bit set */
if (G_UNLIKELY (packettype & 0x80)) {
packettype &= 0x7f;
packlen <<= 8;
packlen |= payload[pos++];
hsize = 3;
GST_DEBUG ("Packet id:%d, type:0x%x, len:%d",
packetid, packettype, packlen);
}
if (packettype > 0x7f) {
GST_ERROR ("HOUSTON WE HAVE A PROBLEM !!!!");
}
add_packet (rtpqdm2depay, packetid, packlen + hsize,
payload + pos - hsize);
pos += packlen;
}
}
}
GST_DEBUG ("final pos %d", pos);
avail = gst_adapter_available (rtpqdm2depay->adapter);
if (G_UNLIKELY (avail)) {
GST_DEBUG ("Pushing out %d bytes of collected data", avail);
outbuf = gst_adapter_take_buffer (rtpqdm2depay->adapter, avail);
GST_BUFFER_TIMESTAMP (outbuf) = rtpqdm2depay->ptimestamp;
GST_DEBUG ("Outgoing buffer timestamp %" GST_TIME_FORMAT,
GST_TIME_ARGS (rtpqdm2depay->ptimestamp));
return outbuf;
}
}
return NULL;
/* ERRORS */
bad_packet:
{
GST_ELEMENT_WARNING (rtpqdm2depay, STREAM, DECODE,
(NULL), ("Packet was too short"));
return NULL;
}
}
开发者ID:pli3,项目名称:gst-plugins-good,代码行数:101,代码来源:gstrtpqdmdepay.c
示例14: on_error_cb
static void
on_error_cb (GMarkupParseContext * context, GError * error, gpointer user_data)
{
GST_ERROR ("Error parsing file: %s", error->message);
}
开发者ID:MathieuDuponchelle,项目名称:gst-devtools,代码行数:5,代码来源:media-descriptor-parser.c
示例15: gst_egl_allocate_eglimage
//.........这里部分代码省略.........
if (!gst_egl_image_memory_is_mappable ())
flags |= GST_MEMORY_FLAG_NOT_MAPPABLE;
/* See https://bugzilla.gnome.org/show_bug.cgi?id=695203 */
flags |= GST_MEMORY_FLAG_NO_SHARE;
gst_video_info_set_format (&info, format, width, height);
GST_DEBUG ("Allocating EGL Image format %s width %d height %d",
gst_video_format_to_string (format), width, height);
switch (format) {
case GST_VIDEO_FORMAT_RGBA:{
gsize size;
EGLImageKHR image;
mem[0] =
gst_egl_image_allocator_alloc (allocator, ctx->gst_display,
GST_VIDEO_GL_TEXTURE_TYPE_RGBA, GST_VIDEO_INFO_WIDTH (&info),
GST_VIDEO_INFO_HEIGHT (&info), &size);
if (mem[0]) {
stride[0] = size / GST_VIDEO_INFO_HEIGHT (&info);
n_mem = 1;
GST_MINI_OBJECT_FLAG_SET (mem[0], GST_MEMORY_FLAG_NO_SHARE);
} else {
data = g_slice_new0 (GstEGLGLESImageData);
stride[0] = GST_ROUND_UP_4 (GST_VIDEO_INFO_WIDTH (&info) * 4);
size = stride[0] * GST_VIDEO_INFO_HEIGHT (&info);
glGenTextures (1, &data->texture);
if (got_gl_error ("glGenTextures"))
goto mem_error;
glBindTexture (GL_TEXTURE_2D, data->texture);
if (got_gl_error ("glBindTexture"))
goto mem_error;
/* Set 2D resizing params */
glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
/* If these are not set the texture image unit will return
* * (R, G, B, A) = black on glTexImage2D for non-POT width/height
* * frames. For a deeper explanation take a look at the OpenGL ES
* * documentation for glTexParameter */
glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
if (got_gl_error ("glTexParameteri"))
goto mem_error;
glTexImage2D (GL_TEXTURE_2D, 0, GL_RGBA,
GST_VIDEO_INFO_WIDTH (&info),
GST_VIDEO_INFO_HEIGHT (&info), 0, GL_RGBA, GL_UNSIGNED_BYTE, NULL);
if (got_gl_error ("glTexImage2D"))
goto mem_error;
image =
eglCreateImageKHR (gst_egl_display_get (ctx->gst_display),
ctx->context, EGL_GL_TEXTURE_2D_KHR,
(EGLClientBuffer) (guintptr) data->texture, NULL);
if (got_egl_error ("eglCreateImageKHR"))
goto mem_error;
mem[0] =
gst_egl_image_allocator_wrap (allocator, ctx->gst_display,
image, GST_VIDEO_GL_TEXTURE_TYPE_RGBA, flags, size, data, NULL);
n_mem = 1;
}
}
break;
default:
goto mem_error;
break;
}
buffer = gst_buffer_new ();
gst_buffer_add_video_meta_full (buffer, 0, format, width, height,
GST_VIDEO_INFO_N_PLANES (&info), offset, stride);
/* n_mem could be reused for planar colorspaces, for now its == 1 for RGBA */
for (i = 0; i < n_mem; i++)
gst_buffer_append_memory (buffer, mem[i]);
return buffer;
mem_error:
{
GST_ERROR ("Failed to create EGLImage");
if (data)
gst_egl_gles_image_data_free (data);
if (mem[0])
gst_memory_unref (mem[0]);
return NULL;
}
}
开发者ID:01org,项目名称:gst-omx,代码行数:101,代码来源:testegl.c
|
请发表评论