本文整理汇总了C++中GST_VIDEO_FRAME_PLANE_STRIDE函数的典型用法代码示例。如果您正苦于以下问题:C++ GST_VIDEO_FRAME_PLANE_STRIDE函数的具体用法?C++ GST_VIDEO_FRAME_PLANE_STRIDE怎么用?C++ GST_VIDEO_FRAME_PLANE_STRIDE使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了GST_VIDEO_FRAME_PLANE_STRIDE函数的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的C++代码示例。
示例1: gst_video_crop_transform_packed_simple
static void
gst_video_crop_transform_packed_simple (GstVideoCrop * vcrop,
GstVideoFrame * in_frame, GstVideoFrame * out_frame)
{
guint8 *in_data, *out_data;
gint width, height;
guint i, dx;
gint in_stride, out_stride;
width = GST_VIDEO_FRAME_WIDTH (out_frame);
height = GST_VIDEO_FRAME_HEIGHT (out_frame);
in_data = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
out_data = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
in_stride = GST_VIDEO_FRAME_PLANE_STRIDE (in_frame, 0);
out_stride = GST_VIDEO_FRAME_PLANE_STRIDE (out_frame, 0);
in_data += vcrop->crop_top * in_stride;
in_data += vcrop->crop_left * GST_VIDEO_FRAME_COMP_PSTRIDE (in_frame, 0);
dx = width * GST_VIDEO_FRAME_COMP_PSTRIDE (out_frame, 0);
for (i = 0; i < height; ++i) {
memcpy (out_data, in_data, dx);
in_data += in_stride;
out_data += out_stride;
}
}
开发者ID:BigBrother-International,项目名称:gst-plugins-good,代码行数:29,代码来源:gstvideocrop.c
示例2: gst_yuv_to_rgb_transform_frame
/* this function does the actual processing
*/
static GstFlowReturn
gst_yuv_to_rgb_transform_frame (GstVideoFilter *filter, GstVideoFrame *in_frame, GstVideoFrame *out_frame)
{
GstYuvToRgb *rgbtoyuv = GST_YUVTORGB_CAST (filter);
gint width, height, stride;
gint y_stride, uv_stride;
guint32 *out_data;
guint8 *y_in, *u_in, *v_in;
y_stride = GST_VIDEO_FRAME_PLANE_STRIDE (in_frame, 0);
uv_stride = GST_VIDEO_FRAME_PLANE_STRIDE (in_frame, 1);
y_in = (guint8*) GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
u_in = (guint8*) GST_VIDEO_FRAME_PLANE_DATA (in_frame, 1);
v_in = (guint8*) GST_VIDEO_FRAME_PLANE_DATA (in_frame, 2);
width = GST_VIDEO_FRAME_WIDTH (out_frame);
height = GST_VIDEO_FRAME_HEIGHT (out_frame);
stride = GST_VIDEO_FRAME_PLANE_STRIDE (out_frame, 0);
out_data = (guint32*) GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
// GST_INFO ("DEBUG_INFO: rgbtoyuv::transform_frame: ");
// GST_INFO ("in stride: %d; out stride: %d %d\n", stride, y_stride, uv_stride);
libyuv::I420ToARGB (y_in, y_stride,
u_in, uv_stride,
v_in, uv_stride,
(guint8*)out_data, stride,
width, height);
return GST_FLOW_OK;
}
开发者ID:ksb2go,项目名称:gst-plugins,代码行数:35,代码来源:gstyuvtorgb.cpp
示例3: copy_field
static void
copy_field (GstInterlace * interlace, GstBuffer * dest, GstBuffer * src,
int field_index)
{
GstVideoInfo *info = &interlace->info;
gint i, j, n_planes;
guint8 *d, *s;
GstVideoFrame dframe, sframe;
if (!gst_video_frame_map (&dframe, info, dest, GST_MAP_WRITE))
goto dest_map_failed;
if (!gst_video_frame_map (&sframe, info, src, GST_MAP_READ))
goto src_map_failed;
n_planes = GST_VIDEO_FRAME_N_PLANES (&dframe);
for (i = 0; i < n_planes; i++) {
gint cheight, cwidth;
gint ss, ds;
d = GST_VIDEO_FRAME_PLANE_DATA (&dframe, i);
s = GST_VIDEO_FRAME_PLANE_DATA (&sframe, i);
ds = GST_VIDEO_FRAME_PLANE_STRIDE (&dframe, i);
ss = GST_VIDEO_FRAME_PLANE_STRIDE (&sframe, i);
d += field_index * ds;
s += field_index * ss;
cheight = GST_VIDEO_FRAME_COMP_HEIGHT (&dframe, i);
cwidth = MIN (ABS (ss), ABS (ds));
for (j = field_index; j < cheight; j += 2) {
memcpy (d, s, cwidth);
d += ds * 2;
s += ss * 2;
}
}
gst_video_frame_unmap (&dframe);
gst_video_frame_unmap (&sframe);
return;
dest_map_failed:
{
GST_ERROR_OBJECT (interlace, "failed to map dest");
return;
}
src_map_failed:
{
GST_ERROR_OBJECT (interlace, "failed to map src");
gst_video_frame_unmap (&dframe);
return;
}
}
开发者ID:jcaden,项目名称:gst-plugins-bad,代码行数:56,代码来源:gstinterlace.c
示例4: gst_video_balance_semiplanar_yuv
static void
gst_video_balance_semiplanar_yuv (GstVideoBalance * videobalance,
GstVideoFrame * frame)
{
gint x, y;
guint8 *ydata;
guint8 *uvdata;
gint ystride, uvstride;
gint width, height;
gint width2, height2;
guint8 *tabley = videobalance->tabley;
guint8 **tableu = videobalance->tableu;
guint8 **tablev = videobalance->tablev;
gint upos, vpos;
width = GST_VIDEO_FRAME_WIDTH (frame);
height = GST_VIDEO_FRAME_HEIGHT (frame);
ydata = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
ystride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
for (y = 0; y < height; y++) {
guint8 *yptr;
yptr = ydata + y * ystride;
for (x = 0; x < width; x++) {
*yptr = tabley[*yptr];
yptr++;
}
}
width2 = GST_VIDEO_FRAME_COMP_WIDTH (frame, 1);
height2 = GST_VIDEO_FRAME_COMP_HEIGHT (frame, 1);
uvdata = GST_VIDEO_FRAME_PLANE_DATA (frame, 1);
uvstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 1);
upos = GST_VIDEO_INFO_FORMAT (&frame->info) == GST_VIDEO_FORMAT_NV12 ? 0 : 1;
vpos = GST_VIDEO_INFO_FORMAT (&frame->info) == GST_VIDEO_FORMAT_NV12 ? 1 : 0;
for (y = 0; y < height2; y++) {
guint8 *uvptr;
guint8 u1, v1;
uvptr = uvdata + y * uvstride;
for (x = 0; x < width2; x++) {
u1 = uvptr[upos];
v1 = uvptr[vpos];
uvptr[upos] = tableu[u1][v1];
uvptr[vpos] = tablev[u1][v1];
uvptr += 2;
}
}
}
开发者ID:PeterXu,项目名称:gst-mobile,代码行数:56,代码来源:gstvideobalance.c
示例5: gst_video_balance_planar_yuv
static void
gst_video_balance_planar_yuv (GstVideoBalance * videobalance,
GstVideoFrame * frame)
{
gint x, y;
guint8 *ydata;
guint8 *udata, *vdata;
gint ystride, ustride, vstride;
gint width, height;
gint width2, height2;
guint8 *tabley = videobalance->tabley;
guint8 **tableu = videobalance->tableu;
guint8 **tablev = videobalance->tablev;
width = GST_VIDEO_FRAME_WIDTH (frame);
height = GST_VIDEO_FRAME_HEIGHT (frame);
ydata = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
ystride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
for (y = 0; y < height; y++) {
guint8 *yptr;
yptr = ydata + y * ystride;
for (x = 0; x < width; x++) {
*yptr = tabley[*yptr];
yptr++;
}
}
width2 = GST_VIDEO_FRAME_COMP_WIDTH (frame, 1);
height2 = GST_VIDEO_FRAME_COMP_HEIGHT (frame, 1);
udata = GST_VIDEO_FRAME_PLANE_DATA (frame, 1);
vdata = GST_VIDEO_FRAME_PLANE_DATA (frame, 2);
ustride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 1);
vstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 2);
for (y = 0; y < height2; y++) {
guint8 *uptr, *vptr;
guint8 u1, v1;
uptr = udata + y * ustride;
vptr = vdata + y * vstride;
for (x = 0; x < width2; x++) {
u1 = *uptr;
v1 = *vptr;
*uptr++ = tableu[u1][v1];
*vptr++ = tablev[u1][v1];
}
}
}
开发者ID:PeterXu,项目名称:gst-mobile,代码行数:54,代码来源:gstvideobalance.c
示例6: gst_video_crop_transform_packed_complex
static void
gst_video_crop_transform_packed_complex (GstVideoCrop * vcrop,
GstVideoFrame * in_frame, GstVideoFrame * out_frame)
{
guint8 *in_data, *out_data;
guint i, dx;
gint width, height;
gint in_stride;
gint out_stride;
width = GST_VIDEO_FRAME_WIDTH (out_frame);
height = GST_VIDEO_FRAME_HEIGHT (out_frame);
in_data = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
out_data = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
in_stride = GST_VIDEO_FRAME_PLANE_STRIDE (in_frame, 0);
out_stride = GST_VIDEO_FRAME_PLANE_STRIDE (out_frame, 0);
in_data += vcrop->crop_top * in_stride;
/* rounding down here so we end up at the start of a macro-pixel and not
* in the middle of one */
in_data += ROUND_DOWN_2 (vcrop->crop_left) *
GST_VIDEO_FRAME_COMP_PSTRIDE (in_frame, 0);
dx = width * GST_VIDEO_FRAME_COMP_PSTRIDE (out_frame, 0);
/* UYVY = 4:2:2 - [U0 Y0 V0 Y1] [U2 Y2 V2 Y3] [U4 Y4 V4 Y5]
* YUYV = 4:2:2 - [Y0 U0 Y1 V0] [Y2 U2 Y3 V2] [Y4 U4 Y5 V4] = YUY2 */
if ((vcrop->crop_left % 2) != 0) {
for (i = 0; i < height; ++i) {
gint j;
memcpy (out_data, in_data, dx);
/* move just the Y samples one pixel to the left, don't worry about
* chroma shift */
for (j = vcrop->macro_y_off; j < out_stride - 2; j += 2)
out_data[j] = in_data[j + 2];
in_data += in_stride;
out_data += out_stride;
}
} else {
for (i = 0; i < height; ++i) {
memcpy (out_data, in_data, dx);
in_data += in_stride;
out_data += out_stride;
}
}
}
开发者ID:BigBrother-International,项目名称:gst-plugins-good,代码行数:52,代码来源:gstvideocrop.c
示例7: gst_CopyPicture
/* Copy the frame data from the GstBuffer (from decoder)
* to the picture obtained from downstream in VLC.
* This function should be avoided as much
* as possible, since it involves a complete frame copy. */
static void gst_CopyPicture( picture_t *p_pic, GstVideoFrame *p_frame )
{
int i_plane, i_planes, i_line, i_dst_stride, i_src_stride;
uint8_t *p_dst, *p_src;
int i_w, i_h;
i_planes = p_pic->i_planes;
for( i_plane = 0; i_plane < i_planes; i_plane++ )
{
p_dst = p_pic->p[i_plane].p_pixels;
p_src = GST_VIDEO_FRAME_PLANE_DATA( p_frame, i_plane );
i_dst_stride = p_pic->p[i_plane].i_pitch;
i_src_stride = GST_VIDEO_FRAME_PLANE_STRIDE( p_frame, i_plane );
i_w = GST_VIDEO_FRAME_COMP_WIDTH( p_frame,
i_plane ) * GST_VIDEO_FRAME_COMP_PSTRIDE( p_frame, i_plane );
i_h = GST_VIDEO_FRAME_COMP_HEIGHT( p_frame, i_plane );
for( i_line = 0;
i_line < __MIN( p_pic->p[i_plane].i_lines, i_h );
i_line++ )
{
memcpy( p_dst, p_src, i_w );
p_src += i_src_stride;
p_dst += i_dst_stride;
}
}
}
开发者ID:CityFire,项目名称:vlc,代码行数:32,代码来源:gstdecode.c
示例8: openni2_read_gstbuffer
static GstFlowReturn
openni2_read_gstbuffer (GstOpenni2Src * src, GstBuffer * buf)
{
openni::Status rc = openni::STATUS_OK;
openni::VideoStream * pStream = src->depth;
int changedStreamDummy;
GstVideoFrame vframe;
uint64_t oni_ts;
/* Block until we get some data */
rc = openni::OpenNI::waitForAnyStream (&pStream, 1, &changedStreamDummy,
SAMPLE_READ_WAIT_TIMEOUT);
if (rc != openni::STATUS_OK) {
GST_ERROR_OBJECT (src, "Frame read timeout: %s",
openni::OpenNI::getExtendedError ());
return GST_FLOW_ERROR;
}
if (src->depth->isValid () && src->color->isValid () &&
src->sourcetype == SOURCETYPE_BOTH) {
rc = src->depth->readFrame (src->depthFrame);
if (rc != openni::STATUS_OK) {
GST_ERROR_OBJECT (src, "Frame read error: %s",
openni::OpenNI::getExtendedError ());
return GST_FLOW_ERROR;
}
rc = src->color->readFrame (src->colorFrame);
if (rc != openni::STATUS_OK) {
GST_ERROR_OBJECT (src, "Frame read error: %s",
openni::OpenNI::getExtendedError ());
return GST_FLOW_ERROR;
}
/* Copy colour information */
gst_video_frame_map (&vframe, &src->info, buf, GST_MAP_WRITE);
guint8 *pData = (guint8 *) GST_VIDEO_FRAME_PLANE_DATA (&vframe, 0);
guint8 *pColor = (guint8 *) src->colorFrame->getData ();
/* Add depth as 8bit alpha channel, depth is 16bit samples. */
guint16 *pDepth = (guint16 *) src->depthFrame->getData ();
for (int i = 0; i < src->colorFrame->getHeight (); ++i) {
for (int j = 0; j < src->colorFrame->getWidth (); ++j) {
pData[4 * j + 0] = pColor[3 * j + 0];
pData[4 * j + 1] = pColor[3 * j + 1];
pData[4 * j + 2] = pColor[3 * j + 2];
pData[4 * j + 3] = pDepth[j] >> 8;
}
pData += GST_VIDEO_FRAME_PLANE_STRIDE (&vframe, 0);
pColor += src->colorFrame->getStrideInBytes ();
pDepth += src->depthFrame->getStrideInBytes () / 2;
}
gst_video_frame_unmap (&vframe);
oni_ts = src->colorFrame->getTimestamp () * 1000;
GST_LOG_OBJECT (src, "sending buffer (%d+%d)B",
src->colorFrame->getDataSize (),
src->depthFrame->getDataSize ());
} else if (src->depth->isValid () && src->sourcetype == SOURCETYPE_DEPTH) {
开发者ID:Distrotech,项目名称:gst-plugins-bad,代码行数:60,代码来源:gstopenni2src.cpp
示例9: fill_image_planar8_3
static void
fill_image_planar8_3 (opj_image_t * image, GstVideoFrame * frame)
{
gint c, x, y, w, h;
const guint8 *data_in, *tmp;
gint *data_out;
gint sstride;
for (c = 0; c < 3; c++) {
w = GST_VIDEO_FRAME_COMP_WIDTH (frame, c);
h = GST_VIDEO_FRAME_COMP_HEIGHT (frame, c);
data_in = GST_VIDEO_FRAME_COMP_DATA (frame, c);
sstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, c);
data_out = image->comps[c].data;
for (y = 0; y < h; y++) {
tmp = data_in;
for (x = 0; x < w; x++) {
*data_out = *tmp;
data_out++;
tmp++;
}
data_in += sstride;
}
}
}
开发者ID:ndufresne,项目名称:gst-plugins-bad,代码行数:26,代码来源:gstopenjpegenc.c
示例10: fill_frame_planar16_1
static void
fill_frame_planar16_1 (GstVideoFrame * frame, opj_image_t * image)
{
gint x, y, w, h;
guint16 *data_out, *tmp;
const gint *data_in;
gint dstride;
gint shift;
w = GST_VIDEO_FRAME_WIDTH (frame);
h = GST_VIDEO_FRAME_HEIGHT (frame);
data_out = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
dstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0) / 2;
data_in = image->comps[0].data;
shift = 16 - image->comps[0].prec;
for (y = 0; y < h; y++) {
tmp = data_out;
for (x = 0; x < w; x++) {
*tmp = *data_in << shift;
tmp++;
data_in++;
}
data_out += dstride;
}
}
开发者ID:cbetz421,项目名称:gst-plugins-bad,代码行数:30,代码来源:gstopenjpegdec.c
示例11: fill_frame_packed8_3
static void
fill_frame_packed8_3 (GstVideoFrame * frame, opj_image_t * image)
{
gint x, y, w, h;
guint8 *data_out, *tmp;
const gint *data_in[3];
gint dstride;
w = GST_VIDEO_FRAME_WIDTH (frame);
h = GST_VIDEO_FRAME_HEIGHT (frame);
data_out = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
dstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
data_in[0] = image->comps[0].data;
data_in[1] = image->comps[1].data;
data_in[2] = image->comps[2].data;
for (y = 0; y < h; y++) {
tmp = data_out;
for (x = 0; x < w; x++) {
tmp[1] = *data_in[0];
tmp[2] = *data_in[1];
tmp[3] = *data_in[2];
tmp += 4;
data_in[0]++;
data_in[1]++;
data_in[2]++;
}
data_out += dstride;
}
}
开发者ID:cbetz421,项目名称:gst-plugins-bad,代码行数:33,代码来源:gstopenjpegdec.c
示例12: fill_image_packed8_3
static void
fill_image_packed8_3 (opj_image_t * image, GstVideoFrame * frame)
{
gint x, y, w, h;
const guint8 *data_in, *tmp;
gint *data_out[3];
gint sstride;
w = GST_VIDEO_FRAME_WIDTH (frame);
h = GST_VIDEO_FRAME_HEIGHT (frame);
data_in = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
sstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
data_out[0] = image->comps[0].data;
data_out[1] = image->comps[1].data;
data_out[2] = image->comps[2].data;
for (y = 0; y < h; y++) {
tmp = data_in;
for (x = 0; x < w; x++) {
*data_out[0] = tmp[1];
*data_out[1] = tmp[2];
*data_out[2] = tmp[3];
tmp += 4;
data_out[0]++;
data_out[1]++;
data_out[2]++;
}
data_in += sstride;
}
}
开发者ID:ndufresne,项目名称:gst-plugins-bad,代码行数:33,代码来源:gstopenjpegenc.c
示例13: gst_video_balance_packed_rgb
static void
gst_video_balance_packed_rgb (GstVideoBalance * videobalance,
GstVideoFrame * frame)
{
gint i, j, height;
gint width, stride, row_wrap;
gint pixel_stride;
guint8 *data;
gint offsets[3];
gint r, g, b;
gint y, u, v;
gint u_tmp, v_tmp;
guint8 *tabley = videobalance->tabley;
guint8 **tableu = videobalance->tableu;
guint8 **tablev = videobalance->tablev;
width = GST_VIDEO_FRAME_WIDTH (frame);
height = GST_VIDEO_FRAME_HEIGHT (frame);
offsets[0] = GST_VIDEO_FRAME_COMP_OFFSET (frame, 0);
offsets[1] = GST_VIDEO_FRAME_COMP_OFFSET (frame, 1);
offsets[2] = GST_VIDEO_FRAME_COMP_OFFSET (frame, 2);
data = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
stride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
pixel_stride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
row_wrap = stride - pixel_stride * width;
for (i = 0; i < height; i++) {
for (j = 0; j < width; j++) {
r = data[offsets[0]];
g = data[offsets[1]];
b = data[offsets[2]];
y = APPLY_MATRIX (cog_rgb_to_ycbcr_matrix_8bit_sdtv, 0, r, g, b);
u_tmp = APPLY_MATRIX (cog_rgb_to_ycbcr_matrix_8bit_sdtv, 1, r, g, b);
v_tmp = APPLY_MATRIX (cog_rgb_to_ycbcr_matrix_8bit_sdtv, 2, r, g, b);
y = CLAMP (y, 0, 255);
u_tmp = CLAMP (u_tmp, 0, 255);
v_tmp = CLAMP (v_tmp, 0, 255);
y = tabley[y];
u = tableu[u_tmp][v_tmp];
v = tablev[u_tmp][v_tmp];
r = APPLY_MATRIX (cog_ycbcr_to_rgb_matrix_8bit_sdtv, 0, y, u, v);
g = APPLY_MATRIX (cog_ycbcr_to_rgb_matrix_8bit_sdtv, 1, y, u, v);
b = APPLY_MATRIX (cog_ycbcr_to_rgb_matrix_8bit_sdtv, 2, y, u, v);
data[offsets[0]] = CLAMP (r, 0, 255);
data[offsets[1]] = CLAMP (g, 0, 255);
data[offsets[2]] = CLAMP (b, 0, 255);
data += pixel_stride;
}
data += row_wrap;
}
}
开发者ID:PeterXu,项目名称:gst-mobile,代码行数:59,代码来源:gstvideobalance.c
示例14: gst_video_crop_transform_planar
static void
gst_video_crop_transform_planar (GstVideoCrop * vcrop,
GstVideoFrame * in_frame, GstVideoFrame * out_frame)
{
gint width, height;
guint8 *y_out, *u_out, *v_out;
guint8 *y_in, *u_in, *v_in;
guint i, dx;
width = GST_VIDEO_FRAME_WIDTH (out_frame);
height = GST_VIDEO_FRAME_HEIGHT (out_frame);
/* Y plane */
y_in = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
y_out = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
y_in +=
(vcrop->crop_top * GST_VIDEO_FRAME_PLANE_STRIDE (in_frame,
0)) + vcrop->crop_left;
dx = width;
for (i = 0; i < height; ++i) {
memcpy (y_out, y_in, dx);
y_in += GST_VIDEO_FRAME_PLANE_STRIDE (in_frame, 0);
y_out += GST_VIDEO_FRAME_PLANE_STRIDE (out_frame, 0);
}
/* U + V planes */
u_in = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 1);
u_out = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 1);
u_in += (vcrop->crop_top / 2) * GST_VIDEO_FRAME_PLANE_STRIDE (in_frame, 1);
u_in += vcrop->crop_left / 2;
v_in = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 2);
v_out = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 2);
v_in += (vcrop->crop_top / 2) * GST_VIDEO_FRAME_PLANE_STRIDE (in_frame, 2);
v_in += vcrop->crop_left / 2;
dx = GST_ROUND_UP_2 (width) / 2;
for (i = 0; i < GST_ROUND_UP_2 (height) / 2; ++i) {
memcpy (u_out, u_in, dx);
memcpy (v_out, v_in, dx);
u_in += GST_VIDEO_FRAME_PLANE_STRIDE (in_frame, 1);
u_out += GST_VIDEO_FRAME_PLANE_STRIDE (out_frame, 1);
v_in += GST_VIDEO_FRAME_PLANE_STRIDE (in_frame, 2);
v_out += GST_VIDEO_FRAME_PLANE_STRIDE (out_frame, 2);
}
}
开发者ID:BigBrother-International,项目名称:gst-plugins-good,代码行数:51,代码来源:gstvideocrop.c
示例15: gst_smpte_alpha_process_ayuv_ayuv
static void
gst_smpte_alpha_process_ayuv_ayuv (GstSMPTEAlpha * smpte,
const GstVideoFrame * in_frame, GstVideoFrame * out_frame, GstMask * mask,
gint border, gint pos)
{
gint i, j;
const guint32 *maskp;
gint value;
gint min, max;
gint width, height;
guint8 *in, *out;
gint src_wrap, dest_wrap;
if (border == 0)
border++;
min = pos - border;
max = pos;
GST_DEBUG_OBJECT (smpte, "pos %d, min %d, max %d, border %d", pos, min, max,
border);
maskp = mask->data;
width = GST_VIDEO_FRAME_WIDTH (out_frame);
height = GST_VIDEO_FRAME_HEIGHT (out_frame);
in = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
out = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
src_wrap = GST_VIDEO_FRAME_PLANE_STRIDE (in_frame, 0) - (width << 2);
dest_wrap = GST_VIDEO_FRAME_PLANE_STRIDE (out_frame, 0) - (width << 2);
/* we basically copy the source to dest but we scale the alpha channel with
* the mask */
for (i = 0; i < height; i++) {
for (j = 0; j < width; j++) {
value = *maskp++;
*out++ = (*in++ * ((CLAMP (value, min, max) - min) << 8) / border) >> 8;
*out++ = *in++;
*out++ = *in++;
*out++ = *in++;
}
in += src_wrap;
out += dest_wrap;
}
}
开发者ID:adesurya,项目名称:gst-mobile,代码行数:45,代码来源:gstsmptealpha.c
示例16: gst_color_effects_transform_rgb
static void
gst_color_effects_transform_rgb (GstColorEffects * filter,
GstVideoFrame * frame)
{
gint i, j;
gint width, height;
gint pixel_stride, row_stride, row_wrap;
guint32 r, g, b;
guint32 luma;
gint offsets[3];
guint8 *data;
data = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
offsets[0] = GST_VIDEO_FRAME_COMP_POFFSET (frame, 0);
offsets[1] = GST_VIDEO_FRAME_COMP_POFFSET (frame, 1);
offsets[2] = GST_VIDEO_FRAME_COMP_POFFSET (frame, 2);
width = GST_VIDEO_FRAME_WIDTH (frame);
height = GST_VIDEO_FRAME_HEIGHT (frame);
row_stride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
pixel_stride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
row_wrap = row_stride - pixel_stride * width;
/* transform */
for (i = 0; i < height; i++) {
for (j = 0; j < width; j++) {
r = data[offsets[0]];
g = data[offsets[1]];
b = data[offsets[2]];
if (filter->map_luma) {
/* BT. 709 coefficients in B8 fixed point */
/* 0.2126 R + 0.7152 G + 0.0722 B */
luma = ((r << 8) * 54) + ((g << 8) * 183) + ((b << 8) * 19);
luma >>= 16; /* get integer part */
luma *= 3; /* times 3 to retrieve the correct pixel from
* the lut */
/* map luma to lookup table */
/* src.luma |-> table[luma].rgb */
data[offsets[0]] = filter->table[luma];
data[offsets[1]] = filter->table[luma + 1];
data[offsets[2]] = filter->table[luma + 2];
} else {
/* map each color component to the correspondent lut color */
/* src.r |-> table[r].r */
/* src.g |-> table[g].g */
/* src.b |-> table[b].b */
data[offsets[0]] = filter->table[r * 3];
data[offsets[1]] = filter->table[g * 3 + 1];
data[offsets[2]] = filter->table[b * 3 + 2];
}
data += pixel_stride;
}
开发者ID:Distrotech,项目名称:gst-plugins-bad,代码行数:54,代码来源:gstcoloreffects.c
示例17: gst_cairo_overlay_transform_frame_ip
static GstFlowReturn
gst_cairo_overlay_transform_frame_ip (GstVideoFilter * vfilter,
GstVideoFrame * frame)
{
GstCairoOverlay *overlay = GST_CAIRO_OVERLAY (vfilter);
cairo_surface_t *surface;
cairo_t *cr;
cairo_format_t format;
switch (GST_VIDEO_FRAME_FORMAT (frame)) {
case GST_VIDEO_FORMAT_ARGB:
case GST_VIDEO_FORMAT_BGRA:
format = CAIRO_FORMAT_ARGB32;
break;
case GST_VIDEO_FORMAT_xRGB:
case GST_VIDEO_FORMAT_BGRx:
format = CAIRO_FORMAT_RGB24;
break;
case GST_VIDEO_FORMAT_RGB16:
format = CAIRO_FORMAT_RGB16_565;
break;
default:
{
GST_WARNING ("No matching cairo format for %s",
gst_video_format_to_string (GST_VIDEO_FRAME_FORMAT (frame)));
return GST_FLOW_ERROR;
}
}
surface =
cairo_image_surface_create_for_data (GST_VIDEO_FRAME_PLANE_DATA (frame,
0), format, GST_VIDEO_FRAME_WIDTH (frame),
GST_VIDEO_FRAME_HEIGHT (frame), GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0));
if (G_UNLIKELY (!surface))
return GST_FLOW_ERROR;
cr = cairo_create (surface);
if (G_UNLIKELY (!cr)) {
cairo_surface_destroy (surface);
return GST_FLOW_ERROR;
}
g_signal_emit (overlay, gst_cairo_overlay_signals[SIGNAL_DRAW], 0,
cr, GST_BUFFER_PTS (frame->buffer), GST_BUFFER_DURATION (frame->buffer),
NULL);
cairo_destroy (cr);
cairo_surface_destroy (surface);
return GST_FLOW_OK;
}
开发者ID:BigBrother-International,项目名称:gst-plugins-good,代码行数:52,代码来源:gstcairooverlay.c
示例18: gst_gamma_packed_rgb_ip
static void
gst_gamma_packed_rgb_ip (GstGamma * gamma, GstVideoFrame * frame)
{
gint i, j, height;
gint width, stride, row_wrap;
gint pixel_stride;
const guint8 *table = gamma->gamma_table;
gint offsets[3];
gint r, g, b;
gint y, u, v;
guint8 *data;
data = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
stride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
width = GST_VIDEO_FRAME_COMP_WIDTH (frame, 0);
height = GST_VIDEO_FRAME_COMP_HEIGHT (frame, 0);
offsets[0] = GST_VIDEO_FRAME_COMP_OFFSET (frame, 0);
offsets[1] = GST_VIDEO_FRAME_COMP_OFFSET (frame, 1);
offsets[2] = GST_VIDEO_FRAME_COMP_OFFSET (frame, 2);
pixel_stride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
row_wrap = stride - pixel_stride * width;
for (i = 0; i < height; i++) {
for (j = 0; j < width; j++) {
r = data[offsets[0]];
g = data[offsets[1]];
b = data[offsets[2]];
y = APPLY_MATRIX (cog_rgb_to_ycbcr_matrix_8bit_sdtv, 0, r, g, b);
u = APPLY_MATRIX (cog_rgb_to_ycbcr_matrix_8bit_sdtv, 1, r, g, b);
v = APPLY_MATRIX (cog_rgb_to_ycbcr_matrix_8bit_sdtv, 2, r, g, b);
y = table[CLAMP (y, 0, 255)];
r = APPLY_MATRIX (cog_ycbcr_to_rgb_matrix_8bit_sdtv, 0, y, u, v);
g = APPLY_MATRIX (cog_ycbcr_to_rgb_matrix_8bit_sdtv, 1, y, u, v);
b = APPLY_MATRIX (cog_ycbcr_to_rgb_matrix_8bit_sdtv, 2, y, u, v);
data[offsets[0]] = CLAMP (r, 0, 255);
data[offsets[1]] = CLAMP (g, 0, 255);
data[offsets[2]] = CLAMP (b, 0, 255);
data += pixel_stride;
}
data += row_wrap;
}
}
开发者ID:adesurya,项目名称:gst-mobile,代码行数:47,代码来源:gstgamma.c
示例19: fill_frame_planar16_4_generic
static void
fill_frame_planar16_4_generic (GstVideoFrame * frame, opj_image_t * image)
{
gint x, y, w, h;
guint16 *data_out, *tmp;
const gint *data_in[4];
gint dstride;
gint dx[4], dy[4], shift[4];
w = GST_VIDEO_FRAME_WIDTH (frame);
h = GST_VIDEO_FRAME_HEIGHT (frame);
data_out = (guint16 *) GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
dstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0) / 2;
data_in[0] = image->comps[0].data;
data_in[1] = image->comps[1].data;
data_in[2] = image->comps[2].data;
data_in[3] = image->comps[3].data;
dx[0] = image->comps[0].dx;
dx[1] = image->comps[1].dx;
dx[2] = image->comps[2].dx;
dx[3] = image->comps[3].dx;
dy[0] = image->comps[0].dy;
dy[1] = image->comps[1].dy;
dy[2] = image->comps[2].dy;
dy[3] = image->comps[3].dy;
shift[0] = 16 - image->comps[0].prec;
shift[1] = 16 - image->comps[1].prec;
shift[2] = 16 - image->comps[2].prec;
shift[3] = 16 - image->comps[3].prec;
for (y = 0; y < h; y++) {
tmp = data_out;
for (x = 0; x < w; x++) {
tmp[0] = data_in[3][((y / dy[3]) * w + x) / dx[3]] << shift[3];
tmp[1] = data_in[0][((y / dy[0]) * w + x) / dx[0]] << shift[0];
tmp[2] = data_in[1][((y / dy[1]) * w + x) / dx[1]] << shift[1];
tmp[3] = data_in[2][((y / dy[2]) * w + x) / dx[2]] << shift[2];
tmp += 4;
}
data_out += dstride;
}
}
开发者ID:cbetz421,项目名称:gst-plugins-bad,代码行数:47,代码来源:gstopenjpegdec.c
示例20: lock
PassRefPtr<BitmapTexture> MediaPlayerPrivateGStreamerBase::updateTexture(TextureMapper* textureMapper)
{
WTF::GMutexLocker<GMutex> lock(m_sampleMutex);
if (!GST_IS_SAMPLE(m_sample.get()))
return nullptr;
GstCaps* caps = gst_sample_get_caps(m_sample.get());
if (!caps)
return nullptr;
GstVideoInfo videoInfo;
gst_video_info_init(&videoInfo);
if (!gst_video_info_from_caps(&videoInfo, caps))
return nullptr;
IntSize size = IntSize(GST_VIDEO_INFO_WIDTH(&videoInfo), GST_VIDEO_INFO_HEIGHT(&videoInfo));
RefPtr<BitmapTexture> texture = textureMapper->acquireTextureFromPool(size, GST_VIDEO_INFO_HAS_ALPHA(&videoInfo) ? BitmapTexture::SupportsAlpha : BitmapTexture::NoFlag);
GstBuffer* buffer = gst_sample_get_buffer(m_sample.get());
#if GST_CHECK_VERSION(1, 1, 0)
GstVideoGLTextureUploadMeta* meta;
if ((meta = gst_buffer_get_video_gl_texture_upload_meta(buffer))) {
if (meta->n_textures == 1) { // BRGx & BGRA formats use only one texture.
const BitmapTextureGL* textureGL = static_cast<const BitmapTextureGL*>(texture.get());
guint ids[4] = { textureGL->id(), 0, 0, 0 };
if (gst_video_gl_texture_upload_meta_upload(meta, ids))
return texture;
}
}
#endif
// Right now the TextureMapper only supports chromas with one plane
ASSERT(GST_VIDEO_INFO_N_PLANES(&videoInfo) == 1);
GstVideoFrame videoFrame;
if (!gst_video_frame_map(&videoFrame, &videoInfo, buffer, GST_MAP_READ))
return nullptr;
int stride = GST_VIDEO_FRAME_PLANE_STRIDE(&videoFrame, 0);
const void* srcData = GST_VIDEO_FRAME_PLANE_DATA(&videoFrame, 0);
texture->updateContents(srcData, WebCore::IntRect(WebCore::IntPoint(0, 0), size), WebCore::IntPoint(0, 0), stride, BitmapTexture::UpdateCannotModifyOriginalImageData);
gst_video_frame_unmap(&videoFrame);
return texture;
}
开发者ID:rhythmkay,项目名称:webkit,代码行数:46,代码来源:MediaPlayerPrivateGStreamerBase.cpp
注:本文中的GST_VIDEO_FRAME_PLANE_STRIDE函数示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论