• 设为首页
  • 点击收藏
  • 手机版
    手机扫一扫访问
    迪恩网络手机版
  • 关注官方公众号
    微信扫一扫关注
    迪恩网络公众号

C++ VideoStream类代码示例

原作者: [db:作者] 来自: [db:来源] 收藏 邀请

本文整理汇总了C++中VideoStream的典型用法代码示例。如果您正苦于以下问题:C++ VideoStream类的具体用法?C++ VideoStream怎么用?C++ VideoStream使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。



在下文中一共展示了VideoStream类的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的C++代码示例。

示例1: lock

void DisplayChannel::streams_time()
{
    _next_timer_time = 0;
    Lock lock(_streams_lock);
    uint32_t mm_time = get_client().get_mm_time();
    uint32_t next_time = 0;
    VideoStream* stream = _active_streams;
    while (stream) {
        uint32_t next_frame_time;
        if ((next_frame_time = stream->handle_timer_update(mm_time))) {
            if (!next_time || int(next_frame_time - next_time) < 0) {
                next_time = next_frame_time;
            }
        }
        stream = stream->next;
    }
    Lock timer_lock(_timer_lock);
    mm_time = get_client().get_mm_time();
    next_time = mm_time + 15;
    if (next_time && (!_next_timer_time || int(next_time - _next_timer_time) < 0)) {
        get_client().activate_interval_timer(*_streams_timer, MAX(int(next_time - mm_time), 0));
        _next_timer_time = next_time;
    } else if (!_next_timer_time) {
        get_client().deactivate_interval_timer(*_streams_timer);
    }
    timer_lock.unlock();
    lock.unlock();
    Platform::yield();
}
开发者ID:colama,项目名称:colama-3rdparty-tools,代码行数:29,代码来源:display_channel.cpp


示例2: guard

OniStatus Recorder::attachStream(VideoStream& stream, OniBool allowLossyCompression)
{
    if (m_wasStarted)
    {
        return ONI_STATUS_ERROR;
    }

    xnl::LockGuard<AttachedStreams> guard(m_streams);
    VideoStream* pStream = &stream;
    if (m_streams.Find(pStream) == m_streams.End())
    {
        if (ONI_STATUS_OK == pStream->addRecorder(*this))
        {
            m_streams[pStream].nodeId                    = ++m_maxId;
            m_streams[pStream].pCodec                    = NULL;
            m_streams[pStream].allowLossyCompression     = allowLossyCompression;
            m_streams[pStream].frameId                   = 0;
            m_streams[pStream].lastOutputTimestamp       = 0;
            m_streams[pStream].lastInputTimestamp        = 0;
            m_streams[pStream].lastNewDataRecordPosition = 0;
            m_streams[pStream].dataIndex.Clear();
            send(Message::MESSAGE_ATTACH, pStream);
            return ONI_STATUS_OK;
        }
    }
    return ONI_STATUS_ERROR;
}
开发者ID:Arkapravo,项目名称:OpenNI2,代码行数:27,代码来源:OniRecorder.cpp


示例3: while

OniStatus Device::close()
{
	--m_openCount;

	if (m_openCount == 0)
	{
		while(m_streams.Begin() != m_streams.End())
		{
			VideoStream* pStream = *m_streams.Begin();
			pStream->stop();
			m_streams.Remove(pStream);
		}
		
		for (int i = 0; i < MAX_SENSORS_PER_DEVICE; ++i)
		{
			if (m_sensors[i] != NULL)
			{
				XN_DELETE(m_sensors[i]);
				m_sensors[i] = NULL;
			}
		}

		if (m_deviceHandle != NULL)
		{
			m_driverHandler.deviceClose(m_deviceHandle);
		}
		m_deviceHandle = NULL;
	}

	return ONI_STATUS_OK;
}
开发者ID:aldebaran,项目名称:openni2,代码行数:31,代码来源:OniDevice.cpp


示例4: ReadVideoFrame

void* VideoStream::ReadVideoFrame(void *arg)
{
    VideoStream *h = (VideoStream*)arg;

    //一帧一帧读取视频
    h->PlayImageSlots();
    return NULL;
}
开发者ID:APCVSRepo,项目名称:hmi_sdk,代码行数:8,代码来源:VideoStream.cpp


示例5: __declspec

	__declspec(dllexport) Status VideoStream_create(VideoStream*& vs, Device* device, SensorType sensorType)
	{
		 VideoStream* vsl = new VideoStream();
		 Status status = vsl->create(*device, sensorType);
		 if (status == STATUS_OK)
			 vs = vsl;
		 else
			 VideoStream_destroy(vsl);
		 return status;
	}
开发者ID:tramper3,项目名称:NiWrapper.Net,代码行数:10,代码来源:VideoStream.cpp


示例6: readOpenNiColorAndDepth

int readOpenNiColorAndDepth(VideoStream &color , VideoStream &depth,VideoFrameRef &colorFrame,VideoFrameRef &depthFrame)
{
  #if USE_WAITFORANYSTREAM_TO_GRAB
   #warning "Please turn #define USE_WAITFORANYSTREAM_TO_GRAB 0"
   #warning "This is a bad idea taken from OpenNI2/Samples/SimpleViewer , we dont just want to update 'any' frame we really want to snap BOTH and do that sequentially"
   #warning "It is better to sequencially grab them instead of waiting for any stream a couple of times "
   openni::VideoStream** m_streams = new openni::VideoStream*[2];
   m_streams[0] = &depth;
   m_streams[1] = &color;


  unsigned char haveDepth=0,haveColor=0;
  int changedIndex;
  while ( (!haveDepth) || (!haveColor) )
  {
	openni::Status rc = openni::OpenNI::waitForAnyStream(m_streams, 2, &changedIndex);
	if (rc != openni::STATUS_OK)
	{
		fprintf(stderr,"Wait failed\n");
		return 0 ;
	}

  unsigned int i=0;

	switch (changedIndex)
	{
	case 0:
		depth.readFrame(&depthFrame);
		haveDepth=1;
    break;
	case 1:
		color.readFrame(&colorFrame);
		haveColor=1;
    break;
	default:
		printf("Error in wait\n");
		return 0;
	}
  }

	delete m_streams;
	return 1;
  #else
    //Using serial frame grabbing
    readFrameBlocking(depth,depthFrame,MAX_TRIES_FOR_EACH_FRAME); // depth.readFrame(&depthFrame);
    readFrameBlocking(color,colorFrame,MAX_TRIES_FOR_EACH_FRAME); // color.readFrame(&colorFrame);

    if(depthFrame.isValid() && colorFrame.isValid()) { return 1; }

    fprintf(stderr,"Depth And Color frames are wrong!\n");
  #endif
    return 0;

}
开发者ID:AmmarkoV,项目名称:RGBDAcquisition,代码行数:54,代码来源:OpenNI2Acquisition.cpp


示例7: Log

/**************************************
* startReceivingVideo
*	Function helper for thread
**************************************/
void* VideoStream::startReceivingVideo(void *par)
{
	Log("RecVideoThread [%p]\n",pthread_self());

	//Obtenemos el objeto
	VideoStream *conf = (VideoStream *)par;

	//Bloqueamos las se�a�es
	blocksignals();

	//Y ejecutamos
	conf->RecVideo();
	//Exit
	return NULL;
}
开发者ID:tidehc,项目名称:media-server-1,代码行数:19,代码来源:videostream.cpp


示例8: handle_stream_data

void DisplayChannel::handle_stream_data(RedPeer::InMessage* message)
{
    SpiceMsgDisplayStreamData* stream_data = (SpiceMsgDisplayStreamData*)message->data();
    VideoStream* stream;

    if (stream_data->id >= _streams.size() || !(stream = _streams[stream_data->id])) {
        THROW("invalid stream");
    }

    if (message->size() < sizeof(SpiceMsgDisplayStreamData) + stream_data->data_size) {
        THROW("access violation");
    }

    stream->push_data(stream_data->multi_media_time, stream_data->data_size, stream_data->data);
}
开发者ID:colama,项目名称:colama-3rdparty-tools,代码行数:15,代码来源:display_channel.cpp


示例9: onNewFrame

void XtionDepthDriverImpl::onNewFrame(VideoStream &stream)
{
  VideoFrameRef ref;
  stream.readFrame(&ref);
  _lastCaptured = XtionDepthImage(ref.getData(), ref.getDataSize(),
    ref.getWidth(), ref.getHeight(), 0, this);
}
开发者ID:Clemensius,项目名称:libkovan,代码行数:7,代码来源:xtion_depth_driver_impl_p.cpp


示例10: VDADecoderCallback

static void
VDADecoderCallback (void *decompressionOutputRefCon, CFDictionaryRef frameInfo, OSStatus status, uint32_t infoFlags, CVImageBufferRef imageBuffer)
{
	MoonVDADecoder *decoder = (MoonVDADecoder *) decompressionOutputRefCon;
	VideoStream *vs = (VideoStream *) decoder->GetStream ();

	// FIXME: Is this always 1 thread?  Can we optimize this
	decoder->GetDeployment ()->RegisterThread ();

	Deployment::SetCurrent (decoder->GetDeployment ());

	if (imageBuffer == NULL) {
		return;
	}

	OSType format_type = CVPixelBufferGetPixelFormatType (imageBuffer);
	if (format_type != kCVPixelFormatType_422YpCbCr8) {
		g_warning ("Mismatched format in VDA");
		return;
	}

	MediaFrame *mf = (MediaFrame *) CFDictionaryGetValue (frameInfo, CFSTR ("MoonMediaFrame"));

	mf->AddState (MediaFrameVUY2);
	mf->FreeBuffer ();
	mf->SetBufLen (0);

	mf->srcSlideY = 0;
	mf->srcSlideH = vs->GetHeight ();

	mf->width = vs->GetWidth ();
	mf->height = vs->GetHeight ();

	CVPixelBufferLockBaseAddress (imageBuffer, 0);

	mf->data_stride [0] = (uint8_t *) CVPixelBufferGetBaseAddress (imageBuffer);
	mf->srcStride [0] = CVPixelBufferGetBytesPerRow (imageBuffer);

	mf->AddState (MediaFrameDecoded);

	mf->decoder_specific_data = imageBuffer;
	CVPixelBufferRetain (imageBuffer);

	decoder->ReportDecodeFrameCompleted (mf);

	mf->unref ();
}
开发者ID:499940913,项目名称:moon,代码行数:47,代码来源:pipeline-vda.cpp


示例11: main

int main(void) {
	VideoStream *videoStream = new VideoStream();
	DifferenceImage *differenceImage = new DifferenceImage();
	HeatImage *heatImage = new HeatImage();

	Mat *image = NULL;
	Mat *bgImage = NULL;
	Mat *diffImage = NULL;
	Mat *htImage = NULL;

	if (DEBUG == true) {
		namedWindow("actImage", CV_WINDOW_AUTOSIZE);
		namedWindow("bgImage", CV_WINDOW_AUTOSIZE);
	}

	namedWindow("diffImage", CV_WINDOW_AUTOSIZE);
	namedWindow("heatImage", CV_WINDOW_AUTOSIZE);

	bgImage = videoStream->getFrame();

	while (true) {
		image = videoStream->getFrame();

		differenceImage->generate(image, bgImage);
		diffImage = differenceImage->get();

		heatImage->generate(diffImage);
		htImage = heatImage->get();

		if (DEBUG == true) {
			imshow("actImage", *image);
			imshow("bgImage", *image);
		}

		imshow("diffImage", *diffImage);
		imshow("heatImage", *htImage);
		waitKey(1);

		heatImage->degenerate();

		delete bgImage;
		bgImage = image;
	}
	delete image;

	return 0;
}
开发者ID:reckmeyerm,项目名称:motionAnalysis,代码行数:47,代码来源:main.cpp


示例12: onNewFrame

  virtual void onNewFrame(VideoStream& stream)
  {
    ros::Time ts = ros::Time::now();

    VideoFrameRef frame;
    stream.readFrame(&frame);

    sensor_msgs::Image::Ptr img(new sensor_msgs::Image);
    sensor_msgs::CameraInfo::Ptr info(new sensor_msgs::CameraInfo);

    double scale = double(frame.getWidth()) / double(1280);

    info->header.stamp = ts;
    info->header.frame_id = frame_id_;
    info->width = frame.getWidth();
    info->height = frame.getHeight();
    info->K.assign(0);
    info->K[0] = 1050.0 * scale;
    info->K[4] = 1050.0 * scale;
    info->K[2] = frame.getWidth() / 2.0 - 0.5;
    info->K[5] = frame.getHeight() / 2.0 - 0.5;
    info->P.assign(0);
    info->P[0] = 1050.0 * scale;
    info->P[5] = 1050.0 * scale;
    info->P[2] = frame.getWidth() / 2.0 - 0.5;
    info->P[6] = frame.getHeight() / 2.0 - 0.5;

    switch(frame.getVideoMode().getPixelFormat())
    {
    case PIXEL_FORMAT_GRAY8:
      img->encoding = sensor_msgs::image_encodings::MONO8;
      break;
    case PIXEL_FORMAT_GRAY16:
      img->encoding = sensor_msgs::image_encodings::MONO16;
      break;
    case PIXEL_FORMAT_YUV422:
      img->encoding = sensor_msgs::image_encodings::YUV422;
      break;
    case PIXEL_FORMAT_RGB888:
      img->encoding = sensor_msgs::image_encodings::RGB8;
      break;
    case PIXEL_FORMAT_SHIFT_9_2:
    case PIXEL_FORMAT_DEPTH_1_MM:
      img->encoding = sensor_msgs::image_encodings::TYPE_16UC1;
      break;
    default:
      ROS_WARN("Unknown OpenNI pixel format!");
      break;
    }
    img->header.stamp = ts;
    img->header.frame_id = frame_id_;
    img->height = frame.getHeight();
    img->width = frame.getWidth();
    img->step = frame.getStrideInBytes();
    img->data.resize(frame.getDataSize());
    std::copy(static_cast<const uint8_t*>(frame.getData()), static_cast<const uint8_t*>(frame.getData()) + frame.getDataSize(), img->data.begin());

    publish(img, info);
  }
开发者ID:amiltonwong,项目名称:openni2_camera,代码行数:59,代码来源:camera.cpp


示例13: handle_stream_clip

void DisplayChannel::handle_stream_clip(RedPeer::InMessage* message)
{
    SpiceMsgDisplayStreamClip* clip_data = (SpiceMsgDisplayStreamClip*)message->data();
    VideoStream* stream;
    uint32_t num_clip_rects;
    SpiceRect* clip_rects;

    if (clip_data->id >= _streams.size() || !(stream = _streams[clip_data->id])) {
        THROW("invalid stream");
    }

    if (message->size() < sizeof(SpiceMsgDisplayStreamClip)) {
        THROW("access violation");
    }
    set_clip_rects(clip_data->clip, num_clip_rects, clip_rects);
    Lock lock(_streams_lock);
    stream->set_clip(clip_data->clip.type, num_clip_rects, clip_rects);
}
开发者ID:colama,项目名称:colama-3rdparty-tools,代码行数:18,代码来源:display_channel.cpp


示例14: createStream

OniStatus Context::createStream(OniDeviceHandle device, OniSensorType sensorType, OniStreamHandle* pStream)
{

	// Create the stream.
	Device* pDevice = device->pDevice;
	VideoStream* pMyStream = pDevice->createStream(sensorType);
	if (pMyStream == NULL)
	{
		m_errorLogger.Append("Context: Couldn't create stream from device:%08x, source: %d", device, sensorType);
		return ONI_STATUS_ERROR;
	}

	pMyStream->setNewFrameCallback(newFrameCallback, this);

	// Create stream frame holder and connect it to the stream.
	StreamFrameHolder* pFrameHolder = XN_NEW(StreamFrameHolder, m_frameManager, pMyStream);
	if (pFrameHolder == NULL)
	{
		m_errorLogger.Append("Context: Couldn't create stream frame holder from device:%08x, source: %d", device, sensorType);
		XN_DELETE(pMyStream);
		return ONI_STATUS_ERROR;
	}
	pMyStream->setFrameHolder(pFrameHolder);

	// Create handle object.
	_OniStream* pStreamHandle = XN_NEW(_OniStream);
	if (pStreamHandle == NULL)
	{
		m_errorLogger.Append("Couldn't allocate memory for StreamHandle");
		XN_DELETE(pFrameHolder);
		pFrameHolder = NULL;
		XN_DELETE(pMyStream);
		pMyStream = NULL;
		return ONI_STATUS_ERROR;
	}
	*pStream = pStreamHandle;
	pStreamHandle->pStream = pMyStream;

	m_cs.Lock();
	m_streams.AddLast(pMyStream);
	m_cs.Unlock();

	return ONI_STATUS_OK;
}
开发者ID:quintona,项目名称:openni2,代码行数:44,代码来源:OniContext.cpp


示例15: main

int main()
{
	Status rc = OpenNI::initialize();
	if (rc != STATUS_OK)
	{
		printf("Initialize failed\n%s\n", OpenNI::getExtendedError());
		return 1;
	}

	OpenNIEventListener eventPrinter;
	OpenNI::addListener(&eventPrinter);

	Device device;
	rc = device.open(ANY_DEVICE);
	if (rc != STATUS_OK)
	{
		printf("Couldn't open device\n%s\n", OpenNI::getExtendedError());
		return 2;
	}

	VideoStream depth;

	if (device.getSensorInfo(SENSOR_DEPTH) != NULL)
	{
		rc = depth.create(device, SENSOR_DEPTH);
		if (rc != STATUS_OK)
		{
			printf("Couldn't create depth stream\n%s\n", OpenNI::getExtendedError());
		}
	}
	rc = depth.start();
	if (rc != STATUS_OK)
	{
		printf("Couldn't start the depth stream\n%s\n", OpenNI::getExtendedError());
	}


	PrintCallback depthPrinter;

	// Register to new frame
	depth.addListener(&depthPrinter);

	// Wait while we're getting frames through the printer
	while (!wasKeyboardHit())
	{
		Sleep(100);
	}

	depth.removeListener(&depthPrinter);


	depth.stop();
	depth.destroy();
	device.close();
	OpenNI::shutdown();

	return 0;
}
开发者ID:MetaMagic,项目名称:OpenNI2,代码行数:58,代码来源:main.cpp


示例16: WriteProperties

	void WriteProperties( VideoStream& rStream )
	{
		for_each( vProperties.begin(), vProperties.end(), [&rStream]( CProperty& rProp ){
			int iSize = rProp.vData.size();
			if( rStream.setProperty( rProp.iIdx, rProp.vData.data(), iSize ) != STATUS_OK )
			{
				cerr << "Property " << rProp.sName << " write fail" << endl;
			}
		} );
	}
开发者ID:VIML,项目名称:oniFixer,代码行数:10,代码来源:main.cpp


示例17: GetStream

void
MoonVDADecoder::OpenDecoderAsyncInternal ()
{
	IMediaStream *stream = GetStream ();
	VideoStream *vs = (VideoStream *) stream;
	int format = 'avc1';

	CFDataRef avcCData = CFDataCreate (kCFAllocatorDefault, (const uint8_t*) stream->GetRawExtraData (), stream->GetRawExtraDataSize ());
	OSStatus status = CreateDecoder ((SInt32) vs->GetHeight (), (SInt32) vs->GetWidth (), (OSType) format, avcCData);

	if (avcCData) CFRelease (avcCData);

	if (status == kVDADecoderNoErr) {
		SetPixelFormat (MoonPixelFormat422YpCbCr8);

		ReportOpenDecoderCompleted ();
	} else {
		char *str = g_strdup_printf ("MoonVDADecoder failed to open codec (result: %d)", status);
		ReportErrorOccurred (str);
		g_free (str);
	}
}
开发者ID:499940913,项目名称:moon,代码行数:22,代码来源:pipeline-vda.cpp


示例18: readFrameBlocking

int readFrameBlocking(VideoStream &stream,VideoFrameRef &frame , unsigned int max_tries)
{
  unsigned int tries_for_frame = 0 ;
  while (  tries_for_frame < max_tries  )
          {
            stream.readFrame(&frame);
	        if (frame.isValid()) { return 1; }
	        ++tries_for_frame;
          }

  if (!frame.isValid()) { fprintf(stderr,"Could not get a valid frame even after %u tries \n",max_tries); return 0; }
  return (tries_for_frame<max_tries);
}
开发者ID:AmmarkoV,项目名称:RGBDAcquisition,代码行数:13,代码来源:OpenNI2Acquisition.cpp


示例19:

  virtual ~SensorStreamManager()
  {
    stream_.removeNewFrameListener(this);
    stream_.stop();
    stream_.destroy();

    publisher_.shutdown();
  }
开发者ID:amiltonwong,项目名称:openni2_camera,代码行数:8,代码来源:camera.cpp


示例20: kinect_init

int kinect_init()
{
    Status rc = OpenNI::initialize();
    if (rc != STATUS_OK)
    {
        printf("Initialize failed\n%s\n", OpenNI::getExtendedError());
        return 1;
    }

    rc = device.open(ANY_DEVICE);
    if (rc != STATUS_OK)
    {
        printf("Couldn't open device\n%s\n", OpenNI::getExtendedError());
        return 2;
    }

    if (device.getSensorInfo(SENSOR_DEPTH) != NULL)
    {
        rc = depth.create(device, SENSOR_DEPTH);
        if (rc != STATUS_OK)
        {
            printf("Couldn't create depth stream\n%s\n", OpenNI::getExtendedError());
            return 3;
        }

        const SensorInfo* sinfo = device.getSensorInfo(SENSOR_DEPTH);
        const Array<VideoMode>& modes = sinfo->getSupportedVideoModes();

        for (int i=0; i<modes.getSize(); i++) {
            printf("%i: %ix%i, %i fps, %i format\n",
                i,
                modes[i].getResolutionX(),
                modes[i].getResolutionY(),
                modes[i].getFps(),
                modes[i].getPixelFormat()
                );
        }

        //rc = depth.setVideoMode(modes[0]); // 320x240, 30fps, format: 100
        //rc = depth.setVideoMode(modes[4]); // 640x480, 30fps, format: 100
        rc = depth.setVideoMode(modes[4]); // 640x480, 30fps, format: 100

        if (rc != openni::STATUS_OK) {
            printf("Failed to set depth resolution\n");
            return -1;
        }
    }

    rc = depth.start();
    if (rc != STATUS_OK)
    {
        printf("Couldn't start the depth stream\n%s\n", OpenNI::getExtendedError());
        return 4;
    }

    return 0;
}
开发者ID:jasongeng,项目名称:linuxconfig,代码行数:57,代码来源:C_linux_signal_handler.cpp



注:本文中的VideoStream类示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。


鲜花

握手

雷人

路过

鸡蛋
该文章已有0人参与评论

请发表评论

全部评论

专题导读
上一篇:
C++ VideoTrack类代码示例发布时间:2022-05-31
下一篇:
C++ VideoSource类代码示例发布时间:2022-05-31
热门推荐
阅读排行榜

扫描微信二维码

查看手机版网站

随时了解更新最新资讯

139-2527-9053

在线客服(服务时间 9:00~18:00)

在线QQ客服
地址:深圳市南山区西丽大学城创智工业园
电邮:jeky_zhao#qq.com
移动电话:139-2527-9053

Powered by 互联科技 X3.4© 2001-2213 极客世界.|Sitemap