本文整理汇总了C++中capture函数的典型用法代码示例。如果您正苦于以下问题:C++ capture函数的具体用法?C++ capture怎么用?C++ capture使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了capture函数的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的C++代码示例。
示例1: main
int main ()
{
printf("Enter file adress\n");
char *fileAdress = new char[maxFileAdressLength];
gets(fileAdress);
FILE *file;
file = fopen(fileAdress, "r");
int adjMatrix[maxVertexNumber][maxVertexNumber];
for (int i = 0; i < maxVertexNumber; ++i)
for (int j = 0; j < maxVertexNumber; ++j)
adjMatrix[i][j] = maxWay;
int numb = 0;
fscanf (file, "%d", &numb);
int i = 0;
int j = 0;
int length = 0;
while (!feof(file))
{
fscanf (file, "%d %d %d", &i, &j, &length);
adjMatrix[i][j] = length;
adjMatrix[j][i] = length;
}
int isVisited[maxVertexNumber];
isVisited[1] = 1;
for (int i = 2; i <= numb; ++i)
isVisited[i] = 0;
int minToFirst[maxVertexNumber];
minToFirst[1] = 0;
for (int i = 2; i <= numb; ++i)
minToFirst[i] = maxWay;
String *ways[maxVertexNumber];
for (int i = 1; i <= numb; ++i)
ways[i] = createString();
addSymbol(ways[1], '1');
capture(adjMatrix, isVisited, numb, minToFirst, ways);
fclose(file);
delete []fileAdress;
for (int i = 1; i <= numb; ++i)
{
deleteString(ways[i]);
ways[i] = nullptr;
}
scanf("%d", &i);
}
开发者ID:AnnaSkachkauskaite,项目名称:Homeworks,代码行数:44,代码来源:main.cpp
示例2: getCornersSamples
std::vector<pf::Corners> getCornersSamples(size_t index) {
cv::Size numSquares(NUM_HOR_SQUARES, NUM_VER_SQUARES);
cv::VideoCapture capture(index + 1);
if (!capture.isOpened()) {
std::cerr << "Can't open the camera" << std::endl;
std::exit(-1);
}
capture.set(CV_CAP_PROP_FPS, FRAMES_PER_SECOND);
std::vector<pf::Corners> cornersSamples;
bool started = false;
clock_t time = 0;
while (cornersSamples.size() < NUM_FRAMES) {
// Capture frame
cv::Mat frame;
capture >> frame;
// Find chessboard corners
auto found = findChessboardCorners(frame);
if (found.second && started && clock() - time > DELAY_BETWEEN_FRAMES) {
time = clock();
cornersSamples.push_back(found.first);
cv::bitwise_not(frame, frame);
}
// Show image
cv::drawChessboardCorners(frame, numSquares, cv::Mat(found.first), found.second);
cv::imshow("Calibrate", frame);
// Wait for 's' to start
if (cv::waitKey(100) == 's') {
started = true;
}
}
return cornersSamples;
}
开发者ID:esneider,项目名称:minoru,代码行数:43,代码来源:calibrate.cpp
示例3: forward
int forward (class zmq::socket_base_t *from_,
zmq_socket_stats_t *from_stats,
class zmq::socket_base_t *to_,
zmq_socket_stats_t *to_stats,
class zmq::socket_base_t *capture_,
zmq::msg_t &msg_)
{
int more;
size_t moresz;
size_t complete_msg_size = 0;
while (true) {
int rc = from_->recv (&msg_, 0);
if (unlikely (rc < 0))
return -1;
complete_msg_size += msg_.size ();
moresz = sizeof more;
rc = from_->getsockopt (ZMQ_RCVMORE, &more, &moresz);
if (unlikely (rc < 0))
return -1;
// Copy message to capture socket if any
rc = capture (capture_, msg_, more);
if (unlikely (rc < 0))
return -1;
rc = to_->send (&msg_, more ? ZMQ_SNDMORE : 0);
if (unlikely (rc < 0))
return -1;
if (more == 0)
break;
}
// A multipart message counts as 1 packet:
from_stats->msg_in++;
from_stats->bytes_in += complete_msg_size;
to_stats->msg_out++;
to_stats->bytes_out += complete_msg_size;
return 0;
}
开发者ID:ming-hai,项目名称:libzmq,代码行数:43,代码来源:proxy.cpp
示例4: capture
int L2Window::check(){
QImage image;
image = capture();
if(image.isNull()) {
qDebug("image.isNull()");
findBars();
return status;
}
if(status == L2_OFF || image_width != image.width() || image_height != image.height()){
findBars();
status = L2_OFF;
image_width = image.width();
image_height = image.height();
for(int i = idCP; i < BARNUM; i++ ){
status |= findXP(i, image);
}
for(int j = idCP; j < BARNUM; j++ ){
for(int i=bar[j].getBegin();i<bar[j].getEnd();i++){
image.setPixel(i, bar[j].getY(), qRgb(128, 128, 128));
}
}
image.save("image.png");
QImage icotmp=image.copy(bar[idCP].getBegin()+18, bar[idCP].getY()-29, 18, 18);
// icotmp.save("icotmp.png");
QPixmap pixmap(20,20);
pixmap.convertFromImage(icotmp);
// pixmap.save("pixmap.png");
if(L2icon) delete L2icon;
L2icon = new QIcon(pixmap);
}
//QImage barimg=image.copy(bar[idCP].getBegin(), bar[idCP].getY(), bar[idCP].getEnd()-bar[idCP].getBegin(), 1);
//barimg.save("CP.png");
for(int i = idCP; i < BARNUM; i++ ){
bar[i].checkXPBar(image);
}
return status;
}
开发者ID:dennn66,项目名称:BB,代码行数:43,代码来源:l2window.cpp
示例5: capture
bool EditPointOP::OnMouseMove(int x, int y)
{
if (ee::ZoomViewOP::OnMouseMove(x, y)) return true;
int tolerance = m_node_capture ? m_node_capture->GetValue() : 0;
if (tolerance == 0) {
return false;
}
sm::vec2 pos = m_stage->TransPosScrToProj(x, y);
NodeCapture capture(m_shapes_impl, tolerance);
auto old = m_captured.shape;
capture.captureEditable(pos, m_captured);
if (old && !m_captured.shape || !old && m_captured.shape) {
ee::SetCanvasDirtySJ::Instance()->SetDirty();
}
return false;
}
开发者ID:xzrunner,项目名称:easyeditor,代码行数:19,代码来源:EditPointOP.cpp
示例6: main
int main(int argc, char* argv[]){
if(argc < 2){
std::cerr << "usage: detfps filename"<<std::endl;
exit(EXIT_FAILURE);
}
for(int i=1;i<=argc-1;i++){
std::string filename(argv[i]);
std::string vInput(filename);
cv::VideoCapture capture(vInput);
double fpsIn = capture.get(CV_CAP_PROP_FPS);
int frameCount = capture.get(CV_CAP_PROP_FRAME_COUNT);
std::cout << "INPUT: " << vInput << std::endl;
std::cout << "* Number of frames: " << frameCount << std::endl;
std::cout << "* Frames per second: " << fpsIn << std::endl;
}
return EXIT_SUCCESS;
}
开发者ID:hehuilong,项目名称:IMAR-C,代码行数:19,代码来源:main.cpp
示例7: capture
int Cam::showCam() {
cv::VideoCapture capture(0); // open default camera
if ( capture.isOpened() == false )
return -1;
cv::namedWindow("Test OpenCV",1);
cv::Mat frame;
while ( true )
{
capture >> frame;
cv::imshow("Test OpenCV", frame );
int key = cv::waitKey(1);
if ( key == 27 )
break;
}
return 0;
}
开发者ID:xd785,项目名称:swift_opencv3_demo,代码行数:19,代码来源:Cam.cpp
示例8: main
int main(int argc, char * argv[])
{
cv::namedWindow("depth");
cv::namedWindow("color");
// Open Kinect sensor
cv::VideoCapture capture(cv::CAP_OPENNI2);
if (!capture.isOpened())
{
printf("Could not open OpenNI-capable sensor\n");
return -1;
}
capture.set(cv::CAP_PROP_OPENNI_REGISTRATION, 1);
double focal_length = capture.get(cv::CAP_OPENNI_DEPTH_GENERATOR_FOCAL_LENGTH);
//printf("Focal length = %f\n", focal_length);
cv::Mat color, depth;
bool isLooping = true;
while (isLooping)
{
capture.grab();
capture.retrieve(depth, cv::CAP_OPENNI_DEPTH_MAP);
capture.retrieve(color, cv::CAP_OPENNI_BGR_IMAGE);
cv::imshow("depth", depth);
cv::imshow("color", color);
char key = (char)cv::waitKey(10);
if (key == 'q')
break;
switch (key)
{
case 'q':
isLooping = false;
break;
default:
;
}
}
return 0;
}
开发者ID:caomw,项目名称:opencv-rgbd,代码行数:42,代码来源:main.cpp
示例9: main
int main(int argc, char** argv)
{
std::cout << "INITIALIZING COLOR_RECOGNIZER..." << std::endl;
ros::init(argc, argv, "color_recog");
ros::NodeHandle n;
ros::Rate loop(30);
std::cout << "ColorRecognizer.->Triying to initialize kinect sensor... " << std::endl;
cv::VideoCapture capture(CV_CAP_OPENNI);
if(!capture.isOpened())
{
std::cout << "ColorRecognizer.->Cannot open kinect :'(" << std::endl;
return 1;
}
capture.set(CV_CAP_OPENNI_DEPTH_GENERATOR_REGISTRATION, CV_CAP_OPENNI_DEPTH_GENERATOR_REGISTRATION_ON);
std::cout << "ColorRecognizer.->Kinect sensor started :D" << std::endl;
cv::Mat matDepth;
cv::Mat matColor;
pcl::PointCloud<pcl::PointXYZRGBA>::Ptr pclFrame(new pcl::PointCloud<pcl::PointXYZRGBA>);
pcl::PointCloud<pcl::PointXYZRGBA>::Ptr pclNotPlane(new pcl::PointCloud<pcl::PointXYZRGBA>);
pcl::PointCloud<pcl::PointXYZRGBA>::Ptr pclPlane(new pcl::PointCloud<pcl::PointXYZRGBA>);
pcl::visualization::CloudViewer viewer("Original");
while(ros::ok() && cv::waitKey(10) != 27 && !viewer.wasStopped())
{
if(!capture.grab())
{
loop.sleep();
ros::spinOnce();
continue;
}
capture.retrieve(matDepth, CV_CAP_OPENNI_POINT_CLOUD_MAP);
capture.retrieve(matColor, CV_CAP_OPENNI_BGR_IMAGE);
cvMat2Pcl(matColor, matDepth, *pclFrame);
extractPlane(pclFrame, pclFrame, pclPlane, pclNotPlane);
cv::imshow("Original", matColor);
viewer.showCloud(pclFrame);
ros::spinOnce();
}
}
开发者ID:RobotJustina,项目名称:JUSTINA,代码行数:42,代码来源:simple_color_node.cpp
示例10: switch
void testApp::keyPressed(int key) {
switch (key) {
case ' ':
capture();
break;
case 'j':
deviceId--;
videoGrabber.setDeviceID(deviceId);
videoGrabber.initGrabber(640, 480);
break;
case 'k':
deviceId++;
videoGrabber.setDeviceID(deviceId);
videoGrabber.initGrabber(640, 480);
break;
}
}
开发者ID:nielmclaren,项目名称:TimeLapse,代码行数:20,代码来源:testApp.cpp
示例11: main
int main(int argc , char ** argv)
{
if (argc < 2)
return 1;
cv::VideoCapture capture(argv[1]);
if (!capture.isOpened())
return 1;
cv::namedWindow("RTSP Stream", CV_WINDOW_AUTOSIZE);
cv::Mat frame;
while(true)
{
if (!capture.read(frame))
return 1;
cv::imshow("TEST", frame);
cv::waitKey(1);
}
}
开发者ID:Remote-Oculus-Controller,项目名称:R.O.C-SERVER,代码行数:20,代码来源:RTSP_player.cpp
示例12: calcTransforms
//--------------------------------------------------------------
void testApp::update(){
FOREACH_CAMERA
camera[iCam].update();
if (NUM_CAMERAS==2)
if (camera[0].bangAdded && camera[1].bangAdded)
{
camera[0].bangAdded = false;
camera[1].bangAdded = false;
calcTransforms();
}
if (timerOn)
if (ofGetElapsedTimef() > (lastCaptureTime+TIMER_PERIOD) && !camera[0].doAdd && !camera[1].doAdd)
{
capture();
lastCaptureTime = ofGetElapsedTimef();
}
}
开发者ID:elliotwoods,项目名称:MapTools-SL,代码行数:21,代码来源:testApp.cpp
示例13: unicap_capture
static PyObject *
unicap_capture(PyObject *self, PyObject *args)
{
unicap_handle_t handle = getHandle();
capture(handle);
unicap_close(handle);
PyObject* imgarray = PyList_New(nrOfPixel);
int i;
for(i = 0; i < nrOfPixel; i++)
{
long l = buf[i];
PyObject* intensity = PyLong_FromLong(l);
PyList_SetItem(imgarray, i, intensity);
}
free(buf);
buf = NULL;
return imgarray;
}
开发者ID:Hofmaier,项目名称:wintermute,代码行数:20,代码来源:unicapmodule.c
示例14: qDebug
void LSCWorker::start()
{
const double period = (1.0/mFps)*1000;
int currentFrame = 0;
const int framesCnt = mFps*mDuration;
qDebug() << "period:"<<period << "mFps:" << mFps << "duration:" << mDuration << "framesCnt:"<<framesCnt;
while ( currentFrame < framesCnt || (currentFrame == 0 && framesCnt == 1))
{
const qint64 started = QDateTime::currentMSecsSinceEpoch();
while( !mQueue.isEmpty() )
{
continue;
}
qDebug() << "request for capture" << currentFrame << "\t\t" << QDateTime::currentMSecsSinceEpoch();
mQueue.enqueue( "capture" );
emit capture( currentFrame );
// qApp->processEvents();
if( framesCnt > 1 )
{
const qint64 spent = QDateTime::currentMSecsSinceEpoch() - started;
const qint64 wait = period - spent;
if ( wait>0 ) customWait( wait );
}
++currentFrame;
}
mMutex.lock(); // to avoid dequeueing from LSCCapturer::onCaptureRequested
mQueue.clear();
mMutex.unlock();
emit finished();
}
开发者ID:sendevent,项目名称:lsc,代码行数:41,代码来源:screenshotworker.cpp
示例15: QPushButton
//创建按钮
void MeasureMarkersWindow::createWidget() {
//预览按钮
mPlay = new QPushButton();
mPlay->setText(QString::fromWCharArray(L"预览"));
connect(mPlay, SIGNAL(pressed()), this, SLOT(preview()));
//标签
mCaps = new QLabel();
mCaps->setText(" 0/0");
mCaps->setAlignment(Qt::AlignRight | Qt::AlignCenter);
//捕获按钮
mCapture = new QPushButton();
mCapture->setText(QString::fromWCharArray(L"捕获"));
connect(mCapture, SIGNAL(pressed()), this, SLOT(capture()));
mCapture->setDisabled(true);
//删除按钮
mDelete = new QPushButton();
mDelete->setText(QString::fromWCharArray(L"删除"));
connect(mDelete, SIGNAL(pressed()), this, SLOT(deleteImg()));
mDelete->setDisabled(true);
//滑块
mSlider = new QSlider(Qt::Horizontal);
mSlider->setTickPosition(QSlider::TicksBelow);
mSlider->setMinimum(1);
mSlider->setMaximum(1);
mSlider->setTickInterval(1);
mSlider->setValue(1);
mSlider->setTracking(true);
mSlider->setDisabled(true);
connect(mSlider, SIGNAL(valueChanged(int)), this, SLOT(updateCurrent(int)));
mCtrlLayout->addWidget(mPlay);
mCtrlLayout->addWidget(mCaps);
mCtrlLayout->addWidget(mCapture);
mCtrlLayout->addWidget(mDelete);
mTimeLineLayout->addWidget(mSlider);
}
开发者ID:Marco-LIU,项目名称:reconstruction-3d,代码行数:42,代码来源:MeasureMarkersWindows.cpp
示例16: capture
vector<Mat> Utils::extractVideoHistograms(string videoPath) {
vector< pair<int, Mat> > hTemp;
vector<Mat> histograms;
// Each threads will be real fast...
unsigned nThreads = thread::hardware_concurrency() * 100;
vector<thread> pool;
try {
Mat frame;
VideoCapture capture(videoPath);
for(int num = 0; capture.read(frame); num++) {
if(pool.size() > nThreads) {
for(auto &t : pool) {
t.join();
}
pool.clear();
}
Mat fTemp;
frame.copyTo(fTemp);
pool.push_back(thread(&Utils::extractHistogram, fTemp, num, std::ref(hTemp)));
}
for(auto &t : pool) {
t.join();
}
pool.clear();
frame.release();
capture.release();
} catch(exception &e) {
cout << "The video file is corrupt or of an unsupported format" << endl;
}
std::sort(hTemp.begin(), hTemp.end(), Utils::pairCompare);
for(pair<int, Mat> t : hTemp) {
histograms.push_back(t.second);
}
return histograms;
}
开发者ID:Trojahn,项目名称:FAST,代码行数:41,代码来源:Utils.cpp
示例17: capture
bool EditBezierOP::onMouseMove(int x, int y)
{
if (ZoomViewOP::onMouseMove(x, y)) return true;
Vector pos = m_editPanel->transPosScreenToProject(x, y);
int tolerance;
if (m_cmpt)
tolerance = m_cmpt->getNodeCaptureDistance();
else
tolerance = 0;
if (tolerance != 0)
{
NodeCapture capture(m_shapesImpl, tolerance);
IShape* old = m_captured.shape;
capture.captureEditable(pos, m_captured);
if (old && !m_captured.shape || !old && m_captured.shape)
m_editPanel->Refresh();
}
return false;
}
开发者ID:jjiezheng,项目名称:drag2d,代码行数:21,代码来源:EditBezierOP.cpp
示例18: QWidget
content2::content2(QString ip,QString port,int pp, QWidget *parent)
: QWidget(parent),ip_(ip),port_(port)
{
QPalette palette;
palette.setBrush(QPalette::Window,QBrush(QColor(250,255,240,150)));
setPalette(palette);
setAutoFillBackground(true);
button = new QPushButton(this);
button->setText("开始录制");
button->setCheckable(true);
connect(button,SIGNAL(toggled(bool)),this,SLOT(button_text(bool)));
button2 = new QPushButton(this);
button2->setText("屏幕截图");
picture = new pic_show(this,ip,port);
button->move( width()-10-button->width(),10 );
connect(picture,SIGNAL(showmessage(QString)),this,SIGNAL(showmess(QString)));
connect(button2,SIGNAL(clicked()),picture,SLOT(capture()));
}
开发者ID:bojianbin,项目名称:packet_client_Qt2.0,代码行数:21,代码来源:content2.cpp
示例19: main
int main(int argc, char **argv)
{
cv::VideoCapture capture(0);
cv::Size size(int(capture.get(cv::CAP_PROP_FRAME_WIDTH)), int(capture.get(cv::CAP_PROP_FRAME_HEIGHT)));
size = size / 4;
// Create the context
gatherer::graphics::GLContextWindow window(size, "display");
gatherer::graphics::OEGLGPGPUTest test(&window, window.getResolution().x);
while( /*capture */ true )
{
cv::Mat frame;
capture >> frame;
cv::resize(frame, frame, size);
cv::cvtColor(frame, frame, cv::COLOR_BGR2BGRA);
test.captureOutput(frame);
window.swapBuffers();
}
}
开发者ID:lucmichalski,项目名称:gatherer,代码行数:21,代码来源:ogles_gpgpu_test.cpp
示例20: switch
void CPlyRender::key(GLFWwindow *window, int key, int scancode, int action, int mods)
{
if (action == GLFW_PRESS) {
switch (key)
{
case GLFW_KEY_ESCAPE:// ESCキーでウィンドウのクローズフラグを設定。
// Sets the close flag of the specified window.
glfwSetWindowShouldClose(window, GL_TRUE);
break;
case GLFW_KEY_C:
capture(window);
break;
case GLFW_KEY_F:
m_zoom += 0.1;
break;
case GLFW_KEY_B:
m_zoom -= 0.1;
break;
}
}
}
开发者ID:miffysora,项目名称:Shader,代码行数:21,代码来源:PlyRender.cpp
注:本文中的capture函数示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论