本文整理汇总了C++中ARLOGe函数的典型用法代码示例。如果您正苦于以下问题:C++ ARLOGe函数的具体用法?C++ ARLOGe怎么用?C++ ARLOGe使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了ARLOGe函数的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的C++代码示例。
示例1: loadNFTData
// References globals: markersNFTCount
// Modifies globals: threadHandle, surfaceSet[], surfaceSetCount, markersNFT[]
static int loadNFTData(void)
{
int i;
KpmRefDataSet *refDataSet;
// If data was already loaded, stop KPM tracking thread and unload previously loaded data.
if (threadHandle) {
ARLOGi("Reloading NFT data.\n");
unloadNFTData();
} else {
ARLOGi("Loading NFT data.\n");
}
refDataSet = NULL;
for (i = 0; i < markersNFTCount; i++) {
// Load KPM data.
KpmRefDataSet *refDataSet2;
ARLOGi("Reading %s.fset3\n", markersNFT[i].datasetPathname);
if (kpmLoadRefDataSet(markersNFT[i].datasetPathname, "fset3", &refDataSet2) < 0 ) {
ARLOGe("Error reading KPM data from %s.fset3\n", markersNFT[i].datasetPathname);
markersNFT[i].pageNo = -1;
continue;
}
markersNFT[i].pageNo = surfaceSetCount;
ARLOGi(" Assigned page no. %d.\n", surfaceSetCount);
if (kpmChangePageNoOfRefDataSet(refDataSet2, KpmChangePageNoAllPages, surfaceSetCount) < 0) {
ARLOGe("Error: kpmChangePageNoOfRefDataSet\n");
exit(-1);
}
if (kpmMergeRefDataSet(&refDataSet, &refDataSet2) < 0) {
ARLOGe("Error: kpmMergeRefDataSet\n");
exit(-1);
}
ARLOGi(" Done.\n");
// Load AR2 data.
ARLOGi("Reading %s.fset\n", markersNFT[i].datasetPathname);
if ((surfaceSet[surfaceSetCount] = ar2ReadSurfaceSet(markersNFT[i].datasetPathname, "fset", NULL)) == NULL ) {
ARLOGe("Error reading data from %s.fset\n", markersNFT[i].datasetPathname);
}
ARLOGi(" Done.\n");
surfaceSetCount++;
if (surfaceSetCount == PAGES_MAX) break;
}
if (kpmSetRefDataSet(kpmHandle, refDataSet) < 0) {
ARLOGe("Error: kpmSetRefDataSet\n");
exit(-1);
}
kpmDeleteRefDataSet(&refDataSet);
// Start the KPM tracking thread.
threadHandle = trackingInitInit(kpmHandle);
if (!threadHandle) exit(-1);
ARLOGi("Loading of NFT data complete.\n");
return (TRUE);
}
开发者ID:GitHubGenLi,项目名称:artoolkit5,代码行数:62,代码来源:nftBook.c
示例2: onFrameBuffer
virtual bool onFrameBuffer(void *buffer, int bufferSize)
{
int frameIndex;
bool ret;
if (!isConnected() || !buffer || bufferSize <= 0)
{
ARLOGe("Error: onFrameBuffer() called while not connected, or called without frame.\n");
return false;
}
ret = true;
m_framesReceived++;
pthread_mutex_lock(&m_nc->frameLock);
if (m_nc->frameBuffers[0] && m_nc->frameBuffers[1]) // Only do copy if capture has been started.
{
if (bufferSize != m_nc->frameBufferLength)
{
ARLOGe("Error: onFrameBuffer frame size is %d but receiver expected %d.\n", bufferSize, m_nc->frameBufferLength);
ret = false;
}
else
{
// Find a buffer to write to. Any buffer not locked by client is a candidate.
if (m_nc->frameBuffersStatus[0] != LOCKED)
frameIndex = 0;
else if (m_nc->frameBuffersStatus[1] != LOCKED)
frameIndex = 1;
else
frameIndex = -1;
if (frameIndex == -1)
{
ARLOGe("Error: onFrameBuffer receiver was all full up.\n");
ret = false;
}
else
{
ARLOGd("FRAME => buffer %d %p\n", frameIndex, m_nc->frameBuffers[frameIndex]);
memcpy(m_nc->frameBuffers[frameIndex], buffer, bufferSize);
m_nc->frameBuffersStatus[frameIndex] = READY;
if (m_nc->frameReadyCallback)
pthread_cond_signal(&m_nc->frameReadyNotifierThreadCondGo);
}
}
}
else
{
ARLOGd("FRAME =X\n");
}
pthread_mutex_unlock(&m_nc->frameLock);
return ret;
}
开发者ID:hyyh619,项目名称:ARToolKit_5.3.1,代码行数:56,代码来源:videoAndroidNativeCapture.cpp
示例3: errorWMC
static void errorWMC(void *userdata)
{
if (!userdata) {
ARLOGe("Windows.Media.Capture error but no userdata suppplied.\n");
return;
}
AR2VideoParamWinMCT *vid = (AR2VideoParamWinMCT *)userdata;
ARLOGe("Windows.Media.Capture error.\n");
stopWMC(vid);
}
开发者ID:nuttonutto,项目名称:artoolkit5,代码行数:10,代码来源:videoWinMC.cpp
示例4: debugReportMode
// Report state of ARToolKit tracker.
static void debugReportMode(ARGViewportHandle *vp)
{
if (vp->dispMethod == AR_GL_DISP_METHOD_GL_DRAW_PIXELS) {
ARLOGe("dispMode (d) : GL_DRAW_PIXELS\n");
} else if (vp->dispMethod == AR_GL_DISP_METHOD_TEXTURE_MAPPING_FRAME) {
ARLOGe("dispMode (d) : TEXTURE MAPPING (FULL RESOLUTION)\n");
} else {
ARLOGe("dispMode (d) : TEXTURE MAPPING (HALF RESOLUTION)\n");
}
}
开发者ID:AadityaDev,项目名称:artoolkit5,代码行数:11,代码来源:gsubTest.c
示例5: ARLOGe
static void *trackingInitMain( THREAD_HANDLE_T *threadHandle )
{
TrackingInitHandle *trackingInitHandle;
KpmHandle *kpmHandle;
KpmResult *kpmResult = NULL;
int kpmResultNum;
ARUint8 *imagePtr;
float err;
int i, j, k;
if (!threadHandle) {
ARLOGe("Error starting tracking thread: empty THREAD_HANDLE_T.\n");
return (NULL);
}
trackingInitHandle = (TrackingInitHandle *)threadGetArg(threadHandle);
if (!threadHandle) {
ARLOGe("Error starting tracking thread: empty trackingInitHandle.\n");
return (NULL);
}
kpmHandle = trackingInitHandle->kpmHandle;
imagePtr = trackingInitHandle->imagePtr;
if (!kpmHandle || !imagePtr) {
ARLOGe("Error starting tracking thread: empty kpmHandle/imagePtr.\n");
return (NULL);
}
ARLOGi("Start tracking thread.\n");
kpmGetResult( kpmHandle, &kpmResult, &kpmResultNum );
for(;;) {
if( threadStartWait(threadHandle) < 0 ) break;
kpmMatching(kpmHandle, imagePtr);
trackingInitHandle->flag = 0;
for( i = 0; i < kpmResultNum; i++ ) {
if( kpmResult[i].camPoseF != 0 ) continue;
ARLOGd("kpmGetPose OK.\n");
if( trackingInitHandle->flag == 0 || err > kpmResult[i].error ) { // Take the first or best result.
trackingInitHandle->flag = 1;
trackingInitHandle->page = kpmResult[i].pageNo;
for (j = 0; j < 3; j++) for (k = 0; k < 4; k++) trackingInitHandle->trans[j][k] = kpmResult[i].camPose[j][k];
err = kpmResult[i].error;
}
}
threadEndSignal(threadHandle);
}
ARLOGi("End tracking thread.\n");
return (NULL);
}
开发者ID:AkiraSoumi,项目名称:artoolkit5,代码行数:51,代码来源:trackingSub.c
示例6: ar2WriteImageSet
int ar2WriteImageSet(char *filename, AR2ImageSetT *imageSet)
{
FILE *fp;
AR2JpegImageT jpegImage;
int i;
size_t len;
const char ext[] = ".iset";
char *buf;
len = strlen(filename) + strlen(ext) + 1; // +1 for nul terminator.
arMalloc(buf, char, len);
sprintf(buf, "%s%s", filename, ext);
if ((fp = fopen(buf, "wb")) == NULL)
{
ARLOGe("Error: unable to open file '%s' for writing.\n", buf);
free(buf);
return (-1);
}
free(buf);
if (fwrite(&(imageSet->num), sizeof(imageSet->num), 1, fp) != 1)
goto bailBadWrite;
jpegImage.xsize = imageSet->scale[0]->xsize;
jpegImage.ysize = imageSet->scale[0]->ysize;
jpegImage.dpi = imageSet->scale[0]->dpi;
jpegImage.nc = 1;
#if AR2_CAPABLE_ADAPTIVE_TEMPLATE
jpegImage.image = imageSet->scale[0]->imgBWBlur[0];
#else
jpegImage.image = imageSet->scale[0]->imgBW;
#endif
if (ar2WriteJpegImage2(fp, &jpegImage, AR2_DEFAULT_JPEG_IMAGE_QUALITY) < 0)
goto bailBadWrite;
for (i = 1; i < imageSet->num; i++)
{
if (fwrite(&(imageSet->scale[i]->dpi), sizeof(imageSet->scale[i]->dpi), 1, fp) != 1)
goto bailBadWrite;
}
fclose(fp);
return 0;
bailBadWrite:
ARLOGe("Error saving image set: error writing data.\n");
fclose(fp);
return (-1);
}
开发者ID:hyyh619,项目名称:ARToolKit_5.3.1,代码行数:51,代码来源:imageSet.c
示例7: setupCamera
static int setupCamera(const char *cparam_name, char *vconf, ARParamLT **cparamLT_p)
{
ARParam cparam;
int xsize, ysize;
AR_PIXEL_FORMAT pixFormat;
// Open the video path.
if (arVideoOpen(vconf) < 0) {
ARLOGe("setupCamera(): Unable to open connection to camera.\n");
return (FALSE);
}
// Find the size of the window.
if (arVideoGetSize(&xsize, &ysize) < 0) {
ARLOGe("setupCamera(): Unable to determine camera frame size.\n");
arVideoClose();
return (FALSE);
}
ARLOGi("Camera image size (x,y) = (%d,%d)\n", xsize, ysize);
// Get the format in which the camera is returning pixels.
pixFormat = arVideoGetPixelFormat();
if (pixFormat == AR_PIXEL_FORMAT_INVALID) {
ARLOGe("setupCamera(): Camera is using unsupported pixel format.\n");
arVideoClose();
return (FALSE);
}
// Load the camera parameters, resize for the window and init.
if (arParamLoad(cparam_name, 1, &cparam) < 0) {
ARLOGe("setupCamera(): Error loading parameter file %s for camera.\n", cparam_name);
arVideoClose();
return (FALSE);
}
if (cparam.xsize != xsize || cparam.ysize != ysize) {
ARLOGw("*** Camera Parameter resized from %d, %d. ***\n", cparam.xsize, cparam.ysize);
arParamChangeSize(&cparam, xsize, ysize, &cparam);
}
#ifdef DEBUG
ARLOG("*** Camera Parameter ***\n");
arParamDisp(&cparam);
#endif
if ((*cparamLT_p = arParamLTCreate(&cparam, AR_PARAM_LT_DEFAULT_OFFSET)) == NULL) {
ARLOGe("setupCamera(): Error: arParamLTCreate.\n");
arVideoClose();
return (FALSE);
}
return (TRUE);
}
开发者ID:Belial2010,项目名称:artoolkit5,代码行数:50,代码来源:simpleOSG.c
示例8: videoAndroidNativeCaptureStart
bool videoAndroidNativeCaptureStart(VIDEO_ANDROID_NATIVE_CAPTURE *nc, AR_VIDEO_FRAME_READY_CALLBACK callback, void *userdata)
{
int err;
bool ret = true;
ARLOGd("videoAndroidNativeCaptureStart().\n");
if (!nc)
return false;
// Don't start if already started.
if (nc->frameBuffers[0] || nc->frameBuffers[1])
{
ARLOGe("videoAndroidNativeCaptureStart called again.\n");
return false;
}
// Create the frame buffers.
pthread_mutex_lock(&nc->frameLock);
nc->frameBufferLength = (nc->frameWidth * nc->frameHeight * 3) / 2; // Assume NV21/NV12 format.
nc->frameBuffersStatus[0] = nc->frameBuffersStatus[1] = EMPTY;
nc->frameBuffers[0] = (unsigned char*)malloc(nc->frameBufferLength);
nc->frameBuffers[1] = (unsigned char*)malloc(nc->frameBufferLength);
if (!nc->frameBuffers[0] || !nc->frameBuffers[1])
{
ARLOGe("Out of memory!\n");
ret = false;
}
else
{
nc->frameReadyCallback = callback;
if (callback)
{
// Start the frameReadyNotifierThread.
nc->frameReadyCallbackUserdata = userdata;
nc->frameReadyNotifierThreadShouldQuit = false;
if ((err = pthread_create(&(nc->frameReadyNotifierThread), NULL, frameReadyNotifier, (void*)nc)) != 0)
{
ARLOGe("videoAndroidNativeCaptureOpen(): Error %d detaching thread.\n", err);
ret = false;
}
}
}
pthread_mutex_unlock(&nc->frameLock);
ARLOGd("/videoAndroidNativeCaptureStart nc->frameBufferLength=%d.\n", nc->frameBufferLength);
return ret;
}
开发者ID:hyyh619,项目名称:ARToolKit_5.3.1,代码行数:50,代码来源:videoAndroidNativeCapture.cpp
示例9: glmReadOBJ
bool ARTApp::init(const char *cparamName, const char *pattName, const char *objModelFile, float pattWidth, float modelScale)
{
if (arHandle) //has initialized
return false;
if (!setupCamera(cparamName, "", &cParam, &arHandle, &ar3DHandle)) {
return false;
}
if (!setupMarker(pattName, &pattID, arHandle, &pattHandle)) {
return false;
}
{
objModel = glmReadOBJ((char*)objModelFile);
if (!objModel)
{
ARLOGe("Unable to load obj model file.\n");
return false;
}
glmUnitize(objModel);
glmScale(objModel, pattWidth*modelScale);
}
this->pattWidth = pattWidth;
return true;
}
开发者ID:imbinwang,项目名称:simpleARDIY,代码行数:27,代码来源:DIY.cpp
示例10: ARLOGe
// N.B. This function is duplicated in libARvideo, so that libARvideo doesn't need to
// link to libAR. Therefore, if changes are made here they should be duplicated there.
const char *arUtilGetPixelFormatName(const AR_PIXEL_FORMAT arPixelFormat)
{
const char *names[] = {
"AR_PIXEL_FORMAT_RGB",
"AR_PIXEL_FORMAT_BGR",
"AR_PIXEL_FORMAT_RGBA",
"AR_PIXEL_FORMAT_BGRA",
"AR_PIXEL_FORMAT_ABGR",
"AR_PIXEL_FORMAT_MONO",
"AR_PIXEL_FORMAT_ARGB",
"AR_PIXEL_FORMAT_2vuy",
"AR_PIXEL_FORMAT_yuvs",
"AR_PIXEL_FORMAT_RGB_565",
"AR_PIXEL_FORMAT_RGBA_5551",
"AR_PIXEL_FORMAT_RGBA_4444",
"AR_PIXEL_FORMAT_420v",
"AR_PIXEL_FORMAT_420f",
"AR_PIXEL_FORMAT_NV21"
};
if ((int)arPixelFormat < 0 || (int)arPixelFormat > AR_PIXEL_FORMAT_MAX) {
ARLOGe("arUtilGetPixelFormatName: Error, unrecognised pixel format (%d).\n", (int)arPixelFormat);
return (NULL);
}
return (names[(int)arPixelFormat]);
}
开发者ID:Avatarchik,项目名称:nft_generator,代码行数:27,代码来源:arUtil.c
示例11: ar2VideoGetAbsMaxValue1394
int ar2VideoGetAbsMaxValue1394(AR2VideoParam1394T *vid, int paramName, ARdouble *value)
{
dc1394feature_t feature;
float min, max;
switch (paramName)
{
case AR_VIDEO_1394_GAMMA:
feature = DC1394_FEATURE_GAMMA;
break;
default:
return -1;
}
if (dc1394_feature_get_absolute_boundaries(vid->camera, feature, &min, &max) != DC1394_SUCCESS)
{
ARLOGe("unable to get max value.\n");
return -1;
}
*value = (float)max;
return 0;
}
开发者ID:hyyh619,项目名称:ARToolKit_5.3.1,代码行数:25,代码来源:video1394V2setting.c
示例12: startWMC
static bool startWMC(AR2VideoParamWinMCT *vid, const int width, const int height)
{
if (!vid || !vid->wmc) return false;
if (vid->wmc->Capturing()) {
ARLOGe("Windows.Media.Capture already started.\n");
return false;
}
if (!vid->wmc->StartCapture(width, height, getWMCVideoMediaSubTypeForARPixelFormat(vid->format), vid->devNum - 1, vid->preferredDeviceLocation, errorWMC, (void *)vid)) {
ARLOGe("Error starting capture.\n");
return false;
}
return true;
}
开发者ID:nuttonutto,项目名称:artoolkit5,代码行数:16,代码来源:videoWinMC.cpp
示例13: main
int main(int argc, char *argv[])
{
ARParam cparam;
// ARParamLT *cparamLT;
float trans[3][4];
float pos[2];
float dpi[2];
// char name[1024], ext[1024];
int i, j;
float z;
init(argc, argv);
if (!cpara)
cpara = cparaDefault;
// ar2UtilDivideExt( cpara, name, ext );
// Load the camera parameters, resize for the window and init.
// if( arParamLoad(name, ext, 1, &cparam) < 0 )
if (arParamLoad(cpara, 1, &cparam) < 0)
{
ARLOGe("setupCamera(): Error loading parameter file %s for camera.\n", cpara);
exit(-1);
}
if (xsize != -1 && ysize != -1 && (cparam.xsize != xsize || cparam.ysize != ysize))
{
ARLOG("*** Camera Parameter resized from %d, %d. ***\n", cparam.xsize, cparam.ysize);
arParamChangeSize(&cparam, xsize, ysize, &cparam);
}
ARLOG("*** Camera Parameter ***\n");
arParamDisp(&cparam);
// if ((cparamLT = arParamLTCreate(&cparam, AR_PARAM_LT_DEFAULT_OFFSET)) == NULL) {
// ARLOGe("setupCamera(): Error: arParamLTCreate.\n");
// exit(-1);
// }
pos[0] = 0.0;
pos[1] = 0.0;
for (j = 0; j < 3; j++)
for (i = 0; i < 4; i++)
trans[j][i] = ((i == j) ? 1.0 : 0.0);
for (i = 10; i <= 1000; i *= 10)
{
for (j = 1; j < 10; j++)
{
z = j * i;
trans[2][3] = z;
ar2GetResolution2(&cparam, trans, pos, dpi);
ARLOG("Distance: %f [mm] --> Resolution = %10.5f, %10.5f [DPI]\n", z, dpi[0], dpi[1]);
}
}
return (0);
}
开发者ID:hyyh619,项目名称:ARToolKit_5.3.1,代码行数:60,代码来源:checkResolution.c
示例14: videoAndroidNativeCaptureOpen
VIDEO_ANDROID_NATIVE_CAPTURE* videoAndroidNativeCaptureOpen(int cameraIndex)
{
CameraActivity::ErrorCode ca_err;
ARLOGd("videoAndroidNativeCaptureOpen(%d).\n", cameraIndex);
VIDEO_ANDROID_NATIVE_CAPTURE *nc = (VIDEO_ANDROID_NATIVE_CAPTURE*)calloc(1, sizeof(VIDEO_ANDROID_NATIVE_CAPTURE));
if (!nc)
{
ARLOGe("Out of memory!\n");
return (NULL);
}
nc->ca = new ARToolKitVideoAndroidCameraActivity(nc);
if (!nc->ca)
{
ARLOGe("Unable to create native connection to camera.\n");
goto bail;
}
// Lock manages contention between user thread, CameraActivity::onFrameBuffer thread (might be same as user thread), and frameReadyNotifierThread.
pthread_mutex_init(&nc->frameLock, NULL);
pthread_cond_init(&nc->frameReadyNotifierThreadCondGo, NULL);
ca_err = nc->ca->connect(cameraIndex);
if (ca_err != CameraActivity::NO_ERROR)
{
ARLOGe("Error %d opening native connection to camera.\n", ca_err);
goto bail1;
}
nc->frameWidth = (int)nc->ca->getProperty(ANDROID_CAMERA_PROPERTY_FRAMEWIDTH);
nc->frameHeight = (int)nc->ca->getProperty(ANDROID_CAMERA_PROPERTY_FRAMEHEIGHT);
ARLOGd("/videoAndroidNativeCaptureOpen %dx%d.\n", nc->frameWidth, nc->frameHeight);
return (nc);
bail1:
delete(nc->ca);
pthread_cond_destroy(&nc->frameReadyNotifierThreadCondGo);
pthread_mutex_destroy(&nc->frameLock);
bail:
free(nc);
return (NULL);
}
开发者ID:hyyh619,项目名称:ARToolKit_5.3.1,代码行数:46,代码来源:videoAndroidNativeCapture.cpp
示例15: arUtilChangeToResourcesDirectory
int arUtilChangeToResourcesDirectory(AR_UTIL_RESOURCES_DIRECTORY_BEHAVIOR behavior, const char *path)
#endif
{
char *wpath;
AR_UTIL_RESOURCES_DIRECTORY_BEHAVIOR behaviorW;
if (behavior == AR_UTIL_RESOURCES_DIRECTORY_BEHAVIOR_BEST) {
#if defined(__APPLE__)
behaviorW = AR_UTIL_RESOURCES_DIRECTORY_BEHAVIOR_USE_BUNDLE_RESOURCES_DIR;
#elif defined(ANDROID)
behaviorW = AR_UTIL_RESOURCES_DIRECTORY_BEHAVIOR_USE_APP_CACHE_DIR;
#elif defined(_WIN32) || defined(__linux)
behaviorW = AR_UTIL_RESOURCES_DIRECTORY_BEHAVIOR_USE_EXECUTABLE_DIR;
#else
behaviorW = AR_UTIL_RESOURCES_DIRECTORY_BEHAVIOR_USE_CWD;
#endif
} else {
behaviorW = behavior;
}
if (behaviorW != AR_UTIL_RESOURCES_DIRECTORY_BEHAVIOR_USE_SUPPLIED_PATH) {
#ifdef ANDROID
wpath = arUtilGetResourcesDirectoryPath(behavior, instanceOfAndroidContext);
#else
wpath = arUtilGetResourcesDirectoryPath(behavior);
#endif
if (wpath) {
if (chdir(wpath) != 0) {
ARLOGe("Error: Unable to change working directory to '%s'.\n", wpath);
ARLOGperror(NULL);
free (wpath);
return (-1);
}
free(wpath);
}
}
if (path) {
if (chdir(path) != 0) {
ARLOGe("Error: Unable to change working directory to '%s'.\n", path);
ARLOGperror(NULL);
return (-1);
}
}
return (0);
}
开发者ID:Avatarchik,项目名称:nft_generator,代码行数:46,代码来源:arUtil.c
示例16: arglCameraFrustumRH
void arglCameraFrustumRH(const ARParam *cparam, const ARdouble focalmin, const ARdouble focalmax, ARdouble m_projection[16])
{
ARdouble icpara[3][4];
ARdouble trans[3][4];
ARdouble p[3][3], q[4][4];
int width, height;
int i, j;
width = cparam->xsize;
height = cparam->ysize;
if (arParamDecompMat(cparam->mat, icpara, trans) < 0) {
ARLOGe("arglCameraFrustum(): arParamDecompMat() indicated parameter error.\n");
return;
}
for (i = 0; i < 4; i++) {
icpara[1][i] = (height - 1)*(icpara[2][i]) - icpara[1][i];
}
for(i = 0; i < 3; i++) {
for(j = 0; j < 3; j++) {
p[i][j] = icpara[i][j] / icpara[2][2];
}
}
q[0][0] = (2.0 * p[0][0] / (width - 1));
q[0][1] = (2.0 * p[0][1] / (width - 1));
q[0][2] = -((2.0 * p[0][2] / (width - 1)) - 1.0);
q[0][3] = 0.0;
q[1][0] = 0.0;
q[1][1] = -(2.0 * p[1][1] / (height - 1));
q[1][2] = -((2.0 * p[1][2] / (height - 1)) - 1.0);
q[1][3] = 0.0;
q[2][0] = 0.0;
q[2][1] = 0.0;
q[2][2] = (focalmax + focalmin)/(focalmin - focalmax);
q[2][3] = 2.0 * focalmax * focalmin / (focalmin - focalmax);
q[3][0] = 0.0;
q[3][1] = 0.0;
q[3][2] = -1.0;
q[3][3] = 0.0;
for (i = 0; i < 4; i++) { // Row.
// First 3 columns of the current row.
for (j = 0; j < 3; j++) { // Column.
m_projection[i + j*4] = q[i][0] * trans[0][j] +
q[i][1] * trans[1][j] +
q[i][2] * trans[2][j];
}
// Fourth column of the current row.
m_projection[i + 3*4] = q[i][0] * trans[0][3] +
q[i][1] * trans[1][3] +
q[i][2] * trans[2][3] +
q[i][3];
}
}
开发者ID:Ray0427,项目名称:artoolkit-ios,代码行数:58,代码来源:gsub_lite.c
示例17: setupMovie
static int setupMovie(const char *path)
{
char *movieVconf;
int len;
int xsize, ysize;
AR_PIXEL_FORMAT pixFormat;
// Construct the vconf string.
arMalloc(movieVconf, char, 2048); // 2Kb for URL.
sprintf(movieVconf, "-device=QUICKTIME -movie=\""); // Make sure we're using the QuickTime video input.
len = (int)strlen(movieVconf);
strncat(movieVconf + len, path, 2048 - len - 1);
len = (int)strlen(movieVconf);
strncat(movieVconf + len, "\" -loop -pause", 2048 - len - 1); // Start the movie paused. It will be unpaused in mainLoop().
// Open the movie.
gMovieVideo = ar2VideoOpen(movieVconf);
free(movieVconf);
if (!gMovieVideo)
{
ARLOGe("setupMovie(): Unable to open movie.\n");
return (FALSE);
}
// Find the size of the movie.
if (ar2VideoGetSize(gMovieVideo, &xsize, &ysize) < 0)
return (FALSE);
// Get the pixel format of the movie.
pixFormat = ar2VideoGetPixelFormat(gMovieVideo);
if (pixFormat == AR_PIXEL_FORMAT_INVALID)
{
ARLOGe("setupMovie(): Movie is using unsupported pixel format.\n");
return (FALSE);
}
// Set up an ARParam object for the movie input.
arParamClear(&gMovieCparam, xsize, ysize, AR_DIST_FUNCTION_VERSION_DEFAULT);
// For convenience, we will use gsub_lite to draw the actual pixels. Set it up now.
gMovieArglSettings = arglSetupForCurrentContext(&gMovieCparam, pixFormat);
arglDistortionCompensationSet(gMovieArglSettings, 0);
return (TRUE);
}
开发者ID:hyyh619,项目名称:ARToolKit_5.3.1,代码行数:45,代码来源:simpleMovie.c
示例18: setupMarker
static int setupMarker(const char *patt_name, int *patt_id, ARHandle *arhandle, ARPattHandle **pattHandle_p)
{
if ((*pattHandle_p = arPattCreateHandle()) == NULL) {
ARLOGe("setupMarker(): Error: arPattCreateHandle.\n");
return (FALSE);
}
// Loading only 1 pattern in this example.
if ((*patt_id = arPattLoad(*pattHandle_p, patt_name)) < 0) {
ARLOGe("setupMarker(): Error loading pattern file %s.\n", patt_name);
arPattDeleteHandle(*pattHandle_p);
return (FALSE);
}
arPattAttach(arhandle, *pattHandle_p);
return (TRUE);
}
开发者ID:afauch,项目名称:artoolkit5,代码行数:18,代码来源:simpleMovie.c
示例19: mainLoop
static void mainLoop(void)
{
int i;
static int imageNumber = 0;
static int ms_prev;
int ms;
float s_elapsed;
ARUint8 *image;
// Find out how long since mainLoop() last ran.
ms = glutGet(GLUT_ELAPSED_TIME);
s_elapsed = (float)(ms - ms_prev) * 0.001f;
if (s_elapsed < 0.01f)
return; // Don't update more often than 100 Hz.
ms_prev = ms;
// Grab a video frame.
if ((image = arVideoGetImage()) != NULL)
{
gARTImage = image; // Save the fetched image.
if (gARTImageSavePlease)
{
char imageNumberText[15];
sprintf(imageNumberText, "image-%04d.jpg", imageNumber++);
if (arVideoSaveImageJPEG(gARHandle->xsize, gARHandle->ysize, gARHandle->arPixelFormat, gARTImage, imageNumberText, 75, 0) < 0)
{
ARLOGe("Error saving video image.\n");
}
gARTImageSavePlease = FALSE;
}
gCallCountMarkerDetect++; // Increment ARToolKit FPS counter.
// Detect the markers in the video frame.
if (arDetectMarker(gARHandle, gARTImage) < 0)
{
exit(-1);
}
// If marker config files were specified, evaluate detected patterns against them now.
for (i = 0; i < gMultiConfigCount; i++)
{
if (gRobustFlag)
gMultiErrs[i] = arGetTransMatMultiSquareRobust(gAR3DHandle, arGetMarker(gARHandle), arGetMarkerNum(gARHandle), gMultiConfigs[i]);
else
gMultiErrs[i] = arGetTransMatMultiSquare(gAR3DHandle, arGetMarker(gARHandle), arGetMarkerNum(gARHandle), gMultiConfigs[i]);
// if (gMultiConfigs[i]->prevF != 0) ARLOGe("Found multimarker set %d, err=%0.3f\n", i, gMultiErrs[i]);
}
// Tell GLUT the display has changed.
glutPostRedisplay();
}
}
开发者ID:hyyh619,项目名称:ARToolKit_5.3.1,代码行数:57,代码来源:check_id.c
示例20: init
static int init( int argc, char *argv[] )
{
ARGViewport viewport;
char *filename = NULL;
int xmax, ymax;
float xzoom, yzoom;
float zoom;
int i;
for( i = 1; i < argc; i++ ) {
if( filename == NULL ) filename = argv[i];
else usage(argv[0] );
}
if (!filename || !filename[0]) usage(argv[0]);
ARLOG("Read ImageSet.\n");
ar2UtilRemoveExt( filename );
imageSet = ar2ReadImageSet( filename );
if( imageSet == NULL ) {
ARLOGe("file open error: %s.iset\n", filename );
exit(0);
}
ARLOG(" end.\n");
arMalloc(vp, ARGViewportHandle *, imageSet->num);
xmax = ymax = 0;
for( i = 0; i < imageSet->num; i++ ) {
if( imageSet->scale[i]->xsize > xmax ) xmax = imageSet->scale[i]->xsize;
if( imageSet->scale[i]->ysize > ymax ) ymax = imageSet->scale[i]->ysize;
}
xzoom = yzoom = 1.0;
while( xmax > winXsize*xzoom ) xzoom += 1.0;
while( ymax > winYsize*yzoom ) yzoom += 1.0;
if( xzoom > yzoom ) zoom = 1.0/xzoom;
else zoom = 1.0/yzoom;
winXsize = xmax * zoom;
winYsize = ymax * zoom;
ARLOG("Size = (%d,%d) Zoom = %f\n", xmax, ymax, zoom);
argCreateWindow( winXsize, winYsize );
for( i = 0; i < imageSet->num; i++ ) {
viewport.sx = viewport.sy = 0;
viewport.xsize = imageSet->scale[i]->xsize * zoom;
viewport.ysize = imageSet->scale[i]->ysize * zoom;
vp[i] = argCreateViewport( &viewport );
argViewportSetImageSize( vp[i], imageSet->scale[i]->xsize, imageSet->scale[i]->ysize );
argViewportSetDispMethod( vp[i], AR_GL_DISP_METHOD_TEXTURE_MAPPING_FRAME );
//argViewportSetDispMethod( vp[i], AR_GL_DISP_METHOD_GL_DRAW_PIXELS );
argViewportSetDispMode( vp[i], AR_GL_DISP_MODE_FIT_TO_VIEWPORT );
argViewportSetDistortionMode( vp[i], AR_GL_DISTORTION_COMPENSATE_DISABLE );
}
reportCurrentDPI();
return 0;
}
开发者ID:AadityaDev,项目名称:artoolkit5,代码行数:57,代码来源:dispImageSet.c
注:本文中的ARLOGe函数示例整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论