• 设为首页
  • 点击收藏
  • 手机版
    手机扫一扫访问
    迪恩网络手机版
  • 关注官方公众号
    微信扫一扫关注
    迪恩网络公众号

C++ android::sp类代码示例

原作者: [db:作者] 来自: [db:来源] 收藏 邀请

本文整理汇总了C++中android::sp的典型用法代码示例。如果您正苦于以下问题:C++ sp类的具体用法?C++ sp怎么用?C++ sp使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。



在下文中一共展示了sp类的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的C++代码示例。

示例1: postData

void CameraControl::postData(
		int32_t msg_type,
		const android::sp<android::IMemory>& data,
		camera_frame_metadata_t* metadata)
{
	REPORT_FUNCTION();

	if (!listener)
		return;

	switch (msg_type) {
	case CAMERA_MSG_RAW_IMAGE:
		if (listener->on_data_raw_image_cb)
			listener->on_data_raw_image_cb(data->pointer(), data->size(), listener->context);
		break;
	case CAMERA_MSG_COMPRESSED_IMAGE:
		if (listener->on_data_compressed_image_cb)
			listener->on_data_compressed_image_cb(data->pointer(), data->size(), listener->context);
		break;
	default:
		break;
	}

	camera->releaseRecordingFrame(data);
}
开发者ID:F35X70,项目名称:libhybris,代码行数:25,代码来源:camera_compatibility_layer.cpp


示例2:

bool
TextureHostOGL::SetReleaseFence(const android::sp<android::Fence>& aReleaseFence)
{
  if (!aReleaseFence.get() || !aReleaseFence->isValid()) {
    // HWC might not provide Fence.
    // In this case, HWC implicitly handles buffer's fence.
    return false;
  }

  if (!mReleaseFence.get()) {
    mReleaseFence = aReleaseFence;
  } else {
    android::sp<android::Fence> mergedFence = android::Fence::merge(
                  android::String8::format("TextureHostOGL"),
                  mReleaseFence, aReleaseFence);
    if (!mergedFence.get()) {
      // synchronization is broken, the best we can do is hope fences
      // signal in order so the new fence will act like a union.
      // This error handling is same as android::ConsumerBase does.
      mReleaseFence = aReleaseFence;
      return false;
    }
    mReleaseFence = mergedFence;
  }
  return true;
}
开发者ID:Acidburn0zzz,项目名称:tor-browser,代码行数:26,代码来源:TextureHostOGL.cpp


示例3: submitRequestFrame

MERROR
PipelineModelBase::
submitRequestFrame(
    android::sp<IPipelineFrame> pFrame
)
{
    sp<IPipelineNodeMap const> pPipelineNodeMap = pFrame->getPipelineNodeMap();
    if  ( pPipelineNodeMap == 0 || pPipelineNodeMap->isEmpty() ) {
        MY_LOGE("[frameNo:%d] Bad PipelineNodeMap:%p", pFrame->getFrameNo(), pPipelineNodeMap.get());
        return DEAD_OBJECT;
    }
    //
    IPipelineDAG::NodeObj_T const nodeObj = pFrame->getPipelineDAG().getRootNode();
    sp<IPipelineNode> pNode = pPipelineNodeMap->nodeAt(nodeObj.val);
    if  ( pNode == 0 ) {
        MY_LOGE("[frameNo:%d] Bad root node", pFrame->getFrameNo());
        return DEAD_OBJECT;
    }

    MERROR err = OK;
    RWLock::AutoRLock _l(mflushLock);
    if(mInFlush == MTRUE) {
        err = pNode->flush(pFrame);
    } else {
        err = pNode->queue(pFrame);
    }

    return err;
}
开发者ID:,项目名称:,代码行数:29,代码来源:


示例4: unbind

int OpenSLMediaPlayerVisualizerJNIBinder::unbind(const android::sp<OpenSLMediaPlayerVisualizer> &visualizer) noexcept
{
    if (!visualizer.get())
        return OSLMP_RESULT_ILLEGAL_ARGUMENT;

    return visualizer->setInternalPeriodicCaptureThreadEventListener(nullptr, 0, false, false);
}
开发者ID:HKingz,项目名称:android-openslmediaplayer,代码行数:7,代码来源:OpenSLMediaPlayerVisualizerJNIBinder.cpp


示例5: bind

int OpenSLMediaPlayerHQVisualizerJNIBinder::bind(const android::sp<OpenSLMediaPlayerHQVisualizer> &visualizer,
                                                 uint32_t rate, bool waveform, bool fft) noexcept
{

    if (!visualizer.get())
        return OSLMP_RESULT_ILLEGAL_ARGUMENT;

    return visualizer->setInternalPeriodicCaptureThreadEventListener(this, rate, waveform, fft);
}
开发者ID:HKingz,项目名称:android-openslmediaplayer,代码行数:9,代码来源:OpenSLMediaPlayerHQVisualizerJNIBinder.cpp


示例6: android_Player_setPlayState

/**
 * pre-condition: gp != 0
 */
XAresult android_Player_setPlayState(const android::sp<android::GenericPlayer> &gp,
        SLuint32 playState,
        AndroidObjectState* pObjState)
{
    XAresult result = XA_RESULT_SUCCESS;
    AndroidObjectState objState = *pObjState;

    switch (playState) {
     case SL_PLAYSTATE_STOPPED: {
         SL_LOGV("setting AVPlayer to SL_PLAYSTATE_STOPPED");
         gp->stop();
         }
         break;
     case SL_PLAYSTATE_PAUSED: {
         SL_LOGV("setting AVPlayer to SL_PLAYSTATE_PAUSED");
         switch(objState) {
         case ANDROID_UNINITIALIZED:
             *pObjState = ANDROID_PREPARING;
             gp->prepare();
             break;
         case ANDROID_PREPARING:
             break;
         case ANDROID_READY:
             gp->pause();
             break;
         default:
             SL_LOGE("Android object in invalid state");
             break;
         }
         }
         break;
     case SL_PLAYSTATE_PLAYING: {
         SL_LOGV("setting AVPlayer to SL_PLAYSTATE_PLAYING");
         switch(objState) {
         case ANDROID_UNINITIALIZED:
             *pObjState = ANDROID_PREPARING;
             gp->prepare();
             // intended fall through
         case ANDROID_PREPARING:
             // intended fall through
         case ANDROID_READY:
             gp->play();
             break;
         default:
             SL_LOGE("Android object in invalid state");
             break;
         }
         }
         break;
     default:
         // checked by caller, should not happen
         break;
     }

    return result;
}
开发者ID:DARKPOP,项目名称:frameworks_wilhelm,代码行数:59,代码来源:MediaPlayer_to_android.cpp


示例7: setVideoSurfaceTexture

    android::status_t setVideoSurfaceTexture(const android::sp<android::SurfaceTexture> &surfaceTexture)
    {
        REPORT_FUNCTION();

        surfaceTexture->getBufferQueue()->setBufferCount(5);
        texture = surfaceTexture;
        texture->setFrameAvailableListener(frame_listener);

        return MediaPlayer::setVideoSurfaceTexture(surfaceTexture->getBufferQueue());
    }
开发者ID:CoryXie,项目名称:ubuntu-phablet_libhybris,代码行数:10,代码来源:media_compatibility_layer.cpp


示例8: SurfaceTexture_setSurfaceTexture

static void SurfaceTexture_setSurfaceTexture(
    /* [in] */ CSurfaceTexture* thiz,
    /* [in] */ const android::sp<android::SurfaceTexture>& surfaceTexture)
{
    android::SurfaceTexture* const p = (android::SurfaceTexture*)thiz->mSurfaceTexture;
    if (surfaceTexture.get()) {
        surfaceTexture->incStrong(thiz);
    }
    if (p) {
        p->decStrong(thiz);
    }
    thiz->mSurfaceTexture = (Int32)surfaceTexture.get();
}
开发者ID:TheTypoMaster,项目名称:ElastosRDK5_0,代码行数:13,代码来源:CSurfaceTexture.cpp


示例9: SurfaceTexture_setSurfaceTexture

static void SurfaceTexture_setSurfaceTexture(
    /* [in] */ CSurfaceTexture* thiz,
    /* [in] */ const android::sp<GLConsumer>& surfaceTexture)
{
    GLConsumer* const p = (GLConsumer*)thiz->mSurfaceTexture;
    if (surfaceTexture.get()) {
        surfaceTexture->incStrong((void*)SurfaceTexture_setSurfaceTexture);
    }
    if (p) {
        p->decStrong((void*)SurfaceTexture_setSurfaceTexture);
    }
    thiz->mSurfaceTexture = (Int64)surfaceTexture.get();
}
开发者ID:,项目名称:,代码行数:13,代码来源:


示例10: doPlaybackOrRecord

bool AudioPlaybackLocal::doPlaybackOrRecord(android::sp<Buffer>& buffer)
{
    if (buffer->amountToHandle() < (size_t)mSizes) {
        mSizes = buffer->amountToHandle();
    }
    if (pcm_write(mPcmHandle, buffer->getUnhanledData(), mSizes)) {
        LOGE("AudioPlaybackLocal error %s", pcm_get_error(mPcmHandle));
        return false;
    }
    buffer->increaseHandled(mSizes);
    LOGV("AudioPlaybackLocal::doPlaybackOrRecord %d", buffer->amountHandled());
    return true;
}
开发者ID:Abocer,项目名称:android-4.2_r1,代码行数:13,代码来源:AudioPlaybackLocal.cpp


示例11: SurfaceTexture_setFrameAvailableListener

static void SurfaceTexture_setFrameAvailableListener(
    /* [in] */ CSurfaceTexture* thiz,
    /* [in] */ android::sp<GLConsumer::FrameAvailableListener> listener)
{
    GLConsumer::FrameAvailableListener* const p = (GLConsumer::FrameAvailableListener*)thiz->mFrameAvailableListener;
    if (listener.get()) {
        listener->incStrong((void*)SurfaceTexture_setSurfaceTexture);
    }
    if (p) {
        p->decStrong((void*)SurfaceTexture_setSurfaceTexture);
    }
    thiz->mFrameAvailableListener = (Int64)listener.get();
}
开发者ID:,项目名称:,代码行数:13,代码来源:


示例12: SurfaceTexture_setProducer

static void SurfaceTexture_setProducer(
    /* [in] */ CSurfaceTexture* thiz,
    /* [in] */ const android::sp<IGraphicBufferProducer>& producer)
{
    IGraphicBufferProducer* const p =
        (IGraphicBufferProducer*)thiz->mProducer;
    if (producer.get()) {
        producer->incStrong((void*)SurfaceTexture_setProducer);
    }
    if (p) {
        p->decStrong((void*)SurfaceTexture_setProducer);
    }
    thiz->mProducer = (Int64)producer.get();
}
开发者ID:,项目名称:,代码行数:14,代码来源:


示例13: android_fxSend_attach

/**
 * pre-condition:
 *    ap != NULL
 *    for media players:
 *      ap->mAPlayer != 0
 *      ap->mTrackPlayer->mAudioTrack == 0
 *    for buffer queue players:
 *      ap->mAPlayer == 0
 *      ap->mTrackPlayer->mAudioTrack != 0 is optional; if no track yet then the setting is deferred
 */
android::status_t android_fxSend_attach(CAudioPlayer* ap, bool attach,
        const android::sp<android::AudioEffect>& pFx, SLmillibel sendLevel) {

    if (pFx == 0) {
        return android::INVALID_OPERATION;
    }

    // There are 3 cases:
    //  mAPlayer != 0 && mAudioTrack == 0 means playing decoded audio
    //  mAPlayer == 0 && mAudioTrack != 0 means playing PCM audio
    //  mAPlayer == 0 && mAudioTrack == 0 means player not fully configured yet
    // The asserts document and verify this.
    if (ap->mAPlayer != 0) {
        assert(ap->mTrackPlayer->mAudioTrack == 0);
        if (attach) {
            ap->mAPlayer->attachAuxEffect(pFx->id());
            ap->mAPlayer->setAuxEffectSendLevel( sles_to_android_amplification(sendLevel) );
        } else {
            ap->mAPlayer->attachAuxEffect(0);
        }
        return android::NO_ERROR;
    }

    if (ap->mTrackPlayer->mAudioTrack == 0) {
        // the player doesn't have an AudioTrack at the moment, so store this info to use it
        // when the AudioTrack becomes available
        if (attach) {
            ap->mAuxEffect = pFx;
        } else {
            ap->mAuxEffect.clear();
        }
        // we keep track of the send level, independently of the current audio player level
        ap->mAuxSendLevel = sendLevel - ap->mVolume.mLevel;
        return android::NO_ERROR;
    }

    if (attach) {
        android::status_t status = ap->mTrackPlayer->mAudioTrack->attachAuxEffect(pFx->id());
        //SL_LOGV("attachAuxEffect(%d) returned %d", pFx->id(), status);
        if (android::NO_ERROR == status) {
            status =
                ap->mTrackPlayer->mAudioTrack->setAuxEffectSendLevel(
                        sles_to_android_amplification(sendLevel) );
        }
        return status;
    } else {
        return ap->mTrackPlayer->mAudioTrack->attachAuxEffect(0);
    }
}
开发者ID:MIPS,项目名称:frameworks-wilhelm,代码行数:59,代码来源:android_Effect.cpp


示例14: onDisplayConnected

    virtual void onDisplayConnected(
        /* [in] */ const android::sp<android::ISurfaceTexture>& surfaceTexture,
        /* [in] */ uint32_t width,
        /* [in] */ uint32_t height,
        /* [in] */ uint32_t flags)
    {
        if (surfaceTexture == NULL) {
            return;
        }

        android::sp<android::Surface> surface(new android::Surface(surfaceTexture));
        if (surface == NULL) {
            Logger::E("NativeRemoteDisplayClient", "Could not create Surface from surface texture %p provided by media server.",
                surfaceTexture.get());
            return;
        }

        AutoPtr<ISurface> surfaceObj;
        CSurface::New((ISurface**)&surfaceObj);
        if (surfaceObj == NULL) {
            Logger::E("NativeRemoteDisplayClient", "Could not create instance of Surface from ISurfaceTexture.");
            return;
        }

        surfaceObj->SetSurface((Handle32)surface.get());

        mRemoteDisplayObjGlobal->NotifyDisplayConnected(surfaceObj, width, height, flags);

        CheckAndClearExceptionFromCallback("notifyDisplayConnected");
    }
开发者ID:TheTypoMaster,项目名称:ElastosRDK5_0,代码行数:30,代码来源:CRemoteDisplay.cpp


示例15: onDisplayConnected

    virtual void onDisplayConnected(
        /* [in] */ const android::sp<android::IGraphicBufferProducer>& bufferProducer,
        /* [in] */ uint32_t width,
        /* [in] */ uint32_t height,
        /* [in] */ uint32_t flags,
        /* [in] */ uint32_t session)
    {
        if (bufferProducer == NULL) {
            return;
        }

        android::sp<android::Surface> surface(new android::Surface(bufferProducer));
        if (surface == NULL) {
            Logger::E("NativeRemoteDisplayClient", "Could not create Surface from surface texture %p provided by media server.",
                bufferProducer.get());
            return;
        }

        AutoPtr<ISurface> surfaceObj;
        CSurface::New((ISurface**)&surfaceObj);
        if (surfaceObj == NULL) {
            Logger::E("NativeRemoteDisplayClient", "Could not create instance of Surface from ISurfaceTexture.");
            return;
        }

        mRemoteDisplayObjGlobal->NotifyDisplayConnected(surfaceObj, width, height, flags, session);

        CheckAndClearExceptionFromCallback("notifyDisplayConnected");
    }
开发者ID:elastos,项目名称:Elastos5,代码行数:29,代码来源:CRemoteDisplay.cpp


示例16: tf_write

int TestFrameworkClient::tf_write(const char *buf)
{
    int ret = 0, status = 0;
    bool send = false;

    status = tf_logging_status();

    switch(status) {
    case TF_LOGCAT:
        __android_log_write(ANDROID_LOG_ERROR, LOG_TAG, buf);
        break;
    case TF_TESTFRAMEWORK:
    case TF_ALL:
        send = true;
        break;
    case TF_DISABLE:
        break;
    }

    if (send) {
#ifdef TF_FEATURE_MSGS_THROUGH_BINDER
        mTfDispacther->DispatchMsg(buf);
#else
        ret = TfWrite(buf);
#endif
    }

    return ret;
}
开发者ID:jsr-d9,项目名称:android_vendor_qcom_opensource,代码行数:29,代码来源:TestFrameworkApi.cpp


示例17: doDownload

    void doDownload() {
        android::sp<Buffer> buffer = AudioSignalFactory::generateZeroSound(AudioHardware::E2BPS, 2,
                false);
        uint32_t prepareSend[] = {
                U32_ENDIAN_SWAP(AudioProtocol::ECmdDownload),
                U32_ENDIAN_SWAP(8),
                U32_ENDIAN_SWAP(0), //id
                U32_ENDIAN_SWAP(0)
        };
        uint32_t prepareReply[] = {
                U32_ENDIAN_SWAP((AudioProtocol::ECmdDownload & 0xffff) | 0x43210000),
                0,
                0
        };
        LOGD("reply 0x%x", prepareReply[0]);

        mTestSocket.setSendExpectation((char*)prepareSend, sizeof(prepareSend));
        // this is reply, but set expectation for reply first as it is sent after send
        mTestSocket.setReadExpectation((char*)prepareReply, sizeof(prepareReply));

        int id = -1;
        android::String8 name("1");
        ASSERT_TRUE(mRemoteAudio->downloadData(name, buffer, id));
        ASSERT_TRUE(id >= 0);
    }
开发者ID:10114395,项目名称:android-5.0.0_r5,代码行数:25,代码来源:RemoteAudioFakeTcpTest.cpp


示例18: tf_logging_status

int TestFrameworkClient::tf_logging_status() {
    int status = TF_DISABLE, time_now = 0;
    bool timeout = 0;

    //probe frequency set to 1, so lets not probe
    //any params whatever set initially they will be used
    if (mProbeFreq <= 1) {
        return mLogType;
    }

    time_now = ns2ms(systemTime());
    timeout = (time_now - tfTs >= mProbeFreq);

    //eventhough binder doesn't incur much overhead, lets not use it
    //everytime, one in few milliseconds fetch data from tf service
    if (timeout) {
        if (TfIsServiceRunning()) {
            BpTestFramework::Connect(mTfDispacther);

            if (mTfDispacther != 0) {
                mTfDispacther->DispatchGetInfo(mLogType, mEventType,
                                              mOpenInterval, mClosedInterval);

                if (!mEventType) {
                    mTfDispacther->ConnectReset();
                }

                if ((mLogType != TF_DISABLE) && mTfDispacther->IsConnectedAgain()) {
                    TfGetPropertyFilters();
                }
            }
        }
        else {
            TfUpdate();
            if (mLogType != TF_DISABLE) {
                TfGetPropertyFilters();
            }
        }
        status = mLogType;
        tfTs = time_now;
    }
    else {
        status = mLogType;
    }

    return status;
}
开发者ID:jsr-d9,项目名称:android_vendor_qcom_opensource,代码行数:47,代码来源:TestFrameworkApi.cpp


示例19: android_prev_setPreset

//-----------------------------------------------------------------------------
android::status_t android_prev_setPreset(const android::sp<android::AudioEffect>& pFx,
        uint16_t preset) {
    android::status_t status = android_fx_setParam(pFx, REVERB_PARAM_PRESET,
            PRESETREVERB_PARAM_SIZE_MAX, &preset, sizeof(uint16_t));
    // enable the effect if the preset is different from SL_REVERBPRESET_NONE
    pFx->setEnabled(SL_REVERBPRESET_NONE != preset);
    return status;
}
开发者ID:MIPS,项目名称:frameworks-wilhelm,代码行数:9,代码来源:android_Effect.cpp


示例20: getISurface

 static android::sp<android::ISurface> getISurface (const android::sp<android::Surface>& surface) {
     if (surface != NULL) {
         return surface->getISurface ();
     } else {
         ERROR ("surface == NULL!");
         return NULL;
     }
 }
开发者ID:treejames,项目名称:testSparrow,代码行数:8,代码来源:android_video.cpp



注:本文中的android::sp类示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。


鲜花

握手

雷人

路过

鸡蛋
该文章已有0人参与评论

请发表评论

全部评论

专题导读
上一篇:
C++ animation::NodeTrackIterator类代码示例发布时间:2022-05-31
下一篇:
C++ alib::CommandLine类代码示例发布时间:2022-05-31
热门推荐
阅读排行榜

扫描微信二维码

查看手机版网站

随时了解更新最新资讯

139-2527-9053

在线客服(服务时间 9:00~18:00)

在线QQ客服
地址:深圳市南山区西丽大学城创智工业园
电邮:jeky_zhao#qq.com
移动电话:139-2527-9053

Powered by 互联科技 X3.4© 2001-2213 极客世界.|Sitemap