• 设为首页
  • 点击收藏
  • 手机版
    手机扫一扫访问
    迪恩网络手机版
  • 关注官方公众号
    微信扫一扫关注
    迪恩网络公众号

C# VR.VRTextureBounds_t类代码示例

原作者: [db:作者] 来自: [db:来源] 收藏 邀请

本文整理汇总了C#中Valve.VR.VRTextureBounds_t的典型用法代码示例。如果您正苦于以下问题:C# VRTextureBounds_t类的具体用法?C# VRTextureBounds_t怎么用?C# VRTextureBounds_t使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。



VRTextureBounds_t类属于Valve.VR命名空间,在下文中一共展示了VRTextureBounds_t类的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的C#代码示例。

示例1: SteamVR

    private SteamVR()
    {
        hmd = OpenVR.System;
        Debug.Log("Connected to " + hmd_TrackingSystemName + ":" + hmd_SerialNumber);

        compositor = OpenVR.Compositor;
        overlay = OpenVR.Overlay;

        // Setup render values
        uint w = 0, h = 0;
        hmd.GetRecommendedRenderTargetSize(ref w, ref h);
        sceneWidth = (float)w;
        sceneHeight = (float)h;

        float l_left = 0.0f, l_right = 0.0f, l_top = 0.0f, l_bottom = 0.0f;
        hmd.GetProjectionRaw(EVREye.Eye_Left, ref l_left, ref l_right, ref l_top, ref l_bottom);

        float r_left = 0.0f, r_right = 0.0f, r_top = 0.0f, r_bottom = 0.0f;
        hmd.GetProjectionRaw(EVREye.Eye_Right, ref r_left, ref r_right, ref r_top, ref r_bottom);

        tanHalfFov = new Vector2(
            Mathf.Max(-l_left, l_right, -r_left, r_right),
            Mathf.Max(-l_top, l_bottom, -r_top, r_bottom));

        textureBounds = new VRTextureBounds_t[2];

        textureBounds[0].uMin = 0.5f + 0.5f * l_left / tanHalfFov.x;
        textureBounds[0].uMax = 0.5f + 0.5f * l_right / tanHalfFov.x;
        textureBounds[0].vMin = 0.5f - 0.5f * l_bottom / tanHalfFov.y;
        textureBounds[0].vMax = 0.5f - 0.5f * l_top / tanHalfFov.y;

        textureBounds[1].uMin = 0.5f + 0.5f * r_left / tanHalfFov.x;
        textureBounds[1].uMax = 0.5f + 0.5f * r_right / tanHalfFov.x;
        textureBounds[1].vMin = 0.5f - 0.5f * r_bottom / tanHalfFov.y;
        textureBounds[1].vMax = 0.5f - 0.5f * r_top / tanHalfFov.y;

        // Grow the recommended size to account for the overlapping fov
        sceneWidth = sceneWidth / Mathf.Max(textureBounds[0].uMax - textureBounds[0].uMin, textureBounds[1].uMax - textureBounds[1].uMin);
        sceneHeight = sceneHeight / Mathf.Max(textureBounds[0].vMax - textureBounds[0].vMin, textureBounds[1].vMax - textureBounds[1].vMin);

        aspect = tanHalfFov.x / tanHalfFov.y;
        fieldOfView = 2.0f * Mathf.Atan(tanHalfFov.y) * Mathf.Rad2Deg;

        eyes = new SteamVR_Utils.RigidTransform[] {
            new SteamVR_Utils.RigidTransform(hmd.GetEyeToHeadTransform(EVREye.Eye_Left)),
            new SteamVR_Utils.RigidTransform(hmd.GetEyeToHeadTransform(EVREye.Eye_Right)) };

        if (SystemInfo.graphicsDeviceVersion.StartsWith("OpenGL"))
            graphicsAPI = EGraphicsAPIConvention.API_OpenGL;
        else
            graphicsAPI = EGraphicsAPIConvention.API_DirectX;

        SteamVR_Utils.Event.Listen("initializing", OnInitializing);
        SteamVR_Utils.Event.Listen("calibrating", OnCalibrating);
        SteamVR_Utils.Event.Listen("out_of_range", OnOutOfRange);
        SteamVR_Utils.Event.Listen("device_connected", OnDeviceConnected);
        SteamVR_Utils.Event.Listen("new_poses", OnNewPoses);
    }
开发者ID:RandomTiger,项目名称:gvr-unity-sdk,代码行数:58,代码来源:SteamVR.cs


示例2: VR_IVRCompositor_Submit

 internal static extern VRCompositorError VR_IVRCompositor_Submit(IntPtr instancePtr, Hmd_Eye eEye, GraphicsAPIConvention eTextureType, IntPtr pTexture, ref VRTextureBounds_t pBounds, VRSubmitFlags_t nSubmitFlags);
开发者ID:billcrosbie,项目名称:openvr,代码行数:1,代码来源:openvr_api.cs


示例3: SteamVR

    private SteamVR(System.IntPtr pHmd, System.IntPtr pCompositor, System.IntPtr pOverlay)
    {
        hmd = new CVRSystem(pHmd);
        Debug.Log("Connected to " + hmd_TrackingSystemName + ":" + hmd_SerialNumber);

        compositor = new CVRCompositor(pCompositor);
        overlay = new CVROverlay(pOverlay);

        var capacity = compositor.GetLastError(null, 0);
        if (capacity > 1)
        {
            var result = new System.Text.StringBuilder((int)capacity);
            compositor.GetLastError(result, capacity);
            Debug.Log("Compositor - " + result);
        }

        // Setup render values
        uint w = 0, h = 0;
        hmd.GetRecommendedRenderTargetSize(ref w, ref h);
        sceneWidth = (float)w;
        sceneHeight = (float)h;

        float l_left = 0.0f, l_right = 0.0f, l_top = 0.0f, l_bottom = 0.0f;
        hmd.GetProjectionRaw(Hmd_Eye.Eye_Left, ref l_left, ref l_right, ref l_top, ref l_bottom);

        float r_left = 0.0f, r_right = 0.0f, r_top = 0.0f, r_bottom = 0.0f;
        hmd.GetProjectionRaw(Hmd_Eye.Eye_Right, ref r_left, ref r_right, ref r_top, ref r_bottom);

        tanHalfFov = new Vector2(
            Mathf.Max(-l_left, l_right, -r_left, r_right),
            Mathf.Max(-l_top, l_bottom, -r_top, r_bottom));

        textureBounds = new VRTextureBounds_t[2];

        textureBounds[0].uMin = 0.5f + 0.5f * l_left / tanHalfFov.x;
        textureBounds[0].uMax = 0.5f + 0.5f * l_right / tanHalfFov.x;
        textureBounds[0].vMin = 0.5f - 0.5f * l_bottom / tanHalfFov.y;
        textureBounds[0].vMax = 0.5f - 0.5f * l_top / tanHalfFov.y;

        textureBounds[1].uMin = 0.5f + 0.5f * r_left / tanHalfFov.x;
        textureBounds[1].uMax = 0.5f + 0.5f * r_right / tanHalfFov.x;
        textureBounds[1].vMin = 0.5f - 0.5f * r_bottom / tanHalfFov.y;
        textureBounds[1].vMax = 0.5f - 0.5f * r_top / tanHalfFov.y;

        Unity.SetSubmitParams(textureBounds[0], textureBounds[1], VRSubmitFlags_t.Submit_Default);

        // Grow the recommended size to account for the overlapping fov
        sceneWidth = sceneWidth / Mathf.Max(textureBounds[0].uMax - textureBounds[0].uMin, textureBounds[1].uMax - textureBounds[1].uMin);
        sceneHeight = sceneHeight / Mathf.Max(textureBounds[0].vMax - textureBounds[0].vMin, textureBounds[1].vMax - textureBounds[1].vMin);

        aspect = tanHalfFov.x / tanHalfFov.y;
        fieldOfView = 2.0f * Mathf.Atan(tanHalfFov.y) * Mathf.Rad2Deg;

        eyes = new SteamVR_Utils.RigidTransform[] {
            new SteamVR_Utils.RigidTransform(hmd.GetEyeToHeadTransform(Hmd_Eye.Eye_Left)),
            new SteamVR_Utils.RigidTransform(hmd.GetEyeToHeadTransform(Hmd_Eye.Eye_Right)) };

        if (SystemInfo.graphicsDeviceVersion.StartsWith("OpenGL"))
            graphicsAPI = GraphicsAPIConvention.API_OpenGL;
        else
            graphicsAPI = GraphicsAPIConvention.API_DirectX;

        SteamVR_Utils.Event.Listen("initializing", OnInitializing);
        SteamVR_Utils.Event.Listen("calibrating", OnCalibrating);
        SteamVR_Utils.Event.Listen("out_of_range", OnOutOfRange);
        SteamVR_Utils.Event.Listen("device_connected", OnDeviceConnected);
        SteamVR_Utils.Event.Listen("new_poses", OnNewPoses);
    }
开发者ID:8BitRain,项目名称:EcoWorld,代码行数:68,代码来源:SteamVR.cs


示例4: SteamVR

    private SteamVR(System.IntPtr pHmd, System.IntPtr pCompositor, System.IntPtr pOverlay)
    {
        hmd = new CVRSystem(pHmd);
        Debug.Log("Connected to " + hmd_TrackingSystemName + ":" + hmd_SerialNumber);

        compositor = new CVRCompositor(pCompositor);
        overlay = new CVROverlay(pOverlay);

        var device = new UnityGraphicsDevice();
        GetUnityGraphicsDevice(ref device);
        switch (device.type)
        {
            case GfxDeviceRenderer.kGfxRendererD3D11:
                compositor.SetGraphicsDevice(Compositor_DeviceType.D3D11, device.ptr);
                break;
            case GfxDeviceRenderer.kGfxRendererOpenGL:
                compositor.SetGraphicsDevice(Compositor_DeviceType.OpenGL, device.ptr);
                break;
            default:
                throw new System.Exception("Unsupported device type.");
        }

        var capacity = compositor.GetLastError(null, 0);
        if (capacity > 1)
        {
            var result = new System.Text.StringBuilder((int)capacity);
            compositor.GetLastError(result, capacity);
            Debug.Log("Compositor - " + result);
        }

        // Register for a callback if our graphics device goes away, so we can properly clean up.
        var resetDelegate = new UnityResetDelegate(SteamVR.SafeDispose);
        callbackHandle = GCHandle.Alloc(resetDelegate);
        SetUnityResetCallback(Marshal.GetFunctionPointerForDelegate(resetDelegate));

        // Hook up the render thread present event just in case we wind up needing to use this.
        var error = HmdError.None;
        SetUnityRenderCallback(OpenVR.GetGenericInterface(IVRHmdDistortPresent_Version, ref error));

        // Setup render values
        uint w = 0, h = 0;
        hmd.GetRecommendedRenderTargetSize(ref w, ref h);
        sceneWidth = (float)w;
        sceneHeight = (float)h;

        float l_left = 0.0f, l_right = 0.0f, l_top = 0.0f, l_bottom = 0.0f;
        hmd.GetProjectionRaw(Hmd_Eye.Eye_Left, ref l_left, ref l_right, ref l_top, ref l_bottom);

        float r_left = 0.0f, r_right = 0.0f, r_top = 0.0f, r_bottom = 0.0f;
        hmd.GetProjectionRaw(Hmd_Eye.Eye_Right, ref r_left, ref r_right, ref r_top, ref r_bottom);

        tanHalfFov = new Vector2(
            Mathf.Max(-l_left, l_right, -r_left, r_right),
            Mathf.Max(-l_top, l_bottom, -r_top, r_bottom));

        textureBounds = new VRTextureBounds_t[2];

        textureBounds[0].uMin = 0.5f + 0.5f * l_left / tanHalfFov.x;
        textureBounds[0].uMax = 0.5f + 0.5f * l_right / tanHalfFov.x;
        textureBounds[0].vMin = 0.5f - 0.5f * l_bottom / tanHalfFov.y;
        textureBounds[0].vMax = 0.5f - 0.5f * l_top / tanHalfFov.y;

        textureBounds[1].uMin = 0.5f + 0.5f * r_left / tanHalfFov.x;
        textureBounds[1].uMax = 0.5f + 0.5f * r_right / tanHalfFov.x;
        textureBounds[1].vMin = 0.5f - 0.5f * r_bottom / tanHalfFov.y;
        textureBounds[1].vMax = 0.5f - 0.5f * r_top / tanHalfFov.y;

        // Grow the recommended size to account for the overlapping fov
        sceneWidth = sceneWidth / Mathf.Max(textureBounds[0].uMax - textureBounds[0].uMin, textureBounds[1].uMax - textureBounds[1].uMin);
        sceneHeight = sceneHeight / Mathf.Max(textureBounds[0].vMax - textureBounds[0].vMin, textureBounds[1].vMax - textureBounds[1].vMin);

        aspect = tanHalfFov.x / tanHalfFov.y;
        fieldOfView = 2.0f * Mathf.Atan(tanHalfFov.y) * Mathf.Rad2Deg;

        eyes = new SteamVR_Utils.RigidTransform[] {
            new SteamVR_Utils.RigidTransform(hmd.GetEyeToHeadTransform(Hmd_Eye.Eye_Left)),
            new SteamVR_Utils.RigidTransform(hmd.GetEyeToHeadTransform(Hmd_Eye.Eye_Right)) };

        SteamVR_Utils.Event.Listen("initializing", OnInitializing);
        SteamVR_Utils.Event.Listen("calibrating", OnCalibrating);
        SteamVR_Utils.Event.Listen("out_of_range", OnOutOfRange);
        SteamVR_Utils.Event.Listen("device_connected", OnDeviceConnected);
        SteamVR_Utils.Event.Listen("new_poses", OnNewPoses);
    }
开发者ID:scottstephan,项目名称:Vive2Prototypes,代码行数:84,代码来源:SteamVR.cs


示例5: GetOverlayTextureBounds

	public abstract EVROverlayError GetOverlayTextureBounds(ulong ulOverlayHandle,ref VRTextureBounds_t pOverlayTextureBounds);
开发者ID:Black4Blade,项目名称:NewtonVR,代码行数:1,代码来源:openvr_api.cs


示例6: SetSubmitParams

	public static extern void SetSubmitParams(VRTextureBounds_t boundsL, VRTextureBounds_t boundsR, EVRSubmitFlags nSubmitFlags);
开发者ID:Black4Blade,项目名称:NewtonVR,代码行数:1,代码来源:openvr_api.cs


示例7: VR_IVRCompositor_Submit

	internal static extern EVRCompositorError VR_IVRCompositor_Submit(IntPtr instancePtr, EVREye eEye, ref Texture_t pTexture, ref VRTextureBounds_t pBounds, EVRSubmitFlags nSubmitFlags);
开发者ID:Black4Blade,项目名称:NewtonVR,代码行数:1,代码来源:openvr_api.cs


示例8: Submit

	public override EVRCompositorError Submit(EVREye eEye,ref Texture_t pTexture,ref VRTextureBounds_t pBounds,EVRSubmitFlags nSubmitFlags)
	{
		CheckIfUsable();
		EVRCompositorError result = VRNativeEntrypoints.VR_IVRCompositor_Submit(m_pVRCompositor,eEye,ref pTexture,ref pBounds,nSubmitFlags);
		return result;
	}
开发者ID:Black4Blade,项目名称:NewtonVR,代码行数:6,代码来源:openvr_api.cs


示例9: Submit

 public EVRCompositorError Submit(EVREye eEye, ref Texture_t pTexture, ref VRTextureBounds_t pBounds, EVRSubmitFlags nSubmitFlags)
 {
     return this.FnTable.Submit(eEye, ref pTexture, ref pBounds, nSubmitFlags);
 }
开发者ID:GameDiffs,项目名称:TheForest,代码行数:4,代码来源:CVRCompositor.cs


示例10: GetVideoStreamTextureSize

	public EVRTrackedCameraError GetVideoStreamTextureSize(uint nDeviceIndex,EVRTrackedCameraFrameType eFrameType,ref VRTextureBounds_t pTextureBounds,ref uint pnWidth,ref uint pnHeight)
	{
		pnWidth = 0;
		pnHeight = 0;
		EVRTrackedCameraError result = FnTable.GetVideoStreamTextureSize(nDeviceIndex,eFrameType,ref pTextureBounds,ref pnWidth,ref pnHeight);
		return result;
	}
开发者ID:Chazman11,项目名称:SGJ,代码行数:7,代码来源:openvr_api.cs


示例11: Submit

	public abstract VRCompositorError Submit(Hmd_Eye eEye,IntPtr pTexture,ref VRTextureBounds_t pBounds);
开发者ID:zekiguven,项目名称:TERRA-Engine,代码行数:1,代码来源:openvr_api.cs


示例12: VR_IVRCompositor_Submit

	internal static extern VRCompositorError VR_IVRCompositor_Submit(IntPtr instancePtr, Hmd_Eye eEye, IntPtr pTexture, ref VRTextureBounds_t pBounds);
开发者ID:zekiguven,项目名称:TERRA-Engine,代码行数:1,代码来源:openvr_api.cs


示例13: UpdateOverlay

    public void UpdateOverlay(SteamVR vr)
    {
        if (texture != null)
        {
            var error = vr.overlay.ShowOverlay(handle);
            if (error == VROverlayError.InvalidHandle || error == VROverlayError.UnknownOverlay)
            {
                if (vr.overlay.FindOverlay(key, ref handle) != VROverlayError.None)
                    return;
            }

            vr.overlay.SetOverlayTexture(handle, texture.GetNativeTexturePtr());
            vr.overlay.SetOverlayAlpha(handle, alpha);
            vr.overlay.SetOverlayGamma(handle, gamma);
            vr.overlay.SetOverlayWidthInMeters(handle, scale);

            var textureBounds = new VRTextureBounds_t();
            textureBounds.uMin = (0 + uvOffset.x) * uvOffset.z;
            textureBounds.vMin = (1 + uvOffset.y) * uvOffset.w;
            textureBounds.uMax = (1 + uvOffset.x) * uvOffset.z;
            textureBounds.vMax = (0 + uvOffset.y) * uvOffset.w;
            vr.overlay.SetOverlayTextureBounds(handle, ref textureBounds);

            var vecMouseScale = new HmdVector2_t();
            vecMouseScale.v = new float[] { mouseScale.x, mouseScale.y };
            vr.overlay.SetOverlayMouseScale(handle, ref vecMouseScale);

            var vrcam = SteamVR_Render.Top();
            if (vrcam != null && vrcam.origin != null)
            {
                var offset = new SteamVR_Utils.RigidTransform(vrcam.origin, transform);
                offset.pos.x /= vrcam.origin.localScale.x;
                offset.pos.y /= vrcam.origin.localScale.y;
                offset.pos.z /= vrcam.origin.localScale.z;

                offset.pos.z += distance;

                var t = offset.ToHmdMatrix34();
                vr.overlay.SetOverlayTransformAbsolute(handle, SteamVR_Render.instance.trackingSpace, ref t);
            }

            vr.overlay.SetOverlayVisibility(handle, visibility);
            vr.overlay.SetOverlayInputMethod(handle, inputMethod);

            if (curved || antialias)
                highquality = true;

            if (highquality)
            {
                vr.overlay.SetHighQualityOverlay(handle);
                vr.overlay.SetOverlayFlag(handle, VROverlayFlags.Curved, curved);
                vr.overlay.SetOverlayFlag(handle, VROverlayFlags.RGSS4X, antialias);
            }
            else if (vr.overlay.GetHighQualityOverlay() == handle)
            {
                vr.overlay.SetHighQualityOverlay(OpenVR.k_ulOverlayHandleInvalid);
            }
        }
        else
        {
            vr.overlay.HideOverlay(handle);
        }
    }
开发者ID:scottstephan,项目名称:Vive2Prototypes,代码行数:63,代码来源:SteamVR_Overlay.cs


示例14: CreateHiddenAreaMesh

    public static Mesh CreateHiddenAreaMesh(HiddenAreaMesh_t src, VRTextureBounds_t bounds)
    {
        if (src.unTriangleCount == 0)
            return null;

        var data = new float[src.unTriangleCount * 3 * 2]; //HmdVector2_t
        Marshal.Copy(src.pVertexData, data, 0, data.Length);

        var vertices = new Vector3[src.unTriangleCount * 3 + 12];
        var indices = new int[src.unTriangleCount * 3 + 24];

        var x0 = 2.0f * bounds.uMin - 1.0f;
        var x1 = 2.0f * bounds.uMax - 1.0f;
        var y0 = 2.0f * bounds.vMin - 1.0f;
        var y1 = 2.0f * bounds.vMax - 1.0f;

        for (int i = 0, j = 0; i < src.unTriangleCount * 3; i++)
        {
            var x = Lerp(x0, x1, data[j++]);
            var y = Lerp(y0, y1, data[j++]);
            vertices[i] = new Vector3(x, y, 0.0f);
            indices[i] = i;
        }

        // Add border
        var offset = (int)src.unTriangleCount * 3;
        var iVert = offset;
        vertices[iVert++] = new Vector3(-1, -1, 0);
        vertices[iVert++] = new Vector3(x0, -1, 0);
        vertices[iVert++] = new Vector3(-1, 1, 0);
        vertices[iVert++] = new Vector3(x0, 1, 0);
        vertices[iVert++] = new Vector3(x1, -1, 0);
        vertices[iVert++] = new Vector3(1, -1, 0);
        vertices[iVert++] = new Vector3(x1, 1, 0);
        vertices[iVert++] = new Vector3(1, 1, 0);
        vertices[iVert++] = new Vector3(x0, y0, 0);
        vertices[iVert++] = new Vector3(x1, y0, 0);
        vertices[iVert++] = new Vector3(x0, y1, 0);
        vertices[iVert++] = new Vector3(x1, y1, 0);

        var iTri = offset;
        indices[iTri++] = offset + 0;
        indices[iTri++] = offset + 1;
        indices[iTri++] = offset + 2;
        indices[iTri++] = offset + 2;
        indices[iTri++] = offset + 1;
        indices[iTri++] = offset + 3;
        indices[iTri++] = offset + 4;
        indices[iTri++] = offset + 5;
        indices[iTri++] = offset + 6;
        indices[iTri++] = offset + 6;
        indices[iTri++] = offset + 5;
        indices[iTri++] = offset + 7;
        indices[iTri++] = offset + 1;
        indices[iTri++] = offset + 4;
        indices[iTri++] = offset + 8;
        indices[iTri++] = offset + 8;
        indices[iTri++] = offset + 4;
        indices[iTri++] = offset + 9;
        indices[iTri++] = offset + 10;
        indices[iTri++] = offset + 11;
        indices[iTri++] = offset + 3;
        indices[iTri++] = offset + 3;
        indices[iTri++] = offset + 11;
        indices[iTri++] = offset + 6;

        var mesh = new Mesh();
        mesh.vertices = vertices;
        mesh.triangles = indices;
        mesh.bounds = new Bounds(Vector3.zero, new Vector3(float.MaxValue, float.MaxValue, float.MaxValue)); // Prevent frustum culling from culling this mesh
        return mesh;
    }
开发者ID:ConstLiu,项目名称:openvr,代码行数:72,代码来源:SteamVR_Utils.cs


示例15: Update

		void Update()
		{
			if (Time.frameCount == prevFrameCount)
				return;

			prevFrameCount = Time.frameCount;

			if (videostream.handle == 0)
				return;

			var vr = SteamVR.instance;
			if (vr == null)
				return;

			var trackedCamera = OpenVR.TrackedCamera;
			if (trackedCamera == null)
				return;

			var nativeTex = System.IntPtr.Zero;
			var deviceTexture = (_texture != null) ? _texture : new Texture2D(2, 2);
			var headerSize = (uint)System.Runtime.InteropServices.Marshal.SizeOf(header.GetType());

			if (vr.graphicsAPI == EGraphicsAPIConvention.API_OpenGL)
			{
				if (glTextureId != 0)
					trackedCamera.ReleaseVideoStreamTextureGL(videostream.handle, glTextureId);

				if (trackedCamera.GetVideoStreamTextureGL(videostream.handle, frameType, ref glTextureId, ref header, headerSize) != EVRTrackedCameraError.None)
					return;

				nativeTex = (System.IntPtr)glTextureId;
            }
			else
			{
				if (trackedCamera.GetVideoStreamTextureD3D11(videostream.handle, frameType, deviceTexture.GetNativeTexturePtr(), ref nativeTex, ref header, headerSize) != EVRTrackedCameraError.None)
					return;
			}

			if (_texture == null)
			{
				_texture = Texture2D.CreateExternalTexture((int)header.nWidth, (int)header.nHeight, TextureFormat.RGBA32, false, false, nativeTex);

				uint width = 0, height = 0;
				var frameBounds = new VRTextureBounds_t();
				if (trackedCamera.GetVideoStreamTextureSize(deviceIndex, frameType, ref frameBounds, ref width, ref height) == EVRTrackedCameraError.None)
				{
					// Account for textures being upside-down in Unity.
					frameBounds.vMin = 1.0f - frameBounds.vMin;
					frameBounds.vMax = 1.0f - frameBounds.vMax;
					this.frameBounds = frameBounds;
				}
			}
			else
			{
				_texture.UpdateExternalTexture(nativeTex);
			}
		}
开发者ID:ConstLiu,项目名称:openvr,代码行数:57,代码来源:SteamVR_TrackedCamera.cs


示例16: RenderFrame

        public void RenderFrame(params IVRDrawable[] drawables)
        {
            if (graphics == null) throw new InvalidOperationException("Cannot render a frame until graphics are initialized");

            EyeTextures eyeTextures = graphics.RenderToTextures(
                CurrentViewProjMatrix(EVREye.Eye_Left),
                CurrentViewProjMatrix(EVREye.Eye_Right),
                LeftEyePos,
                RightEyePos,
                drawables);

            Texture_t leftEye = eyeTextures.LeftEye;
            Texture_t rightEye = eyeTextures.RightEye;

            // send to the headset
            VRTextureBounds_t bounds = new VRTextureBounds_t() { uMin = 0, vMin = 0, uMax = 1f, vMax = 1f }; // is this right?
            EVRCompositorError compErr = OpenVR.Compositor.Submit(EVREye.Eye_Left, ref leftEye, ref bounds, EVRSubmitFlags.Submit_Default);
            if (compErr != EVRCompositorError.None) throw new InvalidOperationException($"Failed to submit image to compositor: {compErr}");
            compErr = OpenVR.Compositor.Submit(EVREye.Eye_Right, ref rightEye, ref bounds, EVRSubmitFlags.Submit_Default);
            if (compErr != EVRCompositorError.None) throw new InvalidOperationException($"Failed to submit image to compositor: {compErr}");
        }
开发者ID:domisterwoozy,项目名称:LabBox,代码行数:21,代码来源:OpenVRScene.cs


示例17: DisplayEye

        public void DisplayEye(IntPtr nativePointer)
        {
            VRTextureBounds_t tboud = new VRTextureBounds_t();
            Texture_t tex;
            tex.eType = EGraphicsAPIConvention.API_DirectX;
            tex.eColorSpace = EColorSpace.Auto;
            tex.handle = nativePointer;// MyRender11.Backbuffer.m_resource.NativePointer;
            tboud.vMin = 0;
            tboud.vMax = 1;

            tboud.uMin = m_outsideLimit / 2;
            tboud.uMax = (1-m_insideLimit) / 2;
            var error = m_vrCompositor.Submit(EVREye.Eye_Left, ref tex, ref tboud, EVRSubmitFlags.Submit_Default);

            tboud.uMin = 0.5f + m_insideLimit / 2;
            tboud.uMax = 0.5f + (1-m_outsideLimit) / 2;
            var error2 = m_vrCompositor.Submit(EVREye.Eye_Right, ref tex, ref tboud, EVRSubmitFlags.Submit_Default);
            //FrameDone();

            if (debug_var)
                m_vrCompositor.CompositorDumpImages();
        }
开发者ID:ChristianHeinz71,项目名称:SpaceEngineers,代码行数:22,代码来源:MyOpenVR.cs


示例18: VR_IVROverlay_GetOverlayTextureBounds

	internal static extern EVROverlayError VR_IVROverlay_GetOverlayTextureBounds(IntPtr instancePtr, ulong ulOverlayHandle, ref VRTextureBounds_t pOverlayTextureBounds);
开发者ID:Black4Blade,项目名称:NewtonVR,代码行数:1,代码来源:openvr_api.cs


示例19: UpdateOverlay

	public void UpdateOverlay()
	{
		var overlay = OpenVR.Overlay;
		if (overlay == null)
			return;

		if (texture != null)
		{
			var error = overlay.ShowOverlay(handle);
			if (error == EVROverlayError.InvalidHandle || error == EVROverlayError.UnknownOverlay)
			{
				if (overlay.FindOverlay(key, ref handle) != EVROverlayError.None)
					return;
			}

			var tex = new Texture_t();
			tex.handle = texture.GetNativeTexturePtr();
			tex.eType = SteamVR.instance.graphicsAPI;
			tex.eColorSpace = EColorSpace.Auto;
            overlay.SetOverlayTexture(handle, ref tex);

			overlay.SetOverlayAlpha(handle, alpha);
			overlay.SetOverlayWidthInMeters(handle, scale);
			overlay.SetOverlayAutoCurveDistanceRangeInMeters(handle, curvedRange.x, curvedRange.y);

			var textureBounds = new VRTextureBounds_t();
			textureBounds.uMin = (0 + uvOffset.x) * uvOffset.z;
			textureBounds.vMin = (1 + uvOffset.y) * uvOffset.w;
			textureBounds.uMax = (1 + uvOffset.x) * uvOffset.z;
			textureBounds.vMax = (0 + uvOffset.y) * uvOffset.w;
			overlay.SetOverlayTextureBounds(handle, ref textureBounds);

			var vecMouseScale = new HmdVector2_t();
			vecMouseScale.v0 = mouseScale.x;
			vecMouseScale.v1 = mouseScale.y;
			overlay.SetOverlayMouseScale(handle, ref vecMouseScale);

			var vrcam = SteamVR_Render.Top();
			if (vrcam != null && vrcam.origin != null)
			{
				var offset = new SteamVR_Utils.RigidTransform(vrcam.origin, transform);
				offset.pos.x /= vrcam.origin.localScale.x;
				offset.pos.y /= vrcam.origin.localScale.y;
				offset.pos.z /= vrcam.origin.localScale.z;

				offset.pos.z += distance;

				var t = offset.ToHmdMatrix34();
				overlay.SetOverlayTransformAbsolute(handle, SteamVR_Render.instance.trackingSpace, ref t);
			}

			overlay.SetOverlayInputMethod(handle, inputMethod);

			if (curved || antialias)
				highquality = true;

			if (highquality)
			{
				overlay.SetHighQualityOverlay(handle);
				overlay.SetOverlayFlag(handle, VROverlayFlags.Curved, curved);
				overlay.SetOverlayFlag(handle, VROverlayFlags.RGSS4X, antialias);
			}
			else if (overlay.GetHighQualityOverlay() == handle)
			{
				overlay.SetHighQualityOverlay(OpenVR.k_ulOverlayHandleInvalid);
			}
		}
		else
		{
			overlay.HideOverlay(handle);
		}
	}
开发者ID:Chazman11,项目名称:SGJ,代码行数:72,代码来源:SteamVR_Overlay.cs


示例20: Submit

	public EVRCompositorError Submit(EVREye eEye,ref Texture_t pTexture,ref VRTextureBounds_t pBounds,EVRSubmitFlags nSubmitFlags)
	{
		EVRCompositorError result = FnTable.Submit(eEye,ref pTexture,ref pBounds,nSubmitFlags);
		return result;
	}
开发者ID:apekshadarbari,项目名称:PokerFace,代码行数:5,代码来源:openvr_api.cs



注:本文中的Valve.VR.VRTextureBounds_t类示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。


鲜花

握手

雷人

路过

鸡蛋
该文章已有0人参与评论

请发表评论

全部评论

专题导读
上一篇:
C# APIClient.Asset类代码示例发布时间:2022-05-26
下一篇:
C# VR.VREvent_t类代码示例发布时间:2022-05-26
热门推荐
阅读排行榜

扫描微信二维码

查看手机版网站

随时了解更新最新资讯

139-2527-9053

在线客服(服务时间 9:00~18:00)

在线QQ客服
地址:深圳市南山区西丽大学城创智工业园
电邮:jeky_zhao#qq.com
移动电话:139-2527-9053

Powered by 互联科技 X3.4© 2001-2213 极客世界.|Sitemap