Commit 708d3dee authored by BlackAngle233's avatar BlackAngle233
Browse files

update final design

parent 1444629e
fileFormatVersion: 2
guid: c22de44a513565847a2098668ad0010e
timeCreated: 1560758054
licenseType: Free
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:
fileFormatVersion: 2
guid: d92eb92f60f735b418971311cc0837fa
folderAsset: yes
timeCreated: 1594183630
licenseType: Pro
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:
fileFormatVersion: 2
guid: 8cc254012dc7e3541bf4862040fc369c
folderAsset: yes
timeCreated: 1594184120
licenseType: Pro
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:
//================================================================================================================================
//
// Copyright (c) 2015-2021 VisionStar Information Technology (Shanghai) Co., Ltd. All Rights Reserved.
// EasyAR is the registered trademark or trademark of VisionStar Information Technology (Shanghai) Co., Ltd in China
// and other countries for the augmented reality technology developed by VisionStar Information Technology (Shanghai) Co., Ltd.
//
//================================================================================================================================
using System;
using UnityEngine;
namespace easyar
{
/// <summary>
/// <para xml:lang="en">Material to render camera image.</para>
/// <para xml:lang="zh">用于渲染camera图像的材质。</para>
/// </summary>
public class CameraImageMaterial : IDisposable
{
private static CameraImageShaders shaders;
private Material mat;
private Texture2D[] textures = new Texture2D[0];
private PixelFormat format;
private int imageWidth;
private int imageHeight;
public CameraImageMaterial()
{
if (!shaders)
{
shaders = Resources.Load<CameraImageShaders>("EasyAR/Shaders");
}
}
~CameraImageMaterial()
{
DisposeResources();
}
/// <summary>
/// <para xml:lang="en">Dispose resources.</para>
/// <para xml:lang="zh">销毁资源。</para>
/// </summary>
public void Dispose()
{
DisposeResources();
GC.SuppressFinalize(this);
}
/// <summary>
/// <para xml:lang="en">Update material using <paramref name="image"/>.</para>
/// <para xml:lang="zh">使用<paramref name="image"/>更新材质。</para>
/// </summary>
public Material UpdateByImage(Image image)
{
var recreateMaterial = false;
if (image.width() != imageWidth || image.height() != imageHeight || image.format() != format)
{
DisposeResources();
imageWidth = image.width();
imageHeight = image.height();
format = image.format();
recreateMaterial = true;
}
using (var buffer = image.buffer())
{
var ptr = buffer.data();
var resolution = imageWidth * imageHeight;
switch (format)
{
case PixelFormat.Gray:
if (recreateMaterial)
{
textures = new Texture2D[1];
textures[0] = new Texture2D(imageWidth, imageHeight, TextureFormat.Alpha8, false);
mat = new Material(shaders.GRAY);
mat.SetTexture("_grayTexture", textures[0]);
}
textures[0].LoadRawTextureData(ptr, resolution);
textures[0].Apply();
break;
case PixelFormat.YUV_NV21:
if (recreateMaterial)
{
textures = new Texture2D[2];
textures[0] = new Texture2D(imageWidth, imageHeight, TextureFormat.Alpha8, false);
textures[1] = new Texture2D(imageWidth / 2, imageHeight / 2, TextureFormat.RGBA4444, false);
mat = new Material(shaders.YUV_NV21);
mat.SetTexture("_yTexture", textures[0]);
mat.SetTexture("_uvTexture", textures[1]);
}
textures[0].LoadRawTextureData(ptr, resolution);
textures[0].Apply();
textures[1].LoadRawTextureData(new IntPtr(ptr.ToInt64() + resolution), resolution);
textures[1].Apply();
break;
case PixelFormat.YUV_NV12:
if (recreateMaterial)
{
textures = new Texture2D[2];
textures[0] = new Texture2D(imageWidth, imageHeight, TextureFormat.Alpha8, false);
textures[1] = new Texture2D(imageWidth / 2, imageHeight / 2, TextureFormat.RGBA4444, false);
mat = new Material(shaders.YUV_NV12);
mat.SetTexture("_yTexture", textures[0]);
mat.SetTexture("_uvTexture", textures[1]);
}
textures[0].LoadRawTextureData(ptr, resolution);
textures[0].Apply();
textures[1].LoadRawTextureData(new IntPtr(ptr.ToInt64() + resolution), resolution);
textures[1].Apply();
break;
case PixelFormat.YUV_I420:
if (recreateMaterial)
{
textures = new Texture2D[3];
textures[0] = new Texture2D(imageWidth, imageHeight, TextureFormat.Alpha8, false);
textures[1] = new Texture2D(imageWidth / 2, imageHeight / 2, TextureFormat.Alpha8, false);
textures[2] = new Texture2D(imageWidth / 2, imageHeight / 2, TextureFormat.Alpha8, false);
mat = new Material(shaders.YUV_I420_YV12);
mat.SetTexture("_yTexture", textures[0]);
mat.SetTexture("_uTexture", textures[1]);
mat.SetTexture("_vTexture", textures[2]);
}
textures[0].LoadRawTextureData(new IntPtr(ptr.ToInt64()), resolution);
textures[0].Apply();
textures[1].LoadRawTextureData(new IntPtr(ptr.ToInt64() + resolution), resolution / 4);
textures[1].Apply();
textures[2].LoadRawTextureData(new IntPtr(ptr.ToInt64() + resolution + resolution / 4), resolution / 4);
textures[2].Apply();
break;
case PixelFormat.YUV_YV12:
if (recreateMaterial)
{
textures = new Texture2D[3];
textures[0] = new Texture2D(imageWidth, imageHeight, TextureFormat.Alpha8, false);
textures[1] = new Texture2D(imageWidth / 2, imageHeight / 2, TextureFormat.Alpha8, false);
textures[2] = new Texture2D(imageWidth / 2, imageHeight / 2, TextureFormat.Alpha8, false);
mat = new Material(shaders.YUV_I420_YV12);
mat.SetTexture("_yTexture", textures[0]);
mat.SetTexture("_uTexture", textures[1]);
mat.SetTexture("_vTexture", textures[2]);
}
textures[0].LoadRawTextureData(new IntPtr(ptr.ToInt64()), resolution);
textures[0].Apply();
textures[1].LoadRawTextureData(new IntPtr(ptr.ToInt64() + resolution + resolution / 4), resolution / 4);
textures[1].Apply();
textures[2].LoadRawTextureData(new IntPtr(ptr.ToInt64() + resolution), resolution / 4);
textures[2].Apply();
break;
case PixelFormat.RGB888:
if (recreateMaterial)
{
textures = new Texture2D[1];
textures[0] = new Texture2D(imageWidth, imageHeight, TextureFormat.RGB24, false);
mat = new Material(shaders.RGB);
mat.SetTexture("_MainTex", textures[0]);
}
textures[0].LoadRawTextureData(new IntPtr(ptr.ToInt64()), buffer.size());
textures[0].Apply();
break;
case PixelFormat.BGR888:
if (recreateMaterial)
{
textures = new Texture2D[1];
textures[0] = new Texture2D(imageWidth, imageHeight, TextureFormat.RGB24, false);
mat = new Material(shaders.BGR);
mat.SetTexture("_MainTex", textures[0]);
}
textures[0].LoadRawTextureData(new IntPtr(ptr.ToInt64()), buffer.size());
textures[0].Apply();
break;
case PixelFormat.RGBA8888:
if (recreateMaterial)
{
textures = new Texture2D[1];
textures[0] = new Texture2D(imageWidth, imageHeight, TextureFormat.RGBA32, false);
mat = new Material(shaders.RGB);
mat.SetTexture("_MainTex", textures[0]);
}
textures[0].LoadRawTextureData(new IntPtr(ptr.ToInt64()), buffer.size());
textures[0].Apply();
break;
case PixelFormat.BGRA8888:
if (recreateMaterial)
{
textures = new Texture2D[1];
textures[0] = new Texture2D(imageWidth, imageHeight, TextureFormat.RGBA32, false);
mat = new Material(shaders.BGR);
mat.SetTexture("_MainTex", textures[0]);
}
textures[0].LoadRawTextureData(new IntPtr(ptr.ToInt64()), buffer.size());
textures[0].Apply();
break;
default:
break;
}
}
return mat;
}
private void DisposeResources()
{
if (mat)
{
UnityEngine.Object.Destroy(mat);
}
foreach(var texture in textures)
{
UnityEngine.Object.Destroy(texture);
}
}
}
}
fileFormatVersion: 2
guid: 69a3654d76e05254e8291232693c8da3
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:
//================================================================================================================================
//
// Copyright (c) 2015-2021 VisionStar Information Technology (Shanghai) Co., Ltd. All Rights Reserved.
// EasyAR is the registered trademark or trademark of VisionStar Information Technology (Shanghai) Co., Ltd in China
// and other countries for the augmented reality technology developed by VisionStar Information Technology (Shanghai) Co., Ltd.
//
//================================================================================================================================
using System;
using UnityEngine;
using UnityEngine.Rendering;
namespace easyar
{
/// <summary>
/// <para xml:lang="en"><see cref="MonoBehaviour"/> which controls camera image rendering in the scene. Unity universal render pipeline (URP) is not supported yet, you can extend this class for URP support.</para>
/// <para xml:lang="zh">在场景中控制camera图像渲染的<see cref="MonoBehaviour"/>,这个类目前不支持Unity universal render pipeline (URP) ,但你可以自行扩展这个类的实现来支持URP。</para>
/// </summary>
[RequireComponent(typeof(RenderCameraController))]
public class CameraImageRenderer : MonoBehaviour
{
private RenderCameraController controller;
private CommandBuffer commandBuffer;
private Mesh screenMesh;
private CameraImageMaterial arMaterial;
private Material material;
private CameraParameters cameraParameters;
private bool renderImageHFlip;
private UserRequest request;
private ARSession arSession;
/// <summary>
/// <para xml:lang="en">Camera image rendering update event. This event will pass out the Material and texture size of current camera image rendering. This event only indicates a new render happens, while the camera image itself may not change.</para>
/// <para xml:lang="zh">camera图像渲染更新的事件。这个事件会传出当前用于camera图像渲染的材质和贴图大小。当这个事件发生时,camera图像本身不一定有改变,它只表示一次渲染的发生。</para>
/// </summary>
public event Action<Material, Vector2> OnFrameRenderUpdate;
private event Action<Camera, RenderTexture> TargetTextureChange;
/// <summary>
/// MonoBehaviour Awake
/// </summary>
protected virtual void Awake()
{
controller = GetComponent<RenderCameraController>();
arMaterial = new CameraImageMaterial();
}
/// <summary>
/// MonoBehaviour OnEnable
/// </summary>
protected virtual void OnEnable()
{
UpdateCommandBuffer(controller ? controller.TargetCamera : null, material);
}
/// <summary>
/// MonoBehaviour OnDisable
/// </summary>
protected virtual void OnDisable()
{
RemoveCommandBuffer(controller ? controller.TargetCamera : null);
}
/// <summary>
/// MonoBehaviour OnDestroy
/// </summary>
protected virtual void OnDestroy()
{
arMaterial.Dispose();
if (request != null) { request.Dispose(); }
if (cameraParameters != null) { cameraParameters.Dispose(); }
}
/// <summary>
/// <para xml:lang="en">Get the <see cref="RenderTexture"/> of camera image.</para>
/// <para xml:lang="en">The texture is a full sized image from <see cref="OutputFrame"/>, not cropped by the screen. The action <paramref name="targetTextureEventHandler"/> will pass out the <see cref="RenderTexture"/> and the <see cref="Camera"/> drawing the texture when the texture created or changed, will not call every frame or when the camera image data change. Calling this method will create external resources, and will trigger render when necessary, so make sure to release the resource using <see cref="DropTargetTexture"/> when not use.</para>
/// <para xml:lang="zh">获取camera图像的<see cref="RenderTexture"/>。</para>
/// <para xml:lang="zh">通过这个接口获取的texture是从<see cref="OutputFrame"/>获取的完整大小的图像,未经屏幕裁剪。<paramref name="targetTextureEventHandler"/> action会传出<see cref="RenderTexture"/>以及用于绘制texture的<see cref="Camera"/>。这个action不会每帧调用,也不会在camera图像数据发生变化的时候调用,它只会发生在texture本身创建或改变的时候。调用这个方法会创建额外的资源且会在必要时触发渲染,因此在不使用的时候需要调用<see cref="DropTargetTexture"/>释放资源。</para>
/// </summary>
public void RequestTargetTexture(Action<Camera, RenderTexture> targetTextureEventHandler)
{
if (request == null)
{
request = new UserRequest();
}
TargetTextureChange += targetTextureEventHandler;
RenderTexture texture;
request.UpdateTexture(controller ? controller.TargetCamera : null, material, out texture);
if (TargetTextureChange != null && texture)
{
TargetTextureChange(controller.TargetCamera, texture);
}
}
/// <summary>
/// <para xml:lang="en">Release the <see cref="RenderTexture"/> of camera image. Internal resources will be released when all holders release.</para>
/// <para xml:lang="zh">释放绘制camera图像的<see cref="RenderTexture"/>。内部资源将在所有持有者都释放后释放。</para>
/// </summary>
public void DropTargetTexture(Action<Camera, RenderTexture> targetTextureEventHandler)
{
if (controller)
{
targetTextureEventHandler(controller.TargetCamera, null);
}
TargetTextureChange -= targetTextureEventHandler;
if (TargetTextureChange == null && request != null)
{
request.RemoveCommandBuffer(controller ? controller.TargetCamera : null);
request.Dispose();
request = null;
}
}
/// <summary>
/// <para xml:lang="en">Usually only for internal assemble use. Assemble response.</para>
/// <para xml:lang="zh">通常只在内部组装时使用。组装响应方法。</para>
/// </summary>
public void OnAssemble(ARSession session)
{
arSession = session;
session.FrameChange += OnFrameChange;
session.FrameUpdate += OnFrameUpdate;
}
/// <summary>
/// <para xml:lang="en">Set render image horizontal flip.</para>
/// <para xml:lang="zh">设置渲染的图像的镜像翻转。</para>
/// </summary>
public void SetHFilp(bool hFlip)
{
renderImageHFlip = hFlip;
}
private void OnFrameChange(OutputFrame outputFrame, Matrix4x4 displayCompensation)
{
if (outputFrame == null)
{
material = null;
UpdateCommandBuffer(controller ? controller.TargetCamera : null, material);
if (request != null)
{
request.UpdateCommandBuffer(controller ? controller.TargetCamera : null, material);
RenderTexture texture;
if (TargetTextureChange != null && request.UpdateTexture(controller.TargetCamera, material, out texture))
{
TargetTextureChange(controller.TargetCamera, texture);
}
}
return;
}
if (!enabled && request == null && OnFrameRenderUpdate == null)
{
return;
}
using (var frame = outputFrame.inputFrame())
{
using (var image = frame.image())
{
var materialUpdated = arMaterial.UpdateByImage(image);
if (material != materialUpdated)
{
material = materialUpdated;
UpdateCommandBuffer(controller ? controller.TargetCamera : null, material);
if (request != null) { request.UpdateCommandBuffer(controller ? controller.TargetCamera : null, material); }
}
}
if (cameraParameters != null)
{
cameraParameters.Dispose();
}
cameraParameters = frame.cameraParameters();
}
}
private void OnFrameUpdate(OutputFrame outputFrame)
{
if (!controller || (!enabled && request == null && OnFrameRenderUpdate == null))
{
return;
}
if (request != null)
{
RenderTexture texture;
if (TargetTextureChange != null && request.UpdateTexture(controller.TargetCamera, material, out texture))
{
TargetTextureChange(controller.TargetCamera, texture);
}
}
if (!material)
{
return;
}
bool cameraFront = cameraParameters.cameraDeviceType() == CameraDeviceType.Front;
var imageProjection = cameraParameters.imageProjection(controller.TargetCamera.aspect, arSession.Assembly.Display.Rotation, true, cameraFront? !renderImageHFlip : renderImageHFlip).ToUnityMatrix();
var gpuProjection = GL.GetGPUProjectionMatrix(imageProjection, false);
material.SetMatrix("_projection", gpuProjection);
if (OnFrameRenderUpdate != null)
{
OnFrameRenderUpdate(material, new Vector2(Screen.width * controller.TargetCamera.rect.width, Screen.height * controller.TargetCamera.rect.height));
}
}
private void UpdateCommandBuffer(Camera cam, Material material)
{
RemoveCommandBuffer(cam);
if (!cam || !material)
{
return;
}
if (enabled)
{
commandBuffer = new CommandBuffer();
screenMesh = new Mesh();
screenMesh.vertices = new Vector3[] { new Vector3(-1, -1), new Vector3(1, -1), new Vector3(1, 1), new Vector3(-1, 1) };
screenMesh.uv = new Vector2[] { new Vector2(0, 0), new Vector2(1, 0), new Vector2(1, 1), new Vector2(0, 1) };
screenMesh.triangles = new int[] { 0, 1, 2, 2, 3, 0 };
commandBuffer.DrawMesh(screenMesh, Matrix4x4.identity, material);
cam.AddCommandBuffer(CameraEvent.BeforeForwardOpaque, commandBuffer);
}
}
private void RemoveCommandBuffer(Camera cam)
{
if (screenMesh != null)
{
Destroy(screenMesh);
screenMesh = null;
}
if (commandBuffer != null)
{
if (cam)
{
cam.RemoveCommandBuffer(CameraEvent.BeforeForwardOpaque, commandBuffer);
}
commandBuffer.Dispose();
commandBuffer = null;
}
}
private class UserRequest : IDisposable
{
private RenderTexture texture;
private CommandBuffer commandBuffer;
private Mesh screenMesh;
~UserRequest()
{
if (screenMesh != null) { Destroy(screenMesh); }
if (commandBuffer != null) { commandBuffer.Dispose(); }
if (texture) { Destroy(texture); }
}
public void Dispose()
{
if (screenMesh != null) { Destroy(screenMesh); }
if (commandBuffer != null) { commandBuffer.Dispose(); }
if (texture) { Destroy(texture); }
GC.SuppressFinalize(this);
}
public bool UpdateTexture(Camera cam, Material material, out RenderTexture tex)
{
tex = texture;
if (!cam || !material)
{
if (texture)
{
Destroy(texture);
tex = texture = null;
return true;
}
return false;
}
int w = (int)(Screen.width * cam.rect.width);
int h = (int)(Screen.height * cam.rect.height);
if (texture && (texture.width != w || texture.height != h))
{
Destroy(texture);
}
if (texture)
{
return false;
}
else
{
texture = new RenderTexture(w, h, 0);
UpdateCommandBuffer(cam, material);
tex = texture;
return true;
}
}
public void UpdateCommandBuffer(Camera cam, Material material)
{
RemoveCommandBuffer(cam);
if (!cam || !material)
{
return;
}
if (texture)
{
commandBuffer = new CommandBuffer();
commandBuffer.SetRenderTarget(texture);
screenMesh = new Mesh();
screenMesh.vertices = new Vector3[] { new Vector3(-1, -1), new Vector3(1, -1), new Vector3(1, 1), new Vector3(-1, 1) };
screenMesh.uv = new Vector2[] { new Vector2(0, 0), new Vector2(1, 0), new Vector2(1, 1), new Vector2(0, 1) };
screenMesh.triangles = new int[] { 0, 1, 2, 2, 3, 0 };
commandBuffer.DrawMesh(screenMesh, Matrix4x4.identity, material);
cam.AddCommandBuffer(CameraEvent.BeforeForwardOpaque, commandBuffer);
}
}
public void RemoveCommandBuffer(Camera cam)
{
if (screenMesh != null)
{
Destroy(screenMesh);
screenMesh = null;
}
if (commandBuffer != null)
{
if (cam)
{
cam.RemoveCommandBuffer(CameraEvent.BeforeForwardOpaque, commandBuffer);
}
commandBuffer.Dispose();
commandBuffer = null;
}
}
}
}
}
fileFormatVersion: 2
guid: 827d788c59357834d9adc4815a9a065c
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:
//================================================================================================================================
//
// Copyright (c) 2015-2021 VisionStar Information Technology (Shanghai) Co., Ltd. All Rights Reserved.
// EasyAR is the registered trademark or trademark of VisionStar Information Technology (Shanghai) Co., Ltd in China
// and other countries for the augmented reality technology developed by VisionStar Information Technology (Shanghai) Co., Ltd.
//
//================================================================================================================================
using UnityEngine;
namespace easyar
{
/// <summary>
/// <para xml:lang="en">Shaders to draw camera image.</para>
/// <para xml:lang="zh">绘制camera图像的shader。</para>
/// </summary>
[CreateAssetMenu(menuName = "EasyAR/Shaders")]
public class CameraImageShaders : ScriptableObject
{
/// <summary>
/// <para xml:lang="en"><see cref="Shader"/> to draw image of <see cref="PixelFormat.RGB888"/> or <see cref="PixelFormat.RGBA8888"/> format.</para>
/// <para xml:lang="zh">处理图片数据格式为<see cref="PixelFormat.RGB888"/>或<see cref="PixelFormat.RGBA8888"/>的<see cref="Shader"/>。</para>
/// </summary>
public Shader RGB;
/// <summary>
/// <para xml:lang="en"><see cref="Shader"/> to draw image of <see cref="PixelFormat.BGR888"/> or <see cref="PixelFormat.BGRA8888"/> format.</para>
/// <para xml:lang="zh">处理图片数据格式为<see cref="PixelFormat.BGR888"/>或<see cref="PixelFormat.BGRA8888"/>的<see cref="Shader"/>。</para>
/// </summary>
public Shader BGR;
/// <summary>
/// <para xml:lang="en"><see cref="Shader"/> to draw image of <see cref="PixelFormat.Gray"/> format.</para>
/// <para xml:lang="zh">处理图片数据格式为<see cref="PixelFormat.Gray"/>的<see cref="Shader"/>。</para>
/// </summary>
public Shader GRAY;
/// <summary>
/// <para xml:lang="en"><see cref="Shader"/> to draw image of <see cref="PixelFormat.YUV_YV12"/> or <see cref="PixelFormat.YUV_I420"/> format.</para>
/// <para xml:lang="zh">处理图片数据格式为<see cref="PixelFormat.YUV_YV12"/>或<see cref="PixelFormat.YUV_I420"/>的<see cref="Shader"/>。</para>
/// </summary>
public Shader YUV_I420_YV12;
/// <summary>
/// <para xml:lang="en"><see cref="Shader"/> to draw image of <see cref="PixelFormat.YUV_NV12"/> format.</para>
/// <para xml:lang="zh">处理图片数据格式为<see cref="PixelFormat.YUV_NV12"/>的<see cref="Shader"/>。</para>
/// </summary>
public Shader YUV_NV12;
/// <summary>
/// <para xml:lang="en"><see cref="Shader"/> to draw image of <see cref="PixelFormat.YUV_NV21"/> format.</para>
/// <para xml:lang="zh">处理图片数据格式为<see cref="PixelFormat.YUV_NV21"/>的<see cref="Shader"/>。</para>
/// </summary>
public Shader YUV_NV21;
}
}
fileFormatVersion: 2
guid: ad83e83f13274fd4a862ea9284a6bb29
timeCreated: 1574937003
licenseType: Pro
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:
//================================================================================================================================
//
// Copyright (c) 2015-2021 VisionStar Information Technology (Shanghai) Co., Ltd. All Rights Reserved.
// EasyAR is the registered trademark or trademark of VisionStar Information Technology (Shanghai) Co., Ltd in China
// and other countries for the augmented reality technology developed by VisionStar Information Technology (Shanghai) Co., Ltd.
//
//================================================================================================================================
namespace easyar
{
/// <summary>
/// <para xml:lang="en">Abstracts camera device as frame source, used when assemble, to provide input frame data to the algorithms.</para>
/// <para xml:lang="zh">抽象作为frame源的Camera设备,在组装时使用,提供算法所需的frame输入数据。</para>
/// </summary>
public abstract class CameraSource : FrameSource
{
protected int bufferCapacity;
/// <summary>
/// <para xml:lang="en">Device buffer capacity.</para>
/// <para xml:lang="zh">设备缓冲容量。</para>
/// </summary>
public virtual int BufferCapacity
{
get
{
return bufferCapacity;
}
set
{
bufferCapacity = value;
}
}
/// <summary>
/// MonoBehaviour Start
/// </summary>
protected virtual void Start()
{
if (!EasyARController.Initialized)
{
return;
}
Open();
}
/// <summary>
/// MonoBehaviour OnDestroy
/// </summary>
protected virtual void OnDestroy()
{
Close();
}
/// <summary>
/// <para xml:lang="en">Open camera</para>
/// <para xml:lang="zh">开启Camera。</para>
/// </summary>
public abstract void Open();
/// <summary>
/// <para xml:lang="en">Close camera</para>
/// <para xml:lang="zh">关闭Camera。</para>
/// </summary>
public abstract void Close();
}
}
fileFormatVersion: 2
guid: 93e73b821711ac74ea7e4f46960b655d
timeCreated: 1562756779
licenseType: Free
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:
//================================================================================================================================
//
// Copyright (c) 2015-2021 VisionStar Information Technology (Shanghai) Co., Ltd. All Rights Reserved.
// EasyAR is the registered trademark or trademark of VisionStar Information Technology (Shanghai) Co., Ltd in China
// and other countries for the augmented reality technology developed by VisionStar Information Technology (Shanghai) Co., Ltd.
//
//================================================================================================================================
using System;
using System.Collections.Generic;
using UnityEngine;
namespace easyar
{
/// <summary>
/// <para xml:lang="en">Display device interface.</para>
/// <para xml:lang="zh">显示设备接口。</para>
/// </summary>
public interface IDisplay
{
/// <summary>
/// <para xml:lang="en">Device rotation.</para>
/// <para xml:lang="zh">设备旋转信息。</para>
/// </summary>
int Rotation { get; }
}
/// <summary>
/// <para xml:lang="en">Display device.</para>
/// <para xml:lang="zh">显示设备。</para>
/// </summary>
internal class Display : IDisplay, IDisposable
{
private Dictionary<int, int> rotations = new Dictionary<int, int>();
#if UNITY_ANDROID && !UNITY_EDITOR
private static AndroidJavaObject defaultDisplay;
#endif
public Display()
{
if (Application.platform == RuntimePlatform.Android)
{
InitializeAndroid();
}
else if (Application.platform == RuntimePlatform.IPhonePlayer)
{
InitializeIOS();
}
}
~Display()
{
DeleteAndroidJavaObjects();
}
public int Rotation
{
get
{
if (Application.platform == RuntimePlatform.Android)
{
#if UNITY_ANDROID && !UNITY_EDITOR
var rotation = defaultDisplay.Call<int>("getRotation");
return rotations[rotation];
#endif
}
else if (Application.platform == RuntimePlatform.IPhonePlayer)
{
return rotations[(int)Screen.orientation];
}
return 0;
}
}
/// <summary>
/// <para xml:lang="en">Dispose resources.</para>
/// <para xml:lang="zh">销毁资源。</para>
/// </summary>
public void Dispose()
{
DeleteAndroidJavaObjects();
GC.SuppressFinalize(this);
}
private void InitializeIOS()
{
rotations[(int)ScreenOrientation.Portrait] = 0;
rotations[(int)ScreenOrientation.LandscapeLeft] = 90;
rotations[(int)ScreenOrientation.PortraitUpsideDown] = 180;
rotations[(int)ScreenOrientation.LandscapeRight] = 270;
}
private void InitializeAndroid()
{
#if UNITY_ANDROID && !UNITY_EDITOR
using (var surfaceClass = new AndroidJavaClass("android.view.Surface"))
using (var contextClass = new AndroidJavaClass("android.content.Context"))
using (var windowService = contextClass.GetStatic<AndroidJavaObject>("WINDOW_SERVICE"))
using (var unityPlayerClass = new AndroidJavaClass("com.unity3d.player.UnityPlayer"))
using (var currentActivity = unityPlayerClass.GetStatic<AndroidJavaObject>("currentActivity"))
using (var systemService = currentActivity.Call<AndroidJavaObject>("getSystemService", windowService))
{
defaultDisplay = systemService.Call<AndroidJavaObject>("getDefaultDisplay");
rotations[surfaceClass.GetStatic<int>("ROTATION_0")] = 0;
rotations[surfaceClass.GetStatic<int>("ROTATION_90")] = 90;
rotations[surfaceClass.GetStatic<int>("ROTATION_180")] = 180;
rotations[surfaceClass.GetStatic<int>("ROTATION_270")] = 270;
}
#endif
}
private void DeleteAndroidJavaObjects()
{
#if UNITY_ANDROID && !UNITY_EDITOR
if (defaultDisplay != null) { defaultDisplay.Dispose(); }
#endif
}
}
/// <summary>
/// <para xml:lang="en">Display emulator.</para>
/// <para xml:lang="zh">Display模拟。</para>
/// </summary>
internal class DisplayEmulator : IDisplay
{
public int Rotation { get; private set; }
internal void EmulateRotation(int value)
{
Rotation = value;
}
}
}
fileFormatVersion: 2
guid: 26cef0dbfc2f7ab479e748ca5dda7cb3
timeCreated: 1575790864
licenseType: Pro
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:
fileFormatVersion: 2
guid: 842c5f6f83fb04a4692bb36a97ac2c37
folderAsset: yes
timeCreated: 1594184409
licenseType: Pro
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:
//================================================================================================================================
//
// Copyright (c) 2015-2021 VisionStar Information Technology (Shanghai) Co., Ltd. All Rights Reserved.
// EasyAR is the registered trademark or trademark of VisionStar Information Technology (Shanghai) Co., Ltd in China
// and other countries for the augmented reality technology developed by VisionStar Information Technology (Shanghai) Co., Ltd.
//
//================================================================================================================================
using UnityEngine;
namespace easyar
{
/// <summary>
/// <para xml:lang="en"><see cref="MonoBehaviour"/> which controls <see cref="Camera"/> in the scene. The <see cref="Camera"/> projection is set to fit real world <see cref="CameraDevice"/> or other optical device.</para>
/// <para xml:lang="zh">在场景中控制<see cref="Camera"/>的<see cref="MonoBehaviour"/>,<see cref="Camera"/> 投影矩阵会反映现实世界中的<see cref="CameraDevice"/>或其它光学设备。</para>
/// </summary>
public class RenderCameraController : MonoBehaviour
{
/// <summary>
/// <para xml:lang="en">The <see cref="Camera"/> representing real world <see cref="CameraDevice"/> or "eye" when using eyewears. It will be automatically set to the camera from <see cref="ARSession.Assembly"/> when assemble if not manually assigned.</para>
/// <para xml:lang="zh">代表现实世界中<see cref="CameraDevice"/>或使用眼镜时的“眼睛”的<see cref="Camera"/>。如果未手动指定,它将在组装时被自动设为<see cref="ARSession.Assembly"/>中的camera。</para>
/// </summary>
public Camera TargetCamera;
/// <summary>
/// <para xml:lang="en">The external <see cref="CameraParameters"/> used to set <see cref="Camera"/> projection. It is used when the <see cref="Camera"/> is not representing the <see cref="CameraDevice"/> but other optical device, like "eye" from eyewears.</para>
/// <para xml:lang="zh">用于设置<see cref="Camera"/>投影矩阵的外部<see cref="CameraParameters"/>。它通常在<see cref="Camera"/>不代表<see cref="CameraDevice"/>而是类似眼镜的“眼睛”的光学设备时使用。</para>
/// </summary>
public RenderCameraParameters ExternalParameters;
private CameraImageRenderer cameraRenderer;
private Matrix4x4 currentDisplayCompensation = Matrix4x4.identity;
private CameraParameters cameraParameters;
private bool projectHFilp;
private ARSession arSession;
private RenderCameraEventHandler renderEvent;
/// <summary>
/// MonoBehaviour OnEnable
/// </summary>
protected virtual void OnEnable()
{
if (arSession)
{
arSession.FrameChange += OnFrameChange;
arSession.FrameUpdate += OnFrameUpdate;
}
}
/// <summary>
/// MonoBehaviour OnDisable
/// </summary>
protected virtual void OnDisable()
{
if (arSession)
{
arSession.FrameChange -= OnFrameChange;
arSession.FrameUpdate -= OnFrameUpdate;
}
}
/// <summary>
/// MonoBehaviour OnDestroy
/// </summary>
protected virtual void OnDestroy()
{
if (cameraParameters != null)
{
cameraParameters.Dispose();
}
if (ExternalParameters)
{
ExternalParameters.Dispose();
}
}
/// <summary>
/// <para xml:lang="en">Usually only for internal assemble use. Assemble response.</para>
/// <para xml:lang="zh">通常只在内部组装时使用。组装响应方法。</para>
/// </summary>
internal void OnAssemble(ARSession session)
{
arSession = session;
if (!TargetCamera)
{
TargetCamera = session.Assembly.Camera;
}
if (enabled)
{
arSession.FrameChange += OnFrameChange;
arSession.FrameUpdate += OnFrameUpdate;
}
cameraRenderer = GetComponent<CameraImageRenderer>();
if (cameraRenderer)
{
cameraRenderer.OnAssemble(session);
}
}
/// <summary>
/// <para xml:lang="en">Set projection horizontal flip when using <see cref="ARSession.ARHorizontalFlipMode.World"/> mode.</para>
/// <para xml:lang="zh">在<see cref="ARSession.ARHorizontalFlipMode.World"/>模式下设置投影矩阵镜像翻转。</para>
/// </summary>
internal void SetProjectHFlip(bool hFlip)
{
projectHFilp = hFlip;
}
/// <summary>
/// <para xml:lang="en">Set render image horizontal flip.</para>
/// <para xml:lang="zh">设置渲染的图像的镜像翻转。</para>
/// </summary>
internal void SetRenderImageHFilp(bool hFlip)
{
if (cameraRenderer)
{
cameraRenderer.SetHFilp(hFlip);
}
}
private void OnFrameChange(OutputFrame outputFrame, Matrix4x4 displayCompensation)
{
if (outputFrame == null)
{
return;
}
currentDisplayCompensation = displayCompensation.inverse;
using (var frame = outputFrame.inputFrame())
{
if (cameraParameters != null)
{
cameraParameters.Dispose();
}
cameraParameters = frame.cameraParameters();
if (ExternalParameters)
{
ExternalParameters.Build(cameraParameters);
}
}
}
private void OnFrameUpdate(OutputFrame outputFrame)
{
var camParameters = ExternalParameters ? ExternalParameters.Parameters : cameraParameters;
var projection = camParameters.projection(TargetCamera.nearClipPlane, TargetCamera.farClipPlane, TargetCamera.aspect, arSession.Assembly.Display.Rotation, false, false).ToUnityMatrix();
if (ExternalParameters)
{
projection *= ExternalParameters.Transform;
}
projection *= currentDisplayCompensation;
if (projectHFilp)
{
var translateMatrix = Matrix4x4.identity;
translateMatrix.m00 = -1;
projection = translateMatrix * projection;
}
TargetCamera.projectionMatrix = projection;
if (renderEvent == null)
{
if (TargetCamera)
{
renderEvent = TargetCamera.gameObject.AddComponent<RenderCameraEventHandler>();
renderEvent.PreRender += () => { GL.invertCulling = projectHFilp; };
renderEvent.PostRender += () => { if (projectHFilp) { GL.invertCulling = false; } };
}
}
else
{
if (!TargetCamera)
{
Destroy(renderEvent);
}
}
}
}
}
fileFormatVersion: 2
guid: cfaf5bea4c10a8141a92abc840879a12
timeCreated: 1575021345
licenseType: Pro
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:
//================================================================================================================================
//
// Copyright (c) 2015-2021 VisionStar Information Technology (Shanghai) Co., Ltd. All Rights Reserved.
// EasyAR is the registered trademark or trademark of VisionStar Information Technology (Shanghai) Co., Ltd in China
// and other countries for the augmented reality technology developed by VisionStar Information Technology (Shanghai) Co., Ltd.
//
//================================================================================================================================
using System;
using UnityEngine;
namespace easyar
{
/// <summary>
/// <para xml:lang="en"><see cref="MonoBehaviour"/> which provides <see cref="Camera"/> rendering events.</para>
/// <para xml:lang="cn">提供<see cref="Camera"/>渲染事件的<see cref="MonoBehaviour"/>。</para>
/// </summary>
internal class RenderCameraEventHandler : MonoBehaviour
{
/// <summary>
/// <para xml:lang="en">PreRender is called before a <see cref="Camera"/> starts rendering the Scene.</para>
/// <para xml:lang="cn">PreRender <see cref="Camera">渲染场景的之前触发。</para>
/// </summary>
public event Action PreRender;
/// <summary>
/// <para xml:lang="en">PostRender is called after a <see cref="Camera"/> finished rendering the Scene.</para>
/// <para xml:lang="cn">PostRender <see cref="Camera">结束场景渲染之后触发。</para>
/// </summary>
public event Action PostRender;
private void OnPreRender()
{
if (PreRender != null)
{
PreRender();
}
}
private void OnPostRender()
{
if (PostRender != null)
{
PostRender();
}
}
}
}
fileFormatVersion: 2
guid: 0551f9a44a33f2740be510b9a5b774ee
timeCreated: 1592808747
licenseType: Free
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:
//================================================================================================================================
//
// Copyright (c) 2015-2021 VisionStar Information Technology (Shanghai) Co., Ltd. All Rights Reserved.
// EasyAR is the registered trademark or trademark of VisionStar Information Technology (Shanghai) Co., Ltd in China
// and other countries for the augmented reality technology developed by VisionStar Information Technology (Shanghai) Co., Ltd.
//
//================================================================================================================================
using System;
using UnityEngine;
namespace easyar
{
/// <summary>
/// <para xml:lang="en">The render camera parameters. It is usually used when setting parameters of optical device, like "eye" from eyewears.</para>
/// <para xml:lang="zh">相机渲染参数与配置。通常在设置类似眼镜的“眼睛”的光学设备参数时使用。</para>
/// </summary>
[CreateAssetMenu(menuName = "EasyAR/Render Camera Parameters")]
public class RenderCameraParameters : ScriptableObject, IDisposable
{
/// <summary>
/// <para xml:lang="en">Device model.</para>
/// <para xml:lang="zh">设备型号。</para>
/// </summary>
public string DeviceModel;
/// <summary>
/// <para xml:lang="en">Position offset.</para>
/// <para xml:lang="zh">位置偏移。</para>
/// </summary>
public Vector3 PositionOffset;
/// <summary>
/// <para xml:lang="en">Rotation offset.</para>
/// <para xml:lang="zh">角度偏移。</para>
/// </summary>
public Vector3 RotationOffset;
/// <summary>
/// <para xml:lang="en">(Image) size.</para>
/// <para xml:lang="zh">(图像)大小。</para>
/// </summary>
public Vector2 Size;
/// <summary>
/// <para xml:lang="en">Focal length.</para>
/// <para xml:lang="zh">焦距。</para>
/// </summary>
public Vector2 FocalLength;
/// <summary>
/// <para xml:lang="en">Principal point.</para>
/// <para xml:lang="zh">主点。</para>
/// </summary>
public Vector2 PrincipalPoint;
private static Vector3 positionScale = new Vector3(1, -1, -1);
~RenderCameraParameters()
{
if (Parameters != null)
{
Parameters.Dispose();
}
}
/// <summary>
/// <para xml:lang="en">Transform matrix.</para>
/// <para xml:lang="zh">变换矩阵。</para>
/// </summary>
public Matrix4x4 Transform { get; private set; }
/// <summary>
/// <para xml:lang="en">The equivalent parameter of camera device.</para>
/// <para xml:lang="zh">相机设备的等效参数。</para>
/// </summary>
public CameraParameters Parameters { get; private set; }
/// <summary>
/// <para xml:lang="en">Build <see cref="Transform"/> and <see cref="Parameters"/>.</para>
/// <para xml:lang="zh">生成<see cref="Transform"/>和<see cref="Parameters"/>。</para>
/// </summary>
public void Build(CameraParameters cameraParameters)
{
Transform = Matrix4x4.TRS(Vector3.Scale(PositionOffset, positionScale), Quaternion.Euler(RotationOffset), Vector3.one);
if (Parameters != null)
{
Parameters.Dispose();
}
Parameters = new CameraParameters(new Vec2I((int)Size.x, (int)Size.y), new Vec2F(FocalLength.x, FocalLength.y), new Vec2F(PrincipalPoint.x, PrincipalPoint.y),
cameraParameters.cameraDeviceType(), cameraParameters.cameraOrientation());
}
/// <summary>
/// <para xml:lang="en">Dispose resources.</para>
/// <para xml:lang="zh">销毁资源。</para>
/// </summary>
public void Dispose()
{
if (Parameters != null)
{
Parameters.Dispose();
}
GC.SuppressFinalize(this);
}
}
}
fileFormatVersion: 2
guid: 9d30dd7b7fde8d142b79363b96897c5d
timeCreated: 1575030294
licenseType: Pro
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment