Commit 708d3dee authored by BlackAngle233's avatar BlackAngle233
Browse files

update final design

parent 1444629e
fileFormatVersion: 2
guid: 4f0d9b25d70e3054686ccff2e35bce31
timeCreated: 1611716672
licenseType: Pro
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:
//=============================================================================================================================
//
// EasyAR Sense 4.2.0.8700-7bcbc8b1c
// Copyright (c) 2015-2021 VisionStar Information Technology (Shanghai) Co., Ltd. All Rights Reserved.
// EasyAR is the registered trademark or trademark of VisionStar Information Technology (Shanghai) Co., Ltd in China
// and other countries for the augmented reality technology developed by VisionStar Information Technology (Shanghai) Co., Ltd.
//
//=============================================================================================================================
#ifndef __EASYAR_CALLBACKSCHEDULER_H__
#define __EASYAR_CALLBACKSCHEDULER_H__
#include "easyar/types.h"
#ifdef __cplusplus
extern "C" {
#endif
void easyar_CallbackScheduler__dtor(easyar_CallbackScheduler * This);
void easyar_CallbackScheduler__retain(const easyar_CallbackScheduler * This, /* OUT */ easyar_CallbackScheduler * * Return);
const char * easyar_CallbackScheduler__typeName(const easyar_CallbackScheduler * This);
void easyar_DelayedCallbackScheduler__ctor(/* OUT */ easyar_DelayedCallbackScheduler * * Return);
/// <summary>
/// Executes a callback. If there is no callback to execute, false is returned.
/// </summary>
bool easyar_DelayedCallbackScheduler_runOne(easyar_DelayedCallbackScheduler * This);
void easyar_DelayedCallbackScheduler__dtor(easyar_DelayedCallbackScheduler * This);
void easyar_DelayedCallbackScheduler__retain(const easyar_DelayedCallbackScheduler * This, /* OUT */ easyar_DelayedCallbackScheduler * * Return);
const char * easyar_DelayedCallbackScheduler__typeName(const easyar_DelayedCallbackScheduler * This);
void easyar_castDelayedCallbackSchedulerToCallbackScheduler(const easyar_DelayedCallbackScheduler * This, /* OUT */ easyar_CallbackScheduler * * Return);
void easyar_tryCastCallbackSchedulerToDelayedCallbackScheduler(const easyar_CallbackScheduler * This, /* OUT */ easyar_DelayedCallbackScheduler * * Return);
/// <summary>
/// Gets a default immediate callback scheduler.
/// </summary>
void easyar_ImmediateCallbackScheduler_getDefault(/* OUT */ easyar_ImmediateCallbackScheduler * * Return);
void easyar_ImmediateCallbackScheduler__dtor(easyar_ImmediateCallbackScheduler * This);
void easyar_ImmediateCallbackScheduler__retain(const easyar_ImmediateCallbackScheduler * This, /* OUT */ easyar_ImmediateCallbackScheduler * * Return);
const char * easyar_ImmediateCallbackScheduler__typeName(const easyar_ImmediateCallbackScheduler * This);
void easyar_castImmediateCallbackSchedulerToCallbackScheduler(const easyar_ImmediateCallbackScheduler * This, /* OUT */ easyar_CallbackScheduler * * Return);
void easyar_tryCastCallbackSchedulerToImmediateCallbackScheduler(const easyar_CallbackScheduler * This, /* OUT */ easyar_ImmediateCallbackScheduler * * Return);
#ifdef __cplusplus
}
#endif
#endif
fileFormatVersion: 2
guid: 73f33b6ead933bc498faf01fe8740eb8
timeCreated: 1611716672
licenseType: Pro
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:
//=============================================================================================================================
//
// EasyAR Sense 4.2.0.8700-7bcbc8b1c
// Copyright (c) 2015-2021 VisionStar Information Technology (Shanghai) Co., Ltd. All Rights Reserved.
// EasyAR is the registered trademark or trademark of VisionStar Information Technology (Shanghai) Co., Ltd in China
// and other countries for the augmented reality technology developed by VisionStar Information Technology (Shanghai) Co., Ltd.
//
//=============================================================================================================================
#ifndef __EASYAR_CALLBACKSCHEDULER_HXX__
#define __EASYAR_CALLBACKSCHEDULER_HXX__
#include "easyar/types.hxx"
namespace easyar {
/// <summary>
/// Callback scheduler.
/// There are two subclasses: `DelayedCallbackScheduler`_ and `ImmediateCallbackScheduler`_ .
/// `DelayedCallbackScheduler`_ is used to delay callback to be invoked manually, and it can be used in single-threaded environments (such as various UI environments).
/// `ImmediateCallbackScheduler`_ is used to mark callback to be invoked when event is dispatched, and it can be used in multi-threaded environments (such as server or service daemon).
/// </summary>
class CallbackScheduler
{
protected:
easyar_CallbackScheduler * cdata_ ;
void init_cdata(easyar_CallbackScheduler * cdata);
virtual CallbackScheduler & operator=(const CallbackScheduler & data) { return *this; } //deleted
public:
CallbackScheduler(easyar_CallbackScheduler * cdata);
virtual ~CallbackScheduler();
CallbackScheduler(const CallbackScheduler & data);
const easyar_CallbackScheduler * get_cdata() const;
easyar_CallbackScheduler * get_cdata();
};
/// <summary>
/// Delayed callback scheduler.
/// It is used to delay callback to be invoked manually, and it can be used in single-threaded environments (such as various UI environments).
/// All members of this class is thread-safe.
/// </summary>
class DelayedCallbackScheduler : public CallbackScheduler
{
protected:
easyar_DelayedCallbackScheduler * cdata_ ;
void init_cdata(easyar_DelayedCallbackScheduler * cdata);
virtual DelayedCallbackScheduler & operator=(const DelayedCallbackScheduler & data) { return *this; } //deleted
public:
DelayedCallbackScheduler(easyar_DelayedCallbackScheduler * cdata);
virtual ~DelayedCallbackScheduler();
DelayedCallbackScheduler(const DelayedCallbackScheduler & data);
const easyar_DelayedCallbackScheduler * get_cdata() const;
easyar_DelayedCallbackScheduler * get_cdata();
DelayedCallbackScheduler();
/// <summary>
/// Executes a callback. If there is no callback to execute, false is returned.
/// </summary>
bool runOne();
static void tryCastFromCallbackScheduler(CallbackScheduler * v, /* OUT */ DelayedCallbackScheduler * * Return);
};
/// <summary>
/// Immediate callback scheduler.
/// It is used to mark callback to be invoked when event is dispatched, and it can be used in multi-threaded environments (such as server or service daemon).
/// All members of this class is thread-safe.
/// </summary>
class ImmediateCallbackScheduler : public CallbackScheduler
{
protected:
easyar_ImmediateCallbackScheduler * cdata_ ;
void init_cdata(easyar_ImmediateCallbackScheduler * cdata);
virtual ImmediateCallbackScheduler & operator=(const ImmediateCallbackScheduler & data) { return *this; } //deleted
public:
ImmediateCallbackScheduler(easyar_ImmediateCallbackScheduler * cdata);
virtual ~ImmediateCallbackScheduler();
ImmediateCallbackScheduler(const ImmediateCallbackScheduler & data);
const easyar_ImmediateCallbackScheduler * get_cdata() const;
easyar_ImmediateCallbackScheduler * get_cdata();
/// <summary>
/// Gets a default immediate callback scheduler.
/// </summary>
static void getDefault(/* OUT */ ImmediateCallbackScheduler * * Return);
static void tryCastFromCallbackScheduler(CallbackScheduler * v, /* OUT */ ImmediateCallbackScheduler * * Return);
};
}
#endif
#ifndef __IMPLEMENTATION_EASYAR_CALLBACKSCHEDULER_HXX__
#define __IMPLEMENTATION_EASYAR_CALLBACKSCHEDULER_HXX__
#include "easyar/callbackscheduler.h"
namespace easyar {
inline CallbackScheduler::CallbackScheduler(easyar_CallbackScheduler * cdata)
:
cdata_(NULL)
{
init_cdata(cdata);
}
inline CallbackScheduler::~CallbackScheduler()
{
if (cdata_) {
easyar_CallbackScheduler__dtor(cdata_);
cdata_ = NULL;
}
}
inline CallbackScheduler::CallbackScheduler(const CallbackScheduler & data)
:
cdata_(NULL)
{
easyar_CallbackScheduler * cdata = NULL;
easyar_CallbackScheduler__retain(data.cdata_, &cdata);
init_cdata(cdata);
}
inline const easyar_CallbackScheduler * CallbackScheduler::get_cdata() const
{
return cdata_;
}
inline easyar_CallbackScheduler * CallbackScheduler::get_cdata()
{
return cdata_;
}
inline void CallbackScheduler::init_cdata(easyar_CallbackScheduler * cdata)
{
cdata_ = cdata;
}
inline DelayedCallbackScheduler::DelayedCallbackScheduler(easyar_DelayedCallbackScheduler * cdata)
:
CallbackScheduler(static_cast<easyar_CallbackScheduler *>(NULL)),
cdata_(NULL)
{
init_cdata(cdata);
}
inline DelayedCallbackScheduler::~DelayedCallbackScheduler()
{
if (cdata_) {
easyar_DelayedCallbackScheduler__dtor(cdata_);
cdata_ = NULL;
}
}
inline DelayedCallbackScheduler::DelayedCallbackScheduler(const DelayedCallbackScheduler & data)
:
CallbackScheduler(static_cast<easyar_CallbackScheduler *>(NULL)),
cdata_(NULL)
{
easyar_DelayedCallbackScheduler * cdata = NULL;
easyar_DelayedCallbackScheduler__retain(data.cdata_, &cdata);
init_cdata(cdata);
}
inline const easyar_DelayedCallbackScheduler * DelayedCallbackScheduler::get_cdata() const
{
return cdata_;
}
inline easyar_DelayedCallbackScheduler * DelayedCallbackScheduler::get_cdata()
{
return cdata_;
}
inline void DelayedCallbackScheduler::init_cdata(easyar_DelayedCallbackScheduler * cdata)
{
cdata_ = cdata;
{
easyar_CallbackScheduler * cdata_inner = NULL;
easyar_castDelayedCallbackSchedulerToCallbackScheduler(cdata, &cdata_inner);
CallbackScheduler::init_cdata(cdata_inner);
}
}
inline DelayedCallbackScheduler::DelayedCallbackScheduler()
:
CallbackScheduler(static_cast<easyar_CallbackScheduler *>(NULL)),
cdata_(NULL)
{
easyar_DelayedCallbackScheduler * _return_value_ = NULL;
easyar_DelayedCallbackScheduler__ctor(&_return_value_);
init_cdata(_return_value_);
}
inline bool DelayedCallbackScheduler::runOne()
{
if (cdata_ == NULL) {
return bool();
}
bool _return_value_ = easyar_DelayedCallbackScheduler_runOne(cdata_);
return _return_value_;
}
inline void DelayedCallbackScheduler::tryCastFromCallbackScheduler(CallbackScheduler * v, /* OUT */ DelayedCallbackScheduler * * Return)
{
if (v == NULL) {
*Return = NULL;
return;
}
easyar_DelayedCallbackScheduler * cdata = NULL;
easyar_tryCastCallbackSchedulerToDelayedCallbackScheduler(v->get_cdata(), &cdata);
if (cdata == NULL) {
*Return = NULL;
return;
}
*Return = new DelayedCallbackScheduler(cdata);
}
inline ImmediateCallbackScheduler::ImmediateCallbackScheduler(easyar_ImmediateCallbackScheduler * cdata)
:
CallbackScheduler(static_cast<easyar_CallbackScheduler *>(NULL)),
cdata_(NULL)
{
init_cdata(cdata);
}
inline ImmediateCallbackScheduler::~ImmediateCallbackScheduler()
{
if (cdata_) {
easyar_ImmediateCallbackScheduler__dtor(cdata_);
cdata_ = NULL;
}
}
inline ImmediateCallbackScheduler::ImmediateCallbackScheduler(const ImmediateCallbackScheduler & data)
:
CallbackScheduler(static_cast<easyar_CallbackScheduler *>(NULL)),
cdata_(NULL)
{
easyar_ImmediateCallbackScheduler * cdata = NULL;
easyar_ImmediateCallbackScheduler__retain(data.cdata_, &cdata);
init_cdata(cdata);
}
inline const easyar_ImmediateCallbackScheduler * ImmediateCallbackScheduler::get_cdata() const
{
return cdata_;
}
inline easyar_ImmediateCallbackScheduler * ImmediateCallbackScheduler::get_cdata()
{
return cdata_;
}
inline void ImmediateCallbackScheduler::init_cdata(easyar_ImmediateCallbackScheduler * cdata)
{
cdata_ = cdata;
{
easyar_CallbackScheduler * cdata_inner = NULL;
easyar_castImmediateCallbackSchedulerToCallbackScheduler(cdata, &cdata_inner);
CallbackScheduler::init_cdata(cdata_inner);
}
}
inline void ImmediateCallbackScheduler::getDefault(/* OUT */ ImmediateCallbackScheduler * * Return)
{
easyar_ImmediateCallbackScheduler * _return_value_ = NULL;
easyar_ImmediateCallbackScheduler_getDefault(&_return_value_);
*Return = new ImmediateCallbackScheduler(_return_value_);
}
inline void ImmediateCallbackScheduler::tryCastFromCallbackScheduler(CallbackScheduler * v, /* OUT */ ImmediateCallbackScheduler * * Return)
{
if (v == NULL) {
*Return = NULL;
return;
}
easyar_ImmediateCallbackScheduler * cdata = NULL;
easyar_tryCastCallbackSchedulerToImmediateCallbackScheduler(v->get_cdata(), &cdata);
if (cdata == NULL) {
*Return = NULL;
return;
}
*Return = new ImmediateCallbackScheduler(cdata);
}
}
#endif
fileFormatVersion: 2
guid: 7ec032dcbec3f374f83a204895550469
timeCreated: 1611716672
licenseType: Pro
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:
//=============================================================================================================================
//
// EasyAR Sense 4.2.0.8700-7bcbc8b1c
// Copyright (c) 2015-2021 VisionStar Information Technology (Shanghai) Co., Ltd. All Rights Reserved.
// EasyAR is the registered trademark or trademark of VisionStar Information Technology (Shanghai) Co., Ltd in China
// and other countries for the augmented reality technology developed by VisionStar Information Technology (Shanghai) Co., Ltd.
//
//=============================================================================================================================
#import "easyar/types.oc.h"
/// <summary>
/// Callback scheduler.
/// There are two subclasses: `DelayedCallbackScheduler`_ and `ImmediateCallbackScheduler`_ .
/// `DelayedCallbackScheduler`_ is used to delay callback to be invoked manually, and it can be used in single-threaded environments (such as various UI environments).
/// `ImmediateCallbackScheduler`_ is used to mark callback to be invoked when event is dispatched, and it can be used in multi-threaded environments (such as server or service daemon).
/// </summary>
@interface easyar_CallbackScheduler : easyar_RefBase
+ (instancetype)new NS_UNAVAILABLE;
- (instancetype)init NS_UNAVAILABLE;
@end
/// <summary>
/// Delayed callback scheduler.
/// It is used to delay callback to be invoked manually, and it can be used in single-threaded environments (such as various UI environments).
/// All members of this class is thread-safe.
/// </summary>
@interface easyar_DelayedCallbackScheduler : easyar_CallbackScheduler
+ (instancetype)new NS_UNAVAILABLE;
- (instancetype)init NS_UNAVAILABLE;
+ (easyar_DelayedCallbackScheduler *) create;
/// <summary>
/// Executes a callback. If there is no callback to execute, false is returned.
/// </summary>
- (bool)runOne;
@end
/// <summary>
/// Immediate callback scheduler.
/// It is used to mark callback to be invoked when event is dispatched, and it can be used in multi-threaded environments (such as server or service daemon).
/// All members of this class is thread-safe.
/// </summary>
@interface easyar_ImmediateCallbackScheduler : easyar_CallbackScheduler
+ (instancetype)new NS_UNAVAILABLE;
- (instancetype)init NS_UNAVAILABLE;
/// <summary>
/// Gets a default immediate callback scheduler.
/// </summary>
+ (easyar_ImmediateCallbackScheduler *)getDefault;
@end
fileFormatVersion: 2
guid: 376683e7b9b1efd448b0b3e633cc8a5b
timeCreated: 1611716672
licenseType: Pro
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:
//=============================================================================================================================
//
// EasyAR Sense 4.2.0.8700-7bcbc8b1c
// Copyright (c) 2015-2021 VisionStar Information Technology (Shanghai) Co., Ltd. All Rights Reserved.
// EasyAR is the registered trademark or trademark of VisionStar Information Technology (Shanghai) Co., Ltd in China
// and other countries for the augmented reality technology developed by VisionStar Information Technology (Shanghai) Co., Ltd.
//
//=============================================================================================================================
#ifndef __EASYAR_CAMERA_H__
#define __EASYAR_CAMERA_H__
#include "easyar/types.h"
#ifdef __cplusplus
extern "C" {
#endif
void easyar_CameraDevice__ctor(/* OUT */ easyar_CameraDevice * * Return);
/// <summary>
/// Checks if the component is available. It returns true only on Windows, Mac, Android or iOS.
/// </summary>
bool easyar_CameraDevice_isAvailable(void);
/// <summary>
/// Gets current camera API (camera1 or camera2) on Android. camera1 is better for compatibility, but lacks some necessary information such as timestamp. camera2 has compatibility issues on some devices.
/// </summary>
easyar_AndroidCameraApiType easyar_CameraDevice_androidCameraApiType(easyar_CameraDevice * This);
/// <summary>
/// Sets current camera API (camera1 or camera2) on Android. It must be called before calling openWithIndex, openWithSpecificType or openWithPreferredType, or it will not take effect.
/// It is recommended to use `CameraDeviceSelector`_ to create camera with camera API set to recommended based on primary algorithm to run.
/// </summary>
void easyar_CameraDevice_setAndroidCameraApiType(easyar_CameraDevice * This, easyar_AndroidCameraApiType type);
/// <summary>
/// `InputFrame`_ buffer capacity. The default is 8.
/// </summary>
int easyar_CameraDevice_bufferCapacity(const easyar_CameraDevice * This);
/// <summary>
/// Sets `InputFrame`_ buffer capacity.
/// </summary>
void easyar_CameraDevice_setBufferCapacity(easyar_CameraDevice * This, int capacity);
/// <summary>
/// `InputFrame`_ output port.
/// </summary>
void easyar_CameraDevice_inputFrameSource(easyar_CameraDevice * This, /* OUT */ easyar_InputFrameSource * * Return);
/// <summary>
/// Sets callback on state change to notify state of camera disconnection or preemption. It is only available on Windows.
/// </summary>
void easyar_CameraDevice_setStateChangedCallback(easyar_CameraDevice * This, easyar_CallbackScheduler * callbackScheduler, easyar_OptionalOfFunctorOfVoidFromCameraState stateChangedCallback);
/// <summary>
/// Requests camera permission from operating system. You can call this function or request permission directly from operating system. It is only available on Android and iOS. On other platforms, it will call the callback directly with status being granted. This function need to be called from the UI thread.
/// </summary>
void easyar_CameraDevice_requestPermissions(easyar_CallbackScheduler * callbackScheduler, easyar_OptionalOfFunctorOfVoidFromPermissionStatusAndString permissionCallback);
/// <summary>
/// Gets count of cameras recognized by the operating system.
/// </summary>
int easyar_CameraDevice_cameraCount(void);
/// <summary>
/// Opens a camera by index.
/// </summary>
bool easyar_CameraDevice_openWithIndex(easyar_CameraDevice * This, int cameraIndex);
/// <summary>
/// Opens a camera by specific camera device type. If no camera is matched, false will be returned. On Mac, camera device types can not be distinguished.
/// </summary>
bool easyar_CameraDevice_openWithSpecificType(easyar_CameraDevice * This, easyar_CameraDeviceType type);
/// <summary>
/// Opens a camera by camera device type. If no camera is matched, the first camera will be used.
/// </summary>
bool easyar_CameraDevice_openWithPreferredType(easyar_CameraDevice * This, easyar_CameraDeviceType type);
/// <summary>
/// Starts video stream capture.
/// </summary>
bool easyar_CameraDevice_start(easyar_CameraDevice * This);
/// <summary>
/// Stops video stream capture. It will only stop capture and will not change previous set camera parameters and connection.
/// </summary>
void easyar_CameraDevice_stop(easyar_CameraDevice * This);
/// <summary>
/// Close. The component shall not be used after calling close.
/// </summary>
void easyar_CameraDevice_close(easyar_CameraDevice * This);
/// <summary>
/// Camera index.
/// </summary>
int easyar_CameraDevice_index(const easyar_CameraDevice * This);
/// <summary>
/// Camera type.
/// </summary>
easyar_CameraDeviceType easyar_CameraDevice_type(const easyar_CameraDevice * This);
/// <summary>
/// Camera parameters, including image size, focal length, principal point, camera type and camera rotation against natural orientation. Call after a successful open.
/// </summary>
void easyar_CameraDevice_cameraParameters(easyar_CameraDevice * This, /* OUT */ easyar_CameraParameters * * Return);
/// <summary>
/// Sets camera parameters. Call after a successful open.
/// </summary>
void easyar_CameraDevice_setCameraParameters(easyar_CameraDevice * This, easyar_CameraParameters * cameraParameters);
/// <summary>
/// Gets the current preview size. Call after a successful open.
/// </summary>
easyar_Vec2I easyar_CameraDevice_size(const easyar_CameraDevice * This);
/// <summary>
/// Gets the number of supported preview sizes. Call after a successful open.
/// </summary>
int easyar_CameraDevice_supportedSizeCount(const easyar_CameraDevice * This);
/// <summary>
/// Gets the index-th supported preview size. It returns {0, 0} if index is out of range. Call after a successful open.
/// </summary>
easyar_Vec2I easyar_CameraDevice_supportedSize(const easyar_CameraDevice * This, int index);
/// <summary>
/// Sets the preview size. The available nearest value will be selected. Call size to get the actual size. Call after a successful open. frameRateRange may change after calling setSize.
/// </summary>
bool easyar_CameraDevice_setSize(easyar_CameraDevice * This, easyar_Vec2I size);
/// <summary>
/// Gets the number of supported frame rate ranges. Call after a successful open.
/// </summary>
int easyar_CameraDevice_supportedFrameRateRangeCount(const easyar_CameraDevice * This);
/// <summary>
/// Gets range lower bound of the index-th supported frame rate range. Call after a successful open.
/// </summary>
float easyar_CameraDevice_supportedFrameRateRangeLower(const easyar_CameraDevice * This, int index);
/// <summary>
/// Gets range upper bound of the index-th supported frame rate range. Call after a successful open.
/// </summary>
float easyar_CameraDevice_supportedFrameRateRangeUpper(const easyar_CameraDevice * This, int index);
/// <summary>
/// Gets current index of frame rate range. Call after a successful open.
/// </summary>
int easyar_CameraDevice_frameRateRange(const easyar_CameraDevice * This);
/// <summary>
/// Sets current index of frame rate range. Call after a successful open.
/// </summary>
bool easyar_CameraDevice_setFrameRateRange(easyar_CameraDevice * This, int index);
/// <summary>
/// Sets flash torch mode to on. Call after a successful open.
/// </summary>
bool easyar_CameraDevice_setFlashTorchMode(easyar_CameraDevice * This, bool on);
/// <summary>
/// Sets focus mode to focusMode. Call after a successful open.
/// </summary>
bool easyar_CameraDevice_setFocusMode(easyar_CameraDevice * This, easyar_CameraDeviceFocusMode focusMode);
/// <summary>
/// Does auto focus once. Call after start. It is only available when FocusMode is Normal or Macro.
/// </summary>
bool easyar_CameraDevice_autoFocus(easyar_CameraDevice * This);
void easyar_CameraDevice__dtor(easyar_CameraDevice * This);
void easyar_CameraDevice__retain(const easyar_CameraDevice * This, /* OUT */ easyar_CameraDevice * * Return);
const char * easyar_CameraDevice__typeName(const easyar_CameraDevice * This);
/// <summary>
/// Gets recommended Android Camera API type by a specified preference.
/// </summary>
easyar_AndroidCameraApiType easyar_CameraDeviceSelector_getAndroidCameraApiType(easyar_CameraDevicePreference preference);
/// <summary>
/// Creates `CameraDevice`_ by a specified preference.
/// </summary>
void easyar_CameraDeviceSelector_createCameraDevice(easyar_CameraDevicePreference preference, /* OUT */ easyar_CameraDevice * * Return);
/// <summary>
/// Gets recommended Camera FocusMode type by a specified preference.
/// </summary>
easyar_CameraDeviceFocusMode easyar_CameraDeviceSelector_getFocusMode(easyar_CameraDevicePreference preference);
#ifdef __cplusplus
}
#endif
#endif
fileFormatVersion: 2
guid: b753708368c3ee54fbe9d53f510583b4
timeCreated: 1611716672
licenseType: Pro
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:
//=============================================================================================================================
//
// EasyAR Sense 4.2.0.8700-7bcbc8b1c
// Copyright (c) 2015-2021 VisionStar Information Technology (Shanghai) Co., Ltd. All Rights Reserved.
// EasyAR is the registered trademark or trademark of VisionStar Information Technology (Shanghai) Co., Ltd in China
// and other countries for the augmented reality technology developed by VisionStar Information Technology (Shanghai) Co., Ltd.
//
//=============================================================================================================================
#ifndef __EASYAR_CAMERA_HXX__
#define __EASYAR_CAMERA_HXX__
#include "easyar/types.hxx"
namespace easyar {
/// <summary>
/// CameraDevice implements a camera device, which outputs `InputFrame`_ (including image, camera paramters, and timestamp). It is available on Windows, Mac, Android and iOS.
/// After open, start/stop can be invoked to start or stop data collection. start/stop will not change previous set camera parameters.
/// When the component is not needed anymore, call close function to close it. It shall not be used after calling close.
/// CameraDevice outputs `InputFrame`_ from inputFrameSource. inputFrameSource shall be connected to `InputFrameSink`_ for use. Refer to `Overview &lt;Overview.html&gt;`__ .
/// bufferCapacity is the capacity of `InputFrame`_ buffer. If the count of `InputFrame`_ which has been output from the device and have not been released is more than this number, the device will not output new `InputFrame`_ , until previous `InputFrame`_ have been released. This may cause screen stuck. Refer to `Overview &lt;Overview.html&gt;`__ .
/// On Android, it is required to add android.permission.CAMERA to AndroidManifest.xml for use.
/// On iOS, it is required to add NSCameraUsageDescription to Info.plist for use.
/// </summary>
class CameraDevice
{
protected:
easyar_CameraDevice * cdata_ ;
void init_cdata(easyar_CameraDevice * cdata);
virtual CameraDevice & operator=(const CameraDevice & data) { return *this; } //deleted
public:
CameraDevice(easyar_CameraDevice * cdata);
virtual ~CameraDevice();
CameraDevice(const CameraDevice & data);
const easyar_CameraDevice * get_cdata() const;
easyar_CameraDevice * get_cdata();
CameraDevice();
/// <summary>
/// Checks if the component is available. It returns true only on Windows, Mac, Android or iOS.
/// </summary>
static bool isAvailable();
/// <summary>
/// Gets current camera API (camera1 or camera2) on Android. camera1 is better for compatibility, but lacks some necessary information such as timestamp. camera2 has compatibility issues on some devices.
/// </summary>
AndroidCameraApiType androidCameraApiType();
/// <summary>
/// Sets current camera API (camera1 or camera2) on Android. It must be called before calling openWithIndex, openWithSpecificType or openWithPreferredType, or it will not take effect.
/// It is recommended to use `CameraDeviceSelector`_ to create camera with camera API set to recommended based on primary algorithm to run.
/// </summary>
void setAndroidCameraApiType(AndroidCameraApiType type);
/// <summary>
/// `InputFrame`_ buffer capacity. The default is 8.
/// </summary>
int bufferCapacity();
/// <summary>
/// Sets `InputFrame`_ buffer capacity.
/// </summary>
void setBufferCapacity(int capacity);
/// <summary>
/// `InputFrame`_ output port.
/// </summary>
void inputFrameSource(/* OUT */ InputFrameSource * * Return);
/// <summary>
/// Sets callback on state change to notify state of camera disconnection or preemption. It is only available on Windows.
/// </summary>
void setStateChangedCallback(CallbackScheduler * callbackScheduler, OptionalOfFunctorOfVoidFromCameraState stateChangedCallback);
/// <summary>
/// Requests camera permission from operating system. You can call this function or request permission directly from operating system. It is only available on Android and iOS. On other platforms, it will call the callback directly with status being granted. This function need to be called from the UI thread.
/// </summary>
static void requestPermissions(CallbackScheduler * callbackScheduler, OptionalOfFunctorOfVoidFromPermissionStatusAndString permissionCallback);
/// <summary>
/// Gets count of cameras recognized by the operating system.
/// </summary>
static int cameraCount();
/// <summary>
/// Opens a camera by index.
/// </summary>
bool openWithIndex(int cameraIndex);
/// <summary>
/// Opens a camera by specific camera device type. If no camera is matched, false will be returned. On Mac, camera device types can not be distinguished.
/// </summary>
bool openWithSpecificType(CameraDeviceType type);
/// <summary>
/// Opens a camera by camera device type. If no camera is matched, the first camera will be used.
/// </summary>
bool openWithPreferredType(CameraDeviceType type);
/// <summary>
/// Starts video stream capture.
/// </summary>
bool start();
/// <summary>
/// Stops video stream capture. It will only stop capture and will not change previous set camera parameters and connection.
/// </summary>
void stop();
/// <summary>
/// Close. The component shall not be used after calling close.
/// </summary>
void close();
/// <summary>
/// Camera index.
/// </summary>
int index();
/// <summary>
/// Camera type.
/// </summary>
CameraDeviceType type();
/// <summary>
/// Camera parameters, including image size, focal length, principal point, camera type and camera rotation against natural orientation. Call after a successful open.
/// </summary>
void cameraParameters(/* OUT */ CameraParameters * * Return);
/// <summary>
/// Sets camera parameters. Call after a successful open.
/// </summary>
void setCameraParameters(CameraParameters * cameraParameters);
/// <summary>
/// Gets the current preview size. Call after a successful open.
/// </summary>
Vec2I size();
/// <summary>
/// Gets the number of supported preview sizes. Call after a successful open.
/// </summary>
int supportedSizeCount();
/// <summary>
/// Gets the index-th supported preview size. It returns {0, 0} if index is out of range. Call after a successful open.
/// </summary>
Vec2I supportedSize(int index);
/// <summary>
/// Sets the preview size. The available nearest value will be selected. Call size to get the actual size. Call after a successful open. frameRateRange may change after calling setSize.
/// </summary>
bool setSize(Vec2I size);
/// <summary>
/// Gets the number of supported frame rate ranges. Call after a successful open.
/// </summary>
int supportedFrameRateRangeCount();
/// <summary>
/// Gets range lower bound of the index-th supported frame rate range. Call after a successful open.
/// </summary>
float supportedFrameRateRangeLower(int index);
/// <summary>
/// Gets range upper bound of the index-th supported frame rate range. Call after a successful open.
/// </summary>
float supportedFrameRateRangeUpper(int index);
/// <summary>
/// Gets current index of frame rate range. Call after a successful open.
/// </summary>
int frameRateRange();
/// <summary>
/// Sets current index of frame rate range. Call after a successful open.
/// </summary>
bool setFrameRateRange(int index);
/// <summary>
/// Sets flash torch mode to on. Call after a successful open.
/// </summary>
bool setFlashTorchMode(bool on);
/// <summary>
/// Sets focus mode to focusMode. Call after a successful open.
/// </summary>
bool setFocusMode(CameraDeviceFocusMode focusMode);
/// <summary>
/// Does auto focus once. Call after start. It is only available when FocusMode is Normal or Macro.
/// </summary>
bool autoFocus();
};
/// <summary>
/// It is used for selecting camera API (camera1 or camera2) on Android. camera1 is better for compatibility, but lacks some necessary information such as timestamp. camera2 has compatibility issues on some devices.
/// Different preferences will choose camera1 or camera2 based on usage.
/// </summary>
class CameraDeviceSelector
{
public:
/// <summary>
/// Gets recommended Android Camera API type by a specified preference.
/// </summary>
static AndroidCameraApiType getAndroidCameraApiType(CameraDevicePreference preference);
/// <summary>
/// Creates `CameraDevice`_ by a specified preference.
/// </summary>
static void createCameraDevice(CameraDevicePreference preference, /* OUT */ CameraDevice * * Return);
/// <summary>
/// Gets recommended Camera FocusMode type by a specified preference.
/// </summary>
static CameraDeviceFocusMode getFocusMode(CameraDevicePreference preference);
};
#ifndef __EASYAR_FUNCTOROFVOIDFROMCAMERASTATE__
#define __EASYAR_FUNCTOROFVOIDFROMCAMERASTATE__
struct FunctorOfVoidFromCameraState
{
void * _state;
void (* func)(void * _state, CameraState);
void (* destroy)(void * _state);
FunctorOfVoidFromCameraState(void * _state, void (* func)(void * _state, CameraState), void (* destroy)(void * _state));
};
static void FunctorOfVoidFromCameraState_func(void * _state, easyar_CameraState, /* OUT */ easyar_String * * _exception);
static void FunctorOfVoidFromCameraState_destroy(void * _state);
static inline easyar_FunctorOfVoidFromCameraState FunctorOfVoidFromCameraState_to_c(FunctorOfVoidFromCameraState f);
#endif
#ifndef __EASYAR_OPTIONALOFFUNCTOROFVOIDFROMCAMERASTATE__
#define __EASYAR_OPTIONALOFFUNCTOROFVOIDFROMCAMERASTATE__
struct OptionalOfFunctorOfVoidFromCameraState
{
bool has_value;
FunctorOfVoidFromCameraState value;
};
static inline easyar_OptionalOfFunctorOfVoidFromCameraState OptionalOfFunctorOfVoidFromCameraState_to_c(OptionalOfFunctorOfVoidFromCameraState o);
#endif
#ifndef __EASYAR_FUNCTOROFVOIDFROMPERMISSIONSTATUSANDSTRING__
#define __EASYAR_FUNCTOROFVOIDFROMPERMISSIONSTATUSANDSTRING__
struct FunctorOfVoidFromPermissionStatusAndString
{
void * _state;
void (* func)(void * _state, PermissionStatus, String *);
void (* destroy)(void * _state);
FunctorOfVoidFromPermissionStatusAndString(void * _state, void (* func)(void * _state, PermissionStatus, String *), void (* destroy)(void * _state));
};
static void FunctorOfVoidFromPermissionStatusAndString_func(void * _state, easyar_PermissionStatus, easyar_String *, /* OUT */ easyar_String * * _exception);
static void FunctorOfVoidFromPermissionStatusAndString_destroy(void * _state);
static inline easyar_FunctorOfVoidFromPermissionStatusAndString FunctorOfVoidFromPermissionStatusAndString_to_c(FunctorOfVoidFromPermissionStatusAndString f);
#endif
#ifndef __EASYAR_OPTIONALOFFUNCTOROFVOIDFROMPERMISSIONSTATUSANDSTRING__
#define __EASYAR_OPTIONALOFFUNCTOROFVOIDFROMPERMISSIONSTATUSANDSTRING__
struct OptionalOfFunctorOfVoidFromPermissionStatusAndString
{
bool has_value;
FunctorOfVoidFromPermissionStatusAndString value;
};
static inline easyar_OptionalOfFunctorOfVoidFromPermissionStatusAndString OptionalOfFunctorOfVoidFromPermissionStatusAndString_to_c(OptionalOfFunctorOfVoidFromPermissionStatusAndString o);
#endif
}
#endif
#ifndef __IMPLEMENTATION_EASYAR_CAMERA_HXX__
#define __IMPLEMENTATION_EASYAR_CAMERA_HXX__
#include "easyar/camera.h"
#include "easyar/dataflow.hxx"
#include "easyar/frame.hxx"
#include "easyar/image.hxx"
#include "easyar/buffer.hxx"
#include "easyar/cameraparameters.hxx"
#include "easyar/vector.hxx"
#include "easyar/matrix.hxx"
#include "easyar/callbackscheduler.hxx"
namespace easyar {
inline CameraDevice::CameraDevice(easyar_CameraDevice * cdata)
:
cdata_(NULL)
{
init_cdata(cdata);
}
inline CameraDevice::~CameraDevice()
{
if (cdata_) {
easyar_CameraDevice__dtor(cdata_);
cdata_ = NULL;
}
}
inline CameraDevice::CameraDevice(const CameraDevice & data)
:
cdata_(NULL)
{
easyar_CameraDevice * cdata = NULL;
easyar_CameraDevice__retain(data.cdata_, &cdata);
init_cdata(cdata);
}
inline const easyar_CameraDevice * CameraDevice::get_cdata() const
{
return cdata_;
}
inline easyar_CameraDevice * CameraDevice::get_cdata()
{
return cdata_;
}
inline void CameraDevice::init_cdata(easyar_CameraDevice * cdata)
{
cdata_ = cdata;
}
inline CameraDevice::CameraDevice()
:
cdata_(NULL)
{
easyar_CameraDevice * _return_value_ = NULL;
easyar_CameraDevice__ctor(&_return_value_);
init_cdata(_return_value_);
}
inline bool CameraDevice::isAvailable()
{
bool _return_value_ = easyar_CameraDevice_isAvailable();
return _return_value_;
}
inline AndroidCameraApiType CameraDevice::androidCameraApiType()
{
if (cdata_ == NULL) {
return AndroidCameraApiType();
}
easyar_AndroidCameraApiType _return_value_ = easyar_CameraDevice_androidCameraApiType(cdata_);
return static_cast<AndroidCameraApiType>(_return_value_);
}
inline void CameraDevice::setAndroidCameraApiType(AndroidCameraApiType arg0)
{
if (cdata_ == NULL) {
return;
}
easyar_CameraDevice_setAndroidCameraApiType(cdata_, static_cast<easyar_AndroidCameraApiType>(arg0));
}
inline int CameraDevice::bufferCapacity()
{
if (cdata_ == NULL) {
return int();
}
int _return_value_ = easyar_CameraDevice_bufferCapacity(cdata_);
return _return_value_;
}
inline void CameraDevice::setBufferCapacity(int arg0)
{
if (cdata_ == NULL) {
return;
}
easyar_CameraDevice_setBufferCapacity(cdata_, arg0);
}
inline void CameraDevice::inputFrameSource(/* OUT */ InputFrameSource * * Return)
{
if (cdata_ == NULL) {
*Return = NULL;
return;
}
easyar_InputFrameSource * _return_value_ = NULL;
easyar_CameraDevice_inputFrameSource(cdata_, &_return_value_);
*Return = new InputFrameSource(_return_value_);
}
inline void CameraDevice::setStateChangedCallback(CallbackScheduler * arg0, OptionalOfFunctorOfVoidFromCameraState arg1)
{
if (cdata_ == NULL) {
return;
}
easyar_CameraDevice_setStateChangedCallback(cdata_, arg0->get_cdata(), OptionalOfFunctorOfVoidFromCameraState_to_c(arg1));
}
inline void CameraDevice::requestPermissions(CallbackScheduler * arg0, OptionalOfFunctorOfVoidFromPermissionStatusAndString arg1)
{
easyar_CameraDevice_requestPermissions(arg0->get_cdata(), OptionalOfFunctorOfVoidFromPermissionStatusAndString_to_c(arg1));
}
inline int CameraDevice::cameraCount()
{
int _return_value_ = easyar_CameraDevice_cameraCount();
return _return_value_;
}
inline bool CameraDevice::openWithIndex(int arg0)
{
if (cdata_ == NULL) {
return bool();
}
bool _return_value_ = easyar_CameraDevice_openWithIndex(cdata_, arg0);
return _return_value_;
}
inline bool CameraDevice::openWithSpecificType(CameraDeviceType arg0)
{
if (cdata_ == NULL) {
return bool();
}
bool _return_value_ = easyar_CameraDevice_openWithSpecificType(cdata_, static_cast<easyar_CameraDeviceType>(arg0));
return _return_value_;
}
inline bool CameraDevice::openWithPreferredType(CameraDeviceType arg0)
{
if (cdata_ == NULL) {
return bool();
}
bool _return_value_ = easyar_CameraDevice_openWithPreferredType(cdata_, static_cast<easyar_CameraDeviceType>(arg0));
return _return_value_;
}
inline bool CameraDevice::start()
{
if (cdata_ == NULL) {
return bool();
}
bool _return_value_ = easyar_CameraDevice_start(cdata_);
return _return_value_;
}
inline void CameraDevice::stop()
{
if (cdata_ == NULL) {
return;
}
easyar_CameraDevice_stop(cdata_);
}
inline void CameraDevice::close()
{
if (cdata_ == NULL) {
return;
}
easyar_CameraDevice_close(cdata_);
}
inline int CameraDevice::index()
{
if (cdata_ == NULL) {
return int();
}
int _return_value_ = easyar_CameraDevice_index(cdata_);
return _return_value_;
}
inline CameraDeviceType CameraDevice::type()
{
if (cdata_ == NULL) {
return CameraDeviceType();
}
easyar_CameraDeviceType _return_value_ = easyar_CameraDevice_type(cdata_);
return static_cast<CameraDeviceType>(_return_value_);
}
inline void CameraDevice::cameraParameters(/* OUT */ CameraParameters * * Return)
{
if (cdata_ == NULL) {
*Return = NULL;
return;
}
easyar_CameraParameters * _return_value_ = NULL;
easyar_CameraDevice_cameraParameters(cdata_, &_return_value_);
*Return = new CameraParameters(_return_value_);
}
inline void CameraDevice::setCameraParameters(CameraParameters * arg0)
{
if (cdata_ == NULL) {
return;
}
easyar_CameraDevice_setCameraParameters(cdata_, arg0->get_cdata());
}
inline Vec2I CameraDevice::size()
{
if (cdata_ == NULL) {
return Vec2I();
}
easyar_Vec2I _return_value_ = easyar_CameraDevice_size(cdata_);
return Vec2I(_return_value_.data[0], _return_value_.data[1]);
}
inline int CameraDevice::supportedSizeCount()
{
if (cdata_ == NULL) {
return int();
}
int _return_value_ = easyar_CameraDevice_supportedSizeCount(cdata_);
return _return_value_;
}
inline Vec2I CameraDevice::supportedSize(int arg0)
{
if (cdata_ == NULL) {
return Vec2I();
}
easyar_Vec2I _return_value_ = easyar_CameraDevice_supportedSize(cdata_, arg0);
return Vec2I(_return_value_.data[0], _return_value_.data[1]);
}
inline bool CameraDevice::setSize(Vec2I arg0)
{
if (cdata_ == NULL) {
return bool();
}
bool _return_value_ = easyar_CameraDevice_setSize(cdata_, arg0.get_cdata());
return _return_value_;
}
inline int CameraDevice::supportedFrameRateRangeCount()
{
if (cdata_ == NULL) {
return int();
}
int _return_value_ = easyar_CameraDevice_supportedFrameRateRangeCount(cdata_);
return _return_value_;
}
inline float CameraDevice::supportedFrameRateRangeLower(int arg0)
{
if (cdata_ == NULL) {
return float();
}
float _return_value_ = easyar_CameraDevice_supportedFrameRateRangeLower(cdata_, arg0);
return _return_value_;
}
inline float CameraDevice::supportedFrameRateRangeUpper(int arg0)
{
if (cdata_ == NULL) {
return float();
}
float _return_value_ = easyar_CameraDevice_supportedFrameRateRangeUpper(cdata_, arg0);
return _return_value_;
}
inline int CameraDevice::frameRateRange()
{
if (cdata_ == NULL) {
return int();
}
int _return_value_ = easyar_CameraDevice_frameRateRange(cdata_);
return _return_value_;
}
inline bool CameraDevice::setFrameRateRange(int arg0)
{
if (cdata_ == NULL) {
return bool();
}
bool _return_value_ = easyar_CameraDevice_setFrameRateRange(cdata_, arg0);
return _return_value_;
}
inline bool CameraDevice::setFlashTorchMode(bool arg0)
{
if (cdata_ == NULL) {
return bool();
}
bool _return_value_ = easyar_CameraDevice_setFlashTorchMode(cdata_, arg0);
return _return_value_;
}
inline bool CameraDevice::setFocusMode(CameraDeviceFocusMode arg0)
{
if (cdata_ == NULL) {
return bool();
}
bool _return_value_ = easyar_CameraDevice_setFocusMode(cdata_, static_cast<easyar_CameraDeviceFocusMode>(arg0));
return _return_value_;
}
inline bool CameraDevice::autoFocus()
{
if (cdata_ == NULL) {
return bool();
}
bool _return_value_ = easyar_CameraDevice_autoFocus(cdata_);
return _return_value_;
}
inline AndroidCameraApiType CameraDeviceSelector::getAndroidCameraApiType(CameraDevicePreference arg0)
{
easyar_AndroidCameraApiType _return_value_ = easyar_CameraDeviceSelector_getAndroidCameraApiType(static_cast<easyar_CameraDevicePreference>(arg0));
return static_cast<AndroidCameraApiType>(_return_value_);
}
inline void CameraDeviceSelector::createCameraDevice(CameraDevicePreference arg0, /* OUT */ CameraDevice * * Return)
{
easyar_CameraDevice * _return_value_ = NULL;
easyar_CameraDeviceSelector_createCameraDevice(static_cast<easyar_CameraDevicePreference>(arg0), &_return_value_);
*Return = new CameraDevice(_return_value_);
}
inline CameraDeviceFocusMode CameraDeviceSelector::getFocusMode(CameraDevicePreference arg0)
{
easyar_CameraDeviceFocusMode _return_value_ = easyar_CameraDeviceSelector_getFocusMode(static_cast<easyar_CameraDevicePreference>(arg0));
return static_cast<CameraDeviceFocusMode>(_return_value_);
}
#ifndef __IMPLEMENTATION_EASYAR_OPTIONALOFFUNCTOROFVOIDFROMCAMERASTATE__
#define __IMPLEMENTATION_EASYAR_OPTIONALOFFUNCTOROFVOIDFROMCAMERASTATE__
static inline easyar_OptionalOfFunctorOfVoidFromCameraState OptionalOfFunctorOfVoidFromCameraState_to_c(OptionalOfFunctorOfVoidFromCameraState o)
{
if (o.has_value) {
easyar_OptionalOfFunctorOfVoidFromCameraState _return_value_ = {true, FunctorOfVoidFromCameraState_to_c(o.value)};
return _return_value_;
} else {
easyar_OptionalOfFunctorOfVoidFromCameraState _return_value_ = {false, {NULL, NULL, NULL}};
return _return_value_;
}
}
#endif
#ifndef __IMPLEMENTATION_EASYAR_FUNCTOROFVOIDFROMCAMERASTATE__
#define __IMPLEMENTATION_EASYAR_FUNCTOROFVOIDFROMCAMERASTATE__
inline FunctorOfVoidFromCameraState::FunctorOfVoidFromCameraState(void * _state, void (* func)(void * _state, CameraState), void (* destroy)(void * _state))
{
this->_state = _state;
this->func = func;
this->destroy = destroy;
}
static void FunctorOfVoidFromCameraState_func(void * _state, easyar_CameraState arg0, /* OUT */ easyar_String * * _exception)
{
*_exception = NULL;
try {
CameraState cpparg0 = static_cast<CameraState>(arg0);
FunctorOfVoidFromCameraState * f = reinterpret_cast<FunctorOfVoidFromCameraState *>(_state);
f->func(f->_state, cpparg0);
} catch (std::exception & ex) {
easyar_String_from_utf8_begin(ex.what(), _exception);
}
}
static void FunctorOfVoidFromCameraState_destroy(void * _state)
{
FunctorOfVoidFromCameraState * f = reinterpret_cast<FunctorOfVoidFromCameraState *>(_state);
if (f->destroy) {
f->destroy(f->_state);
}
delete f;
}
static inline easyar_FunctorOfVoidFromCameraState FunctorOfVoidFromCameraState_to_c(FunctorOfVoidFromCameraState f)
{
easyar_FunctorOfVoidFromCameraState _return_value_ = {NULL, NULL, NULL};
_return_value_._state = new FunctorOfVoidFromCameraState(f._state, f.func, f.destroy);
_return_value_.func = FunctorOfVoidFromCameraState_func;
_return_value_.destroy = FunctorOfVoidFromCameraState_destroy;
return _return_value_;
}
#endif
#ifndef __IMPLEMENTATION_EASYAR_OPTIONALOFFUNCTOROFVOIDFROMPERMISSIONSTATUSANDSTRING__
#define __IMPLEMENTATION_EASYAR_OPTIONALOFFUNCTOROFVOIDFROMPERMISSIONSTATUSANDSTRING__
static inline easyar_OptionalOfFunctorOfVoidFromPermissionStatusAndString OptionalOfFunctorOfVoidFromPermissionStatusAndString_to_c(OptionalOfFunctorOfVoidFromPermissionStatusAndString o)
{
if (o.has_value) {
easyar_OptionalOfFunctorOfVoidFromPermissionStatusAndString _return_value_ = {true, FunctorOfVoidFromPermissionStatusAndString_to_c(o.value)};
return _return_value_;
} else {
easyar_OptionalOfFunctorOfVoidFromPermissionStatusAndString _return_value_ = {false, {NULL, NULL, NULL}};
return _return_value_;
}
}
#endif
#ifndef __IMPLEMENTATION_EASYAR_FUNCTOROFVOIDFROMPERMISSIONSTATUSANDSTRING__
#define __IMPLEMENTATION_EASYAR_FUNCTOROFVOIDFROMPERMISSIONSTATUSANDSTRING__
inline FunctorOfVoidFromPermissionStatusAndString::FunctorOfVoidFromPermissionStatusAndString(void * _state, void (* func)(void * _state, PermissionStatus, String *), void (* destroy)(void * _state))
{
this->_state = _state;
this->func = func;
this->destroy = destroy;
}
static void FunctorOfVoidFromPermissionStatusAndString_func(void * _state, easyar_PermissionStatus arg0, easyar_String * arg1, /* OUT */ easyar_String * * _exception)
{
*_exception = NULL;
try {
PermissionStatus cpparg0 = static_cast<PermissionStatus>(arg0);
easyar_String_copy(arg1, &arg1);
String * cpparg1 = new String(arg1);
FunctorOfVoidFromPermissionStatusAndString * f = reinterpret_cast<FunctorOfVoidFromPermissionStatusAndString *>(_state);
f->func(f->_state, cpparg0, cpparg1);
delete cpparg1;
} catch (std::exception & ex) {
easyar_String_from_utf8_begin(ex.what(), _exception);
}
}
static void FunctorOfVoidFromPermissionStatusAndString_destroy(void * _state)
{
FunctorOfVoidFromPermissionStatusAndString * f = reinterpret_cast<FunctorOfVoidFromPermissionStatusAndString *>(_state);
if (f->destroy) {
f->destroy(f->_state);
}
delete f;
}
static inline easyar_FunctorOfVoidFromPermissionStatusAndString FunctorOfVoidFromPermissionStatusAndString_to_c(FunctorOfVoidFromPermissionStatusAndString f)
{
easyar_FunctorOfVoidFromPermissionStatusAndString _return_value_ = {NULL, NULL, NULL};
_return_value_._state = new FunctorOfVoidFromPermissionStatusAndString(f._state, f.func, f.destroy);
_return_value_.func = FunctorOfVoidFromPermissionStatusAndString_func;
_return_value_.destroy = FunctorOfVoidFromPermissionStatusAndString_destroy;
return _return_value_;
}
#endif
}
#endif
fileFormatVersion: 2
guid: 2e0a1ea22a2365b4ab48e3ace362844a
timeCreated: 1611716671
licenseType: Pro
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:
//=============================================================================================================================
//
// EasyAR Sense 4.2.0.8700-7bcbc8b1c
// Copyright (c) 2015-2021 VisionStar Information Technology (Shanghai) Co., Ltd. All Rights Reserved.
// EasyAR is the registered trademark or trademark of VisionStar Information Technology (Shanghai) Co., Ltd in China
// and other countries for the augmented reality technology developed by VisionStar Information Technology (Shanghai) Co., Ltd.
//
//=============================================================================================================================
#import "easyar/types.oc.h"
/// <summary>
/// CameraDevice implements a camera device, which outputs `InputFrame`_ (including image, camera paramters, and timestamp). It is available on Windows, Mac, Android and iOS.
/// After open, start/stop can be invoked to start or stop data collection. start/stop will not change previous set camera parameters.
/// When the component is not needed anymore, call close function to close it. It shall not be used after calling close.
/// CameraDevice outputs `InputFrame`_ from inputFrameSource. inputFrameSource shall be connected to `InputFrameSink`_ for use. Refer to `Overview &lt;Overview.html&gt;`__ .
/// bufferCapacity is the capacity of `InputFrame`_ buffer. If the count of `InputFrame`_ which has been output from the device and have not been released is more than this number, the device will not output new `InputFrame`_ , until previous `InputFrame`_ have been released. This may cause screen stuck. Refer to `Overview &lt;Overview.html&gt;`__ .
/// On Android, it is required to add android.permission.CAMERA to AndroidManifest.xml for use.
/// On iOS, it is required to add NSCameraUsageDescription to Info.plist for use.
/// </summary>
@interface easyar_CameraDevice : easyar_RefBase
+ (instancetype)new NS_UNAVAILABLE;
- (instancetype)init NS_UNAVAILABLE;
+ (easyar_CameraDevice *) create;
/// <summary>
/// Checks if the component is available. It returns true only on Windows, Mac, Android or iOS.
/// </summary>
+ (bool)isAvailable;
/// <summary>
/// Gets current camera API (camera1 or camera2) on Android. camera1 is better for compatibility, but lacks some necessary information such as timestamp. camera2 has compatibility issues on some devices.
/// </summary>
- (easyar_AndroidCameraApiType)androidCameraApiType;
/// <summary>
/// Sets current camera API (camera1 or camera2) on Android. It must be called before calling openWithIndex, openWithSpecificType or openWithPreferredType, or it will not take effect.
/// It is recommended to use `CameraDeviceSelector`_ to create camera with camera API set to recommended based on primary algorithm to run.
/// </summary>
- (void)setAndroidCameraApiType:(easyar_AndroidCameraApiType)type;
/// <summary>
/// `InputFrame`_ buffer capacity. The default is 8.
/// </summary>
- (int)bufferCapacity;
/// <summary>
/// Sets `InputFrame`_ buffer capacity.
/// </summary>
- (void)setBufferCapacity:(int)capacity;
/// <summary>
/// `InputFrame`_ output port.
/// </summary>
- (easyar_InputFrameSource *)inputFrameSource;
/// <summary>
/// Sets callback on state change to notify state of camera disconnection or preemption. It is only available on Windows.
/// </summary>
- (void)setStateChangedCallback:(easyar_CallbackScheduler *)callbackScheduler stateChangedCallback:(void (^)(easyar_CameraState))stateChangedCallback;
/// <summary>
/// Requests camera permission from operating system. You can call this function or request permission directly from operating system. It is only available on Android and iOS. On other platforms, it will call the callback directly with status being granted. This function need to be called from the UI thread.
/// </summary>
+ (void)requestPermissions:(easyar_CallbackScheduler *)callbackScheduler permissionCallback:(void (^)(easyar_PermissionStatus status, NSString * value))permissionCallback;
/// <summary>
/// Gets count of cameras recognized by the operating system.
/// </summary>
+ (int)cameraCount;
/// <summary>
/// Opens a camera by index.
/// </summary>
- (bool)openWithIndex:(int)cameraIndex;
/// <summary>
/// Opens a camera by specific camera device type. If no camera is matched, false will be returned. On Mac, camera device types can not be distinguished.
/// </summary>
- (bool)openWithSpecificType:(easyar_CameraDeviceType)type;
/// <summary>
/// Opens a camera by camera device type. If no camera is matched, the first camera will be used.
/// </summary>
- (bool)openWithPreferredType:(easyar_CameraDeviceType)type;
/// <summary>
/// Starts video stream capture.
/// </summary>
- (bool)start;
/// <summary>
/// Stops video stream capture. It will only stop capture and will not change previous set camera parameters and connection.
/// </summary>
- (void)stop;
/// <summary>
/// Close. The component shall not be used after calling close.
/// </summary>
- (void)close;
/// <summary>
/// Camera index.
/// </summary>
- (int)index;
/// <summary>
/// Camera type.
/// </summary>
- (easyar_CameraDeviceType)type;
/// <summary>
/// Camera parameters, including image size, focal length, principal point, camera type and camera rotation against natural orientation. Call after a successful open.
/// </summary>
- (easyar_CameraParameters *)cameraParameters;
/// <summary>
/// Sets camera parameters. Call after a successful open.
/// </summary>
- (void)setCameraParameters:(easyar_CameraParameters *)cameraParameters;
/// <summary>
/// Gets the current preview size. Call after a successful open.
/// </summary>
- (easyar_Vec2I *)size;
/// <summary>
/// Gets the number of supported preview sizes. Call after a successful open.
/// </summary>
- (int)supportedSizeCount;
/// <summary>
/// Gets the index-th supported preview size. It returns {0, 0} if index is out of range. Call after a successful open.
/// </summary>
- (easyar_Vec2I *)supportedSize:(int)index;
/// <summary>
/// Sets the preview size. The available nearest value will be selected. Call size to get the actual size. Call after a successful open. frameRateRange may change after calling setSize.
/// </summary>
- (bool)setSize:(easyar_Vec2I *)size;
/// <summary>
/// Gets the number of supported frame rate ranges. Call after a successful open.
/// </summary>
- (int)supportedFrameRateRangeCount;
/// <summary>
/// Gets range lower bound of the index-th supported frame rate range. Call after a successful open.
/// </summary>
- (float)supportedFrameRateRangeLower:(int)index;
/// <summary>
/// Gets range upper bound of the index-th supported frame rate range. Call after a successful open.
/// </summary>
- (float)supportedFrameRateRangeUpper:(int)index;
/// <summary>
/// Gets current index of frame rate range. Call after a successful open.
/// </summary>
- (int)frameRateRange;
/// <summary>
/// Sets current index of frame rate range. Call after a successful open.
/// </summary>
- (bool)setFrameRateRange:(int)index;
/// <summary>
/// Sets flash torch mode to on. Call after a successful open.
/// </summary>
- (bool)setFlashTorchMode:(bool)on;
/// <summary>
/// Sets focus mode to focusMode. Call after a successful open.
/// </summary>
- (bool)setFocusMode:(easyar_CameraDeviceFocusMode)focusMode;
/// <summary>
/// Does auto focus once. Call after start. It is only available when FocusMode is Normal or Macro.
/// </summary>
- (bool)autoFocus;
@end
/// <summary>
/// It is used for selecting camera API (camera1 or camera2) on Android. camera1 is better for compatibility, but lacks some necessary information such as timestamp. camera2 has compatibility issues on some devices.
/// Different preferences will choose camera1 or camera2 based on usage.
/// </summary>
@interface easyar_CameraDeviceSelector : NSObject
+ (instancetype)new NS_UNAVAILABLE;
- (instancetype)init NS_UNAVAILABLE;
/// <summary>
/// Gets recommended Android Camera API type by a specified preference.
/// </summary>
+ (easyar_AndroidCameraApiType)getAndroidCameraApiType:(easyar_CameraDevicePreference)preference;
/// <summary>
/// Creates `CameraDevice`_ by a specified preference.
/// </summary>
+ (easyar_CameraDevice *)createCameraDevice:(easyar_CameraDevicePreference)preference;
/// <summary>
/// Gets recommended Camera FocusMode type by a specified preference.
/// </summary>
+ (easyar_CameraDeviceFocusMode)getFocusMode:(easyar_CameraDevicePreference)preference;
@end
fileFormatVersion: 2
guid: af86ebfeae1eed646a4a3fe6f725effe
timeCreated: 1611716672
licenseType: Pro
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:
//=============================================================================================================================
//
// EasyAR Sense 4.2.0.8700-7bcbc8b1c
// Copyright (c) 2015-2021 VisionStar Information Technology (Shanghai) Co., Ltd. All Rights Reserved.
// EasyAR is the registered trademark or trademark of VisionStar Information Technology (Shanghai) Co., Ltd in China
// and other countries for the augmented reality technology developed by VisionStar Information Technology (Shanghai) Co., Ltd.
//
//=============================================================================================================================
#ifndef __EASYAR_CAMERAPARAMETERS_H__
#define __EASYAR_CAMERAPARAMETERS_H__
#include "easyar/types.h"
#ifdef __cplusplus
extern "C" {
#endif
void easyar_CameraParameters__ctor(easyar_Vec2I imageSize, easyar_Vec2F focalLength, easyar_Vec2F principalPoint, easyar_CameraDeviceType cameraDeviceType, int cameraOrientation, /* OUT */ easyar_CameraParameters * * Return);
/// <summary>
/// Image size.
/// </summary>
easyar_Vec2I easyar_CameraParameters_size(const easyar_CameraParameters * This);
/// <summary>
/// Focal length, the distance from effective optical center to CCD plane, divided by unit pixel density in width and height directions. The unit is pixel.
/// </summary>
easyar_Vec2F easyar_CameraParameters_focalLength(const easyar_CameraParameters * This);
/// <summary>
/// Principal point, coordinates of the intersection point of principal axis on CCD plane against the left-top corner of the image. The unit is pixel.
/// </summary>
easyar_Vec2F easyar_CameraParameters_principalPoint(const easyar_CameraParameters * This);
/// <summary>
/// Camera device type. Default, back or front camera. On desktop devices, there are only default cameras. On mobile devices, there is a differentiation between back and front cameras.
/// </summary>
easyar_CameraDeviceType easyar_CameraParameters_cameraDeviceType(const easyar_CameraParameters * This);
/// <summary>
/// Camera rotation against device natural orientation.
/// For Android phones and some Android tablets, this value is 90 degrees.
/// For Android eye-wear and some Android tablets, this value is 0 degrees.
/// For all current iOS devices, this value is 90 degrees.
/// </summary>
int easyar_CameraParameters_cameraOrientation(const easyar_CameraParameters * This);
/// <summary>
/// Creates CameraParameters with default camera intrinsics. Default intrinsics are calculated by image size, which is not very precise.
/// </summary>
void easyar_CameraParameters_createWithDefaultIntrinsics(easyar_Vec2I imageSize, easyar_CameraDeviceType cameraDeviceType, int cameraOrientation, /* OUT */ easyar_CameraParameters * * Return);
/// <summary>
/// Get equivalent CameraParameters for a different camera image size.
/// </summary>
void easyar_CameraParameters_getResized(easyar_CameraParameters * This, easyar_Vec2I imageSize, /* OUT */ easyar_CameraParameters * * Return);
/// <summary>
/// Calculates the angle required to rotate the camera image clockwise to align it with the screen.
/// screenRotation is the angle of rotation of displaying screen image against device natural orientation in clockwise in degrees.
/// For iOS(UIInterfaceOrientationPortrait as natural orientation):
/// * UIInterfaceOrientationPortrait: rotation = 0
/// * UIInterfaceOrientationLandscapeRight: rotation = 90
/// * UIInterfaceOrientationPortraitUpsideDown: rotation = 180
/// * UIInterfaceOrientationLandscapeLeft: rotation = 270
/// For Android:
/// * Surface.ROTATION_0 = 0
/// * Surface.ROTATION_90 = 90
/// * Surface.ROTATION_180 = 180
/// * Surface.ROTATION_270 = 270
/// </summary>
int easyar_CameraParameters_imageOrientation(const easyar_CameraParameters * This, int screenRotation);
/// <summary>
/// Calculates whether the image needed to be flipped horizontally. The image is rotated, then flipped in rendering. When cameraDeviceType is front, a flip is automatically applied. Pass manualHorizontalFlip with true to add a manual flip.
/// </summary>
bool easyar_CameraParameters_imageHorizontalFlip(const easyar_CameraParameters * This, bool manualHorizontalFlip);
/// <summary>
/// Calculates the perspective projection matrix needed by virtual object rendering. The projection transforms points from camera coordinate system to clip coordinate system ([-1, 1]^4). The form of perspective projection matrix is the same as OpenGL, that matrix multiply column vector of homogeneous coordinates of point on the right, ant not like Direct3D, that matrix multiply row vector of homogeneous coordinates of point on the left. But data arrangement is row-major, not like OpenGL&#39;s column-major. Clip coordinate system and normalized device coordinate system are defined as the same as OpenGL&#39;s default.
/// </summary>
easyar_Matrix44F easyar_CameraParameters_projection(const easyar_CameraParameters * This, float nearPlane, float farPlane, float viewportAspectRatio, int screenRotation, bool combiningFlip, bool manualHorizontalFlip);
/// <summary>
/// Calculates the orthogonal projection matrix needed by camera background rendering. The projection transforms points from image quad coordinate system ([-1, 1]^2) to clip coordinate system ([-1, 1]^4), with the undefined two dimensions unchanged. The form of orthogonal projection matrix is the same as OpenGL, that matrix multiply column vector of homogeneous coordinates of point on the right, ant not like Direct3D, that matrix multiply row vector of homogeneous coordinates of point on the left. But data arrangement is row-major, not like OpenGL&#39;s column-major. Clip coordinate system and normalized device coordinate system are defined as the same as OpenGL&#39;s default.
/// </summary>
easyar_Matrix44F easyar_CameraParameters_imageProjection(const easyar_CameraParameters * This, float viewportAspectRatio, int screenRotation, bool combiningFlip, bool manualHorizontalFlip);
/// <summary>
/// Transforms points from image coordinate system ([0, 1]^2) to screen coordinate system ([0, 1]^2). Both coordinate system is x-left, y-down, with origin at left-top.
/// </summary>
easyar_Vec2F easyar_CameraParameters_screenCoordinatesFromImageCoordinates(const easyar_CameraParameters * This, float viewportAspectRatio, int screenRotation, bool combiningFlip, bool manualHorizontalFlip, easyar_Vec2F imageCoordinates);
/// <summary>
/// Transforms points from screen coordinate system ([0, 1]^2) to image coordinate system ([0, 1]^2). Both coordinate system is x-left, y-down, with origin at left-top.
/// </summary>
easyar_Vec2F easyar_CameraParameters_imageCoordinatesFromScreenCoordinates(const easyar_CameraParameters * This, float viewportAspectRatio, int screenRotation, bool combiningFlip, bool manualHorizontalFlip, easyar_Vec2F screenCoordinates);
/// <summary>
/// Checks if two groups of parameters are equal.
/// </summary>
bool easyar_CameraParameters_equalsTo(const easyar_CameraParameters * This, easyar_CameraParameters * other);
void easyar_CameraParameters__dtor(easyar_CameraParameters * This);
void easyar_CameraParameters__retain(const easyar_CameraParameters * This, /* OUT */ easyar_CameraParameters * * Return);
const char * easyar_CameraParameters__typeName(const easyar_CameraParameters * This);
#ifdef __cplusplus
}
#endif
#endif
fileFormatVersion: 2
guid: 7ede4df1af2a2e340af74eec72892047
timeCreated: 1611716672
licenseType: Pro
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:
//=============================================================================================================================
//
// EasyAR Sense 4.2.0.8700-7bcbc8b1c
// Copyright (c) 2015-2021 VisionStar Information Technology (Shanghai) Co., Ltd. All Rights Reserved.
// EasyAR is the registered trademark or trademark of VisionStar Information Technology (Shanghai) Co., Ltd in China
// and other countries for the augmented reality technology developed by VisionStar Information Technology (Shanghai) Co., Ltd.
//
//=============================================================================================================================
#ifndef __EASYAR_CAMERAPARAMETERS_HXX__
#define __EASYAR_CAMERAPARAMETERS_HXX__
#include "easyar/types.hxx"
namespace easyar {
/// <summary>
/// Camera parameters, including image size, focal length, principal point, camera type and camera rotation against natural orientation.
/// </summary>
class CameraParameters
{
protected:
easyar_CameraParameters * cdata_ ;
void init_cdata(easyar_CameraParameters * cdata);
virtual CameraParameters & operator=(const CameraParameters & data) { return *this; } //deleted
public:
CameraParameters(easyar_CameraParameters * cdata);
virtual ~CameraParameters();
CameraParameters(const CameraParameters & data);
const easyar_CameraParameters * get_cdata() const;
easyar_CameraParameters * get_cdata();
CameraParameters(Vec2I imageSize, Vec2F focalLength, Vec2F principalPoint, CameraDeviceType cameraDeviceType, int cameraOrientation);
/// <summary>
/// Image size.
/// </summary>
Vec2I size();
/// <summary>
/// Focal length, the distance from effective optical center to CCD plane, divided by unit pixel density in width and height directions. The unit is pixel.
/// </summary>
Vec2F focalLength();
/// <summary>
/// Principal point, coordinates of the intersection point of principal axis on CCD plane against the left-top corner of the image. The unit is pixel.
/// </summary>
Vec2F principalPoint();
/// <summary>
/// Camera device type. Default, back or front camera. On desktop devices, there are only default cameras. On mobile devices, there is a differentiation between back and front cameras.
/// </summary>
CameraDeviceType cameraDeviceType();
/// <summary>
/// Camera rotation against device natural orientation.
/// For Android phones and some Android tablets, this value is 90 degrees.
/// For Android eye-wear and some Android tablets, this value is 0 degrees.
/// For all current iOS devices, this value is 90 degrees.
/// </summary>
int cameraOrientation();
/// <summary>
/// Creates CameraParameters with default camera intrinsics. Default intrinsics are calculated by image size, which is not very precise.
/// </summary>
static void createWithDefaultIntrinsics(Vec2I imageSize, CameraDeviceType cameraDeviceType, int cameraOrientation, /* OUT */ CameraParameters * * Return);
/// <summary>
/// Get equivalent CameraParameters for a different camera image size.
/// </summary>
void getResized(Vec2I imageSize, /* OUT */ CameraParameters * * Return);
/// <summary>
/// Calculates the angle required to rotate the camera image clockwise to align it with the screen.
/// screenRotation is the angle of rotation of displaying screen image against device natural orientation in clockwise in degrees.
/// For iOS(UIInterfaceOrientationPortrait as natural orientation):
/// * UIInterfaceOrientationPortrait: rotation = 0
/// * UIInterfaceOrientationLandscapeRight: rotation = 90
/// * UIInterfaceOrientationPortraitUpsideDown: rotation = 180
/// * UIInterfaceOrientationLandscapeLeft: rotation = 270
/// For Android:
/// * Surface.ROTATION_0 = 0
/// * Surface.ROTATION_90 = 90
/// * Surface.ROTATION_180 = 180
/// * Surface.ROTATION_270 = 270
/// </summary>
int imageOrientation(int screenRotation);
/// <summary>
/// Calculates whether the image needed to be flipped horizontally. The image is rotated, then flipped in rendering. When cameraDeviceType is front, a flip is automatically applied. Pass manualHorizontalFlip with true to add a manual flip.
/// </summary>
bool imageHorizontalFlip(bool manualHorizontalFlip);
/// <summary>
/// Calculates the perspective projection matrix needed by virtual object rendering. The projection transforms points from camera coordinate system to clip coordinate system ([-1, 1]^4). The form of perspective projection matrix is the same as OpenGL, that matrix multiply column vector of homogeneous coordinates of point on the right, ant not like Direct3D, that matrix multiply row vector of homogeneous coordinates of point on the left. But data arrangement is row-major, not like OpenGL&#39;s column-major. Clip coordinate system and normalized device coordinate system are defined as the same as OpenGL&#39;s default.
/// </summary>
Matrix44F projection(float nearPlane, float farPlane, float viewportAspectRatio, int screenRotation, bool combiningFlip, bool manualHorizontalFlip);
/// <summary>
/// Calculates the orthogonal projection matrix needed by camera background rendering. The projection transforms points from image quad coordinate system ([-1, 1]^2) to clip coordinate system ([-1, 1]^4), with the undefined two dimensions unchanged. The form of orthogonal projection matrix is the same as OpenGL, that matrix multiply column vector of homogeneous coordinates of point on the right, ant not like Direct3D, that matrix multiply row vector of homogeneous coordinates of point on the left. But data arrangement is row-major, not like OpenGL&#39;s column-major. Clip coordinate system and normalized device coordinate system are defined as the same as OpenGL&#39;s default.
/// </summary>
Matrix44F imageProjection(float viewportAspectRatio, int screenRotation, bool combiningFlip, bool manualHorizontalFlip);
/// <summary>
/// Transforms points from image coordinate system ([0, 1]^2) to screen coordinate system ([0, 1]^2). Both coordinate system is x-left, y-down, with origin at left-top.
/// </summary>
Vec2F screenCoordinatesFromImageCoordinates(float viewportAspectRatio, int screenRotation, bool combiningFlip, bool manualHorizontalFlip, Vec2F imageCoordinates);
/// <summary>
/// Transforms points from screen coordinate system ([0, 1]^2) to image coordinate system ([0, 1]^2). Both coordinate system is x-left, y-down, with origin at left-top.
/// </summary>
Vec2F imageCoordinatesFromScreenCoordinates(float viewportAspectRatio, int screenRotation, bool combiningFlip, bool manualHorizontalFlip, Vec2F screenCoordinates);
/// <summary>
/// Checks if two groups of parameters are equal.
/// </summary>
bool equalsTo(CameraParameters * other);
};
}
#endif
#ifndef __IMPLEMENTATION_EASYAR_CAMERAPARAMETERS_HXX__
#define __IMPLEMENTATION_EASYAR_CAMERAPARAMETERS_HXX__
#include "easyar/cameraparameters.h"
#include "easyar/vector.hxx"
#include "easyar/matrix.hxx"
namespace easyar {
inline CameraParameters::CameraParameters(easyar_CameraParameters * cdata)
:
cdata_(NULL)
{
init_cdata(cdata);
}
inline CameraParameters::~CameraParameters()
{
if (cdata_) {
easyar_CameraParameters__dtor(cdata_);
cdata_ = NULL;
}
}
inline CameraParameters::CameraParameters(const CameraParameters & data)
:
cdata_(NULL)
{
easyar_CameraParameters * cdata = NULL;
easyar_CameraParameters__retain(data.cdata_, &cdata);
init_cdata(cdata);
}
inline const easyar_CameraParameters * CameraParameters::get_cdata() const
{
return cdata_;
}
inline easyar_CameraParameters * CameraParameters::get_cdata()
{
return cdata_;
}
inline void CameraParameters::init_cdata(easyar_CameraParameters * cdata)
{
cdata_ = cdata;
}
inline CameraParameters::CameraParameters(Vec2I arg0, Vec2F arg1, Vec2F arg2, CameraDeviceType arg3, int arg4)
:
cdata_(NULL)
{
easyar_CameraParameters * _return_value_ = NULL;
easyar_CameraParameters__ctor(arg0.get_cdata(), arg1.get_cdata(), arg2.get_cdata(), static_cast<easyar_CameraDeviceType>(arg3), arg4, &_return_value_);
init_cdata(_return_value_);
}
inline Vec2I CameraParameters::size()
{
if (cdata_ == NULL) {
return Vec2I();
}
easyar_Vec2I _return_value_ = easyar_CameraParameters_size(cdata_);
return Vec2I(_return_value_.data[0], _return_value_.data[1]);
}
inline Vec2F CameraParameters::focalLength()
{
if (cdata_ == NULL) {
return Vec2F();
}
easyar_Vec2F _return_value_ = easyar_CameraParameters_focalLength(cdata_);
return Vec2F(_return_value_.data[0], _return_value_.data[1]);
}
inline Vec2F CameraParameters::principalPoint()
{
if (cdata_ == NULL) {
return Vec2F();
}
easyar_Vec2F _return_value_ = easyar_CameraParameters_principalPoint(cdata_);
return Vec2F(_return_value_.data[0], _return_value_.data[1]);
}
inline CameraDeviceType CameraParameters::cameraDeviceType()
{
if (cdata_ == NULL) {
return CameraDeviceType();
}
easyar_CameraDeviceType _return_value_ = easyar_CameraParameters_cameraDeviceType(cdata_);
return static_cast<CameraDeviceType>(_return_value_);
}
inline int CameraParameters::cameraOrientation()
{
if (cdata_ == NULL) {
return int();
}
int _return_value_ = easyar_CameraParameters_cameraOrientation(cdata_);
return _return_value_;
}
inline void CameraParameters::createWithDefaultIntrinsics(Vec2I arg0, CameraDeviceType arg1, int arg2, /* OUT */ CameraParameters * * Return)
{
easyar_CameraParameters * _return_value_ = NULL;
easyar_CameraParameters_createWithDefaultIntrinsics(arg0.get_cdata(), static_cast<easyar_CameraDeviceType>(arg1), arg2, &_return_value_);
*Return = new CameraParameters(_return_value_);
}
inline void CameraParameters::getResized(Vec2I arg0, /* OUT */ CameraParameters * * Return)
{
if (cdata_ == NULL) {
*Return = NULL;
return;
}
easyar_CameraParameters * _return_value_ = NULL;
easyar_CameraParameters_getResized(cdata_, arg0.get_cdata(), &_return_value_);
*Return = new CameraParameters(_return_value_);
}
inline int CameraParameters::imageOrientation(int arg0)
{
if (cdata_ == NULL) {
return int();
}
int _return_value_ = easyar_CameraParameters_imageOrientation(cdata_, arg0);
return _return_value_;
}
inline bool CameraParameters::imageHorizontalFlip(bool arg0)
{
if (cdata_ == NULL) {
return bool();
}
bool _return_value_ = easyar_CameraParameters_imageHorizontalFlip(cdata_, arg0);
return _return_value_;
}
inline Matrix44F CameraParameters::projection(float arg0, float arg1, float arg2, int arg3, bool arg4, bool arg5)
{
if (cdata_ == NULL) {
return Matrix44F();
}
easyar_Matrix44F _return_value_ = easyar_CameraParameters_projection(cdata_, arg0, arg1, arg2, arg3, arg4, arg5);
return Matrix44F(_return_value_.data[0], _return_value_.data[1], _return_value_.data[2], _return_value_.data[3], _return_value_.data[4], _return_value_.data[5], _return_value_.data[6], _return_value_.data[7], _return_value_.data[8], _return_value_.data[9], _return_value_.data[10], _return_value_.data[11], _return_value_.data[12], _return_value_.data[13], _return_value_.data[14], _return_value_.data[15]);
}
inline Matrix44F CameraParameters::imageProjection(float arg0, int arg1, bool arg2, bool arg3)
{
if (cdata_ == NULL) {
return Matrix44F();
}
easyar_Matrix44F _return_value_ = easyar_CameraParameters_imageProjection(cdata_, arg0, arg1, arg2, arg3);
return Matrix44F(_return_value_.data[0], _return_value_.data[1], _return_value_.data[2], _return_value_.data[3], _return_value_.data[4], _return_value_.data[5], _return_value_.data[6], _return_value_.data[7], _return_value_.data[8], _return_value_.data[9], _return_value_.data[10], _return_value_.data[11], _return_value_.data[12], _return_value_.data[13], _return_value_.data[14], _return_value_.data[15]);
}
inline Vec2F CameraParameters::screenCoordinatesFromImageCoordinates(float arg0, int arg1, bool arg2, bool arg3, Vec2F arg4)
{
if (cdata_ == NULL) {
return Vec2F();
}
easyar_Vec2F _return_value_ = easyar_CameraParameters_screenCoordinatesFromImageCoordinates(cdata_, arg0, arg1, arg2, arg3, arg4.get_cdata());
return Vec2F(_return_value_.data[0], _return_value_.data[1]);
}
inline Vec2F CameraParameters::imageCoordinatesFromScreenCoordinates(float arg0, int arg1, bool arg2, bool arg3, Vec2F arg4)
{
if (cdata_ == NULL) {
return Vec2F();
}
easyar_Vec2F _return_value_ = easyar_CameraParameters_imageCoordinatesFromScreenCoordinates(cdata_, arg0, arg1, arg2, arg3, arg4.get_cdata());
return Vec2F(_return_value_.data[0], _return_value_.data[1]);
}
inline bool CameraParameters::equalsTo(CameraParameters * arg0)
{
if (cdata_ == NULL) {
return bool();
}
bool _return_value_ = easyar_CameraParameters_equalsTo(cdata_, arg0->get_cdata());
return _return_value_;
}
}
#endif
fileFormatVersion: 2
guid: d2ff96353ddb4a84e88a4e2ffe63e475
timeCreated: 1611716672
licenseType: Pro
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:
//=============================================================================================================================
//
// EasyAR Sense 4.2.0.8700-7bcbc8b1c
// Copyright (c) 2015-2021 VisionStar Information Technology (Shanghai) Co., Ltd. All Rights Reserved.
// EasyAR is the registered trademark or trademark of VisionStar Information Technology (Shanghai) Co., Ltd in China
// and other countries for the augmented reality technology developed by VisionStar Information Technology (Shanghai) Co., Ltd.
//
//=============================================================================================================================
#import "easyar/types.oc.h"
/// <summary>
/// Camera parameters, including image size, focal length, principal point, camera type and camera rotation against natural orientation.
/// </summary>
@interface easyar_CameraParameters : easyar_RefBase
+ (instancetype)new NS_UNAVAILABLE;
- (instancetype)init NS_UNAVAILABLE;
+ (easyar_CameraParameters *) create:(easyar_Vec2I *)imageSize focalLength:(easyar_Vec2F *)focalLength principalPoint:(easyar_Vec2F *)principalPoint cameraDeviceType:(easyar_CameraDeviceType)cameraDeviceType cameraOrientation:(int)cameraOrientation;
/// <summary>
/// Image size.
/// </summary>
- (easyar_Vec2I *)size;
/// <summary>
/// Focal length, the distance from effective optical center to CCD plane, divided by unit pixel density in width and height directions. The unit is pixel.
/// </summary>
- (easyar_Vec2F *)focalLength;
/// <summary>
/// Principal point, coordinates of the intersection point of principal axis on CCD plane against the left-top corner of the image. The unit is pixel.
/// </summary>
- (easyar_Vec2F *)principalPoint;
/// <summary>
/// Camera device type. Default, back or front camera. On desktop devices, there are only default cameras. On mobile devices, there is a differentiation between back and front cameras.
/// </summary>
- (easyar_CameraDeviceType)cameraDeviceType;
/// <summary>
/// Camera rotation against device natural orientation.
/// For Android phones and some Android tablets, this value is 90 degrees.
/// For Android eye-wear and some Android tablets, this value is 0 degrees.
/// For all current iOS devices, this value is 90 degrees.
/// </summary>
- (int)cameraOrientation;
/// <summary>
/// Creates CameraParameters with default camera intrinsics. Default intrinsics are calculated by image size, which is not very precise.
/// </summary>
+ (easyar_CameraParameters *)createWithDefaultIntrinsics:(easyar_Vec2I *)imageSize cameraDeviceType:(easyar_CameraDeviceType)cameraDeviceType cameraOrientation:(int)cameraOrientation;
/// <summary>
/// Get equivalent CameraParameters for a different camera image size.
/// </summary>
- (easyar_CameraParameters *)getResized:(easyar_Vec2I *)imageSize;
/// <summary>
/// Calculates the angle required to rotate the camera image clockwise to align it with the screen.
/// screenRotation is the angle of rotation of displaying screen image against device natural orientation in clockwise in degrees.
/// For iOS(UIInterfaceOrientationPortrait as natural orientation):
/// * UIInterfaceOrientationPortrait: rotation = 0
/// * UIInterfaceOrientationLandscapeRight: rotation = 90
/// * UIInterfaceOrientationPortraitUpsideDown: rotation = 180
/// * UIInterfaceOrientationLandscapeLeft: rotation = 270
/// For Android:
/// * Surface.ROTATION_0 = 0
/// * Surface.ROTATION_90 = 90
/// * Surface.ROTATION_180 = 180
/// * Surface.ROTATION_270 = 270
/// </summary>
- (int)imageOrientation:(int)screenRotation;
/// <summary>
/// Calculates whether the image needed to be flipped horizontally. The image is rotated, then flipped in rendering. When cameraDeviceType is front, a flip is automatically applied. Pass manualHorizontalFlip with true to add a manual flip.
/// </summary>
- (bool)imageHorizontalFlip:(bool)manualHorizontalFlip;
/// <summary>
/// Calculates the perspective projection matrix needed by virtual object rendering. The projection transforms points from camera coordinate system to clip coordinate system ([-1, 1]^4). The form of perspective projection matrix is the same as OpenGL, that matrix multiply column vector of homogeneous coordinates of point on the right, ant not like Direct3D, that matrix multiply row vector of homogeneous coordinates of point on the left. But data arrangement is row-major, not like OpenGL&#39;s column-major. Clip coordinate system and normalized device coordinate system are defined as the same as OpenGL&#39;s default.
/// </summary>
- (easyar_Matrix44F *)projection:(float)nearPlane farPlane:(float)farPlane viewportAspectRatio:(float)viewportAspectRatio screenRotation:(int)screenRotation combiningFlip:(bool)combiningFlip manualHorizontalFlip:(bool)manualHorizontalFlip;
/// <summary>
/// Calculates the orthogonal projection matrix needed by camera background rendering. The projection transforms points from image quad coordinate system ([-1, 1]^2) to clip coordinate system ([-1, 1]^4), with the undefined two dimensions unchanged. The form of orthogonal projection matrix is the same as OpenGL, that matrix multiply column vector of homogeneous coordinates of point on the right, ant not like Direct3D, that matrix multiply row vector of homogeneous coordinates of point on the left. But data arrangement is row-major, not like OpenGL&#39;s column-major. Clip coordinate system and normalized device coordinate system are defined as the same as OpenGL&#39;s default.
/// </summary>
- (easyar_Matrix44F *)imageProjection:(float)viewportAspectRatio screenRotation:(int)screenRotation combiningFlip:(bool)combiningFlip manualHorizontalFlip:(bool)manualHorizontalFlip;
/// <summary>
/// Transforms points from image coordinate system ([0, 1]^2) to screen coordinate system ([0, 1]^2). Both coordinate system is x-left, y-down, with origin at left-top.
/// </summary>
- (easyar_Vec2F *)screenCoordinatesFromImageCoordinates:(float)viewportAspectRatio screenRotation:(int)screenRotation combiningFlip:(bool)combiningFlip manualHorizontalFlip:(bool)manualHorizontalFlip imageCoordinates:(easyar_Vec2F *)imageCoordinates;
/// <summary>
/// Transforms points from screen coordinate system ([0, 1]^2) to image coordinate system ([0, 1]^2). Both coordinate system is x-left, y-down, with origin at left-top.
/// </summary>
- (easyar_Vec2F *)imageCoordinatesFromScreenCoordinates:(float)viewportAspectRatio screenRotation:(int)screenRotation combiningFlip:(bool)combiningFlip manualHorizontalFlip:(bool)manualHorizontalFlip screenCoordinates:(easyar_Vec2F *)screenCoordinates;
/// <summary>
/// Checks if two groups of parameters are equal.
/// </summary>
- (bool)equalsTo:(easyar_CameraParameters *)other;
@end
fileFormatVersion: 2
guid: b265e9f17791938489c886a592b177aa
timeCreated: 1611716672
licenseType: Pro
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:
//=============================================================================================================================
//
// EasyAR Sense 4.2.0.8700-7bcbc8b1c
// Copyright (c) 2015-2021 VisionStar Information Technology (Shanghai) Co., Ltd. All Rights Reserved.
// EasyAR is the registered trademark or trademark of VisionStar Information Technology (Shanghai) Co., Ltd in China
// and other countries for the augmented reality technology developed by VisionStar Information Technology (Shanghai) Co., Ltd.
//
//=============================================================================================================================
#ifndef __EASYAR_CLOUDRECOGNIZER_H__
#define __EASYAR_CLOUDRECOGNIZER_H__
#include "easyar/types.h"
#ifdef __cplusplus
extern "C" {
#endif
/// <summary>
/// Returns recognition status.
/// </summary>
easyar_CloudRecognizationStatus easyar_CloudRecognizationResult_getStatus(const easyar_CloudRecognizationResult * This);
/// <summary>
/// Returns the recognized target when status is FoundTarget.
/// </summary>
void easyar_CloudRecognizationResult_getTarget(const easyar_CloudRecognizationResult * This, /* OUT */ easyar_OptionalOfImageTarget * Return);
/// <summary>
/// Returns the error message when status is UnknownError.
/// </summary>
void easyar_CloudRecognizationResult_getUnknownErrorMessage(const easyar_CloudRecognizationResult * This, /* OUT */ easyar_OptionalOfString * Return);
void easyar_CloudRecognizationResult__dtor(easyar_CloudRecognizationResult * This);
void easyar_CloudRecognizationResult__retain(const easyar_CloudRecognizationResult * This, /* OUT */ easyar_CloudRecognizationResult * * Return);
const char * easyar_CloudRecognizationResult__typeName(const easyar_CloudRecognizationResult * This);
/// <summary>
/// Returns true.
/// </summary>
bool easyar_CloudRecognizer_isAvailable(void);
/// <summary>
/// Creates an instance and connects to the server.
/// </summary>
void easyar_CloudRecognizer_create(easyar_String * cloudRecognitionServiceServerAddress, easyar_String * apiKey, easyar_String * apiSecret, easyar_String * cloudRecognitionServiceAppId, /* OUT */ easyar_CloudRecognizer * * Return);
/// <summary>
/// Creates an instance and connects to the server with Cloud Secret.
/// </summary>
void easyar_CloudRecognizer_createByCloudSecret(easyar_String * cloudRecognitionServiceServerAddress, easyar_String * cloudRecognitionServiceSecret, easyar_String * cloudRecognitionServiceAppId, /* OUT */ easyar_CloudRecognizer * * Return);
/// <summary>
/// Send recognition request. The lowest available request interval is 300ms.
/// </summary>
void easyar_CloudRecognizer_resolve(easyar_CloudRecognizer * This, easyar_InputFrame * inputFrame, easyar_CallbackScheduler * callbackScheduler, easyar_FunctorOfVoidFromCloudRecognizationResult callback);
/// <summary>
/// Stops the recognition and closes connection. The component shall not be used after calling close.
/// </summary>
void easyar_CloudRecognizer_close(easyar_CloudRecognizer * This);
void easyar_CloudRecognizer__dtor(easyar_CloudRecognizer * This);
void easyar_CloudRecognizer__retain(const easyar_CloudRecognizer * This, /* OUT */ easyar_CloudRecognizer * * Return);
const char * easyar_CloudRecognizer__typeName(const easyar_CloudRecognizer * This);
#ifdef __cplusplus
}
#endif
#endif
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment