Commit 708d3dee authored by BlackAngle233's avatar BlackAngle233
Browse files

update final design

parent 1444629e
fileFormatVersion: 2
guid: 538c3e88689706c409987aabe755379d
timeCreated: 1611716672
licenseType: Pro
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:
//=============================================================================================================================
//
// EasyAR Sense 4.2.0.8700-7bcbc8b1c
// Copyright (c) 2015-2021 VisionStar Information Technology (Shanghai) Co., Ltd. All Rights Reserved.
// EasyAR is the registered trademark or trademark of VisionStar Information Technology (Shanghai) Co., Ltd in China
// and other countries for the augmented reality technology developed by VisionStar Information Technology (Shanghai) Co., Ltd.
//
//=============================================================================================================================
#ifndef __EASYAR_TARGET_HXX__
#define __EASYAR_TARGET_HXX__
#include "easyar/types.hxx"
#include "easyar/frame.hxx"
namespace easyar {
/// <summary>
/// Target is the base class for all targets that can be tracked by `ImageTracker`_ or other algorithms inside EasyAR.
/// </summary>
class Target
{
protected:
easyar_Target * cdata_ ;
void init_cdata(easyar_Target * cdata);
virtual Target & operator=(const Target & data) { return *this; } //deleted
public:
Target(easyar_Target * cdata);
virtual ~Target();
Target(const Target & data);
const easyar_Target * get_cdata() const;
easyar_Target * get_cdata();
/// <summary>
/// Returns the target id. A target id is a integer number generated at runtime. This id is non-zero and increasing globally.
/// </summary>
int runtimeID();
/// <summary>
/// Returns the target uid. A target uid is useful in cloud based algorithms. If no cloud is used, you can set this uid in the json config as a alternative method to distinguish from targets.
/// </summary>
void uid(/* OUT */ String * * Return);
/// <summary>
/// Returns the target name. Name is used to distinguish targets in a json file.
/// </summary>
void name(/* OUT */ String * * Return);
/// <summary>
/// Set name. It will erase previously set data or data from cloud.
/// </summary>
void setName(String * name);
/// <summary>
/// Returns the meta data set by setMetaData. Or, in a cloud returned target, returns the meta data set in the cloud server.
/// </summary>
void meta(/* OUT */ String * * Return);
/// <summary>
/// Set meta data. It will erase previously set data or data from cloud.
/// </summary>
void setMeta(String * data);
};
/// <summary>
/// TargetInstance is the tracked target by trackers.
/// An TargetInstance contains a raw `Target`_ that is tracked and current status and pose of the `Target`_ .
/// </summary>
class TargetInstance
{
protected:
easyar_TargetInstance * cdata_ ;
void init_cdata(easyar_TargetInstance * cdata);
virtual TargetInstance & operator=(const TargetInstance & data) { return *this; } //deleted
public:
TargetInstance(easyar_TargetInstance * cdata);
virtual ~TargetInstance();
TargetInstance(const TargetInstance & data);
const easyar_TargetInstance * get_cdata() const;
easyar_TargetInstance * get_cdata();
TargetInstance();
/// <summary>
/// Returns current status of the tracked target. Usually you can check if the status equals `TargetStatus.Tracked` to determine current status of the target.
/// </summary>
TargetStatus status();
/// <summary>
/// Gets the raw target. It will return the same `Target`_ you loaded into a tracker if it was previously loaded into the tracker.
/// </summary>
void target(/* OUT */ Target * * Return);
/// <summary>
/// Returns current pose of the tracked target. Camera coordinate system and target coordinate system are all right-handed. For the camera coordinate system, the origin is the optical center, x-right, y-up, and z in the direction of light going into camera. (The right and up, on mobile devices, is the right and up when the device is in the natural orientation.) The data arrangement is row-major, not like OpenGL&#39;s column-major.
/// </summary>
Matrix44F pose();
};
/// <summary>
/// TargetTrackerResult is the base class of `ImageTrackerResult`_ and `ObjectTrackerResult`_ .
/// </summary>
class TargetTrackerResult : public FrameFilterResult
{
protected:
easyar_TargetTrackerResult * cdata_ ;
void init_cdata(easyar_TargetTrackerResult * cdata);
virtual TargetTrackerResult & operator=(const TargetTrackerResult & data) { return *this; } //deleted
public:
TargetTrackerResult(easyar_TargetTrackerResult * cdata);
virtual ~TargetTrackerResult();
TargetTrackerResult(const TargetTrackerResult & data);
const easyar_TargetTrackerResult * get_cdata() const;
easyar_TargetTrackerResult * get_cdata();
/// <summary>
/// Returns the list of `TargetInstance`_ contained in the result.
/// </summary>
void targetInstances(/* OUT */ ListOfTargetInstance * * Return);
/// <summary>
/// Sets the list of `TargetInstance`_ contained in the result.
/// </summary>
void setTargetInstances(ListOfTargetInstance * instances);
static void tryCastFromFrameFilterResult(FrameFilterResult * v, /* OUT */ TargetTrackerResult * * Return);
};
#ifndef __EASYAR_OPTIONALOFTARGET__
#define __EASYAR_OPTIONALOFTARGET__
struct OptionalOfTarget
{
bool has_value;
Target * value;
};
static inline easyar_OptionalOfTarget OptionalOfTarget_to_c(Target * o);
#endif
#ifndef __EASYAR_LISTOFTARGETINSTANCE__
#define __EASYAR_LISTOFTARGETINSTANCE__
class ListOfTargetInstance
{
private:
easyar_ListOfTargetInstance * cdata_;
virtual ListOfTargetInstance & operator=(const ListOfTargetInstance & data) { return *this; } //deleted
public:
ListOfTargetInstance(easyar_ListOfTargetInstance * cdata);
virtual ~ListOfTargetInstance();
ListOfTargetInstance(const ListOfTargetInstance & data);
const easyar_ListOfTargetInstance * get_cdata() const;
easyar_ListOfTargetInstance * get_cdata();
ListOfTargetInstance(easyar_TargetInstance * * begin, easyar_TargetInstance * * end);
int size() const;
TargetInstance * at(int index) const;
};
#endif
}
#endif
#ifndef __IMPLEMENTATION_EASYAR_TARGET_HXX__
#define __IMPLEMENTATION_EASYAR_TARGET_HXX__
#include "easyar/target.h"
#include "easyar/matrix.hxx"
#include "easyar/frame.hxx"
namespace easyar {
inline Target::Target(easyar_Target * cdata)
:
cdata_(NULL)
{
init_cdata(cdata);
}
inline Target::~Target()
{
if (cdata_) {
easyar_Target__dtor(cdata_);
cdata_ = NULL;
}
}
inline Target::Target(const Target & data)
:
cdata_(NULL)
{
easyar_Target * cdata = NULL;
easyar_Target__retain(data.cdata_, &cdata);
init_cdata(cdata);
}
inline const easyar_Target * Target::get_cdata() const
{
return cdata_;
}
inline easyar_Target * Target::get_cdata()
{
return cdata_;
}
inline void Target::init_cdata(easyar_Target * cdata)
{
cdata_ = cdata;
}
inline int Target::runtimeID()
{
if (cdata_ == NULL) {
return int();
}
int _return_value_ = easyar_Target_runtimeID(cdata_);
return _return_value_;
}
inline void Target::uid(/* OUT */ String * * Return)
{
if (cdata_ == NULL) {
*Return = NULL;
return;
}
easyar_String * _return_value_ = NULL;
easyar_Target_uid(cdata_, &_return_value_);
*Return = new String(_return_value_);
}
inline void Target::name(/* OUT */ String * * Return)
{
if (cdata_ == NULL) {
*Return = NULL;
return;
}
easyar_String * _return_value_ = NULL;
easyar_Target_name(cdata_, &_return_value_);
*Return = new String(_return_value_);
}
inline void Target::setName(String * arg0)
{
if (cdata_ == NULL) {
return;
}
easyar_Target_setName(cdata_, arg0->get_cdata());
}
inline void Target::meta(/* OUT */ String * * Return)
{
if (cdata_ == NULL) {
*Return = NULL;
return;
}
easyar_String * _return_value_ = NULL;
easyar_Target_meta(cdata_, &_return_value_);
*Return = new String(_return_value_);
}
inline void Target::setMeta(String * arg0)
{
if (cdata_ == NULL) {
return;
}
easyar_Target_setMeta(cdata_, arg0->get_cdata());
}
inline TargetInstance::TargetInstance(easyar_TargetInstance * cdata)
:
cdata_(NULL)
{
init_cdata(cdata);
}
inline TargetInstance::~TargetInstance()
{
if (cdata_) {
easyar_TargetInstance__dtor(cdata_);
cdata_ = NULL;
}
}
inline TargetInstance::TargetInstance(const TargetInstance & data)
:
cdata_(NULL)
{
easyar_TargetInstance * cdata = NULL;
easyar_TargetInstance__retain(data.cdata_, &cdata);
init_cdata(cdata);
}
inline const easyar_TargetInstance * TargetInstance::get_cdata() const
{
return cdata_;
}
inline easyar_TargetInstance * TargetInstance::get_cdata()
{
return cdata_;
}
inline void TargetInstance::init_cdata(easyar_TargetInstance * cdata)
{
cdata_ = cdata;
}
inline TargetInstance::TargetInstance()
:
cdata_(NULL)
{
easyar_TargetInstance * _return_value_ = NULL;
easyar_TargetInstance__ctor(&_return_value_);
init_cdata(_return_value_);
}
inline TargetStatus TargetInstance::status()
{
if (cdata_ == NULL) {
return TargetStatus();
}
easyar_TargetStatus _return_value_ = easyar_TargetInstance_status(cdata_);
return static_cast<TargetStatus>(_return_value_);
}
inline void TargetInstance::target(/* OUT */ Target * * Return)
{
if (cdata_ == NULL) {
*Return = NULL;
return;
}
easyar_OptionalOfTarget _return_value_ = {false, NULL};
easyar_TargetInstance_target(cdata_, &_return_value_);
*Return = (_return_value_.has_value ? new Target(_return_value_.value) : NULL);
}
inline Matrix44F TargetInstance::pose()
{
if (cdata_ == NULL) {
return Matrix44F();
}
easyar_Matrix44F _return_value_ = easyar_TargetInstance_pose(cdata_);
return Matrix44F(_return_value_.data[0], _return_value_.data[1], _return_value_.data[2], _return_value_.data[3], _return_value_.data[4], _return_value_.data[5], _return_value_.data[6], _return_value_.data[7], _return_value_.data[8], _return_value_.data[9], _return_value_.data[10], _return_value_.data[11], _return_value_.data[12], _return_value_.data[13], _return_value_.data[14], _return_value_.data[15]);
}
inline TargetTrackerResult::TargetTrackerResult(easyar_TargetTrackerResult * cdata)
:
FrameFilterResult(static_cast<easyar_FrameFilterResult *>(NULL)),
cdata_(NULL)
{
init_cdata(cdata);
}
inline TargetTrackerResult::~TargetTrackerResult()
{
if (cdata_) {
easyar_TargetTrackerResult__dtor(cdata_);
cdata_ = NULL;
}
}
inline TargetTrackerResult::TargetTrackerResult(const TargetTrackerResult & data)
:
FrameFilterResult(static_cast<easyar_FrameFilterResult *>(NULL)),
cdata_(NULL)
{
easyar_TargetTrackerResult * cdata = NULL;
easyar_TargetTrackerResult__retain(data.cdata_, &cdata);
init_cdata(cdata);
}
inline const easyar_TargetTrackerResult * TargetTrackerResult::get_cdata() const
{
return cdata_;
}
inline easyar_TargetTrackerResult * TargetTrackerResult::get_cdata()
{
return cdata_;
}
inline void TargetTrackerResult::init_cdata(easyar_TargetTrackerResult * cdata)
{
cdata_ = cdata;
{
easyar_FrameFilterResult * cdata_inner = NULL;
easyar_castTargetTrackerResultToFrameFilterResult(cdata, &cdata_inner);
FrameFilterResult::init_cdata(cdata_inner);
}
}
inline void TargetTrackerResult::targetInstances(/* OUT */ ListOfTargetInstance * * Return)
{
if (cdata_ == NULL) {
*Return = NULL;
return;
}
easyar_ListOfTargetInstance * _return_value_ = NULL;
easyar_TargetTrackerResult_targetInstances(cdata_, &_return_value_);
*Return = new ListOfTargetInstance(_return_value_);
}
inline void TargetTrackerResult::setTargetInstances(ListOfTargetInstance * arg0)
{
if (cdata_ == NULL) {
return;
}
easyar_TargetTrackerResult_setTargetInstances(cdata_, arg0->get_cdata());
}
inline void TargetTrackerResult::tryCastFromFrameFilterResult(FrameFilterResult * v, /* OUT */ TargetTrackerResult * * Return)
{
if (v == NULL) {
*Return = NULL;
return;
}
easyar_TargetTrackerResult * cdata = NULL;
easyar_tryCastFrameFilterResultToTargetTrackerResult(v->get_cdata(), &cdata);
if (cdata == NULL) {
*Return = NULL;
return;
}
*Return = new TargetTrackerResult(cdata);
}
#ifndef __IMPLEMENTATION_EASYAR_OPTIONALOFTARGET__
#define __IMPLEMENTATION_EASYAR_OPTIONALOFTARGET__
static inline easyar_OptionalOfTarget OptionalOfTarget_to_c(Target * o)
{
if (o != NULL) {
easyar_OptionalOfTarget _return_value_ = {true, o->get_cdata()};
return _return_value_;
} else {
easyar_OptionalOfTarget _return_value_ = {false, NULL};
return _return_value_;
}
}
#endif
#ifndef __IMPLEMENTATION_EASYAR_LISTOFTARGETINSTANCE__
#define __IMPLEMENTATION_EASYAR_LISTOFTARGETINSTANCE__
inline ListOfTargetInstance::ListOfTargetInstance(easyar_ListOfTargetInstance * cdata)
: cdata_(cdata)
{
}
inline ListOfTargetInstance::~ListOfTargetInstance()
{
if (cdata_) {
easyar_ListOfTargetInstance__dtor(cdata_);
cdata_ = NULL;
}
}
inline ListOfTargetInstance::ListOfTargetInstance(const ListOfTargetInstance & data)
: cdata_(static_cast<easyar_ListOfTargetInstance *>(NULL))
{
easyar_ListOfTargetInstance_copy(data.cdata_, &cdata_);
}
inline const easyar_ListOfTargetInstance * ListOfTargetInstance::get_cdata() const
{
return cdata_;
}
inline easyar_ListOfTargetInstance * ListOfTargetInstance::get_cdata()
{
return cdata_;
}
inline ListOfTargetInstance::ListOfTargetInstance(easyar_TargetInstance * * begin, easyar_TargetInstance * * end)
: cdata_(static_cast<easyar_ListOfTargetInstance *>(NULL))
{
easyar_ListOfTargetInstance__ctor(begin, end, &cdata_);
}
inline int ListOfTargetInstance::size() const
{
return easyar_ListOfTargetInstance_size(cdata_);
}
inline TargetInstance * ListOfTargetInstance::at(int index) const
{
easyar_TargetInstance * _return_value_ = easyar_ListOfTargetInstance_at(cdata_, index);
easyar_TargetInstance__retain(_return_value_, &_return_value_);
return new TargetInstance(_return_value_);
}
#endif
}
#endif
fileFormatVersion: 2
guid: f162491c7b975dc40844d987d12c9dc8
timeCreated: 1611716672
licenseType: Pro
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:
//=============================================================================================================================
//
// EasyAR Sense 4.2.0.8700-7bcbc8b1c
// Copyright (c) 2015-2021 VisionStar Information Technology (Shanghai) Co., Ltd. All Rights Reserved.
// EasyAR is the registered trademark or trademark of VisionStar Information Technology (Shanghai) Co., Ltd in China
// and other countries for the augmented reality technology developed by VisionStar Information Technology (Shanghai) Co., Ltd.
//
//=============================================================================================================================
#import "easyar/types.oc.h"
#import "easyar/frame.oc.h"
/// <summary>
/// Target is the base class for all targets that can be tracked by `ImageTracker`_ or other algorithms inside EasyAR.
/// </summary>
@interface easyar_Target : easyar_RefBase
+ (instancetype)new NS_UNAVAILABLE;
- (instancetype)init NS_UNAVAILABLE;
/// <summary>
/// Returns the target id. A target id is a integer number generated at runtime. This id is non-zero and increasing globally.
/// </summary>
- (int)runtimeID;
/// <summary>
/// Returns the target uid. A target uid is useful in cloud based algorithms. If no cloud is used, you can set this uid in the json config as a alternative method to distinguish from targets.
/// </summary>
- (NSString *)uid;
/// <summary>
/// Returns the target name. Name is used to distinguish targets in a json file.
/// </summary>
- (NSString *)name;
/// <summary>
/// Set name. It will erase previously set data or data from cloud.
/// </summary>
- (void)setName:(NSString *)name;
/// <summary>
/// Returns the meta data set by setMetaData. Or, in a cloud returned target, returns the meta data set in the cloud server.
/// </summary>
- (NSString *)meta;
/// <summary>
/// Set meta data. It will erase previously set data or data from cloud.
/// </summary>
- (void)setMeta:(NSString *)data;
@end
/// <summary>
/// TargetInstance is the tracked target by trackers.
/// An TargetInstance contains a raw `Target`_ that is tracked and current status and pose of the `Target`_ .
/// </summary>
@interface easyar_TargetInstance : easyar_RefBase
+ (instancetype)new NS_UNAVAILABLE;
- (instancetype)init NS_UNAVAILABLE;
+ (easyar_TargetInstance *) create;
/// <summary>
/// Returns current status of the tracked target. Usually you can check if the status equals `TargetStatus.Tracked` to determine current status of the target.
/// </summary>
- (easyar_TargetStatus)status;
/// <summary>
/// Gets the raw target. It will return the same `Target`_ you loaded into a tracker if it was previously loaded into the tracker.
/// </summary>
- (easyar_Target *)target;
/// <summary>
/// Returns current pose of the tracked target. Camera coordinate system and target coordinate system are all right-handed. For the camera coordinate system, the origin is the optical center, x-right, y-up, and z in the direction of light going into camera. (The right and up, on mobile devices, is the right and up when the device is in the natural orientation.) The data arrangement is row-major, not like OpenGL&#39;s column-major.
/// </summary>
- (easyar_Matrix44F *)pose;
@end
/// <summary>
/// TargetTrackerResult is the base class of `ImageTrackerResult`_ and `ObjectTrackerResult`_ .
/// </summary>
@interface easyar_TargetTrackerResult : easyar_FrameFilterResult
+ (instancetype)new NS_UNAVAILABLE;
- (instancetype)init NS_UNAVAILABLE;
/// <summary>
/// Returns the list of `TargetInstance`_ contained in the result.
/// </summary>
- (NSArray<easyar_TargetInstance *> *)targetInstances;
/// <summary>
/// Sets the list of `TargetInstance`_ contained in the result.
/// </summary>
- (void)setTargetInstances:(NSArray<easyar_TargetInstance *> *)instances;
@end
fileFormatVersion: 2
guid: f2d72a6616802fd46abb31d0d49b0834
timeCreated: 1611716672
licenseType: Pro
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:
//=============================================================================================================================
//
// EasyAR Sense 4.2.0.8700-7bcbc8b1c
// Copyright (c) 2015-2021 VisionStar Information Technology (Shanghai) Co., Ltd. All Rights Reserved.
// EasyAR is the registered trademark or trademark of VisionStar Information Technology (Shanghai) Co., Ltd in China
// and other countries for the augmented reality technology developed by VisionStar Information Technology (Shanghai) Co., Ltd.
//
//=============================================================================================================================
#ifndef __EASYAR_TEXTURE_H__
#define __EASYAR_TEXTURE_H__
#include "easyar/types.h"
#ifdef __cplusplus
extern "C" {
#endif
/// <summary>
/// Gets ID of an OpenGL/OpenGLES texture object.
/// </summary>
int easyar_TextureId_getInt(easyar_TextureId * This);
/// <summary>
/// Gets pointer of a Direct3D texture object.
/// </summary>
void * easyar_TextureId_getPointer(easyar_TextureId * This);
/// <summary>
/// Creates from ID of an OpenGL/OpenGLES texture object.
/// </summary>
void easyar_TextureId_fromInt(int _value, /* OUT */ easyar_TextureId * * Return);
/// <summary>
/// Creates from pointer of a Direct3D texture object.
/// </summary>
void easyar_TextureId_fromPointer(void * ptr, /* OUT */ easyar_TextureId * * Return);
void easyar_TextureId__dtor(easyar_TextureId * This);
void easyar_TextureId__retain(const easyar_TextureId * This, /* OUT */ easyar_TextureId * * Return);
const char * easyar_TextureId__typeName(const easyar_TextureId * This);
#ifdef __cplusplus
}
#endif
#endif
fileFormatVersion: 2
guid: d431ed4188c3fd946a107d7639268054
timeCreated: 1611716672
licenseType: Pro
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:
//=============================================================================================================================
//
// EasyAR Sense 4.2.0.8700-7bcbc8b1c
// Copyright (c) 2015-2021 VisionStar Information Technology (Shanghai) Co., Ltd. All Rights Reserved.
// EasyAR is the registered trademark or trademark of VisionStar Information Technology (Shanghai) Co., Ltd in China
// and other countries for the augmented reality technology developed by VisionStar Information Technology (Shanghai) Co., Ltd.
//
//=============================================================================================================================
#ifndef __EASYAR_TEXTURE_HXX__
#define __EASYAR_TEXTURE_HXX__
#include "easyar/types.hxx"
namespace easyar {
/// <summary>
/// TextureId encapsulates a texture object in rendering API.
/// For OpenGL/OpenGLES, getInt and fromInt shall be used. For Direct3D, getPointer and fromPointer shall be used.
/// </summary>
class TextureId
{
protected:
easyar_TextureId * cdata_ ;
void init_cdata(easyar_TextureId * cdata);
virtual TextureId & operator=(const TextureId & data) { return *this; } //deleted
public:
TextureId(easyar_TextureId * cdata);
virtual ~TextureId();
TextureId(const TextureId & data);
const easyar_TextureId * get_cdata() const;
easyar_TextureId * get_cdata();
/// <summary>
/// Gets ID of an OpenGL/OpenGLES texture object.
/// </summary>
int getInt();
/// <summary>
/// Gets pointer of a Direct3D texture object.
/// </summary>
void * getPointer();
/// <summary>
/// Creates from ID of an OpenGL/OpenGLES texture object.
/// </summary>
static void fromInt(int _value, /* OUT */ TextureId * * Return);
/// <summary>
/// Creates from pointer of a Direct3D texture object.
/// </summary>
static void fromPointer(void * ptr, /* OUT */ TextureId * * Return);
};
}
#endif
#ifndef __IMPLEMENTATION_EASYAR_TEXTURE_HXX__
#define __IMPLEMENTATION_EASYAR_TEXTURE_HXX__
#include "easyar/texture.h"
namespace easyar {
inline TextureId::TextureId(easyar_TextureId * cdata)
:
cdata_(NULL)
{
init_cdata(cdata);
}
inline TextureId::~TextureId()
{
if (cdata_) {
easyar_TextureId__dtor(cdata_);
cdata_ = NULL;
}
}
inline TextureId::TextureId(const TextureId & data)
:
cdata_(NULL)
{
easyar_TextureId * cdata = NULL;
easyar_TextureId__retain(data.cdata_, &cdata);
init_cdata(cdata);
}
inline const easyar_TextureId * TextureId::get_cdata() const
{
return cdata_;
}
inline easyar_TextureId * TextureId::get_cdata()
{
return cdata_;
}
inline void TextureId::init_cdata(easyar_TextureId * cdata)
{
cdata_ = cdata;
}
inline int TextureId::getInt()
{
if (cdata_ == NULL) {
return int();
}
int _return_value_ = easyar_TextureId_getInt(cdata_);
return _return_value_;
}
inline void * TextureId::getPointer()
{
if (cdata_ == NULL) {
return NULL;
}
void * _return_value_ = easyar_TextureId_getPointer(cdata_);
return _return_value_;
}
inline void TextureId::fromInt(int arg0, /* OUT */ TextureId * * Return)
{
easyar_TextureId * _return_value_ = NULL;
easyar_TextureId_fromInt(arg0, &_return_value_);
*Return = new TextureId(_return_value_);
}
inline void TextureId::fromPointer(void * arg0, /* OUT */ TextureId * * Return)
{
easyar_TextureId * _return_value_ = NULL;
easyar_TextureId_fromPointer(arg0, &_return_value_);
*Return = new TextureId(_return_value_);
}
}
#endif
fileFormatVersion: 2
guid: a860f905b439280499498a29f821be3d
timeCreated: 1611716672
licenseType: Pro
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:
//=============================================================================================================================
//
// EasyAR Sense 4.2.0.8700-7bcbc8b1c
// Copyright (c) 2015-2021 VisionStar Information Technology (Shanghai) Co., Ltd. All Rights Reserved.
// EasyAR is the registered trademark or trademark of VisionStar Information Technology (Shanghai) Co., Ltd in China
// and other countries for the augmented reality technology developed by VisionStar Information Technology (Shanghai) Co., Ltd.
//
//=============================================================================================================================
#import "easyar/types.oc.h"
/// <summary>
/// TextureId encapsulates a texture object in rendering API.
/// For OpenGL/OpenGLES, getInt and fromInt shall be used. For Direct3D, getPointer and fromPointer shall be used.
/// </summary>
@interface easyar_TextureId : easyar_RefBase
+ (instancetype)new NS_UNAVAILABLE;
- (instancetype)init NS_UNAVAILABLE;
/// <summary>
/// Gets ID of an OpenGL/OpenGLES texture object.
/// </summary>
- (int)getInt;
/// <summary>
/// Gets pointer of a Direct3D texture object.
/// </summary>
- (void *)getPointer;
/// <summary>
/// Creates from ID of an OpenGL/OpenGLES texture object.
/// </summary>
+ (easyar_TextureId *)fromInt:(int)_value;
/// <summary>
/// Creates from pointer of a Direct3D texture object.
/// </summary>
+ (easyar_TextureId *)fromPointer:(void *)ptr;
@end
fileFormatVersion: 2
guid: 341926cd23b9d5c44b5523c7492058fb
timeCreated: 1611716672
licenseType: Pro
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:
//=============================================================================================================================
//
// EasyAR Sense 4.2.0.8700-7bcbc8b1c
// Copyright (c) 2015-2021 VisionStar Information Technology (Shanghai) Co., Ltd. All Rights Reserved.
// EasyAR is the registered trademark or trademark of VisionStar Information Technology (Shanghai) Co., Ltd in China
// and other countries for the augmented reality technology developed by VisionStar Information Technology (Shanghai) Co., Ltd.
//
//=============================================================================================================================
#ifndef __EASYAR_TYPES_H__
#define __EASYAR_TYPES_H__
#ifndef __cplusplus
#include <stdbool.h>
#endif
#ifdef __cplusplus
extern "C" {
#endif
typedef struct { char _placeHolder_; } easyar_String;
void easyar_String_from_utf8(const char * begin, const char * end, /* OUT */ easyar_String * * Return);
void easyar_String_from_utf8_begin(const char * begin, /* OUT */ easyar_String * * Return);
const char * easyar_String_begin(const easyar_String * This);
const char * easyar_String_end(const easyar_String * This);
void easyar_String_copy(const easyar_String * This, /* OUT */ easyar_String * * Return);
void easyar_String__dtor(easyar_String * This);
/// <summary>
/// class
/// ObjectTargetParameters represents the parameters to create a `ObjectTarget`_ .
/// </summary>
typedef struct { char _placeHolder_; } easyar_ObjectTargetParameters;
/// <summary>
/// class
/// extends Target
/// ObjectTarget represents 3d object targets that can be tracked by `ObjectTracker`_ .
/// The size of ObjectTarget is determined by the `obj` file. You can change it by changing the object `scale`, which is default to 1.
/// A ObjectTarget can be tracked by `ObjectTracker`_ after a successful load into the `ObjectTracker`_ using `ObjectTracker.loadTarget`_ .
/// </summary>
typedef struct { char _placeHolder_; } easyar_ObjectTarget;
/// <summary>
/// class
/// extends TargetTrackerResult
/// Result of `ObjectTracker`_ .
/// </summary>
typedef struct { char _placeHolder_; } easyar_ObjectTrackerResult;
/// <summary>
/// class
/// ObjectTracker implements 3D object target detection and tracking.
/// ObjectTracker occupies (1 + SimultaneousNum) buffers of camera. Use setBufferCapacity of camera to set an amount of buffers that is not less than the sum of amount of buffers occupied by all components. Refer to `Overview &lt;Overview.html&gt;`__ .
/// After creation, you can call start/stop to enable/disable the track process. start and stop are very lightweight calls.
/// When the component is not needed anymore, call close function to close it. It shall not be used after calling close.
/// ObjectTracker inputs `FeedbackFrame`_ from feedbackFrameSink. `FeedbackFrameSource`_ shall be connected to feedbackFrameSink for use. Refer to `Overview &lt;Overview.html&gt;`__ .
/// Before a `Target`_ can be tracked by ObjectTracker, you have to load it using loadTarget/unloadTarget. You can get load/unload results from callbacks passed into the interfaces.
/// </summary>
typedef struct { char _placeHolder_; } easyar_ObjectTracker;
typedef enum
{
/// <summary>
/// Download successful.
/// </summary>
easyar_CalibrationDownloadStatus_Successful = 0,
/// <summary>
/// Data is already latest.
/// </summary>
easyar_CalibrationDownloadStatus_NotModified = 1,
/// <summary>
/// Connection error
/// </summary>
easyar_CalibrationDownloadStatus_ConnectionError = 2,
/// <summary>
/// Unexpected error
/// </summary>
easyar_CalibrationDownloadStatus_UnexpectedError = 3,
} easyar_CalibrationDownloadStatus;
/// <summary>
/// class
/// CalibrationDownloader is used for download and update of calibration data in MotionTracker. The calibration data will only take effect after reallocation of MotionTracker.
/// </summary>
typedef struct { char _placeHolder_; } easyar_CalibrationDownloader;
typedef enum
{
/// <summary>
/// Unknown error
/// </summary>
easyar_CloudRecognizationStatus_UnknownError = 0,
/// <summary>
/// A target is recognized.
/// </summary>
easyar_CloudRecognizationStatus_FoundTarget = 1,
/// <summary>
/// No target is recognized.
/// </summary>
easyar_CloudRecognizationStatus_TargetNotFound = 2,
/// <summary>
/// Reached the access limit
/// </summary>
easyar_CloudRecognizationStatus_ReachedAccessLimit = 3,
/// <summary>
/// Request interval too low
/// </summary>
easyar_CloudRecognizationStatus_RequestIntervalTooLow = 4,
} easyar_CloudRecognizationStatus;
/// <summary>
/// class
/// </summary>
typedef struct { char _placeHolder_; } easyar_CloudRecognizationResult;
/// <summary>
/// class
/// CloudRecognizer implements cloud recognition. It can only be used after created a recognition image library on the cloud. Please refer to EasyAR CRS documentation.
/// When the component is not needed anymore, call close function to close it. It shall not be used after calling close.
/// Before using a CloudRecognizer, an `ImageTracker`_ must be setup and prepared. Any target returned from cloud should be manually put into the `ImageTracker`_ using `ImageTracker.loadTarget`_ if it need to be tracked. Then the target can be used as same as a local target after loaded into the tracker. When a target is recognized, you can get it from callback, and you should use target uid to distinguish different targets. The target runtimeID is dynamically created and cannot be used as unique identifier in the cloud situation.
/// </summary>
typedef struct { char _placeHolder_; } easyar_CloudRecognizer;
/// <summary>
/// class
/// Buffer stores a raw byte array, which can be used to access image data.
/// To access image data in Java API, get buffer from `Image`_ and copy to a Java byte array.
/// You can always access image data since the first version of EasyAR Sense. Refer to `Image`_ .
/// </summary>
typedef struct { char _placeHolder_; } easyar_Buffer;
/// <summary>
/// class
/// A mapping from file path to `Buffer`_ . It can be used to represent multiple files in the memory.
/// </summary>
typedef struct { char _placeHolder_; } easyar_BufferDictionary;
/// <summary>
/// class
/// BufferPool is a memory pool to reduce memory allocation time consumption for functionality like custom camera interoperability, which needs to allocate memory buffers of a fixed size repeatedly.
/// </summary>
typedef struct { char _placeHolder_; } easyar_BufferPool;
typedef enum
{
/// <summary>
/// Unknown location
/// </summary>
easyar_CameraDeviceType_Unknown = 0,
/// <summary>
/// Rear camera
/// </summary>
easyar_CameraDeviceType_Back = 1,
/// <summary>
/// Front camera
/// </summary>
easyar_CameraDeviceType_Front = 2,
} easyar_CameraDeviceType;
/// <summary>
/// MotionTrackingStatus describes the quality of device motion tracking.
/// </summary>
typedef enum
{
/// <summary>
/// Result is not available and should not to be used to render virtual objects or do 3D reconstruction. This value occurs temporarily after initializing, tracking lost or relocalizing.
/// </summary>
easyar_MotionTrackingStatus_NotTracking = 0,
/// <summary>
/// Tracking is available, but the quality of the result is not good enough. This value occurs temporarily due to weak texture or excessive movement. The result can be used to render virtual objects, but should generally not be used to do 3D reconstruction.
/// </summary>
easyar_MotionTrackingStatus_Limited = 1,
/// <summary>
/// Tracking with a good quality. The result can be used to render virtual objects or do 3D reconstruction.
/// </summary>
easyar_MotionTrackingStatus_Tracking = 2,
} easyar_MotionTrackingStatus;
/// <summary>
/// class
/// Camera parameters, including image size, focal length, principal point, camera type and camera rotation against natural orientation.
/// </summary>
typedef struct { char _placeHolder_; } easyar_CameraParameters;
/// <summary>
/// PixelFormat represents the format of image pixel data. All formats follow the pixel direction from left to right and from top to bottom.
/// </summary>
typedef enum
{
/// <summary>
/// Unknown
/// </summary>
easyar_PixelFormat_Unknown = 0,
/// <summary>
/// 256 shades grayscale
/// </summary>
easyar_PixelFormat_Gray = 1,
/// <summary>
/// YUV_NV21
/// </summary>
easyar_PixelFormat_YUV_NV21 = 2,
/// <summary>
/// YUV_NV12
/// </summary>
easyar_PixelFormat_YUV_NV12 = 3,
/// <summary>
/// YUV_I420
/// </summary>
easyar_PixelFormat_YUV_I420 = 4,
/// <summary>
/// YUV_YV12
/// </summary>
easyar_PixelFormat_YUV_YV12 = 5,
/// <summary>
/// RGB888
/// </summary>
easyar_PixelFormat_RGB888 = 6,
/// <summary>
/// BGR888
/// </summary>
easyar_PixelFormat_BGR888 = 7,
/// <summary>
/// RGBA8888
/// </summary>
easyar_PixelFormat_RGBA8888 = 8,
/// <summary>
/// BGRA8888
/// </summary>
easyar_PixelFormat_BGRA8888 = 9,
} easyar_PixelFormat;
/// <summary>
/// class
/// Image stores an image data and represents an image in memory.
/// Image raw data can be accessed as byte array. The width/height/etc information are also accessible.
/// You can always access image data since the first version of EasyAR Sense.
///
/// You can do this in iOS
/// ::
///
/// #import &lt;easyar/buffer.oc.h&gt;
/// #import &lt;easyar/image.oc.h&gt;
///
/// easyar_OutputFrame * outputFrame = [outputFrameBuffer peek];
/// if (outputFrame != nil) {
/// easyar_Image * i = [[outputFrame inputFrame] image];
/// easyar_Buffer * b = [i buffer];
/// char * bytes = calloc([b size], 1);
/// memcpy(bytes, [b data], [b size]);
/// // use bytes here
/// free(bytes);
/// }
///
/// Or in Android
/// ::
///
/// import cn.easyar.*;
///
/// OutputFrame outputFrame = outputFrameBuffer.peek();
/// if (outputFrame != null) {
/// InputFrame inputFrame = outputFrame.inputFrame();
/// Image i = inputFrame.image();
/// Buffer b = i.buffer();
/// byte[] bytes = new byte[b.size()];
/// b.copyToByteArray(0, bytes, 0, bytes.length);
/// // use bytes here
/// b.dispose();
/// i.dispose();
/// inputFrame.dispose();
/// outputFrame.dispose();
/// }
/// </summary>
typedef struct { char _placeHolder_; } easyar_Image;
/// <summary>
/// record
/// Square matrix of 4. The data arrangement is row-major.
/// </summary>
typedef struct
{
/// <summary>
/// The raw data of matrix.
/// </summary>
float data[16];
} easyar_Matrix44F;
/// <summary>
/// record
/// Square matrix of 3. The data arrangement is row-major.
/// </summary>
typedef struct
{
/// <summary>
/// The raw data of matrix.
/// </summary>
float data[9];
} easyar_Matrix33F;
/// <summary>
/// record
/// 3 dimensional vector of double.
/// </summary>
typedef struct
{
/// <summary>
/// The raw data of vector.
/// </summary>
double data[3];
} easyar_Vec3D;
/// <summary>
/// record
/// 4 dimensional vector of float.
/// </summary>
typedef struct
{
/// <summary>
/// The raw data of vector.
/// </summary>
float data[4];
} easyar_Vec4F;
/// <summary>
/// record
/// 3 dimensional vector of float.
/// </summary>
typedef struct
{
/// <summary>
/// The raw data of vector.
/// </summary>
float data[3];
} easyar_Vec3F;
/// <summary>
/// record
/// 2 dimensional vector of float.
/// </summary>
typedef struct
{
/// <summary>
/// The raw data of vector.
/// </summary>
float data[2];
} easyar_Vec2F;
/// <summary>
/// record
/// 4 dimensional vector of int.
/// </summary>
typedef struct
{
/// <summary>
/// The raw data of vector.
/// </summary>
int data[4];
} easyar_Vec4I;
/// <summary>
/// record
/// 2 dimensional vector of int.
/// </summary>
typedef struct
{
/// <summary>
/// The raw data of vector.
/// </summary>
int data[2];
} easyar_Vec2I;
/// <summary>
/// class
/// DenseSpatialMap is used to reconstruct the environment accurately and densely. The reconstructed model is represented by `triangle mesh`, which is denoted simply by `mesh`.
/// DenseSpatialMap occupies 1 buffers of camera.
/// </summary>
typedef struct { char _placeHolder_; } easyar_DenseSpatialMap;
/// <summary>
/// record
/// The dense reconstructed model is represented by triangle mesh, or simply denoted as mesh. Because mesh updates frequently, in order to ensure efficiency, the mesh of the whole reconstruction model is divided into many mesh blocks. A mesh block is composed of a cube about 1 meter long, with attributes such as vertices and indices.
///
/// BlockInfo is used to describe the content of a mesh block. (x, y, z) is the index of mesh block, the coordinates of a mesh block&#39;s origin in world coordinate system can be obtained by multiplying (x, y, z) by the physical size of mesh block. You may filter the part you want to display in advance by the mesh block&#39;s world coordinates for the sake of saving rendering time.
/// </summary>
typedef struct
{
/// <summary>
/// x in index (x, y, z) of mesh block.
/// </summary>
int x;
/// <summary>
/// y in index (x, y, z) of mesh block.
/// </summary>
int y;
/// <summary>
/// z in index (x, y, z) of mesh block.
/// </summary>
int z;
/// <summary>
/// Number of vertices in a mesh block.
/// </summary>
int numOfVertex;
/// <summary>
/// startPointOfVertex is the starting position of the vertex data stored in the vertex buffer, indicating from where the stored vertices belong to current mesh block. It is not equal to the number of bytes of the offset from the beginning of vertex buffer. The offset is startPointOfVertex*3*4 bytes.
/// </summary>
int startPointOfVertex;
/// <summary>
/// The number of indices in a mesh block. Each of three consecutive vertices form a triangle.
/// </summary>
int numOfIndex;
/// <summary>
/// Similar to startPointOfVertex. startPointOfIndex is the starting position of the index data stored in the index buffer, indicating from where the stored indices belong to current mesh block. It is not equal to the number of bytes of the offset from the beginning of index buffer. The offset is startPointOfIndex*3*4 bytes.
/// </summary>
int startPointOfIndex;
/// <summary>
/// Version represents how many times the mesh block has updated. The larger the version, the newer the block. If the version of a mesh block increases after calling `DenseSpatialMap.updateSceneMesh`_ , it indicates that the mash block has changed.
/// </summary>
int version;
} easyar_BlockInfo;
/// <summary>
/// class
/// SceneMesh is used to manage and preserve the results of `DenseSpatialMap`_.
/// There are two kinds of meshes saved in SceneMesh, one is the mesh of the whole reconstructed scene, hereinafter referred to as `meshAll`, the other is the recently updated mesh, hereinafter referred to as `meshUpdated`. `meshAll` is a whole mesh, including all vertex data and index data, etc. `meshUpdated` is composed of several `mesh block` s, each `mesh block` is a cube, which contains the mesh formed by the object surface in the corresponding cube space.
/// `meshAll` is available only when the `DenseSpatialMap.updateSceneMesh`_ method is called specifying that all meshes need to be updated. If `meshAll` has been updated previously and not updated in recent times, the data in `meshAll` is remain the same.
/// </summary>
typedef struct { char _placeHolder_; } easyar_SceneMesh;
/// <summary>
/// record
/// Accelerometer reading.
///
/// The positive direction of x-axis is from the device center to its right side of the screen.
/// The positive direction of y-axis is from the device center to its top side of the screen.
/// The positive direction of z-axis is from the device center perpendicular to the screen outward.
///
/// The unit of x, y, z is m/s^2.
/// The unit of timestamp is second.
/// </summary>
typedef struct
{
float x;
float y;
float z;
double timestamp;
} easyar_AccelerometerResult;
/// <summary>
/// class
/// ARCoreCameraDevice implements a camera device based on ARCore, which outputs `InputFrame`_ (including image, camera parameters, timestamp, 6DOF location, and tracking status).
/// Loading of libarcore_sdk_c.so with java.lang.System.loadLibrary is required.
/// After creation, start/stop can be invoked to start or stop video stream capture.
/// When the component is not needed anymore, call close function to close it. It shall not be used after calling close.
/// ARCoreCameraDevice outputs `InputFrame`_ from inputFrameSource. inputFrameSource shall be connected to `InputFrameSink`_ for use. Refer to `Overview &lt;Overview.html&gt;`__ .
/// bufferCapacity is the capacity of `InputFrame`_ buffer. If the count of `InputFrame`_ which has been output from the device and have not been released is more than this number, the device will not output new `InputFrame`_ , until previous `InputFrame`_ have been released. This may cause screen stuck. Refer to `Overview &lt;Overview.html&gt;`__ .
/// Caution: Currently, ARCore(v1.13.0) has memory leaks on creating and destroying sessions. Repeated creations and destructions will cause an increasing and non-reclaimable memory footprint.
/// </summary>
typedef struct { char _placeHolder_; } easyar_ARCoreCameraDevice;
/// <summary>
/// class
/// ARKitCameraDevice implements a camera device based on ARKit, which outputs `InputFrame`_ (including image, camera parameters, timestamp, 6DOF location, and tracking status).
/// After creation, start/stop can be invoked to start or stop data collection.
/// When the component is not needed anymore, call close function to close it. It shall not be used after calling close.
/// ARKitCameraDevice outputs `InputFrame`_ from inputFrameSource. inputFrameSource shall be connected to `InputFrameSink`_ for use. Refer to `Overview &lt;Overview.html&gt;`__ .
/// bufferCapacity is the capacity of `InputFrame`_ buffer. If the count of `InputFrame`_ which has been output from the device and have not been released is more than this number, the device will not output new `InputFrame`_ , until previous `InputFrame`_ have been released. This may cause screen stuck. Refer to `Overview &lt;Overview.html&gt;`__ .
/// </summary>
typedef struct { char _placeHolder_; } easyar_ARKitCameraDevice;
typedef enum
{
/// <summary>
/// Normal auto focus mode. You should call autoFocus to start the focus in this mode.
/// </summary>
easyar_CameraDeviceFocusMode_Normal = 0,
/// <summary>
/// Continuous auto focus mode
/// </summary>
easyar_CameraDeviceFocusMode_Continousauto = 2,
/// <summary>
/// Infinity focus mode
/// </summary>
easyar_CameraDeviceFocusMode_Infinity = 3,
/// <summary>
/// Macro (close-up) focus mode. You should call autoFocus to start the focus in this mode.
/// </summary>
easyar_CameraDeviceFocusMode_Macro = 4,
/// <summary>
/// Medium distance focus mode
/// </summary>
easyar_CameraDeviceFocusMode_Medium = 5,
} easyar_CameraDeviceFocusMode;
typedef enum
{
/// <summary>
/// Android Camera1
/// </summary>
easyar_AndroidCameraApiType_Camera1 = 0,
/// <summary>
/// Android Camera2
/// </summary>
easyar_AndroidCameraApiType_Camera2 = 1,
} easyar_AndroidCameraApiType;
typedef enum
{
/// <summary>
/// The same as AVCaptureSessionPresetPhoto.
/// </summary>
easyar_CameraDevicePresetProfile_Photo = 0,
/// <summary>
/// The same as AVCaptureSessionPresetHigh.
/// </summary>
easyar_CameraDevicePresetProfile_High = 1,
/// <summary>
/// The same as AVCaptureSessionPresetMedium.
/// </summary>
easyar_CameraDevicePresetProfile_Medium = 2,
/// <summary>
/// The same as AVCaptureSessionPresetLow.
/// </summary>
easyar_CameraDevicePresetProfile_Low = 3,
} easyar_CameraDevicePresetProfile;
typedef enum
{
/// <summary>
/// Unknown
/// </summary>
easyar_CameraState_Unknown = 0x00000000,
/// <summary>
/// Disconnected
/// </summary>
easyar_CameraState_Disconnected = 0x00000001,
/// <summary>
/// Preempted by another application.
/// </summary>
easyar_CameraState_Preempted = 0x00000002,
} easyar_CameraState;
/// <summary>
/// class
/// CameraDevice implements a camera device, which outputs `InputFrame`_ (including image, camera paramters, and timestamp). It is available on Windows, Mac, Android and iOS.
/// After open, start/stop can be invoked to start or stop data collection. start/stop will not change previous set camera parameters.
/// When the component is not needed anymore, call close function to close it. It shall not be used after calling close.
/// CameraDevice outputs `InputFrame`_ from inputFrameSource. inputFrameSource shall be connected to `InputFrameSink`_ for use. Refer to `Overview &lt;Overview.html&gt;`__ .
/// bufferCapacity is the capacity of `InputFrame`_ buffer. If the count of `InputFrame`_ which has been output from the device and have not been released is more than this number, the device will not output new `InputFrame`_ , until previous `InputFrame`_ have been released. This may cause screen stuck. Refer to `Overview &lt;Overview.html&gt;`__ .
/// On Android, it is required to add android.permission.CAMERA to AndroidManifest.xml for use.
/// On iOS, it is required to add NSCameraUsageDescription to Info.plist for use.
/// </summary>
typedef struct { char _placeHolder_; } easyar_CameraDevice;
typedef enum
{
/// <summary>
/// Optimized for `ImageTracker`_ , `ObjectTracker`_ and `CloudRecognizer`_ .
/// </summary>
easyar_CameraDevicePreference_PreferObjectSensing = 0,
/// <summary>
/// Optimized for `SurfaceTracker`_ .
/// </summary>
easyar_CameraDevicePreference_PreferSurfaceTracking = 1,
/// <summary>
/// Optimized for Motion Tracking .
/// </summary>
easyar_CameraDevicePreference_PreferMotionTracking = 2,
} easyar_CameraDevicePreference;
/// <summary>
/// class
/// It is used for selecting camera API (camera1 or camera2) on Android. camera1 is better for compatibility, but lacks some necessary information such as timestamp. camera2 has compatibility issues on some devices.
/// Different preferences will choose camera1 or camera2 based on usage.
/// </summary>
typedef struct { char _placeHolder_; } easyar_CameraDeviceSelector;
/// <summary>
/// record
/// Magnetometer reading.
///
/// The positive direction of x-axis is from the device center to its right side of the screen.
/// The positive direction of y-axis is from the device center to its top side of the screen.
/// The positive direction of z-axis is from the device center perpendicular to the screen outward.
///
/// The unit of x, y, z is uT(micro-Tesla).
/// The unit of timestamp is second.
/// </summary>
typedef struct
{
float x;
float y;
float z;
double timestamp;
} easyar_MagnetometerResult;
/// <summary>
/// class
/// extends FrameFilterResult
/// Result of `SurfaceTracker`_ .
/// </summary>
typedef struct { char _placeHolder_; } easyar_SurfaceTrackerResult;
/// <summary>
/// class
/// SurfaceTracker implements tracking with environmental surfaces.
/// SurfaceTracker occupies one buffer of camera. Use setBufferCapacity of camera to set an amount of buffers that is not less than the sum of amount of buffers occupied by all components. Refer to `Overview &lt;Overview.html&gt;`__ .
/// After creation, you can call start/stop to enable/disable the track process. start and stop are very lightweight calls.
/// When the component is not needed anymore, call close function to close it. It shall not be used after calling close.
/// SurfaceTracker inputs `InputFrame`_ from inputFrameSink. `InputFrameSource`_ shall be connected to inputFrameSink for use. Refer to `Overview &lt;Overview.html&gt;`__ .
/// </summary>
typedef struct { char _placeHolder_; } easyar_SurfaceTracker;
typedef enum
{
/// <summary>
/// Frame rate is 30 fps, the actual fps will vary with device capabilities. It is the default setting.
/// </summary>
easyar_MotionTrackerCameraDeviceFPS_Camera_FPS_30 = 0,
/// <summary>
/// Frame rate is 60 fps or 30 fps, the actual fps will vary with device capabilities.
/// </summary>
easyar_MotionTrackerCameraDeviceFPS_Camera_FPS_60 = 1,
} easyar_MotionTrackerCameraDeviceFPS;
typedef enum
{
/// <summary>
/// Continuous auto focus mode, high image definition and good tracking performance. The actual focus mode will vary with device capabilities. It is the default setting.
/// </summary>
easyar_MotionTrackerCameraDeviceFocusMode_Continousauto = 0,
/// <summary>
/// Focus is fixed on Medium distance, medium image definition and better tracking performance. The actual focus mode will vary with device capabilities.
/// </summary>
easyar_MotionTrackerCameraDeviceFocusMode_Medium = 1,
} easyar_MotionTrackerCameraDeviceFocusMode;
typedef enum
{
/// <summary>
/// Resolution is typically 1280 x 960 or 1280 x 720, the actual resolution will vary with device capabilities. It is the default setting.
/// </summary>
easyar_MotionTrackerCameraDeviceResolution_Resolution_1280 = 0,
/// <summary>
/// Resolution is typically 640 x 480 or 640 x 360, the actual resolution will vary with device capabilities.
/// </summary>
easyar_MotionTrackerCameraDeviceResolution_Resolution_640 = 1,
} easyar_MotionTrackerCameraDeviceResolution;
/// <summary>
/// class
/// MotionTrackerCameraDevice implements a camera device with metric-scale six degree-of-freedom motion tracking, which outputs `InputFrame`_ (including image, camera parameters, timestamp, 6DOF pose and tracking status).
/// After creation, start/stop can be invoked to start or stop data flow.
/// When the component is not needed anymore, call close function to close it. It shall not be used after calling close.
/// MotionTrackerCameraDevice outputs `InputFrame`_ from inputFrameSource. inputFrameSource shall be connected to `InputFrameSink`_ for further use. Refer to `Overview &lt;Overview.html&gt;`__ .
/// </summary>
typedef struct { char _placeHolder_; } easyar_MotionTrackerCameraDevice;
/// <summary>
/// class
/// Input frame recorder.
/// There is an input frame input port and an input frame output port. It can be used to record input frames into an EIF file. Refer to `Overview &lt;Overview.html&gt;`__ .
/// All members of this class is thread-safe.
/// </summary>
typedef struct { char _placeHolder_; } easyar_InputFrameRecorder;
/// <summary>
/// class
/// Input frame player.
/// There is an input frame output port. It can be used to get input frame from an EIF file. Refer to `Overview &lt;Overview.html&gt;`__ .
/// All members of this class is thread-safe.
/// </summary>
typedef struct { char _placeHolder_; } easyar_InputFramePlayer;
/// <summary>
/// class
/// Callback scheduler.
/// There are two subclasses: `DelayedCallbackScheduler`_ and `ImmediateCallbackScheduler`_ .
/// `DelayedCallbackScheduler`_ is used to delay callback to be invoked manually, and it can be used in single-threaded environments (such as various UI environments).
/// `ImmediateCallbackScheduler`_ is used to mark callback to be invoked when event is dispatched, and it can be used in multi-threaded environments (such as server or service daemon).
/// </summary>
typedef struct { char _placeHolder_; } easyar_CallbackScheduler;
/// <summary>
/// class
/// extends CallbackScheduler
/// Delayed callback scheduler.
/// It is used to delay callback to be invoked manually, and it can be used in single-threaded environments (such as various UI environments).
/// All members of this class is thread-safe.
/// </summary>
typedef struct { char _placeHolder_; } easyar_DelayedCallbackScheduler;
/// <summary>
/// class
/// extends CallbackScheduler
/// Immediate callback scheduler.
/// It is used to mark callback to be invoked when event is dispatched, and it can be used in multi-threaded environments (such as server or service daemon).
/// All members of this class is thread-safe.
/// </summary>
typedef struct { char _placeHolder_; } easyar_ImmediateCallbackScheduler;
/// <summary>
/// class
/// JNI utility class.
/// It is used in Unity to wrap Java byte array and ByteBuffer.
/// It is not supported on iOS.
/// </summary>
typedef struct { char _placeHolder_; } easyar_JniUtility;
typedef enum
{
/// <summary>
/// Error
/// </summary>
easyar_LogLevel_Error = 0,
/// <summary>
/// Warning
/// </summary>
easyar_LogLevel_Warning = 1,
/// <summary>
/// Information
/// </summary>
easyar_LogLevel_Info = 2,
} easyar_LogLevel;
/// <summary>
/// class
/// Log class.
/// It is used to setup a custom log output function.
/// </summary>
typedef struct { char _placeHolder_; } easyar_Log;
/// <summary>
/// class
/// </summary>
typedef struct { char _placeHolder_; } easyar_Storage;
/// <summary>
/// class
/// ImageTargetParameters represents the parameters to create a `ImageTarget`_ .
/// </summary>
typedef struct { char _placeHolder_; } easyar_ImageTargetParameters;
/// <summary>
/// class
/// extends Target
/// ImageTarget represents planar image targets that can be tracked by `ImageTracker`_ .
/// The fields of ImageTarget need to be filled with the create... method before it can be read. And ImageTarget can be tracked by `ImageTracker`_ after a successful load into the `ImageTracker`_ using `ImageTracker.loadTarget`_ .
/// </summary>
typedef struct { char _placeHolder_; } easyar_ImageTarget;
typedef enum
{
/// <summary>
/// Quality is preferred.
/// </summary>
easyar_ImageTrackerMode_PreferQuality = 0,
/// <summary>
/// Performance is preferred.
/// </summary>
easyar_ImageTrackerMode_PreferPerformance = 1,
} easyar_ImageTrackerMode;
/// <summary>
/// class
/// extends TargetTrackerResult
/// Result of `ImageTracker`_ .
/// </summary>
typedef struct { char _placeHolder_; } easyar_ImageTrackerResult;
/// <summary>
/// class
/// ImageTracker implements image target detection and tracking.
/// ImageTracker occupies (1 + SimultaneousNum) buffers of camera. Use setBufferCapacity of camera to set an amount of buffers that is not less than the sum of amount of buffers occupied by all components. Refer to `Overview &lt;Overview.html&gt;`__ .
/// After creation, you can call start/stop to enable/disable the track process. start and stop are very lightweight calls.
/// When the component is not needed anymore, call close function to close it. It shall not be used after calling close.
/// ImageTracker inputs `FeedbackFrame`_ from feedbackFrameSink. `FeedbackFrameSource`_ shall be connected to feedbackFrameSink for use. Refer to `Overview &lt;Overview.html&gt;`__ .
/// Before a `Target`_ can be tracked by ImageTracker, you have to load it using loadTarget/unloadTarget. You can get load/unload results from callbacks passed into the interfaces.
/// </summary>
typedef struct { char _placeHolder_; } easyar_ImageTracker;
/// <summary>
/// class
/// Recorder implements recording for current rendering screen.
/// Currently Recorder only works on Android (4.3 or later) and iOS with OpenGL ES 2.0 context.
/// Due to the dependency to OpenGLES, every method in this class (except requestPermissions, including the destructor) has to be called in a single thread containing an OpenGLES context.
/// **Unity Only** If in Unity, Multi-threaded rendering is enabled, scripting thread and rendering thread will be two separate threads, which makes it impossible to call updateFrame in the rendering thread. For this reason, to use Recorder, Multi-threaded rendering option shall be disabled.
/// </summary>
typedef struct { char _placeHolder_; } easyar_Recorder;
typedef enum
{
/// <summary>
/// 1080P, low quality
/// </summary>
easyar_RecordProfile_Quality_1080P_Low = 0x00000001,
/// <summary>
/// 1080P, middle quality
/// </summary>
easyar_RecordProfile_Quality_1080P_Middle = 0x00000002,
/// <summary>
/// 1080P, high quality
/// </summary>
easyar_RecordProfile_Quality_1080P_High = 0x00000004,
/// <summary>
/// 720P, low quality
/// </summary>
easyar_RecordProfile_Quality_720P_Low = 0x00000008,
/// <summary>
/// 720P, middle quality
/// </summary>
easyar_RecordProfile_Quality_720P_Middle = 0x00000010,
/// <summary>
/// 720P, high quality
/// </summary>
easyar_RecordProfile_Quality_720P_High = 0x00000020,
/// <summary>
/// 480P, low quality
/// </summary>
easyar_RecordProfile_Quality_480P_Low = 0x00000040,
/// <summary>
/// 480P, middle quality
/// </summary>
easyar_RecordProfile_Quality_480P_Middle = 0x00000080,
/// <summary>
/// 480P, high quality
/// </summary>
easyar_RecordProfile_Quality_480P_High = 0x00000100,
/// <summary>
/// default resolution and quality, same as `Quality_720P_Middle`
/// </summary>
easyar_RecordProfile_Quality_Default = 0x00000010,
} easyar_RecordProfile;
typedef enum
{
/// <summary>
/// 1080P
/// </summary>
easyar_RecordVideoSize_Vid1080p = 0x00000002,
/// <summary>
/// 720P
/// </summary>
easyar_RecordVideoSize_Vid720p = 0x00000010,
/// <summary>
/// 480P
/// </summary>
easyar_RecordVideoSize_Vid480p = 0x00000080,
} easyar_RecordVideoSize;
typedef enum
{
/// <summary>
/// If output aspect ratio does not fit input, content will be clipped to fit output aspect ratio.
/// </summary>
easyar_RecordZoomMode_NoZoomAndClip = 0x00000000,
/// <summary>
/// If output aspect ratio does not fit input, content will not be clipped and there will be black borders in one dimension.
/// </summary>
easyar_RecordZoomMode_ZoomInWithAllContent = 0x00000001,
} easyar_RecordZoomMode;
typedef enum
{
/// <summary>
/// video recorded is landscape
/// </summary>
easyar_RecordVideoOrientation_Landscape = 0x00000000,
/// <summary>
/// video recorded is portrait
/// </summary>
easyar_RecordVideoOrientation_Portrait = 0x00000001,
} easyar_RecordVideoOrientation;
typedef enum
{
/// <summary>
/// recording start
/// </summary>
easyar_RecordStatus_OnStarted = 0x00000002,
/// <summary>
/// recording stopped
/// </summary>
easyar_RecordStatus_OnStopped = 0x00000004,
/// <summary>
/// start fail
/// </summary>
easyar_RecordStatus_FailedToStart = 0x00000202,
/// <summary>
/// file write succeed
/// </summary>
easyar_RecordStatus_FileSucceeded = 0x00000400,
/// <summary>
/// file write fail
/// </summary>
easyar_RecordStatus_FileFailed = 0x00000401,
/// <summary>
/// runtime info with description
/// </summary>
easyar_RecordStatus_LogInfo = 0x00000800,
/// <summary>
/// runtime error with description
/// </summary>
easyar_RecordStatus_LogError = 0x00001000,
} easyar_RecordStatus;
/// <summary>
/// class
/// RecorderConfiguration is startup configuration for `Recorder`_ .
/// </summary>
typedef struct { char _placeHolder_; } easyar_RecorderConfiguration;
/// <summary>
/// class
/// extends FrameFilterResult
/// Describes the result of mapping and localization. Updated at the same frame rate with OutputFrame.
/// </summary>
typedef struct { char _placeHolder_; } easyar_SparseSpatialMapResult;
typedef enum
{
/// <summary>
/// Horizontal plane
/// </summary>
easyar_PlaneType_Horizontal = 0,
/// <summary>
/// Vertical plane
/// </summary>
easyar_PlaneType_Vertical = 1,
} easyar_PlaneType;
/// <summary>
/// class
/// </summary>
typedef struct { char _placeHolder_; } easyar_PlaneData;
typedef enum
{
/// <summary>
/// Attempt to perform localization in current SparseSpatialMap until success.
/// </summary>
easyar_LocalizationMode_UntilSuccess = 0,
/// <summary>
/// Perform localization only once
/// </summary>
easyar_LocalizationMode_Once = 1,
/// <summary>
/// Keep performing localization and adjust result on success
/// </summary>
easyar_LocalizationMode_KeepUpdate = 2,
/// <summary>
/// Keep performing localization and adjust localization result only when localization returns different map ID from previous results
/// </summary>
easyar_LocalizationMode_ContinousLocalize = 3,
} easyar_LocalizationMode;
/// <summary>
/// class
/// Configuration used to set the localization mode.
/// </summary>
typedef struct { char _placeHolder_; } easyar_SparseSpatialMapConfig;
/// <summary>
/// class
/// Provides core components for SparseSpatialMap, can be used for sparse spatial map building as well as localization using existing map. Also provides utilities for point cloud and plane access.
/// SparseSpatialMap occupies 2 buffers of camera. Use setBufferCapacity of camera to set an amount of buffers that is not less than the sum of amount of buffers occupied by all components. Refer to `Overview &lt;Overview.html&gt;`__ .
/// </summary>
typedef struct { char _placeHolder_; } easyar_SparseSpatialMap;
/// <summary>
/// class
/// SparseSpatialMap manager class, for managing sharing.
/// </summary>
typedef struct { char _placeHolder_; } easyar_SparseSpatialMapManager;
/// <summary>
/// class
/// </summary>
typedef struct { char _placeHolder_; } easyar_Engine;
typedef enum
{
/// <summary>
/// Status to indicate something wrong happen in video open or play.
/// </summary>
easyar_VideoStatus_Error = -1,
/// <summary>
/// Status to show video finished open and is ready for play.
/// </summary>
easyar_VideoStatus_Ready = 0,
/// <summary>
/// Status to indicate video finished play and reached the end.
/// </summary>
easyar_VideoStatus_Completed = 1,
} easyar_VideoStatus;
typedef enum
{
/// <summary>
/// Normal video.
/// </summary>
easyar_VideoType_Normal = 0,
/// <summary>
/// Transparent video, left half is the RGB channel and right half is alpha channel.
/// </summary>
easyar_VideoType_TransparentSideBySide = 1,
/// <summary>
/// Transparent video, top half is the RGB channel and bottom half is alpha channel.
/// </summary>
easyar_VideoType_TransparentTopAndBottom = 2,
} easyar_VideoType;
/// <summary>
/// class
/// VideoPlayer is the class for video playback.
/// EasyAR supports normal videos, transparent videos and streaming videos. The video content will be rendered into a texture passed into the player through setRenderTexture.
/// This class only supports OpenGLES2 texture.
/// Due to the dependency to OpenGLES, every method in this class (including the destructor) has to be called in a single thread containing an OpenGLES context.
/// Current version requires width and height being mutiples of 16.
///
/// Supported video file formats
/// Windows: Media Foundation-compatible formats, more can be supported via extra codecs. Please refer to `Supported Media Formats in Media Foundation &lt;https://docs.microsoft.com/en-us/windows/win32/medfound/supported-media-formats-in-media-foundation&gt;`__ . DirectShow is not supported.
/// Mac: Not supported.
/// Android: System supported formats. Please refer to `Supported media formats &lt;https://developer.android.com/guide/topics/media/media-formats&gt;`__ .
/// iOS: System supported formats. There is no reference in effect currently.
/// </summary>
typedef struct { char _placeHolder_; } easyar_VideoPlayer;
/// <summary>
/// class
/// Image helper class.
/// </summary>
typedef struct { char _placeHolder_; } easyar_ImageHelper;
/// <summary>
/// class
/// Signal input port.
/// It is used to expose input port for a component.
/// All members of this class is thread-safe.
/// </summary>
typedef struct { char _placeHolder_; } easyar_SignalSink;
/// <summary>
/// class
/// Signal output port.
/// It is used to expose output port for a component.
/// All members of this class is thread-safe.
/// </summary>
typedef struct { char _placeHolder_; } easyar_SignalSource;
/// <summary>
/// class
/// Input frame input port.
/// It is used to expose input port for a component.
/// All members of this class is thread-safe.
/// </summary>
typedef struct { char _placeHolder_; } easyar_InputFrameSink;
/// <summary>
/// class
/// Input frame output port.
/// It is used to expose output port for a component.
/// All members of this class is thread-safe.
/// </summary>
typedef struct { char _placeHolder_; } easyar_InputFrameSource;
/// <summary>
/// class
/// Output frame input port.
/// It is used to expose input port for a component.
/// All members of this class is thread-safe.
/// </summary>
typedef struct { char _placeHolder_; } easyar_OutputFrameSink;
/// <summary>
/// class
/// Output frame output port.
/// It is used to expose output port for a component.
/// All members of this class is thread-safe.
/// </summary>
typedef struct { char _placeHolder_; } easyar_OutputFrameSource;
/// <summary>
/// class
/// Feedback frame input port.
/// It is used to expose input port for a component.
/// All members of this class is thread-safe.
/// </summary>
typedef struct { char _placeHolder_; } easyar_FeedbackFrameSink;
/// <summary>
/// class
/// Feedback frame output port.
/// It is used to expose output port for a component.
/// All members of this class is thread-safe.
/// </summary>
typedef struct { char _placeHolder_; } easyar_FeedbackFrameSource;
/// <summary>
/// class
/// Input frame fork.
/// It is used to branch and transfer input frame to multiple components in parallel.
/// All members of this class is thread-safe.
/// </summary>
typedef struct { char _placeHolder_; } easyar_InputFrameFork;
/// <summary>
/// class
/// Output frame fork.
/// It is used to branch and transfer output frame to multiple components in parallel.
/// All members of this class is thread-safe.
/// </summary>
typedef struct { char _placeHolder_; } easyar_OutputFrameFork;
/// <summary>
/// class
/// Output frame join.
/// It is used to aggregate output frame from multiple components in parallel.
/// All members of this class is thread-safe.
/// It shall be noticed that connections and disconnections to the inputs shall not be performed during the flowing of data, or it may stuck in a state that no frame can be output. (It is recommended to complete dataflow connection before start a camera.)
/// </summary>
typedef struct { char _placeHolder_; } easyar_OutputFrameJoin;
/// <summary>
/// class
/// Feedback frame fork.
/// It is used to branch and transfer feedback frame to multiple components in parallel.
/// All members of this class is thread-safe.
/// </summary>
typedef struct { char _placeHolder_; } easyar_FeedbackFrameFork;
/// <summary>
/// class
/// Input frame throttler.
/// There is a input frame input port and a input frame output port. It can be used to prevent incoming frames from entering algorithm components when they have not finished handling previous workload.
/// InputFrameThrottler occupies one buffer of camera. Use setBufferCapacity of camera to set an amount of buffers that is not less than the sum of amount of buffers occupied by all components. Refer to `Overview &lt;Overview.html&gt;`__ .
/// All members of this class is thread-safe.
/// It shall be noticed that connections and disconnections to signalInput shall not be performed during the flowing of data, or it may stuck in a state that no frame can be output. (It is recommended to complete dataflow connection before start a camera.)
/// </summary>
typedef struct { char _placeHolder_; } easyar_InputFrameThrottler;
/// <summary>
/// class
/// Output frame buffer.
/// There is an output frame input port and output frame fetching function. It can be used to convert output frame fetching from asynchronous pattern to synchronous polling pattern, which fits frame by frame rendering.
/// OutputFrameBuffer occupies one buffer of camera. Use setBufferCapacity of camera to set an amount of buffers that is not less than the sum of amount of buffers occupied by all components. Refer to `Overview &lt;Overview.html&gt;`__ .
/// All members of this class is thread-safe.
/// </summary>
typedef struct { char _placeHolder_; } easyar_OutputFrameBuffer;
/// <summary>
/// class
/// Input frame to output frame adapter.
/// There is an input frame input port and an output frame output port. It can be used to wrap an input frame into an output frame, which can be used for rendering without an algorithm component. Refer to `Overview &lt;Overview.html&gt;`__ .
/// All members of this class is thread-safe.
/// </summary>
typedef struct { char _placeHolder_; } easyar_InputFrameToOutputFrameAdapter;
/// <summary>
/// class
/// Input frame to feedback frame adapter.
/// There is an input frame input port, a historic output frame input port and a feedback frame output port. It can be used to combine an input frame and a historic output frame into a feedback frame, which is required by algorithm components such as `ImageTracker`_ .
/// On every input of an input frame, a feedback frame is generated with a previously input historic feedback frame. If there is no previously input historic feedback frame, it is null in the feedback frame.
/// InputFrameToFeedbackFrameAdapter occupies one buffer of camera. Use setBufferCapacity of camera to set an amount of buffers that is not less than the sum of amount of buffers occupied by all components. Refer to `Overview &lt;Overview.html&gt;`__ .
/// All members of this class is thread-safe.
/// </summary>
typedef struct { char _placeHolder_; } easyar_InputFrameToFeedbackFrameAdapter;
/// <summary>
/// class
/// Input frame.
/// It includes image, camera parameters, timestamp, camera transform matrix against world coordinate system, and tracking status,
/// among which, camera parameters, timestamp, camera transform matrix and tracking status are all optional, but specific algorithms may have special requirements on the input.
/// </summary>
typedef struct { char _placeHolder_; } easyar_InputFrame;
/// <summary>
/// class
/// FrameFilterResult is the base class for result classes of all synchronous algorithm components.
/// </summary>
typedef struct { char _placeHolder_; } easyar_FrameFilterResult;
/// <summary>
/// class
/// Output frame.
/// It includes input frame and results of synchronous components.
/// </summary>
typedef struct { char _placeHolder_; } easyar_OutputFrame;
/// <summary>
/// class
/// Feedback frame.
/// It includes an input frame and a historic output frame for use in feedback synchronous components such as `ImageTracker`_ .
/// </summary>
typedef struct { char _placeHolder_; } easyar_FeedbackFrame;
typedef enum
{
/// <summary>
/// Permission granted
/// </summary>
easyar_PermissionStatus_Granted = 0x00000000,
/// <summary>
/// Permission denied
/// </summary>
easyar_PermissionStatus_Denied = 0x00000001,
/// <summary>
/// A error happened while requesting permission.
/// </summary>
easyar_PermissionStatus_Error = 0x00000002,
} easyar_PermissionStatus;
/// <summary>
/// StorageType represents where the images, jsons, videos or other files are located.
/// StorageType specifies the root path, in all interfaces, you can use relative path relative to the root path.
/// </summary>
typedef enum
{
/// <summary>
/// The app path.
/// Android: the application&#39;s `persistent data directory &lt;https://developer.android.google.cn/reference/android/content/pm/ApplicationInfo.html#dataDir&gt;`__
/// iOS: the application&#39;s sandbox directory
/// Windows: Windows: the application&#39;s executable directory
/// Mac: the application’s executable directory (if app is a bundle, this path is inside the bundle)
/// </summary>
easyar_StorageType_App = 0,
/// <summary>
/// The assets path.
/// Android: assets directory (inside apk)
/// iOS: the application&#39;s executable directory
/// Windows: EasyAR.dll directory
/// Mac: libEasyAR.dylib directory
/// **Note:** *this path is different if you are using Unity3D. It will point to the StreamingAssets folder.*
/// </summary>
easyar_StorageType_Assets = 1,
/// <summary>
/// The absolute path (json/image path or video path) or url (video only).
/// </summary>
easyar_StorageType_Absolute = 2,
} easyar_StorageType;
/// <summary>
/// class
/// Target is the base class for all targets that can be tracked by `ImageTracker`_ or other algorithms inside EasyAR.
/// </summary>
typedef struct { char _placeHolder_; } easyar_Target;
typedef enum
{
/// <summary>
/// The status is unknown.
/// </summary>
easyar_TargetStatus_Unknown = 0,
/// <summary>
/// The status is undefined.
/// </summary>
easyar_TargetStatus_Undefined = 1,
/// <summary>
/// The target is detected.
/// </summary>
easyar_TargetStatus_Detected = 2,
/// <summary>
/// The target is tracked.
/// </summary>
easyar_TargetStatus_Tracked = 3,
} easyar_TargetStatus;
/// <summary>
/// class
/// TargetInstance is the tracked target by trackers.
/// An TargetInstance contains a raw `Target`_ that is tracked and current status and pose of the `Target`_ .
/// </summary>
typedef struct { char _placeHolder_; } easyar_TargetInstance;
/// <summary>
/// class
/// extends FrameFilterResult
/// TargetTrackerResult is the base class of `ImageTrackerResult`_ and `ObjectTrackerResult`_ .
/// </summary>
typedef struct { char _placeHolder_; } easyar_TargetTrackerResult;
/// <summary>
/// class
/// TextureId encapsulates a texture object in rendering API.
/// For OpenGL/OpenGLES, getInt and fromInt shall be used. For Direct3D, getPointer and fromPointer shall be used.
/// </summary>
typedef struct { char _placeHolder_; } easyar_TextureId;
typedef struct { bool has_value; easyar_Buffer * value; } easyar_OptionalOfBuffer;
typedef struct
{
void * _state;
void (* func)(void * _state, /* OUT */ easyar_String * * _exception);
void (* destroy)(void * _state);
} easyar_FunctorOfVoid;
typedef struct { bool has_value; easyar_ObjectTarget * value; } easyar_OptionalOfObjectTarget;
typedef struct { char _placeHolder_; } easyar_ListOfVec3F;
typedef struct { char _placeHolder_; } easyar_ListOfTargetInstance;
typedef struct { bool has_value; easyar_Target * value; } easyar_OptionalOfTarget;
typedef struct { bool has_value; easyar_OutputFrame * value; } easyar_OptionalOfOutputFrame;
typedef struct { bool has_value; easyar_FrameFilterResult * value; } easyar_OptionalOfFrameFilterResult;
typedef struct { char _placeHolder_; } easyar_ListOfOptionalOfFrameFilterResult;
typedef struct
{
void * _state;
void (* func)(void * _state, easyar_OutputFrame *, /* OUT */ easyar_String * * _exception);
void (* destroy)(void * _state);
} easyar_FunctorOfVoidFromOutputFrame;
typedef struct { bool has_value; easyar_FunctorOfVoidFromOutputFrame value; } easyar_OptionalOfFunctorOfVoidFromOutputFrame;
typedef struct
{
void * _state;
void (* func)(void * _state, easyar_Target *, bool, /* OUT */ easyar_String * * _exception);
void (* destroy)(void * _state);
} easyar_FunctorOfVoidFromTargetAndBool;
typedef struct { char _placeHolder_; } easyar_ListOfTarget;
typedef struct { bool has_value; easyar_String * value; } easyar_OptionalOfString;
typedef struct
{
void * _state;
void (* func)(void * _state, easyar_CalibrationDownloadStatus, easyar_OptionalOfString, /* OUT */ easyar_String * * _exception);
void (* destroy)(void * _state);
} easyar_FunctorOfVoidFromCalibrationDownloadStatusAndOptionalOfString;
typedef struct { bool has_value; easyar_ImageTarget * value; } easyar_OptionalOfImageTarget;
typedef struct { char _placeHolder_; } easyar_ListOfImage;
typedef struct
{
void * _state;
void (* func)(void * _state, easyar_CloudRecognizationResult *, /* OUT */ easyar_String * * _exception);
void (* destroy)(void * _state);
} easyar_FunctorOfVoidFromCloudRecognizationResult;
typedef struct { char _placeHolder_; } easyar_ListOfBlockInfo;
typedef struct
{
void * _state;
void (* func)(void * _state, easyar_InputFrame *, /* OUT */ easyar_String * * _exception);
void (* destroy)(void * _state);
} easyar_FunctorOfVoidFromInputFrame;
typedef struct { bool has_value; easyar_FunctorOfVoidFromInputFrame value; } easyar_OptionalOfFunctorOfVoidFromInputFrame;
typedef struct
{
void * _state;
void (* func)(void * _state, easyar_CameraState, /* OUT */ easyar_String * * _exception);
void (* destroy)(void * _state);
} easyar_FunctorOfVoidFromCameraState;
typedef struct { bool has_value; easyar_FunctorOfVoidFromCameraState value; } easyar_OptionalOfFunctorOfVoidFromCameraState;
typedef struct
{
void * _state;
void (* func)(void * _state, easyar_PermissionStatus, easyar_String *, /* OUT */ easyar_String * * _exception);
void (* destroy)(void * _state);
} easyar_FunctorOfVoidFromPermissionStatusAndString;
typedef struct { bool has_value; easyar_FunctorOfVoidFromPermissionStatusAndString value; } easyar_OptionalOfFunctorOfVoidFromPermissionStatusAndString;
typedef struct
{
void * _state;
void (* func)(void * _state, easyar_LogLevel, easyar_String *, /* OUT */ easyar_String * * _exception);
void (* destroy)(void * _state);
} easyar_FunctorOfVoidFromLogLevelAndString;
typedef struct
{
void * _state;
void (* func)(void * _state, easyar_RecordStatus, easyar_String *, /* OUT */ easyar_String * * _exception);
void (* destroy)(void * _state);
} easyar_FunctorOfVoidFromRecordStatusAndString;
typedef struct { bool has_value; easyar_FunctorOfVoidFromRecordStatusAndString value; } easyar_OptionalOfFunctorOfVoidFromRecordStatusAndString;
typedef struct { bool has_value; easyar_Matrix44F value; } easyar_OptionalOfMatrix44F;
typedef struct { char _placeHolder_; } easyar_ListOfPlaneData;
typedef struct
{
void * _state;
void (* func)(void * _state, bool, /* OUT */ easyar_String * * _exception);
void (* destroy)(void * _state);
} easyar_FunctorOfVoidFromBool;
typedef struct { bool has_value; easyar_FunctorOfVoidFromBool value; } easyar_OptionalOfFunctorOfVoidFromBool;
typedef struct { bool has_value; easyar_Image * value; } easyar_OptionalOfImage;
typedef struct
{
void * _state;
void (* func)(void * _state, bool, easyar_String *, easyar_String *, /* OUT */ easyar_String * * _exception);
void (* destroy)(void * _state);
} easyar_FunctorOfVoidFromBoolAndStringAndString;
typedef struct
{
void * _state;
void (* func)(void * _state, bool, easyar_String *, /* OUT */ easyar_String * * _exception);
void (* destroy)(void * _state);
} easyar_FunctorOfVoidFromBoolAndString;
typedef struct
{
void * _state;
void (* func)(void * _state, easyar_VideoStatus, /* OUT */ easyar_String * * _exception);
void (* destroy)(void * _state);
} easyar_FunctorOfVoidFromVideoStatus;
typedef struct { bool has_value; easyar_FunctorOfVoidFromVideoStatus value; } easyar_OptionalOfFunctorOfVoidFromVideoStatus;
typedef struct { bool has_value; easyar_FunctorOfVoid value; } easyar_OptionalOfFunctorOfVoid;
typedef struct
{
void * _state;
void (* func)(void * _state, easyar_FeedbackFrame *, /* OUT */ easyar_String * * _exception);
void (* destroy)(void * _state);
} easyar_FunctorOfVoidFromFeedbackFrame;
typedef struct { bool has_value; easyar_FunctorOfVoidFromFeedbackFrame value; } easyar_OptionalOfFunctorOfVoidFromFeedbackFrame;
typedef struct { char _placeHolder_; } easyar_ListOfOutputFrame;
typedef struct
{
void * _state;
void (* func)(void * _state, easyar_ListOfOutputFrame *, /* OUT */ easyar_OutputFrame * *, /* OUT */ easyar_String * * _exception);
void (* destroy)(void * _state);
} easyar_FunctorOfOutputFrameFromListOfOutputFrame;
#ifdef __cplusplus
}
#endif
#endif
fileFormatVersion: 2
guid: 25166f0d063d9c24da25f9745778d82d
timeCreated: 1611716671
licenseType: Pro
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:
This source diff could not be displayed because it is too large. You can view the blob instead.
fileFormatVersion: 2
guid: 60dc0a73dd5308042b92a9c68de65d6c
timeCreated: 1611716672
licenseType: Pro
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:
//=============================================================================================================================
//
// EasyAR Sense 4.2.0.8700-7bcbc8b1c
// Copyright (c) 2015-2021 VisionStar Information Technology (Shanghai) Co., Ltd. All Rights Reserved.
// EasyAR is the registered trademark or trademark of VisionStar Information Technology (Shanghai) Co., Ltd in China
// and other countries for the augmented reality technology developed by VisionStar Information Technology (Shanghai) Co., Ltd.
//
//=============================================================================================================================
#ifndef __EASYAR_TYPES_HXX__
#define __EASYAR_TYPES_HXX__
#include "easyar/types.h"
#include <cstddef>
#include <stdexcept>
namespace easyar {
class String
{
private:
easyar_String * cdata_;
virtual String & operator=(const String & data) { return *this; } //deleted
public:
String(easyar_String * cdata)
: cdata_(cdata)
{
}
virtual ~String()
{
if (cdata_) {
easyar_String__dtor(cdata_);
cdata_ = NULL;
}
}
String(const String & data)
: cdata_(static_cast<easyar_String *>(NULL))
{
easyar_String_copy(data.cdata_, &cdata_);
}
const easyar_String * get_cdata() const
{
return cdata_;
}
easyar_String * get_cdata()
{
return cdata_;
}
static void from_utf8(const char * begin, const char * end, /* OUT */ String * * Return)
{
easyar_String * _return_value_ = NULL;
easyar_String_from_utf8(begin, end, &_return_value_);
*Return = _return_value_ == NULL ? NULL : new String(_return_value_);
}
static void from_utf8_begin(const char * begin, /* OUT */ String * * Return)
{
easyar_String * _return_value_ = NULL;
easyar_String_from_utf8_begin(begin, &_return_value_);
*Return = _return_value_ == NULL ? NULL : new String(_return_value_);
}
const char * begin()
{
return easyar_String_begin(cdata_);
}
const char * end()
{
return easyar_String_end(cdata_);
}
};
class ObjectTargetParameters;
class ObjectTarget;
class ObjectTrackerResult;
class ObjectTracker;
enum CalibrationDownloadStatus
{
/// <summary>
/// Download successful.
/// </summary>
CalibrationDownloadStatus_Successful = 0,
/// <summary>
/// Data is already latest.
/// </summary>
CalibrationDownloadStatus_NotModified = 1,
/// <summary>
/// Connection error
/// </summary>
CalibrationDownloadStatus_ConnectionError = 2,
/// <summary>
/// Unexpected error
/// </summary>
CalibrationDownloadStatus_UnexpectedError = 3,
};
class CalibrationDownloader;
enum CloudRecognizationStatus
{
/// <summary>
/// Unknown error
/// </summary>
CloudRecognizationStatus_UnknownError = 0,
/// <summary>
/// A target is recognized.
/// </summary>
CloudRecognizationStatus_FoundTarget = 1,
/// <summary>
/// No target is recognized.
/// </summary>
CloudRecognizationStatus_TargetNotFound = 2,
/// <summary>
/// Reached the access limit
/// </summary>
CloudRecognizationStatus_ReachedAccessLimit = 3,
/// <summary>
/// Request interval too low
/// </summary>
CloudRecognizationStatus_RequestIntervalTooLow = 4,
};
class CloudRecognizationResult;
class CloudRecognizer;
class Buffer;
class BufferDictionary;
class BufferPool;
enum CameraDeviceType
{
/// <summary>
/// Unknown location
/// </summary>
CameraDeviceType_Unknown = 0,
/// <summary>
/// Rear camera
/// </summary>
CameraDeviceType_Back = 1,
/// <summary>
/// Front camera
/// </summary>
CameraDeviceType_Front = 2,
};
/// <summary>
/// MotionTrackingStatus describes the quality of device motion tracking.
/// </summary>
enum MotionTrackingStatus
{
/// <summary>
/// Result is not available and should not to be used to render virtual objects or do 3D reconstruction. This value occurs temporarily after initializing, tracking lost or relocalizing.
/// </summary>
MotionTrackingStatus_NotTracking = 0,
/// <summary>
/// Tracking is available, but the quality of the result is not good enough. This value occurs temporarily due to weak texture or excessive movement. The result can be used to render virtual objects, but should generally not be used to do 3D reconstruction.
/// </summary>
MotionTrackingStatus_Limited = 1,
/// <summary>
/// Tracking with a good quality. The result can be used to render virtual objects or do 3D reconstruction.
/// </summary>
MotionTrackingStatus_Tracking = 2,
};
class CameraParameters;
/// <summary>
/// PixelFormat represents the format of image pixel data. All formats follow the pixel direction from left to right and from top to bottom.
/// </summary>
enum PixelFormat
{
/// <summary>
/// Unknown
/// </summary>
PixelFormat_Unknown = 0,
/// <summary>
/// 256 shades grayscale
/// </summary>
PixelFormat_Gray = 1,
/// <summary>
/// YUV_NV21
/// </summary>
PixelFormat_YUV_NV21 = 2,
/// <summary>
/// YUV_NV12
/// </summary>
PixelFormat_YUV_NV12 = 3,
/// <summary>
/// YUV_I420
/// </summary>
PixelFormat_YUV_I420 = 4,
/// <summary>
/// YUV_YV12
/// </summary>
PixelFormat_YUV_YV12 = 5,
/// <summary>
/// RGB888
/// </summary>
PixelFormat_RGB888 = 6,
/// <summary>
/// BGR888
/// </summary>
PixelFormat_BGR888 = 7,
/// <summary>
/// RGBA8888
/// </summary>
PixelFormat_RGBA8888 = 8,
/// <summary>
/// BGRA8888
/// </summary>
PixelFormat_BGRA8888 = 9,
};
class Image;
struct Matrix44F;
struct Matrix33F;
struct Vec3D;
struct Vec4F;
struct Vec3F;
struct Vec2F;
struct Vec4I;
struct Vec2I;
class DenseSpatialMap;
struct BlockInfo;
class SceneMesh;
struct AccelerometerResult;
class ARCoreCameraDevice;
class ARKitCameraDevice;
enum CameraDeviceFocusMode
{
/// <summary>
/// Normal auto focus mode. You should call autoFocus to start the focus in this mode.
/// </summary>
CameraDeviceFocusMode_Normal = 0,
/// <summary>
/// Continuous auto focus mode
/// </summary>
CameraDeviceFocusMode_Continousauto = 2,
/// <summary>
/// Infinity focus mode
/// </summary>
CameraDeviceFocusMode_Infinity = 3,
/// <summary>
/// Macro (close-up) focus mode. You should call autoFocus to start the focus in this mode.
/// </summary>
CameraDeviceFocusMode_Macro = 4,
/// <summary>
/// Medium distance focus mode
/// </summary>
CameraDeviceFocusMode_Medium = 5,
};
enum AndroidCameraApiType
{
/// <summary>
/// Android Camera1
/// </summary>
AndroidCameraApiType_Camera1 = 0,
/// <summary>
/// Android Camera2
/// </summary>
AndroidCameraApiType_Camera2 = 1,
};
enum CameraDevicePresetProfile
{
/// <summary>
/// The same as AVCaptureSessionPresetPhoto.
/// </summary>
CameraDevicePresetProfile_Photo = 0,
/// <summary>
/// The same as AVCaptureSessionPresetHigh.
/// </summary>
CameraDevicePresetProfile_High = 1,
/// <summary>
/// The same as AVCaptureSessionPresetMedium.
/// </summary>
CameraDevicePresetProfile_Medium = 2,
/// <summary>
/// The same as AVCaptureSessionPresetLow.
/// </summary>
CameraDevicePresetProfile_Low = 3,
};
enum CameraState
{
/// <summary>
/// Unknown
/// </summary>
CameraState_Unknown = 0x00000000,
/// <summary>
/// Disconnected
/// </summary>
CameraState_Disconnected = 0x00000001,
/// <summary>
/// Preempted by another application.
/// </summary>
CameraState_Preempted = 0x00000002,
};
class CameraDevice;
enum CameraDevicePreference
{
/// <summary>
/// Optimized for `ImageTracker`_ , `ObjectTracker`_ and `CloudRecognizer`_ .
/// </summary>
CameraDevicePreference_PreferObjectSensing = 0,
/// <summary>
/// Optimized for `SurfaceTracker`_ .
/// </summary>
CameraDevicePreference_PreferSurfaceTracking = 1,
/// <summary>
/// Optimized for Motion Tracking .
/// </summary>
CameraDevicePreference_PreferMotionTracking = 2,
};
class CameraDeviceSelector;
struct MagnetometerResult;
class SurfaceTrackerResult;
class SurfaceTracker;
enum MotionTrackerCameraDeviceFPS
{
/// <summary>
/// Frame rate is 30 fps, the actual fps will vary with device capabilities. It is the default setting.
/// </summary>
MotionTrackerCameraDeviceFPS_Camera_FPS_30 = 0,
/// <summary>
/// Frame rate is 60 fps or 30 fps, the actual fps will vary with device capabilities.
/// </summary>
MotionTrackerCameraDeviceFPS_Camera_FPS_60 = 1,
};
enum MotionTrackerCameraDeviceFocusMode
{
/// <summary>
/// Continuous auto focus mode, high image definition and good tracking performance. The actual focus mode will vary with device capabilities. It is the default setting.
/// </summary>
MotionTrackerCameraDeviceFocusMode_Continousauto = 0,
/// <summary>
/// Focus is fixed on Medium distance, medium image definition and better tracking performance. The actual focus mode will vary with device capabilities.
/// </summary>
MotionTrackerCameraDeviceFocusMode_Medium = 1,
};
enum MotionTrackerCameraDeviceResolution
{
/// <summary>
/// Resolution is typically 1280 x 960 or 1280 x 720, the actual resolution will vary with device capabilities. It is the default setting.
/// </summary>
MotionTrackerCameraDeviceResolution_Resolution_1280 = 0,
/// <summary>
/// Resolution is typically 640 x 480 or 640 x 360, the actual resolution will vary with device capabilities.
/// </summary>
MotionTrackerCameraDeviceResolution_Resolution_640 = 1,
};
class MotionTrackerCameraDevice;
class InputFrameRecorder;
class InputFramePlayer;
class CallbackScheduler;
class DelayedCallbackScheduler;
class ImmediateCallbackScheduler;
class JniUtility;
enum LogLevel
{
/// <summary>
/// Error
/// </summary>
LogLevel_Error = 0,
/// <summary>
/// Warning
/// </summary>
LogLevel_Warning = 1,
/// <summary>
/// Information
/// </summary>
LogLevel_Info = 2,
};
class Log;
class Storage;
class ImageTargetParameters;
class ImageTarget;
enum ImageTrackerMode
{
/// <summary>
/// Quality is preferred.
/// </summary>
ImageTrackerMode_PreferQuality = 0,
/// <summary>
/// Performance is preferred.
/// </summary>
ImageTrackerMode_PreferPerformance = 1,
};
class ImageTrackerResult;
class ImageTracker;
class Recorder;
enum RecordProfile
{
/// <summary>
/// 1080P, low quality
/// </summary>
RecordProfile_Quality_1080P_Low = 0x00000001,
/// <summary>
/// 1080P, middle quality
/// </summary>
RecordProfile_Quality_1080P_Middle = 0x00000002,
/// <summary>
/// 1080P, high quality
/// </summary>
RecordProfile_Quality_1080P_High = 0x00000004,
/// <summary>
/// 720P, low quality
/// </summary>
RecordProfile_Quality_720P_Low = 0x00000008,
/// <summary>
/// 720P, middle quality
/// </summary>
RecordProfile_Quality_720P_Middle = 0x00000010,
/// <summary>
/// 720P, high quality
/// </summary>
RecordProfile_Quality_720P_High = 0x00000020,
/// <summary>
/// 480P, low quality
/// </summary>
RecordProfile_Quality_480P_Low = 0x00000040,
/// <summary>
/// 480P, middle quality
/// </summary>
RecordProfile_Quality_480P_Middle = 0x00000080,
/// <summary>
/// 480P, high quality
/// </summary>
RecordProfile_Quality_480P_High = 0x00000100,
/// <summary>
/// default resolution and quality, same as `Quality_720P_Middle`
/// </summary>
RecordProfile_Quality_Default = 0x00000010,
};
enum RecordVideoSize
{
/// <summary>
/// 1080P
/// </summary>
RecordVideoSize_Vid1080p = 0x00000002,
/// <summary>
/// 720P
/// </summary>
RecordVideoSize_Vid720p = 0x00000010,
/// <summary>
/// 480P
/// </summary>
RecordVideoSize_Vid480p = 0x00000080,
};
enum RecordZoomMode
{
/// <summary>
/// If output aspect ratio does not fit input, content will be clipped to fit output aspect ratio.
/// </summary>
RecordZoomMode_NoZoomAndClip = 0x00000000,
/// <summary>
/// If output aspect ratio does not fit input, content will not be clipped and there will be black borders in one dimension.
/// </summary>
RecordZoomMode_ZoomInWithAllContent = 0x00000001,
};
enum RecordVideoOrientation
{
/// <summary>
/// video recorded is landscape
/// </summary>
RecordVideoOrientation_Landscape = 0x00000000,
/// <summary>
/// video recorded is portrait
/// </summary>
RecordVideoOrientation_Portrait = 0x00000001,
};
enum RecordStatus
{
/// <summary>
/// recording start
/// </summary>
RecordStatus_OnStarted = 0x00000002,
/// <summary>
/// recording stopped
/// </summary>
RecordStatus_OnStopped = 0x00000004,
/// <summary>
/// start fail
/// </summary>
RecordStatus_FailedToStart = 0x00000202,
/// <summary>
/// file write succeed
/// </summary>
RecordStatus_FileSucceeded = 0x00000400,
/// <summary>
/// file write fail
/// </summary>
RecordStatus_FileFailed = 0x00000401,
/// <summary>
/// runtime info with description
/// </summary>
RecordStatus_LogInfo = 0x00000800,
/// <summary>
/// runtime error with description
/// </summary>
RecordStatus_LogError = 0x00001000,
};
class RecorderConfiguration;
class SparseSpatialMapResult;
enum PlaneType
{
/// <summary>
/// Horizontal plane
/// </summary>
PlaneType_Horizontal = 0,
/// <summary>
/// Vertical plane
/// </summary>
PlaneType_Vertical = 1,
};
class PlaneData;
enum LocalizationMode
{
/// <summary>
/// Attempt to perform localization in current SparseSpatialMap until success.
/// </summary>
LocalizationMode_UntilSuccess = 0,
/// <summary>
/// Perform localization only once
/// </summary>
LocalizationMode_Once = 1,
/// <summary>
/// Keep performing localization and adjust result on success
/// </summary>
LocalizationMode_KeepUpdate = 2,
/// <summary>
/// Keep performing localization and adjust localization result only when localization returns different map ID from previous results
/// </summary>
LocalizationMode_ContinousLocalize = 3,
};
class SparseSpatialMapConfig;
class SparseSpatialMap;
class SparseSpatialMapManager;
class Engine;
enum VideoStatus
{
/// <summary>
/// Status to indicate something wrong happen in video open or play.
/// </summary>
VideoStatus_Error = -1,
/// <summary>
/// Status to show video finished open and is ready for play.
/// </summary>
VideoStatus_Ready = 0,
/// <summary>
/// Status to indicate video finished play and reached the end.
/// </summary>
VideoStatus_Completed = 1,
};
enum VideoType
{
/// <summary>
/// Normal video.
/// </summary>
VideoType_Normal = 0,
/// <summary>
/// Transparent video, left half is the RGB channel and right half is alpha channel.
/// </summary>
VideoType_TransparentSideBySide = 1,
/// <summary>
/// Transparent video, top half is the RGB channel and bottom half is alpha channel.
/// </summary>
VideoType_TransparentTopAndBottom = 2,
};
class VideoPlayer;
class ImageHelper;
class SignalSink;
class SignalSource;
class InputFrameSink;
class InputFrameSource;
class OutputFrameSink;
class OutputFrameSource;
class FeedbackFrameSink;
class FeedbackFrameSource;
class InputFrameFork;
class OutputFrameFork;
class OutputFrameJoin;
class FeedbackFrameFork;
class InputFrameThrottler;
class OutputFrameBuffer;
class InputFrameToOutputFrameAdapter;
class InputFrameToFeedbackFrameAdapter;
class InputFrame;
class FrameFilterResult;
class OutputFrame;
class FeedbackFrame;
enum PermissionStatus
{
/// <summary>
/// Permission granted
/// </summary>
PermissionStatus_Granted = 0x00000000,
/// <summary>
/// Permission denied
/// </summary>
PermissionStatus_Denied = 0x00000001,
/// <summary>
/// A error happened while requesting permission.
/// </summary>
PermissionStatus_Error = 0x00000002,
};
/// <summary>
/// StorageType represents where the images, jsons, videos or other files are located.
/// StorageType specifies the root path, in all interfaces, you can use relative path relative to the root path.
/// </summary>
enum StorageType
{
/// <summary>
/// The app path.
/// Android: the application&#39;s `persistent data directory &lt;https://developer.android.google.cn/reference/android/content/pm/ApplicationInfo.html#dataDir&gt;`__
/// iOS: the application&#39;s sandbox directory
/// Windows: Windows: the application&#39;s executable directory
/// Mac: the application’s executable directory (if app is a bundle, this path is inside the bundle)
/// </summary>
StorageType_App = 0,
/// <summary>
/// The assets path.
/// Android: assets directory (inside apk)
/// iOS: the application&#39;s executable directory
/// Windows: EasyAR.dll directory
/// Mac: libEasyAR.dylib directory
/// **Note:** *this path is different if you are using Unity3D. It will point to the StreamingAssets folder.*
/// </summary>
StorageType_Assets = 1,
/// <summary>
/// The absolute path (json/image path or video path) or url (video only).
/// </summary>
StorageType_Absolute = 2,
};
class Target;
enum TargetStatus
{
/// <summary>
/// The status is unknown.
/// </summary>
TargetStatus_Unknown = 0,
/// <summary>
/// The status is undefined.
/// </summary>
TargetStatus_Undefined = 1,
/// <summary>
/// The target is detected.
/// </summary>
TargetStatus_Detected = 2,
/// <summary>
/// The target is tracked.
/// </summary>
TargetStatus_Tracked = 3,
};
class TargetInstance;
class TargetTrackerResult;
class TextureId;
struct OptionalOfBuffer;
struct FunctorOfVoid;
struct OptionalOfObjectTarget;
class ListOfVec3F;
class ListOfTargetInstance;
struct OptionalOfTarget;
struct OptionalOfOutputFrame;
class ListOfOptionalOfFrameFilterResult;
struct OptionalOfFrameFilterResult;
struct OptionalOfFunctorOfVoidFromOutputFrame;
struct FunctorOfVoidFromOutputFrame;
struct FunctorOfVoidFromTargetAndBool;
class ListOfTarget;
struct FunctorOfVoidFromCalibrationDownloadStatusAndOptionalOfString;
struct OptionalOfString;
struct OptionalOfImageTarget;
class ListOfImage;
struct FunctorOfVoidFromCloudRecognizationResult;
class ListOfBlockInfo;
struct OptionalOfFunctorOfVoidFromInputFrame;
struct FunctorOfVoidFromInputFrame;
struct OptionalOfFunctorOfVoidFromCameraState;
struct FunctorOfVoidFromCameraState;
struct OptionalOfFunctorOfVoidFromPermissionStatusAndString;
struct FunctorOfVoidFromPermissionStatusAndString;
struct FunctorOfVoidFromLogLevelAndString;
struct OptionalOfFunctorOfVoidFromRecordStatusAndString;
struct FunctorOfVoidFromRecordStatusAndString;
struct OptionalOfMatrix44F;
class ListOfPlaneData;
struct OptionalOfFunctorOfVoidFromBool;
struct FunctorOfVoidFromBool;
struct OptionalOfImage;
struct FunctorOfVoidFromBoolAndStringAndString;
struct FunctorOfVoidFromBoolAndString;
struct OptionalOfFunctorOfVoidFromVideoStatus;
struct FunctorOfVoidFromVideoStatus;
struct OptionalOfFunctorOfVoid;
struct OptionalOfFunctorOfVoidFromFeedbackFrame;
struct FunctorOfVoidFromFeedbackFrame;
struct FunctorOfOutputFrameFromListOfOutputFrame;
class ListOfOutputFrame;
}
#endif
fileFormatVersion: 2
guid: 117256b2343c515488151abc66deea74
timeCreated: 1611716671
licenseType: Pro
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:
//=============================================================================================================================
//
// EasyAR Sense 4.2.0.8700-7bcbc8b1c
// Copyright (c) 2015-2021 VisionStar Information Technology (Shanghai) Co., Ltd. All Rights Reserved.
// EasyAR is the registered trademark or trademark of VisionStar Information Technology (Shanghai) Co., Ltd in China
// and other countries for the augmented reality technology developed by VisionStar Information Technology (Shanghai) Co., Ltd.
//
//=============================================================================================================================
#import <Foundation/Foundation.h>
@interface easyar_RefBase : NSObject
+ (instancetype)new NS_UNAVAILABLE;
- (instancetype)init NS_UNAVAILABLE;
@end
@class easyar_ObjectTargetParameters;
@class easyar_ObjectTarget;
@class easyar_ObjectTrackerResult;
@class easyar_ObjectTracker;
typedef enum easyar_CalibrationDownloadStatus : NSInteger
{
/// <summary>
/// Download successful.
/// </summary>
easyar_CalibrationDownloadStatus_Successful = 0,
/// <summary>
/// Data is already latest.
/// </summary>
easyar_CalibrationDownloadStatus_NotModified = 1,
/// <summary>
/// Connection error
/// </summary>
easyar_CalibrationDownloadStatus_ConnectionError = 2,
/// <summary>
/// Unexpected error
/// </summary>
easyar_CalibrationDownloadStatus_UnexpectedError = 3,
} easyar_CalibrationDownloadStatus;
@class easyar_CalibrationDownloader;
typedef enum easyar_CloudRecognizationStatus : NSInteger
{
/// <summary>
/// Unknown error
/// </summary>
easyar_CloudRecognizationStatus_UnknownError = 0,
/// <summary>
/// A target is recognized.
/// </summary>
easyar_CloudRecognizationStatus_FoundTarget = 1,
/// <summary>
/// No target is recognized.
/// </summary>
easyar_CloudRecognizationStatus_TargetNotFound = 2,
/// <summary>
/// Reached the access limit
/// </summary>
easyar_CloudRecognizationStatus_ReachedAccessLimit = 3,
/// <summary>
/// Request interval too low
/// </summary>
easyar_CloudRecognizationStatus_RequestIntervalTooLow = 4,
} easyar_CloudRecognizationStatus;
@class easyar_CloudRecognizationResult;
@class easyar_CloudRecognizer;
@class easyar_Buffer;
@class easyar_BufferDictionary;
@class easyar_BufferPool;
typedef enum easyar_CameraDeviceType : NSInteger
{
/// <summary>
/// Unknown location
/// </summary>
easyar_CameraDeviceType_Unknown = 0,
/// <summary>
/// Rear camera
/// </summary>
easyar_CameraDeviceType_Back = 1,
/// <summary>
/// Front camera
/// </summary>
easyar_CameraDeviceType_Front = 2,
} easyar_CameraDeviceType;
/// <summary>
/// MotionTrackingStatus describes the quality of device motion tracking.
/// </summary>
typedef enum easyar_MotionTrackingStatus : NSInteger
{
/// <summary>
/// Result is not available and should not to be used to render virtual objects or do 3D reconstruction. This value occurs temporarily after initializing, tracking lost or relocalizing.
/// </summary>
easyar_MotionTrackingStatus_NotTracking = 0,
/// <summary>
/// Tracking is available, but the quality of the result is not good enough. This value occurs temporarily due to weak texture or excessive movement. The result can be used to render virtual objects, but should generally not be used to do 3D reconstruction.
/// </summary>
easyar_MotionTrackingStatus_Limited = 1,
/// <summary>
/// Tracking with a good quality. The result can be used to render virtual objects or do 3D reconstruction.
/// </summary>
easyar_MotionTrackingStatus_Tracking = 2,
} easyar_MotionTrackingStatus;
@class easyar_CameraParameters;
/// <summary>
/// PixelFormat represents the format of image pixel data. All formats follow the pixel direction from left to right and from top to bottom.
/// </summary>
typedef enum easyar_PixelFormat : NSInteger
{
/// <summary>
/// Unknown
/// </summary>
easyar_PixelFormat_Unknown = 0,
/// <summary>
/// 256 shades grayscale
/// </summary>
easyar_PixelFormat_Gray = 1,
/// <summary>
/// YUV_NV21
/// </summary>
easyar_PixelFormat_YUV_NV21 = 2,
/// <summary>
/// YUV_NV12
/// </summary>
easyar_PixelFormat_YUV_NV12 = 3,
/// <summary>
/// YUV_I420
/// </summary>
easyar_PixelFormat_YUV_I420 = 4,
/// <summary>
/// YUV_YV12
/// </summary>
easyar_PixelFormat_YUV_YV12 = 5,
/// <summary>
/// RGB888
/// </summary>
easyar_PixelFormat_RGB888 = 6,
/// <summary>
/// BGR888
/// </summary>
easyar_PixelFormat_BGR888 = 7,
/// <summary>
/// RGBA8888
/// </summary>
easyar_PixelFormat_RGBA8888 = 8,
/// <summary>
/// BGRA8888
/// </summary>
easyar_PixelFormat_BGRA8888 = 9,
} easyar_PixelFormat;
@class easyar_Image;
@class easyar_Matrix44F;
@class easyar_Matrix33F;
@class easyar_Vec3D;
@class easyar_Vec4F;
@class easyar_Vec3F;
@class easyar_Vec2F;
@class easyar_Vec4I;
@class easyar_Vec2I;
@class easyar_DenseSpatialMap;
@class easyar_BlockInfo;
@class easyar_SceneMesh;
@class easyar_AccelerometerResult;
@class easyar_ARCoreCameraDevice;
@class easyar_ARKitCameraDevice;
typedef enum easyar_CameraDeviceFocusMode : NSInteger
{
/// <summary>
/// Normal auto focus mode. You should call autoFocus to start the focus in this mode.
/// </summary>
easyar_CameraDeviceFocusMode_Normal = 0,
/// <summary>
/// Continuous auto focus mode
/// </summary>
easyar_CameraDeviceFocusMode_Continousauto = 2,
/// <summary>
/// Infinity focus mode
/// </summary>
easyar_CameraDeviceFocusMode_Infinity = 3,
/// <summary>
/// Macro (close-up) focus mode. You should call autoFocus to start the focus in this mode.
/// </summary>
easyar_CameraDeviceFocusMode_Macro = 4,
/// <summary>
/// Medium distance focus mode
/// </summary>
easyar_CameraDeviceFocusMode_Medium = 5,
} easyar_CameraDeviceFocusMode;
typedef enum easyar_AndroidCameraApiType : NSInteger
{
/// <summary>
/// Android Camera1
/// </summary>
easyar_AndroidCameraApiType_Camera1 = 0,
/// <summary>
/// Android Camera2
/// </summary>
easyar_AndroidCameraApiType_Camera2 = 1,
} easyar_AndroidCameraApiType;
typedef enum easyar_CameraDevicePresetProfile : NSInteger
{
/// <summary>
/// The same as AVCaptureSessionPresetPhoto.
/// </summary>
easyar_CameraDevicePresetProfile_Photo = 0,
/// <summary>
/// The same as AVCaptureSessionPresetHigh.
/// </summary>
easyar_CameraDevicePresetProfile_High = 1,
/// <summary>
/// The same as AVCaptureSessionPresetMedium.
/// </summary>
easyar_CameraDevicePresetProfile_Medium = 2,
/// <summary>
/// The same as AVCaptureSessionPresetLow.
/// </summary>
easyar_CameraDevicePresetProfile_Low = 3,
} easyar_CameraDevicePresetProfile;
typedef enum easyar_CameraState : NSInteger
{
/// <summary>
/// Unknown
/// </summary>
easyar_CameraState_Unknown = 0x00000000,
/// <summary>
/// Disconnected
/// </summary>
easyar_CameraState_Disconnected = 0x00000001,
/// <summary>
/// Preempted by another application.
/// </summary>
easyar_CameraState_Preempted = 0x00000002,
} easyar_CameraState;
@class easyar_CameraDevice;
typedef enum easyar_CameraDevicePreference : NSInteger
{
/// <summary>
/// Optimized for `ImageTracker`_ , `ObjectTracker`_ and `CloudRecognizer`_ .
/// </summary>
easyar_CameraDevicePreference_PreferObjectSensing = 0,
/// <summary>
/// Optimized for `SurfaceTracker`_ .
/// </summary>
easyar_CameraDevicePreference_PreferSurfaceTracking = 1,
/// <summary>
/// Optimized for Motion Tracking .
/// </summary>
easyar_CameraDevicePreference_PreferMotionTracking = 2,
} easyar_CameraDevicePreference;
@class easyar_CameraDeviceSelector;
@class easyar_MagnetometerResult;
@class easyar_SurfaceTrackerResult;
@class easyar_SurfaceTracker;
typedef enum easyar_MotionTrackerCameraDeviceFPS : NSInteger
{
/// <summary>
/// Frame rate is 30 fps, the actual fps will vary with device capabilities. It is the default setting.
/// </summary>
easyar_MotionTrackerCameraDeviceFPS_Camera_FPS_30 = 0,
/// <summary>
/// Frame rate is 60 fps or 30 fps, the actual fps will vary with device capabilities.
/// </summary>
easyar_MotionTrackerCameraDeviceFPS_Camera_FPS_60 = 1,
} easyar_MotionTrackerCameraDeviceFPS;
typedef enum easyar_MotionTrackerCameraDeviceFocusMode : NSInteger
{
/// <summary>
/// Continuous auto focus mode, high image definition and good tracking performance. The actual focus mode will vary with device capabilities. It is the default setting.
/// </summary>
easyar_MotionTrackerCameraDeviceFocusMode_Continousauto = 0,
/// <summary>
/// Focus is fixed on Medium distance, medium image definition and better tracking performance. The actual focus mode will vary with device capabilities.
/// </summary>
easyar_MotionTrackerCameraDeviceFocusMode_Medium = 1,
} easyar_MotionTrackerCameraDeviceFocusMode;
typedef enum easyar_MotionTrackerCameraDeviceResolution : NSInteger
{
/// <summary>
/// Resolution is typically 1280 x 960 or 1280 x 720, the actual resolution will vary with device capabilities. It is the default setting.
/// </summary>
easyar_MotionTrackerCameraDeviceResolution_Resolution_1280 = 0,
/// <summary>
/// Resolution is typically 640 x 480 or 640 x 360, the actual resolution will vary with device capabilities.
/// </summary>
easyar_MotionTrackerCameraDeviceResolution_Resolution_640 = 1,
} easyar_MotionTrackerCameraDeviceResolution;
@class easyar_MotionTrackerCameraDevice;
@class easyar_InputFrameRecorder;
@class easyar_InputFramePlayer;
@class easyar_CallbackScheduler;
@class easyar_DelayedCallbackScheduler;
@class easyar_ImmediateCallbackScheduler;
@class easyar_JniUtility;
typedef enum easyar_LogLevel : NSInteger
{
/// <summary>
/// Error
/// </summary>
easyar_LogLevel_Error = 0,
/// <summary>
/// Warning
/// </summary>
easyar_LogLevel_Warning = 1,
/// <summary>
/// Information
/// </summary>
easyar_LogLevel_Info = 2,
} easyar_LogLevel;
@class easyar_Log;
@class easyar_Storage;
@class easyar_ImageTargetParameters;
@class easyar_ImageTarget;
typedef enum easyar_ImageTrackerMode : NSInteger
{
/// <summary>
/// Quality is preferred.
/// </summary>
easyar_ImageTrackerMode_PreferQuality = 0,
/// <summary>
/// Performance is preferred.
/// </summary>
easyar_ImageTrackerMode_PreferPerformance = 1,
} easyar_ImageTrackerMode;
@class easyar_ImageTrackerResult;
@class easyar_ImageTracker;
@class easyar_Recorder;
typedef enum easyar_RecordProfile : NSInteger
{
/// <summary>
/// 1080P, low quality
/// </summary>
easyar_RecordProfile_Quality_1080P_Low = 0x00000001,
/// <summary>
/// 1080P, middle quality
/// </summary>
easyar_RecordProfile_Quality_1080P_Middle = 0x00000002,
/// <summary>
/// 1080P, high quality
/// </summary>
easyar_RecordProfile_Quality_1080P_High = 0x00000004,
/// <summary>
/// 720P, low quality
/// </summary>
easyar_RecordProfile_Quality_720P_Low = 0x00000008,
/// <summary>
/// 720P, middle quality
/// </summary>
easyar_RecordProfile_Quality_720P_Middle = 0x00000010,
/// <summary>
/// 720P, high quality
/// </summary>
easyar_RecordProfile_Quality_720P_High = 0x00000020,
/// <summary>
/// 480P, low quality
/// </summary>
easyar_RecordProfile_Quality_480P_Low = 0x00000040,
/// <summary>
/// 480P, middle quality
/// </summary>
easyar_RecordProfile_Quality_480P_Middle = 0x00000080,
/// <summary>
/// 480P, high quality
/// </summary>
easyar_RecordProfile_Quality_480P_High = 0x00000100,
/// <summary>
/// default resolution and quality, same as `Quality_720P_Middle`
/// </summary>
easyar_RecordProfile_Quality_Default = 0x00000010,
} easyar_RecordProfile;
typedef enum easyar_RecordVideoSize : NSInteger
{
/// <summary>
/// 1080P
/// </summary>
easyar_RecordVideoSize_Vid1080p = 0x00000002,
/// <summary>
/// 720P
/// </summary>
easyar_RecordVideoSize_Vid720p = 0x00000010,
/// <summary>
/// 480P
/// </summary>
easyar_RecordVideoSize_Vid480p = 0x00000080,
} easyar_RecordVideoSize;
typedef enum easyar_RecordZoomMode : NSInteger
{
/// <summary>
/// If output aspect ratio does not fit input, content will be clipped to fit output aspect ratio.
/// </summary>
easyar_RecordZoomMode_NoZoomAndClip = 0x00000000,
/// <summary>
/// If output aspect ratio does not fit input, content will not be clipped and there will be black borders in one dimension.
/// </summary>
easyar_RecordZoomMode_ZoomInWithAllContent = 0x00000001,
} easyar_RecordZoomMode;
typedef enum easyar_RecordVideoOrientation : NSInteger
{
/// <summary>
/// video recorded is landscape
/// </summary>
easyar_RecordVideoOrientation_Landscape = 0x00000000,
/// <summary>
/// video recorded is portrait
/// </summary>
easyar_RecordVideoOrientation_Portrait = 0x00000001,
} easyar_RecordVideoOrientation;
typedef enum easyar_RecordStatus : NSInteger
{
/// <summary>
/// recording start
/// </summary>
easyar_RecordStatus_OnStarted = 0x00000002,
/// <summary>
/// recording stopped
/// </summary>
easyar_RecordStatus_OnStopped = 0x00000004,
/// <summary>
/// start fail
/// </summary>
easyar_RecordStatus_FailedToStart = 0x00000202,
/// <summary>
/// file write succeed
/// </summary>
easyar_RecordStatus_FileSucceeded = 0x00000400,
/// <summary>
/// file write fail
/// </summary>
easyar_RecordStatus_FileFailed = 0x00000401,
/// <summary>
/// runtime info with description
/// </summary>
easyar_RecordStatus_LogInfo = 0x00000800,
/// <summary>
/// runtime error with description
/// </summary>
easyar_RecordStatus_LogError = 0x00001000,
} easyar_RecordStatus;
@class easyar_RecorderConfiguration;
@class easyar_SparseSpatialMapResult;
typedef enum easyar_PlaneType : NSInteger
{
/// <summary>
/// Horizontal plane
/// </summary>
easyar_PlaneType_Horizontal = 0,
/// <summary>
/// Vertical plane
/// </summary>
easyar_PlaneType_Vertical = 1,
} easyar_PlaneType;
@class easyar_PlaneData;
typedef enum easyar_LocalizationMode : NSInteger
{
/// <summary>
/// Attempt to perform localization in current SparseSpatialMap until success.
/// </summary>
easyar_LocalizationMode_UntilSuccess = 0,
/// <summary>
/// Perform localization only once
/// </summary>
easyar_LocalizationMode_Once = 1,
/// <summary>
/// Keep performing localization and adjust result on success
/// </summary>
easyar_LocalizationMode_KeepUpdate = 2,
/// <summary>
/// Keep performing localization and adjust localization result only when localization returns different map ID from previous results
/// </summary>
easyar_LocalizationMode_ContinousLocalize = 3,
} easyar_LocalizationMode;
@class easyar_SparseSpatialMapConfig;
@class easyar_SparseSpatialMap;
@class easyar_SparseSpatialMapManager;
@class easyar_Engine;
typedef enum easyar_VideoStatus : NSInteger
{
/// <summary>
/// Status to indicate something wrong happen in video open or play.
/// </summary>
easyar_VideoStatus_Error = -1,
/// <summary>
/// Status to show video finished open and is ready for play.
/// </summary>
easyar_VideoStatus_Ready = 0,
/// <summary>
/// Status to indicate video finished play and reached the end.
/// </summary>
easyar_VideoStatus_Completed = 1,
} easyar_VideoStatus;
typedef enum easyar_VideoType : NSInteger
{
/// <summary>
/// Normal video.
/// </summary>
easyar_VideoType_Normal = 0,
/// <summary>
/// Transparent video, left half is the RGB channel and right half is alpha channel.
/// </summary>
easyar_VideoType_TransparentSideBySide = 1,
/// <summary>
/// Transparent video, top half is the RGB channel and bottom half is alpha channel.
/// </summary>
easyar_VideoType_TransparentTopAndBottom = 2,
} easyar_VideoType;
@class easyar_VideoPlayer;
@class easyar_ImageHelper;
@class easyar_SignalSink;
@class easyar_SignalSource;
@class easyar_InputFrameSink;
@class easyar_InputFrameSource;
@class easyar_OutputFrameSink;
@class easyar_OutputFrameSource;
@class easyar_FeedbackFrameSink;
@class easyar_FeedbackFrameSource;
@class easyar_InputFrameFork;
@class easyar_OutputFrameFork;
@class easyar_OutputFrameJoin;
@class easyar_FeedbackFrameFork;
@class easyar_InputFrameThrottler;
@class easyar_OutputFrameBuffer;
@class easyar_InputFrameToOutputFrameAdapter;
@class easyar_InputFrameToFeedbackFrameAdapter;
@class easyar_InputFrame;
@class easyar_FrameFilterResult;
@class easyar_OutputFrame;
@class easyar_FeedbackFrame;
typedef enum easyar_PermissionStatus : NSInteger
{
/// <summary>
/// Permission granted
/// </summary>
easyar_PermissionStatus_Granted = 0x00000000,
/// <summary>
/// Permission denied
/// </summary>
easyar_PermissionStatus_Denied = 0x00000001,
/// <summary>
/// A error happened while requesting permission.
/// </summary>
easyar_PermissionStatus_Error = 0x00000002,
} easyar_PermissionStatus;
/// <summary>
/// StorageType represents where the images, jsons, videos or other files are located.
/// StorageType specifies the root path, in all interfaces, you can use relative path relative to the root path.
/// </summary>
typedef enum easyar_StorageType : NSInteger
{
/// <summary>
/// The app path.
/// Android: the application&#39;s `persistent data directory &lt;https://developer.android.google.cn/reference/android/content/pm/ApplicationInfo.html#dataDir&gt;`__
/// iOS: the application&#39;s sandbox directory
/// Windows: Windows: the application&#39;s executable directory
/// Mac: the application’s executable directory (if app is a bundle, this path is inside the bundle)
/// </summary>
easyar_StorageType_App = 0,
/// <summary>
/// The assets path.
/// Android: assets directory (inside apk)
/// iOS: the application&#39;s executable directory
/// Windows: EasyAR.dll directory
/// Mac: libEasyAR.dylib directory
/// **Note:** *this path is different if you are using Unity3D. It will point to the StreamingAssets folder.*
/// </summary>
easyar_StorageType_Assets = 1,
/// <summary>
/// The absolute path (json/image path or video path) or url (video only).
/// </summary>
easyar_StorageType_Absolute = 2,
} easyar_StorageType;
@class easyar_Target;
typedef enum easyar_TargetStatus : NSInteger
{
/// <summary>
/// The status is unknown.
/// </summary>
easyar_TargetStatus_Unknown = 0,
/// <summary>
/// The status is undefined.
/// </summary>
easyar_TargetStatus_Undefined = 1,
/// <summary>
/// The target is detected.
/// </summary>
easyar_TargetStatus_Detected = 2,
/// <summary>
/// The target is tracked.
/// </summary>
easyar_TargetStatus_Tracked = 3,
} easyar_TargetStatus;
@class easyar_TargetInstance;
@class easyar_TargetTrackerResult;
@class easyar_TextureId;
fileFormatVersion: 2
guid: 85661b7eb5cc202488fb7d9b0b2ed551
timeCreated: 1611716672
licenseType: Pro
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:
//=============================================================================================================================
//
// EasyAR Sense 4.2.0.8700-7bcbc8b1c
// Copyright (c) 2015-2021 VisionStar Information Technology (Shanghai) Co., Ltd. All Rights Reserved.
// EasyAR is the registered trademark or trademark of VisionStar Information Technology (Shanghai) Co., Ltd in China
// and other countries for the augmented reality technology developed by VisionStar Information Technology (Shanghai) Co., Ltd.
//
//=============================================================================================================================
#ifndef __EASYAR_VECTOR_HXX__
#define __EASYAR_VECTOR_HXX__
#include "easyar/types.hxx"
namespace easyar {
/// <summary>
/// record
/// 3 dimensional vector of double.
/// </summary>
struct Vec3D
{
/// <summary>
/// The raw data of vector.
/// </summary>
double data[3];
Vec3D();
Vec3D(double data_0, double data_1, double data_2);
easyar_Vec3D get_cdata();
};
/// <summary>
/// record
/// 4 dimensional vector of float.
/// </summary>
struct Vec4F
{
/// <summary>
/// The raw data of vector.
/// </summary>
float data[4];
Vec4F();
Vec4F(float data_0, float data_1, float data_2, float data_3);
easyar_Vec4F get_cdata();
};
/// <summary>
/// record
/// 3 dimensional vector of float.
/// </summary>
struct Vec3F
{
/// <summary>
/// The raw data of vector.
/// </summary>
float data[3];
Vec3F();
Vec3F(float data_0, float data_1, float data_2);
easyar_Vec3F get_cdata();
};
/// <summary>
/// record
/// 2 dimensional vector of float.
/// </summary>
struct Vec2F
{
/// <summary>
/// The raw data of vector.
/// </summary>
float data[2];
Vec2F();
Vec2F(float data_0, float data_1);
easyar_Vec2F get_cdata();
};
/// <summary>
/// record
/// 4 dimensional vector of int.
/// </summary>
struct Vec4I
{
/// <summary>
/// The raw data of vector.
/// </summary>
int data[4];
Vec4I();
Vec4I(int data_0, int data_1, int data_2, int data_3);
easyar_Vec4I get_cdata();
};
/// <summary>
/// record
/// 2 dimensional vector of int.
/// </summary>
struct Vec2I
{
/// <summary>
/// The raw data of vector.
/// </summary>
int data[2];
Vec2I();
Vec2I(int data_0, int data_1);
easyar_Vec2I get_cdata();
};
}
#endif
#ifndef __IMPLEMENTATION_EASYAR_VECTOR_HXX__
#define __IMPLEMENTATION_EASYAR_VECTOR_HXX__
namespace easyar {
inline Vec3D::Vec3D()
{
this->data[0] = double();
this->data[1] = double();
this->data[2] = double();
}
inline Vec3D::Vec3D(double data_0, double data_1, double data_2)
{
this->data[0] = data_0;
this->data[1] = data_1;
this->data[2] = data_2;
}
inline easyar_Vec3D Vec3D::get_cdata()
{
easyar_Vec3D _return_value_ = {data[0], data[1], data[2]};
return _return_value_;
}
inline Vec4F::Vec4F()
{
this->data[0] = float();
this->data[1] = float();
this->data[2] = float();
this->data[3] = float();
}
inline Vec4F::Vec4F(float data_0, float data_1, float data_2, float data_3)
{
this->data[0] = data_0;
this->data[1] = data_1;
this->data[2] = data_2;
this->data[3] = data_3;
}
inline easyar_Vec4F Vec4F::get_cdata()
{
easyar_Vec4F _return_value_ = {data[0], data[1], data[2], data[3]};
return _return_value_;
}
inline Vec3F::Vec3F()
{
this->data[0] = float();
this->data[1] = float();
this->data[2] = float();
}
inline Vec3F::Vec3F(float data_0, float data_1, float data_2)
{
this->data[0] = data_0;
this->data[1] = data_1;
this->data[2] = data_2;
}
inline easyar_Vec3F Vec3F::get_cdata()
{
easyar_Vec3F _return_value_ = {data[0], data[1], data[2]};
return _return_value_;
}
inline Vec2F::Vec2F()
{
this->data[0] = float();
this->data[1] = float();
}
inline Vec2F::Vec2F(float data_0, float data_1)
{
this->data[0] = data_0;
this->data[1] = data_1;
}
inline easyar_Vec2F Vec2F::get_cdata()
{
easyar_Vec2F _return_value_ = {data[0], data[1]};
return _return_value_;
}
inline Vec4I::Vec4I()
{
this->data[0] = int();
this->data[1] = int();
this->data[2] = int();
this->data[3] = int();
}
inline Vec4I::Vec4I(int data_0, int data_1, int data_2, int data_3)
{
this->data[0] = data_0;
this->data[1] = data_1;
this->data[2] = data_2;
this->data[3] = data_3;
}
inline easyar_Vec4I Vec4I::get_cdata()
{
easyar_Vec4I _return_value_ = {data[0], data[1], data[2], data[3]};
return _return_value_;
}
inline Vec2I::Vec2I()
{
this->data[0] = int();
this->data[1] = int();
}
inline Vec2I::Vec2I(int data_0, int data_1)
{
this->data[0] = data_0;
this->data[1] = data_1;
}
inline easyar_Vec2I Vec2I::get_cdata()
{
easyar_Vec2I _return_value_ = {data[0], data[1]};
return _return_value_;
}
}
#endif
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment