ARCameraFrameEventArgs.cs
6.76 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
using System;
using System.Collections.Generic;
using System.Text;
namespace UnityEngine.XR.ARFoundation
{
/// <summary>
/// A structure for camera-related information pertaining to a particular frame.
/// This is used to communicate information in the <see cref="ARSubsystemManager.cameraFrameReceived" /> event.
/// </summary>
public struct ARCameraFrameEventArgs : IEquatable<ARCameraFrameEventArgs>
{
/// <summary>
/// The <see cref="LightEstimationData" /> associated with this frame.
/// </summary>
public ARLightEstimationData lightEstimation { get; set; }
/// <summary>
/// The time, in nanoseconds, associated with this frame.
/// Use <c>timestampNs.HasValue</c> to determine if this data is available.
/// </summary>
public long? timestampNs { get; set; }
/// <summary>
/// Gets or sets the projection matrix for the AR Camera. Use
/// <c>projectionMatrix.HasValue</c> to determine if this data is available.
/// </summary>
public Matrix4x4? projectionMatrix { get; set; }
/// <summary>
/// Gets or sets the display matrix for use in the shader used
/// by the <see cref="ARFoundationBackgroundRenderer"/>.
/// Use <c>displayMatrix.HasValue</c> to determine if this data is available.
/// </summary>
public Matrix4x4? displayMatrix { get; set; }
/// <summary>
/// The textures associated with this camera frame. These are generally
/// external textures, which exist only on the GPU. To use them on the
/// CPU, e.g., for computer vision processing, you will need to read
/// them back from the GPU.
/// </summary>
public List<Texture2D> textures { get; set; }
/// <summary>
/// Ids of the property name associated with each texture. This is a
/// parallel <c>List</c> to the <see cref="textures"/> list.
/// </summary>
public List<int> propertyNameIds { get; set; }
/// <summary>
/// The exposure duration in seconds with sub-millisecond precision. Utilized in calculating motion blur.
/// </summary>
/// <remarks>
/// <see cref="exposureDuration"/> may be null if platform does not support exposure duration.
/// </remarks>
public double? exposureDuration { get; set; }
/// <summary>
/// The offset of camera exposure. Used to scale scene lighting in post-processed lighting stage.
/// </summary>
/// <remarks>
/// <see cref="exposureOffset"/> may be null if platform does not support exposure offset.
/// </remarks>
public float? exposureOffset { get; set; }
/// <summary>
/// The list of keywords to be enabled for the material.
/// </summary>
public List<string> enabledMaterialKeywords { get; internal set; }
/// <summary>
/// The list of keywords to be disabled for the material.
/// </summary>
public List<string> disabledMaterialKeywords { get; internal set; }
public override int GetHashCode()
{
unchecked
{
var hash = lightEstimation.GetHashCode();
hash = hash * 486187739 + timestampNs.GetHashCode();
hash = hash * 486187739 + projectionMatrix.GetHashCode();
hash = hash * 486187739 + displayMatrix.GetHashCode();
hash = hash * 486187739 + (textures == null ? 0 : textures.GetHashCode());
hash = hash * 486187739 + (propertyNameIds == null ? 0 : propertyNameIds.GetHashCode());
hash = hash * 486187739 + exposureDuration.GetHashCode();
hash = hash * 486187739 + exposureOffset.GetHashCode();
hash = hash * 486187739 + (enabledMaterialKeywords == null ? 0 : enabledMaterialKeywords.GetHashCode());
hash = hash * 486187739 + (disabledMaterialKeywords == null ? 0 : disabledMaterialKeywords.GetHashCode());
return hash;
}
}
public override bool Equals(object obj)
{
if (!(obj is ARCameraFrameEventArgs))
return false;
return Equals((ARCameraFrameEventArgs)obj);
}
/// <summary>
/// Generates a string representation of this struct suitable for debug
/// logging.
/// </summary>
/// <returns>A string representation of this struct suitable for debug
/// logging.</returns>
public override string ToString()
{
var stringBuilder = new StringBuilder();
stringBuilder.Append("lightEstimation: " + lightEstimation.ToString());
stringBuilder.Append("\ntimestamp: " + timestampNs);
if (timestampNs.HasValue)
stringBuilder.Append("ns");
stringBuilder.Append("\nprojectionMatrix: " + projectionMatrix);
stringBuilder.Append("\ndisplayMatrix: " + displayMatrix);
stringBuilder.Append("\ntexture count: " + (textures == null ? 0 : textures.Count));
stringBuilder.Append("\npropertyNameId count: " + (propertyNameIds == null ? 0 : propertyNameIds.Count));
return stringBuilder.ToString();
}
public bool Equals(ARCameraFrameEventArgs other)
{
return
lightEstimation.Equals(other.lightEstimation)
&& timestampNs.Equals(other.timestampNs)
&& projectionMatrix.Equals(other.projectionMatrix)
&& displayMatrix.Equals(other.displayMatrix)
&& ((textures == null) ? (other.textures == null) : textures.Equals(other.textures))
&& ((propertyNameIds == null) ? (other.propertyNameIds == null)
: propertyNameIds.Equals(other.propertyNameIds))
&& exposureDuration.Equals(other.exposureDuration)
&& exposureOffset.Equals(other.exposureOffset)
&& ((enabledMaterialKeywords == null) ? (other.enabledMaterialKeywords == null)
: enabledMaterialKeywords.Equals(other.enabledMaterialKeywords))
&& ((disabledMaterialKeywords == null) ? (other.disabledMaterialKeywords == null)
: disabledMaterialKeywords.Equals(other.disabledMaterialKeywords));
}
public static bool operator ==(ARCameraFrameEventArgs lhs, ARCameraFrameEventArgs rhs)
{
return lhs.Equals(rhs);
}
public static bool operator !=(ARCameraFrameEventArgs lhs, ARCameraFrameEventArgs rhs)
{
return !lhs.Equals(rhs);
}
}
}