summaryrefslogtreecommitdiff
path: root/audio/7.0/types.hal
blob: 2d421296a787778420a374c870cc74bf95e6ef83 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
/*
 * Copyright (C) 2020 The Android Open Source Project
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *      http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

package android.hardware.audio@7.0;

import android.hardware.audio.common@7.0;

enum Result : int32_t {
    OK,
    NOT_INITIALIZED,
    INVALID_ARGUMENTS,
    INVALID_STATE,
    /**
     * Methods marked as "Optional method" must return this result value
     * if the operation is not supported by HAL.
     */
    NOT_SUPPORTED
};

enum AudioDrain : int32_t {
    /** drain() returns when all data has been played. */
    ALL,
    /**
     * drain() returns a short time before all data from the current track has
     * been played to give time for gapless track switch.
     */
    EARLY_NOTIFY
};

/**
 * A substitute for POSIX timespec.
 */
struct TimeSpec {
    /** Seconds. */
    uint64_t tvSec;
    /** Nanoseconds. */
    uint64_t tvNSec;
};

struct ParameterValue {
    string key;
    string value;
};

enum MmapBufferFlag : uint32_t {
    NONE    = 0x0,
    /**
     * If the buffer can be securely shared to untrusted applications
     * through the AAudio exclusive mode.
     * Only set this flag if applications are restricted from accessing the
     * memory surrounding the audio data buffer by a kernel mechanism.
     * See Linux kernel's dma_buf.
     */
    APPLICATION_SHAREABLE    = 0x1,
};

/**
 * Mmap buffer descriptor returned by IStream.createMmapBuffer().
 * Used by streams opened in mmap mode.
 */
struct MmapBufferInfo {
    /** Mmap memory buffer */
    memory  sharedMemory;
    /** Total buffer size in frames */
    uint32_t bufferSizeFrames;
    /** Transfer size granularity in frames */
    uint32_t burstSizeFrames;
    /** Attributes describing the buffer. */
    bitfield<MmapBufferFlag> flags;
};

/**
 * Mmap buffer read/write position returned by IStream.getMmapPosition().
 * Used by streams opened in mmap mode.
 */
struct MmapPosition {
    /** Timestamp in ns, CLOCK_MONOTONIC. */
    int64_t  timeNanoseconds;
    /** Increasing 32 bit frame count reset when IStream.stop() is called. */
    int32_t  positionFrames;
};

/**
 * The message queue flags used to synchronize reads and writes from
 * message queues used by StreamIn and StreamOut.
 */
enum MessageQueueFlagBits : uint32_t {
    NOT_EMPTY = 1 << 0,
    NOT_FULL = 1 << 1
};

/*
 * Microphone information
 *
 */

/**
 * A 3D point used to represent position or orientation of a microphone.
 *
 * Position: Coordinates of the microphone's capsule, in meters, from the
 * bottom-left-back corner of the bounding box of android device in natural
 * orientation (PORTRAIT for phones, LANDSCAPE for tablets, tvs, etc).
 * The orientation musth match the reported by the api Display.getRotation().
 *
 * Orientation: Normalized vector to signal the main orientation of the
 * microphone's capsule. Magnitude = sqrt(x^2 + y^2 + z^2) = 1
 */
struct AudioMicrophoneCoordinate {
    float x;
    float y;
    float z;
};

/**
 * Enum to identify the type of channel mapping for active microphones.
 * Used channels further identify if the microphone has any significative
 * process (e.g. High Pass Filtering, dynamic compression)
 * Simple processing as constant gain adjustment must be DIRECT.
 */
@export(name="audio_microphone_channel_mapping_t", value_prefix="AUDIO_MICROPHONE_CHANNEL_MAPPING_")
enum AudioMicrophoneChannelMapping : uint32_t {
    /** Channel not used. */
    UNUSED      = 0,
    /** Channel used and signal not processed. */
    DIRECT      = 1,
    /** Channel used and signal has some processing. */
    PROCESSED   = 2,
};

/**
 * Enum to identify locations of microphones in regards to the body of the
 * android device.
 */
@export(name="audio_microphone_location_t", value_prefix="AUDIO_MICROPHONE_LOCATION_")
enum AudioMicrophoneLocation : uint32_t {
    UNKNOWN             = 0,
    MAINBODY            = 1,
    MAINBODY_MOVABLE    = 2,
    PERIPHERAL          = 3,
};

/**
 * Identifier to help group related microphones together
 * e.g. microphone arrays should belong to the same group
 */
typedef int32_t AudioMicrophoneGroup;

/**
 * Enum with standard polar patterns of microphones
 */
@export(name="audio_microphone_directionality_t", value_prefix="AUDIO_MICROPHONE_DIRECTIONALITY_")
enum AudioMicrophoneDirectionality : uint32_t {
    UNKNOWN         = 0,
    OMNI            = 1,
    BI_DIRECTIONAL  = 2,
    CARDIOID        = 3,
    HYPER_CARDIOID  = 4,
    SUPER_CARDIOID  = 5,
};

/**
 * A (frequency, level) pair. Used to represent frequency response.
 */
struct AudioFrequencyResponsePoint {
    /** In Hz */
    float frequency;
    /** In dB */
    float level;
};

/**
 * Structure used by the HAL to describe microphone's characteristics
 * Used by StreamIn and Device
 */
struct MicrophoneInfo {
    /**
     * Unique alphanumeric id for microphone. Guaranteed to be the same
     * even after rebooting.
     */
    string                                  deviceId;
    /**
     * Device specific information
     */
    DeviceAddress                           deviceAddress;
    /**
     * Each element of the vector must describe the channel with the same
     * index.
     */
    vec<AudioMicrophoneChannelMapping>      channelMapping;
    /** Location of the microphone in regard to the body of the device */
    AudioMicrophoneLocation                 location;
    /**
     * Identifier to help group related microphones together
     * e.g. microphone arrays should belong to the same group
     */
    AudioMicrophoneGroup                    group;
    /**
     * Index of this microphone within the group.
     * (group, index) must be unique within the same device.
     */
    uint32_t                                indexInTheGroup;
    /** Level in dBFS produced by a 1000 Hz tone at 94 dB SPL */
    float                                   sensitivity;
    /** Level in dB of the max SPL supported at 1000 Hz */
    float                                   maxSpl;
    /** Level in dB of the min SPL supported at 1000 Hz */
    float                                   minSpl;
    /** Standard polar pattern of the microphone */
    AudioMicrophoneDirectionality           directionality;
    /**
     * Vector with ordered frequency responses (from low to high frequencies)
     * with the frequency response of the microphone.
     * Levels are in dB, relative to level at 1000 Hz
     */
    vec<AudioFrequencyResponsePoint>        frequencyResponse;
    /**
     * Position of the microphone's capsule in meters, from the
     * bottom-left-back corner of the bounding box of device.
     */
    AudioMicrophoneCoordinate               position;
    /**
     * Normalized point to signal the main orientation of the microphone's
     * capsule. sqrt(x^2 + y^2 + z^2) = 1
     */
    AudioMicrophoneCoordinate               orientation;
};

/**
 * Constants used by the HAL to determine how to select microphones and process those inputs in
 * order to optimize for capture in the specified direction.
 *
 * MicrophoneDirection Constants are defined in MicrophoneDirection.java.
 */
@export(name="audio_microphone_direction_t", value_prefix="MIC_DIRECTION_")
enum MicrophoneDirection : int32_t {
    /**
     * Don't do any directionality processing of the activated microphone(s).
     */
    UNSPECIFIED = 0,
    /**
     * Optimize capture for audio coming from the screen-side of the device.
     */
    FRONT = 1,
    /**
     * Optimize capture for audio coming from the side of the device opposite the screen.
     */
    BACK = 2,
    /**
     * Optimize capture for audio coming from an off-device microphone.
     */
    EXTERNAL = 3,
};


/* Dual Mono handling is used when a stereo audio stream
 * contains separate audio content on the left and right channels.
 * Such information about the content of the stream may be found, for example,
 * in ITU T-REC-J.94-201610 A.6.2.3 Component descriptor.
 */
@export(name="audio_dual_mono_mode_t", value_prefix="AUDIO_DUAL_MONO_MODE_")
enum DualMonoMode : int32_t {
    // Need to be in sync with DUAL_MONO_MODE* constants in
    // frameworks/base/media/java/android/media/AudioTrack.java
    /**
     * Disable any Dual Mono presentation effect.
     */
    OFF = 0,
    /**
     * This mode indicates that a stereo stream should be presented
     * with the left and right audio channels blended together
     * and delivered to both channels.
     *
     * Behavior for non-stereo streams is implementation defined.
     * A suggested guideline is that the left-right stereo symmetric
     * channels are pairwise blended, the other channels such as center
     * are left alone.
     */
    LR = 1,
    /**
     * This mode indicates that a stereo stream should be presented
     * with the left audio channel replicated into the right audio channel.
     *
     * Behavior for non-stereo streams is implementation defined.
     * A suggested guideline is that all channels with left-right
     * stereo symmetry will have the left channel position replicated
     * into the right channel position. The center channels (with no
     * left/right symmetry) or unbalanced channels are left alone.
     */
    LL = 2,
    /**
     * This mode indicates that a stereo stream should be presented
     * with the right audio channel replicated into the left audio channel.
     *
     * Behavior for non-stereo streams is implementation defined.
     * A suggested guideline is that all channels with left-right
     * stereo symmetry will have the right channel position replicated
     * into the left channel position. The center channels (with no
     * left/right symmetry) or unbalanced channels are left alone.
     */
    RR = 3,
};

/**
 * Algorithms used for timestretching (preserving pitch while playing audio
 * content at different speed).
 */
@export(name="audio_timestretch_stretch_mode_t", value_prefix="AUDIO_TIMESTRETCH_STRETCH_")
enum TimestretchMode : int32_t {
    // Need to be in sync with AUDIO_STRETCH_MODE_* constants in
    // frameworks/base/media/java/android/media/PlaybackParams.java
    DEFAULT = 0,
    /** Selects timestretch algorithm best suitable for voice (speech) content. */
    VOICE = 1,
};

/**
 * Behavior when the values for speed and / or pitch are out
 * of applicable range.
 */
@export(name="", value_prefix="HAL_AUDIO_TIMESTRETCH_FALLBACK_")
enum TimestretchFallbackMode : int32_t {
    // Need to be in sync with AUDIO_FALLBACK_MODE_* constants in
    // frameworks/base/media/java/android/media/PlaybackParams.java
    /** Play silence for parameter values that are out of range. */
    MUTE = 1,
    /** Return an error while trying to set the parameters. */
    FAIL = 2,
};

/**
 * Parameters determining playback behavior. They are used to speed up or
 * slow down playback and / or change the tonal frequency of the audio content
 * (pitch).
 */
struct PlaybackRate {
    /**
     * Speed factor (multiplier). Normal speed has the value of 1.0f.
     * Values less than 1.0f slow down playback, value greater than 1.0f
     * speed it up.
     */
    float speed;
    /**
     * Pitch factor (multiplier). Setting pitch value to 1.0f together
     * with changing playback speed preserves the pitch, this is often
     * called "timestretching." Setting the pitch value equal to speed produces
     * the same effect as playing audio content at different sampling rate.
     */
    float pitch;
    /**
     * Selects the algorithm used for timestretching (preserving pitch while
     * playing audio at different speed).
     */
    TimestretchMode timestretchMode;
    /**
     * Selects the behavior when the specified values for speed and / or pitch
     * are out of applicable range.
     */
    TimestretchFallbackMode fallbackMode;
};

/**
 * The audio flags serve two purposes:
 *
 *  - when a stream is created they indicate its attributes;
 *
 *  - when present in a profile descriptor listed for a particular audio
 *    hardware module, they indicate that a stream can be opened that
 *    supports the attributes indicated by the flags.
 *
 * See 'audioIoFlag' in audio_policy_configuration.xsd for the
 * list of allowed values.
 */
typedef string AudioInOutFlag;