/* * Copyright (C) 2019 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package android.hardware.automotive.evs@1.1; import @1.0::CameraDesc; import @1.0::DisplayDesc; import @1.0::DisplayState; import @1.0::EvsResult; import android.hardware.graphics.common@1.2::HardwareBuffer; import android.hardware.camera.device@3.2::CameraMetadata; /** * Structure describing the basic properties of an EVS camera, extended from its * v1.0 declaration. * * The HAL is responsible for filling out this structure for each * EVS camera in the system. */ struct CameraDesc { @1.0::CameraDesc v1; /** * Store camera metadata such as lens characteristics. */ CameraMetadata metadata; }; /** * Structure representing an image buffer through our APIs * * In addition to the handle to the graphics memory, we need to retain * the properties of the buffer for easy reference and reconstruction of * an ANativeWindowBuffer object on the remote side of API calls. * (Not least because OpenGL expect an ANativeWindowBuffer* for us as a * texture via eglCreateImageKHR(). */ struct BufferDesc { /** * HIDL counterpart of `AHardwareBuffer_Desc`. Please see * hardware/interfaces/graphics/common/1.2/types.hal for more details. */ HardwareBuffer buffer; /** * The size of a pixel in the units of bytes */ uint32_t pixelSize; /** * Opaque value from driver */ uint32_t bufferId; /** * Unique identifier of the physical camera device that produces this buffer. */ string deviceId; /** * Time that this buffer is being filled. */ int64_t timestamp; /** * Frame metadata. This is opaque to EVS manager. */ vec metadata; }; /** * Types of informative streaming events */ enum EvsEventType : uint32_t { /** * Video stream is started */ STREAM_STARTED = 0, /** * Video stream is stopped */ STREAM_STOPPED, /** * Video frame is dropped */ FRAME_DROPPED, /** * Timeout happens */ TIMEOUT, /** * Camera parameter is changed; payload contains a changed parameter ID and * its value */ PARAMETER_CHANGED, /** * Master role has become available */ MASTER_RELEASED, /** * Any other erroneous streaming events */ STREAM_ERROR, }; /** * Structure that describes informative events occurred during EVS is streaming */ struct EvsEventDesc { /** * Type of an informative event */ EvsEventType aType; /** * Device identifier */ string deviceId; /** * Possible additional information */ uint32_t[4] payload; }; /** * EVS Camera Parameter */ enum CameraParam : uint32_t { /** * The brightness of image frames */ BRIGHTNESS, /** * The contrast of image frames */ CONTRAST, /** * Automatic gain/exposure control */ AUTOGAIN, /** * Gain control */ GAIN, /** * Automatic Whitebalance */ AUTO_WHITE_BALANCE, /** * Manual white balance setting as a color temperature in Kelvin. */ WHITE_BALANCE_TEMPERATURE, /** * Image sharpness adjustment */ SHARPNESS, /** * Auto Exposure Control modes; auto, manual, shutter priority, or * aperture priority. */ AUTO_EXPOSURE, /** * Manual exposure time of the camera */ ABSOLUTE_EXPOSURE, /** * Set the focal point of the camera to the specified position. This * parameter may not be effective when auto focus is enabled. */ ABSOLUTE_FOCUS, /** * Enables continuous automatic focus adjustments. */ AUTO_FOCUS, /** * Specify the objective lens focal length as an absolute value. */ ABSOLUTE_ZOOM, }; /** * Structure identifies and describes an ultrasonics array in the car. * * A ultrasonics array represents a group of ultrasonic sensors within the * car. These may be sensors that are physically connected to the same hardware * control unit or represent a logical group of sensors like front and back. * The HAL is responsible for filling out this structure for each Ultrasonics * Array. */ struct UltrasonicsArrayDesc { /** * Unique identifier for the ultrasonic array. This may be a path or name of the * physical control device or a string identifying a logical group of sensors forming an array * such as "front_array" and "back_array". */ string ultrasonicsArrayId; /** * Maximum number of readings (points on waveform) provided per sensor in * each data frame. Used by client to pre-allocate required memory buffer for * incoming data. */ uint32_t maxReadingsPerSensorCount; /** * Maximum number of receiver sensors in a data frame. Must be between 1 * and sensorCount. Used by client to pre-allocate required memory buffer for * incoming data. */ uint32_t maxReceiversCount; /** * The order of sensors specified should preferably be in clockwise order * around the car, starting from front left-most sensor. */ vec sensors; }; /** * Structure for rotation expressed as quaternions. * Convention used: Unit quaternion with hamilton convention. */ struct RotationQuat { float x; float y; float z; float w; }; /** Structure for translation with x, y and z units. */ struct Translation { float x; float y; float z; }; /** * Provides the orientation and location of a car sensor relative to the android automotive * coordinate system: * https://source.android.com/devices/sensors/sensor-types#auto_axes * The sensor pose defines the transformation to be applied to the android automotive axes to * obtain the sensor local axes. * The pose consists of rotation, (specified as a quaternions) and translation * (vector with x, y, z). * This rotation and translation applied to the sensor data in the sensor's local coordinate * system transform the data to the automotive coordinate system. * i.e Pcar = ( Rot * Psensor ) + Trans * Here Pcar is a point in automotive coordinate system and Psensor is a point in the sensor's * coordinate system. * Example: * For a sensor on the front bumper and on the left corner of the car with its X axis pointing to * the front, the sensor is located at (-2, 4, 0) meters w.r.t android automotive axes and the * sensor local axes has a rotation of 90 degrees counter-clockwise w.r.t android automotive axes * when viewing the car from top on the +Z axis side: * * ↑X sensor * Y←∘______ * | | front * | car | * | ↑Y | * | ∘→X | rear * |______| * * For this example the rotation and translation will be: * Rotation = + 90 degrees around Z axis = (0.7071, 0, 0, 0.7071) as a unit quaternion. * Translation = (-2, 4, 0) in meters = (-2000, 4000, 0) in milli-meters. * Note: Every sensor type must specify its own pose. */ struct SensorPose { /** * Rotation part of the sensor pose, expressed as a unit quaternion. */ RotationQuat rotation; /** * Translation part of the sensor pose, in (x, y, z) format with milli-meter units. */ Translation translation; }; /** * Structure that contains all information of an ultrasonic sensor. */ struct UltrasonicSensor { /** * Pose provides the orientation and location of the ultrasonic sensor within the car. * The +Y axis points along the center of the beam spread the X axis to the right and the Z * axis in the up direction. */ SensorPose pose; /** * Maximum range of the sensor in milli-metres. */ float maxRange; /** * Half-angle of the angle of measurement of the sensor, relative to the * sensor’s x axis, in radians. */ float angleOfMeasurement; }; /** * Structure that describes the data frame received from an ultrasonics array. * * Each data frame returned consists of received waveform signals from a subset * of sensors in an array as indicated by the receiversIdList. The signal is * transmitted at a particular time instant indicated by timestampNs from a * subset of sensors in the array as provided in the transmittersIdList. */ struct UltrasonicsDataFrameDesc { /** * Timestamp of the start of the transmit signal for this data frame. * Timestamp unit is nanoseconds and is obtained from android elapsed realtime clock which is * the time since system was booted and includes deep sleep. * timeOfFlight readings are future-deltas to this timestamp. */ uint64_t timestampNs; /** * Identifier of data frame. Used by implementation for managing multiple frames in flight. */ uint32_t dataFrameId; /** * List of indexes of sensors in range [0, sensorCount - 1] that * transmitted the signal for this data frame. */ vec transmittersIdList; /** * List of indexes of sensors in range [0, sensorCount - 1] that received * the signal. The order of ids must match the order of the waveforms in the * waveformsData. * Size of list is upper bound by maxReceiversCount. */ vec receiversIdList; /** * List of the number of readings corresponding to each ultrasonics sensor in * the receiversIdList. Order of the readings count must match the order in * receiversIdList. * Size of list is upper bound by maxReadingsPerSensorCount. */ vec receiversReadingsCountList; /** * Shared memory object containing the waveforms data. Contains one waveform * for each sensor specified in receiversIdList, in order. * Each waveform is represented by a number of readings, which are sample * points on the waveform. The number of readings for each waveform is as * specified in the receiversReadingsCountList. * Each reading is a pair of time Of flight and resonance. * Time of flight (float): Time between transmit and receive signal in nanoseconds. * Resonance (float): Resonance at time on waveform in range [0.0, 1.0]. * * The structure of shared memory (example with 2 waveforms, each with 2 readings): * * Byte: | 0 | 1-4 | 5-8 | 9-12 | 13-16 || 17 | 18-21 | 22-25 | 26-29 | 30-33 | * Data: | RecId1 | TOF1 | RES1 | TOF2 | RES2 || RecId2 | TOF1 | RES1 | TOF2 | RES2 | * | Waveform1 || Waveform2 | * Here: * RecId : Receiver's Id. Order matches the receiversIdList, type uint8_t * TOF : Time of flight, type float (4 bytes) * RES : Resonance, type float (4 bytes) * Note: All readings and waveforms are contigious with no padding. */ memory waveformsData; };