Merge changes Ibe771914,I5778d588
* changes:
leaudio: Improve configuration and reconfiguration
laudio: Fix choosing inactive ase whe it is in configured stated
diff --git a/android/app/src/com/android/bluetooth/a2dp/A2dpService.java b/android/app/src/com/android/bluetooth/a2dp/A2dpService.java
index 9cefc03..27cd1ba 100644
--- a/android/app/src/com/android/bluetooth/a2dp/A2dpService.java
+++ b/android/app/src/com/android/bluetooth/a2dp/A2dpService.java
@@ -18,6 +18,7 @@
import static android.Manifest.permission.BLUETOOTH_CONNECT;
+import static com.android.bluetooth.Utils.checkCallerTargetSdk;
import static com.android.bluetooth.Utils.enforceBluetoothPrivilegedPermission;
import android.annotation.RequiresPermission;
@@ -38,6 +39,7 @@
import android.content.IntentFilter;
import android.media.AudioManager;
import android.media.BluetoothProfileConnectionInfo;
+import android.os.Build;
import android.os.HandlerThread;
import android.util.Log;
@@ -1428,6 +1430,7 @@
if (service == null) {
return;
}
+ enforceBluetoothPrivilegedPermission(service);
service.setAvrcpAbsoluteVolume(volume);
}
@@ -1453,6 +1456,10 @@
A2dpService service = getService(source);
BluetoothCodecStatus codecStatus = null;
if (service != null) {
+ if (checkCallerTargetSdk(mService, source.getPackageName(),
+ Build.VERSION_CODES.TIRAMISU)) {
+ enforceBluetoothPrivilegedPermission(service);
+ }
codecStatus = service.getCodecStatus(device);
}
receiver.send(codecStatus);
@@ -1468,6 +1475,10 @@
if (service == null) {
return;
}
+ if (checkCallerTargetSdk(mService, source.getPackageName(),
+ Build.VERSION_CODES.TIRAMISU)) {
+ enforceBluetoothPrivilegedPermission(service);
+ }
service.setCodecConfigPreference(device, codecConfig);
}
@@ -1477,6 +1488,10 @@
if (service == null) {
return;
}
+ if (checkCallerTargetSdk(mService, source.getPackageName(),
+ Build.VERSION_CODES.TIRAMISU)) {
+ enforceBluetoothPrivilegedPermission(service);
+ }
service.enableOptionalCodecs(device);
}
@@ -1486,16 +1501,24 @@
if (service == null) {
return;
}
+ if (checkCallerTargetSdk(mService, source.getPackageName(),
+ Build.VERSION_CODES.TIRAMISU)) {
+ enforceBluetoothPrivilegedPermission(service);
+ }
service.disableOptionalCodecs(device);
}
@Override
- public void supportsOptionalCodecs(BluetoothDevice device, AttributionSource source,
+ public void isOptionalCodecsSupported(BluetoothDevice device, AttributionSource source,
SynchronousResultReceiver receiver) {
try {
A2dpService service = getService(source);
int codecSupport = BluetoothA2dp.OPTIONAL_CODECS_SUPPORT_UNKNOWN;
if (service != null) {
+ if (checkCallerTargetSdk(mService, source.getPackageName(),
+ Build.VERSION_CODES.TIRAMISU)) {
+ enforceBluetoothPrivilegedPermission(service);
+ }
codecSupport = service.getSupportsOptionalCodecs(device);
}
receiver.send(codecSupport);
@@ -1505,12 +1528,16 @@
}
@Override
- public void getOptionalCodecsEnabled(BluetoothDevice device, AttributionSource source,
+ public void isOptionalCodecsEnabled(BluetoothDevice device, AttributionSource source,
SynchronousResultReceiver receiver) {
try {
A2dpService service = getService(source);
int optionalCodecEnabled = BluetoothA2dp.OPTIONAL_CODECS_PREF_UNKNOWN;
if (service != null) {
+ if (checkCallerTargetSdk(mService, source.getPackageName(),
+ Build.VERSION_CODES.TIRAMISU)) {
+ enforceBluetoothPrivilegedPermission(service);
+ }
optionalCodecEnabled = service.getOptionalCodecsEnabled(device);
}
receiver.send(optionalCodecEnabled);
@@ -1526,6 +1553,10 @@
if (service == null) {
return;
}
+ if (checkCallerTargetSdk(mService, source.getPackageName(),
+ Build.VERSION_CODES.TIRAMISU)) {
+ enforceBluetoothPrivilegedPermission(service);
+ }
service.setOptionalCodecsEnabled(device, value);
}
diff --git a/android/app/src/com/android/bluetooth/a2dp/A2dpStateMachine.java b/android/app/src/com/android/bluetooth/a2dp/A2dpStateMachine.java
index 49301f8..f14ccac 100644
--- a/android/app/src/com/android/bluetooth/a2dp/A2dpStateMachine.java
+++ b/android/app/src/com/android/bluetooth/a2dp/A2dpStateMachine.java
@@ -52,12 +52,14 @@
import android.bluetooth.BluetoothCodecStatus;
import android.bluetooth.BluetoothDevice;
import android.bluetooth.BluetoothProfile;
+import android.bluetooth.BluetoothProtoEnums;
import android.content.Intent;
import android.os.Looper;
import android.os.Message;
import android.util.Log;
import com.android.bluetooth.Utils;
+import com.android.bluetooth.btservice.MetricsLogger;
import com.android.bluetooth.btservice.ProfileService;
import com.android.internal.annotations.VisibleForTesting;
import com.android.internal.util.State;
@@ -298,6 +300,8 @@
event.device = mDevice;
event.valueInt = A2dpStackEvent.CONNECTION_STATE_DISCONNECTED;
sendMessage(STACK_EVENT, event);
+ MetricsLogger.getInstance().count(
+ BluetoothProtoEnums.A2DP_CONNECTION_TIMEOUT, 1);
break;
}
case DISCONNECT:
diff --git a/framework/api/system-current.txt b/framework/api/system-current.txt
index 282b7ee..af44fc6 100644
--- a/framework/api/system-current.txt
+++ b/framework/api/system-current.txt
@@ -2,19 +2,19 @@
package android.bluetooth {
public final class BluetoothA2dp implements android.bluetooth.BluetoothProfile {
- method @RequiresPermission(android.Manifest.permission.BLUETOOTH_CONNECT) public void disableOptionalCodecs(@NonNull android.bluetooth.BluetoothDevice);
- method @RequiresPermission(android.Manifest.permission.BLUETOOTH_CONNECT) public void enableOptionalCodecs(@NonNull android.bluetooth.BluetoothDevice);
+ method @RequiresPermission(allOf={android.Manifest.permission.BLUETOOTH_CONNECT, android.Manifest.permission.BLUETOOTH_PRIVILEGED}) public void disableOptionalCodecs(@NonNull android.bluetooth.BluetoothDevice);
+ method @RequiresPermission(allOf={android.Manifest.permission.BLUETOOTH_CONNECT, android.Manifest.permission.BLUETOOTH_PRIVILEGED}) public void enableOptionalCodecs(@NonNull android.bluetooth.BluetoothDevice);
method @Nullable @RequiresPermission(allOf={android.Manifest.permission.BLUETOOTH_CONNECT, android.Manifest.permission.BLUETOOTH_PRIVILEGED}) public android.bluetooth.BufferConstraints getBufferConstraints();
- method @Nullable @RequiresPermission(android.Manifest.permission.BLUETOOTH_CONNECT) public android.bluetooth.BluetoothCodecStatus getCodecStatus(@NonNull android.bluetooth.BluetoothDevice);
+ method @Nullable @RequiresPermission(allOf={android.Manifest.permission.BLUETOOTH_CONNECT, android.Manifest.permission.BLUETOOTH_PRIVILEGED}) public android.bluetooth.BluetoothCodecStatus getCodecStatus(@NonNull android.bluetooth.BluetoothDevice);
method @RequiresPermission(allOf={android.Manifest.permission.BLUETOOTH_CONNECT, android.Manifest.permission.BLUETOOTH_PRIVILEGED}) public int getConnectionPolicy(@NonNull android.bluetooth.BluetoothDevice);
method @RequiresPermission(allOf={android.Manifest.permission.BLUETOOTH_CONNECT, android.Manifest.permission.BLUETOOTH_PRIVILEGED}) public int getDynamicBufferSupport();
- method @RequiresPermission(android.Manifest.permission.BLUETOOTH_CONNECT) public int isOptionalCodecsEnabled(@NonNull android.bluetooth.BluetoothDevice);
- method @RequiresPermission(android.Manifest.permission.BLUETOOTH_CONNECT) public int isOptionalCodecsSupported(@NonNull android.bluetooth.BluetoothDevice);
+ method @RequiresPermission(allOf={android.Manifest.permission.BLUETOOTH_CONNECT, android.Manifest.permission.BLUETOOTH_PRIVILEGED}) public int isOptionalCodecsEnabled(@NonNull android.bluetooth.BluetoothDevice);
+ method @RequiresPermission(allOf={android.Manifest.permission.BLUETOOTH_CONNECT, android.Manifest.permission.BLUETOOTH_PRIVILEGED}) public int isOptionalCodecsSupported(@NonNull android.bluetooth.BluetoothDevice);
method @RequiresPermission(allOf={android.Manifest.permission.BLUETOOTH_CONNECT, android.Manifest.permission.BLUETOOTH_PRIVILEGED}) public void setAvrcpAbsoluteVolume(int);
method @RequiresPermission(allOf={android.Manifest.permission.BLUETOOTH_CONNECT, android.Manifest.permission.BLUETOOTH_PRIVILEGED}) public boolean setBufferLengthMillis(int, int);
- method @RequiresPermission(android.Manifest.permission.BLUETOOTH_CONNECT) public void setCodecConfigPreference(@NonNull android.bluetooth.BluetoothDevice, @NonNull android.bluetooth.BluetoothCodecConfig);
+ method @RequiresPermission(allOf={android.Manifest.permission.BLUETOOTH_CONNECT, android.Manifest.permission.BLUETOOTH_PRIVILEGED}) public void setCodecConfigPreference(@NonNull android.bluetooth.BluetoothDevice, @NonNull android.bluetooth.BluetoothCodecConfig);
method @RequiresPermission(allOf={android.Manifest.permission.BLUETOOTH_CONNECT, android.Manifest.permission.BLUETOOTH_PRIVILEGED}) public boolean setConnectionPolicy(@NonNull android.bluetooth.BluetoothDevice, int);
- method @RequiresPermission(android.Manifest.permission.BLUETOOTH_CONNECT) public void setOptionalCodecsEnabled(@NonNull android.bluetooth.BluetoothDevice, int);
+ method @RequiresPermission(allOf={android.Manifest.permission.BLUETOOTH_CONNECT, android.Manifest.permission.BLUETOOTH_PRIVILEGED}) public void setOptionalCodecsEnabled(@NonNull android.bluetooth.BluetoothDevice, int);
field @RequiresPermission(android.Manifest.permission.BLUETOOTH_CONNECT) public static final String ACTION_ACTIVE_DEVICE_CHANGED = "android.bluetooth.a2dp.profile.action.ACTIVE_DEVICE_CHANGED";
field @RequiresPermission(android.Manifest.permission.BLUETOOTH_CONNECT) public static final String ACTION_CODEC_CONFIG_CHANGED = "android.bluetooth.a2dp.profile.action.CODEC_CONFIG_CHANGED";
field public static final int DYNAMIC_BUFFER_SUPPORT_A2DP_OFFLOAD = 1; // 0x1
diff --git a/framework/java/android/bluetooth/BluetoothA2dp.java b/framework/java/android/bluetooth/BluetoothA2dp.java
index 6c15925..6022058 100644
--- a/framework/java/android/bluetooth/BluetoothA2dp.java
+++ b/framework/java/android/bluetooth/BluetoothA2dp.java
@@ -780,7 +780,10 @@
@Nullable
@RequiresLegacyBluetoothPermission
@RequiresBluetoothConnectPermission
- @RequiresPermission(android.Manifest.permission.BLUETOOTH_CONNECT)
+ @RequiresPermission(allOf = {
+ android.Manifest.permission.BLUETOOTH_CONNECT,
+ android.Manifest.permission.BLUETOOTH_PRIVILEGED,
+ })
public BluetoothCodecStatus getCodecStatus(@NonNull BluetoothDevice device) {
if (DBG) Log.d(TAG, "getCodecStatus(" + device + ")");
verifyDeviceNotNull(device, "getCodecStatus");
@@ -812,7 +815,10 @@
@SystemApi
@RequiresLegacyBluetoothPermission
@RequiresBluetoothConnectPermission
- @RequiresPermission(android.Manifest.permission.BLUETOOTH_CONNECT)
+ @RequiresPermission(allOf = {
+ android.Manifest.permission.BLUETOOTH_CONNECT,
+ android.Manifest.permission.BLUETOOTH_PRIVILEGED,
+ })
public void setCodecConfigPreference(@NonNull BluetoothDevice device,
@NonNull BluetoothCodecConfig codecConfig) {
if (DBG) Log.d(TAG, "setCodecConfigPreference(" + device + ")");
@@ -848,7 +854,10 @@
@SystemApi
@RequiresLegacyBluetoothPermission
@RequiresBluetoothConnectPermission
- @RequiresPermission(android.Manifest.permission.BLUETOOTH_CONNECT)
+ @RequiresPermission(allOf = {
+ android.Manifest.permission.BLUETOOTH_CONNECT,
+ android.Manifest.permission.BLUETOOTH_PRIVILEGED,
+ })
public void enableOptionalCodecs(@NonNull BluetoothDevice device) {
if (DBG) Log.d(TAG, "enableOptionalCodecs(" + device + ")");
verifyDeviceNotNull(device, "enableOptionalCodecs");
@@ -870,7 +879,10 @@
@SystemApi
@RequiresLegacyBluetoothPermission
@RequiresBluetoothConnectPermission
- @RequiresPermission(android.Manifest.permission.BLUETOOTH_CONNECT)
+ @RequiresPermission(allOf = {
+ android.Manifest.permission.BLUETOOTH_CONNECT,
+ android.Manifest.permission.BLUETOOTH_PRIVILEGED,
+ })
public void disableOptionalCodecs(@NonNull BluetoothDevice device) {
if (DBG) Log.d(TAG, "disableOptionalCodecs(" + device + ")");
verifyDeviceNotNull(device, "disableOptionalCodecs");
@@ -883,7 +895,10 @@
* @param device the remote Bluetooth device.
* @param enable if true, enable the optional codecs, otherwise disable them
*/
- @RequiresPermission(android.Manifest.permission.BLUETOOTH_CONNECT)
+ @RequiresPermission(allOf = {
+ android.Manifest.permission.BLUETOOTH_CONNECT,
+ android.Manifest.permission.BLUETOOTH_PRIVILEGED,
+ })
private void enableDisableOptionalCodecs(BluetoothDevice device, boolean enable) {
final IBluetoothA2dp service = getService();
if (service == null) {
@@ -914,7 +929,10 @@
@SystemApi
@RequiresLegacyBluetoothAdminPermission
@RequiresBluetoothConnectPermission
- @RequiresPermission(android.Manifest.permission.BLUETOOTH_CONNECT)
+ @RequiresPermission(allOf = {
+ android.Manifest.permission.BLUETOOTH_CONNECT,
+ android.Manifest.permission.BLUETOOTH_PRIVILEGED,
+ })
public @OptionalCodecsSupportStatus int isOptionalCodecsSupported(
@NonNull BluetoothDevice device) {
if (DBG) log("isOptionalCodecsSupported(" + device + ")");
@@ -927,7 +945,7 @@
} else if (isEnabled() && isValidDevice(device)) {
try {
final SynchronousResultReceiver<Integer> recv = new SynchronousResultReceiver();
- service.supportsOptionalCodecs(device, mAttributionSource, recv);
+ service.isOptionalCodecsSupported(device, mAttributionSource, recv);
return recv.awaitResultNoInterrupt(getSyncTimeout()).getValue(defaultValue);
} catch (RemoteException | TimeoutException e) {
Log.e(TAG, e.toString() + "\n" + Log.getStackTraceString(new Throwable()));
@@ -948,7 +966,10 @@
@SystemApi
@RequiresLegacyBluetoothAdminPermission
@RequiresBluetoothConnectPermission
- @RequiresPermission(android.Manifest.permission.BLUETOOTH_CONNECT)
+ @RequiresPermission(allOf = {
+ android.Manifest.permission.BLUETOOTH_CONNECT,
+ android.Manifest.permission.BLUETOOTH_PRIVILEGED,
+ })
public @OptionalCodecsPreferenceStatus int isOptionalCodecsEnabled(
@NonNull BluetoothDevice device) {
if (DBG) log("isOptionalCodecsEnabled(" + device + ")");
@@ -961,7 +982,7 @@
} else if (isEnabled() && isValidDevice(device)) {
try {
final SynchronousResultReceiver<Integer> recv = new SynchronousResultReceiver();
- service.getOptionalCodecsEnabled(device, mAttributionSource, recv);
+ service.isOptionalCodecsEnabled(device, mAttributionSource, recv);
return recv.awaitResultNoInterrupt(getSyncTimeout()).getValue(defaultValue);
} catch (RemoteException | TimeoutException e) {
Log.e(TAG, e.toString() + "\n" + Log.getStackTraceString(new Throwable()));
@@ -983,7 +1004,10 @@
@SystemApi
@RequiresLegacyBluetoothAdminPermission
@RequiresBluetoothConnectPermission
- @RequiresPermission(android.Manifest.permission.BLUETOOTH_CONNECT)
+ @RequiresPermission(allOf = {
+ android.Manifest.permission.BLUETOOTH_CONNECT,
+ android.Manifest.permission.BLUETOOTH_PRIVILEGED,
+ })
public void setOptionalCodecsEnabled(@NonNull BluetoothDevice device,
@OptionalCodecsPreferenceStatus int value) {
if (DBG) log("setOptionalCodecsEnabled(" + device + ")");
@@ -992,6 +1016,9 @@
&& value != BluetoothA2dp.OPTIONAL_CODECS_PREF_DISABLED
&& value != BluetoothA2dp.OPTIONAL_CODECS_PREF_ENABLED) {
Log.e(TAG, "Invalid value passed to setOptionalCodecsEnabled: " + value);
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.TIRAMISU) {
+ throw new IllegalArgumentException("Invalid codec preference");
+ }
return;
}
final IBluetoothA2dp service = getService();
diff --git a/framework/java/android/bluetooth/BluetoothLeAudio.java b/framework/java/android/bluetooth/BluetoothLeAudio.java
index 9d77125..a1015b4 100644
--- a/framework/java/android/bluetooth/BluetoothLeAudio.java
+++ b/framework/java/android/bluetooth/BluetoothLeAudio.java
@@ -924,6 +924,10 @@
* would have to call {@link #unregisterCallback(Callback)} with
* the same callback object before registering it again.
*
+ * <p> The {@link Callback} will be invoked only if there is codec status changed for the
+ * remote device or the device is connected/disconnected in a certain group or the group
+ * status is changed.
+ *
* @param executor an {@link Executor} to execute given callback
* @param callback user implementation of the {@link Callback}
* @throws NullPointerException if a null executor or callback is given
diff --git a/framework/java/android/bluetooth/le/ScanRecord.java b/framework/java/android/bluetooth/le/ScanRecord.java
index 2ede597..375df1d 100644
--- a/framework/java/android/bluetooth/le/ScanRecord.java
+++ b/framework/java/android/bluetooth/le/ScanRecord.java
@@ -96,54 +96,222 @@
@Retention(RetentionPolicy.SOURCE)
public @interface AdvertisingDataType {}
- // The following data type values are assigned by Bluetooth SIG.
- // For more details refer to Bluetooth Generic Access Profile.
+ /**
+ * Data type is not set for the filter. Will not filter advertising data type.
+ */
public static final int DATA_TYPE_NONE = -1;
+ /**
+ * Data type is Flags, see the Bluetooth Generic Access Profile for more details.
+ */
public static final int DATA_TYPE_FLAGS = 0x01;
+ /**
+ * Data type is Incomplete List of 16-bit Service Class UUIDs, see the Bluetooth Generic Access
+ * Profile for the details.
+ */
public static final int DATA_TYPE_SERVICE_UUIDS_16_BIT_PARTIAL = 0x02;
+ /**
+ * Data type is Complete List of 16-bit Service Class UUIDs, see the Bluetooth Generic Access
+ * Profile for more details.
+ */
public static final int DATA_TYPE_SERVICE_UUIDS_16_BIT_COMPLETE = 0x03;
+ /**
+ * Data type is Incomplete List of 32-bit Service Class UUIDs, see the Bluetooth Generic Access
+ * Profile for the details.
+ */
public static final int DATA_TYPE_SERVICE_UUIDS_32_BIT_PARTIAL = 0x04;
+ /**
+ * Data type is Complete List of 32-bit Service Class UUIDs, see the Bluetooth Generic Access
+ * Profile for more details.
+ */
public static final int DATA_TYPE_SERVICE_UUIDS_32_BIT_COMPLETE = 0x05;
+ /**
+ * Data type is Incomplete List of 128-bit Service Class UUIDs, see the Bluetooth Generic Access
+ * Profile for the details.
+ */
public static final int DATA_TYPE_SERVICE_UUIDS_128_BIT_PARTIAL = 0x06;
+ /**
+ * Data type is Complete List of 128-bit Service Class UUIDs, see the Bluetooth Generic Access
+ * Profile for more details.
+ */
public static final int DATA_TYPE_SERVICE_UUIDS_128_BIT_COMPLETE = 0x07;
+ /**
+ * Data type is Shortened Local Name, see the Bluetooth Generic Access Profile for more details.
+ */
public static final int DATA_TYPE_LOCAL_NAME_SHORT = 0x08;
+ /**
+ * Data type is Complete Local Name, see the Bluetooth Generic Access Profile for more details.
+ */
public static final int DATA_TYPE_LOCAL_NAME_COMPLETE = 0x09;
+ /**
+ * Data type is Tx Power Level, see the Bluetooth Generic Access Profile for more details.
+ */
public static final int DATA_TYPE_TX_POWER_LEVEL = 0x0A;
+ /**
+ * Data type is Class of Device, see the Bluetooth Generic Access Profile for more details.
+ */
public static final int DATA_TYPE_CLASS_OF_DEVICE = 0x0D;
+ /**
+ * Data type is Simple Pairing Hash C, see the Bluetooth Generic Access Profile for more
+ * details.
+ */
public static final int DATA_TYPE_SIMPLE_PAIRING_HASH_C = 0x0E;
+ /**
+ * Data type is Simple Pairing Randomizer R, see the Bluetooth Generic Access Profile for more
+ * details.
+ */
public static final int DATA_TYPE_SIMPLE_PAIRING_RANDOMIZER_R = 0x0F;
+ /**
+ * Data type is Device ID, see the Bluetooth Generic Access Profile for more details.
+ */
public static final int DATA_TYPE_DEVICE_ID = 0x10;
+ /**
+ * Data type is Security Manager Out of Band Flags, see the Bluetooth Generic Access Profile for
+ * more details.
+ */
public static final int DATA_TYPE_SECURITY_MANAGER_OUT_OF_BAND_FLAGS = 0x11;
+ /**
+ * Data type is Slave Connection Interval Range, see the Bluetooth Generic Access Profile for
+ * more details.
+ */
public static final int DATA_TYPE_SLAVE_CONNECTION_INTERVAL_RANGE = 0x12;
+ /**
+ * Data type is List of 16-bit Service Solicitation UUIDs, see the Bluetooth Generic Access
+ * Profile for more details.
+ */
public static final int DATA_TYPE_SERVICE_SOLICITATION_UUIDS_16_BIT = 0x14;
+ /**
+ * Data type is List of 128-bit Service Solicitation UUIDs, see the Bluetooth Generic Access
+ * Profile for more details.
+ */
public static final int DATA_TYPE_SERVICE_SOLICITATION_UUIDS_128_BIT = 0x15;
+ /**
+ * Data type is Service Data - 16-bit UUID, see the Bluetooth Generic Access Profile for more
+ * details.
+ */
public static final int DATA_TYPE_SERVICE_DATA_16_BIT = 0x16;
+ /**
+ * Data type is Public Target Address, see the Bluetooth Generic Access Profile for more
+ * details.
+ */
public static final int DATA_TYPE_PUBLIC_TARGET_ADDRESS = 0x17;
+ /**
+ * Data type is Random Target Address, see the Bluetooth Generic Access Profile for more
+ * details.
+ */
public static final int DATA_TYPE_RANDOM_TARGET_ADDRESS = 0x18;
+ /**
+ * Data type is Appearance, see the Bluetooth Generic Access Profile for more details.
+ */
public static final int DATA_TYPE_APPEARANCE = 0x19;
+ /**
+ * Data type is Advertising Interval, see the Bluetooth Generic Access Profile for more details.
+ */
public static final int DATA_TYPE_ADVERTISING_INTERVAL = 0x1A;
+ /**
+ * Data type is LE Bluetooth Device Address, see the Bluetooth Generic Access Profile for more
+ * details.
+ */
public static final int DATA_TYPE_LE_BLUETOOTH_DEVICE_ADDRESS = 0x1B;
+ /**
+ * Data type is LE Role, see the Bluetooth Generic Access Profile for more details.
+ */
public static final int DATA_TYPE_LE_ROLE = 0x1C;
+ /**
+ * Data type is Simple Pairing Hash C-256, see the Bluetooth Generic Access Profile for more
+ * details.
+ */
public static final int DATA_TYPE_SIMPLE_PAIRING_HASH_C_256 = 0x1D;
+ /**
+ * Data type is Simple Pairing Randomizer R-256, see the Bluetooth Generic Access Profile for
+ * more details.
+ */
public static final int DATA_TYPE_SIMPLE_PAIRING_RANDOMIZER_R_256 = 0x1E;
+ /**
+ * Data type is List of 32-bit Service Solicitation UUIDs, see the Bluetooth Generic Access
+ * Profile for more details.
+ */
public static final int DATA_TYPE_SERVICE_SOLICITATION_UUIDS_32_BIT = 0x1F;
+ /**
+ * Data type is Service Data - 32-bit UUID, see the Bluetooth Generic Access Profile for more
+ * details.
+ */
public static final int DATA_TYPE_SERVICE_DATA_32_BIT = 0x20;
+ /**
+ * Data type is Service Data - 128-bit UUID, see the Bluetooth Generic Access Profile for more
+ * details.
+ */
public static final int DATA_TYPE_SERVICE_DATA_128_BIT = 0x21;
+ /**
+ * Data type is LE Secure Connections Confirmation Value, see the Bluetooth Generic Access
+ * Profile for more details.
+ */
public static final int DATA_TYPE_LE_SECURE_CONNECTIONS_CONFIRMATION_VALUE = 0x22;
+ /**
+ * Data type is LE Secure Connections Random Value, see the Bluetooth Generic Access Profile for
+ * more details.
+ */
public static final int DATA_TYPE_LE_SECURE_CONNECTIONS_RANDOM_VALUE = 0x23;
+ /**
+ * Data type is URI, see the Bluetooth Generic Access Profile for more details.
+ */
public static final int DATA_TYPE_URI = 0x24;
+ /**
+ * Data type is Indoor Positioning, see the Bluetooth Generic Access Profile for more details.
+ */
public static final int DATA_TYPE_INDOOR_POSITIONING = 0x25;
+ /**
+ * Data type is Transport Discovery Data, see the Bluetooth Generic Access Profile for more
+ * details.
+ */
public static final int DATA_TYPE_TRANSPORT_DISCOVERY_DATA = 0x26;
+ /**
+ * Data type is LE Supported Features, see the Bluetooth Generic Access Profile for more
+ * details.
+ */
public static final int DATA_TYPE_LE_SUPPORTED_FEATURES = 0x27;
+ /**
+ * Data type is Channel Map Update Indication, see the Bluetooth Generic Access Profile for more
+ * details.
+ */
public static final int DATA_TYPE_CHANNEL_MAP_UPDATE_INDICATION = 0x28;
+ /**
+ * Data type is PB-ADV, see the Bluetooth Generic Access Profile for more details.
+ */
public static final int DATA_TYPE_PB_ADV = 0x29;
+ /**
+ * Data type is Mesh Message, see the Bluetooth Generic Access Profile for more details.
+ */
public static final int DATA_TYPE_MESH_MESSAGE = 0x2A;
+ /**
+ * Data type is Mesh Beacon, see the Bluetooth Generic Access Profile for more details.
+ */
public static final int DATA_TYPE_MESH_BEACON = 0x2B;
+ /**
+ * Data type is BIGInfo, see the Bluetooth Generic Access Profile for more details.
+ */
public static final int DATA_TYPE_BIG_INFO = 0x2C;
+ /**
+ * Data type is Broadcast_Code, see the Bluetooth Generic Access Profile for more details.
+ */
public static final int DATA_TYPE_BROADCAST_CODE = 0x2D;
+ /**
+ * Data type is Resolvable Set Identifier, see the Bluetooth Generic Access Profile for more
+ * details.
+ */
public static final int DATA_TYPE_RESOLVABLE_SET_IDENTIFIER = 0x2E;
+ /**
+ * Data type is Advertising Interval - long, see the Bluetooth Generic Access Profile for more
+ * details.
+ */
public static final int DATA_TYPE_ADVERTISING_INTERVAL_LONG = 0x2F;
+ /**
+ * Data type is 3D Information Data, see the Bluetooth Generic Access Profile for more details.
+ */
public static final int DATA_TYPE_3D_INFORMATION_DATA = 0x3D;
+ /**
+ * Data type is Manufacturer Specific Data, see the Bluetooth Generic Access Profile for more
+ * details.
+ */
public static final int DATA_TYPE_MANUFACTURER_SPECIFIC_DATA = 0xFF;
// Flags of the advertising data.
diff --git a/system/binder/android/bluetooth/IBluetoothA2dp.aidl b/system/binder/android/bluetooth/IBluetoothA2dp.aidl
index 8745407..3e38a29 100644
--- a/system/binder/android/bluetooth/IBluetoothA2dp.aidl
+++ b/system/binder/android/bluetooth/IBluetoothA2dp.aidl
@@ -66,23 +66,23 @@
void getConnectionPolicy(in BluetoothDevice device, in AttributionSource attributionSource, in SynchronousResultReceiver receiver);
@JavaPassthrough(annotation="@android.annotation.RequiresNoPermission")
void isAvrcpAbsoluteVolumeSupported(in SynchronousResultReceiver receiver);
- @JavaPassthrough(annotation="@android.annotation.RequiresPermission(android.Manifest.permission.BLUETOOTH_CONNECT)")
+ @JavaPassthrough(annotation="@android.annotation.RequiresPermission(allOf={android.Manifest.permission.BLUETOOTH_CONNECT,android.Manifest.permission.BLUETOOTH_PRIVILEGED})")
oneway void setAvrcpAbsoluteVolume(int volume, in AttributionSource attributionSource);
@JavaPassthrough(annotation="@android.annotation.RequiresPermission(android.Manifest.permission.BLUETOOTH_CONNECT)")
void isA2dpPlaying(in BluetoothDevice device, in AttributionSource attributionSource, in SynchronousResultReceiver receiver);
- @JavaPassthrough(annotation="@android.annotation.RequiresPermission(android.Manifest.permission.BLUETOOTH_CONNECT)")
+ @JavaPassthrough(annotation="@android.annotation.RequiresPermission(allOf={android.Manifest.permission.BLUETOOTH_CONNECT,android.Manifest.permission.BLUETOOTH_PRIVILEGED})")
void getCodecStatus(in BluetoothDevice device, in AttributionSource attributionSource, in SynchronousResultReceiver receiver);
- @JavaPassthrough(annotation="@android.annotation.RequiresPermission(android.Manifest.permission.BLUETOOTH_CONNECT)")
+ @JavaPassthrough(annotation="@android.annotation.RequiresPermission(allOf={android.Manifest.permission.BLUETOOTH_CONNECT,android.Manifest.permission.BLUETOOTH_PRIVILEGED})")
oneway void setCodecConfigPreference(in BluetoothDevice device, in BluetoothCodecConfig codecConfig, in AttributionSource attributionSource);
- @JavaPassthrough(annotation="@android.annotation.RequiresPermission(android.Manifest.permission.BLUETOOTH_CONNECT)")
+ @JavaPassthrough(annotation="@android.annotation.RequiresPermission(allOf={android.Manifest.permission.BLUETOOTH_CONNECT,android.Manifest.permission.BLUETOOTH_PRIVILEGED})")
oneway void enableOptionalCodecs(in BluetoothDevice device, in AttributionSource attributionSource);
- @JavaPassthrough(annotation="@android.annotation.RequiresPermission(android.Manifest.permission.BLUETOOTH_CONNECT)")
+ @JavaPassthrough(annotation="@android.annotation.RequiresPermission(allOf={android.Manifest.permission.BLUETOOTH_CONNECT,android.Manifest.permission.BLUETOOTH_PRIVILEGED})")
oneway void disableOptionalCodecs(in BluetoothDevice device, in AttributionSource attributionSource);
- @JavaPassthrough(annotation="@android.annotation.RequiresPermission(android.Manifest.permission.BLUETOOTH_CONNECT)")
- void supportsOptionalCodecs(in BluetoothDevice device, in AttributionSource attributionSource, in SynchronousResultReceiver receiver);
- @JavaPassthrough(annotation="@android.annotation.RequiresPermission(android.Manifest.permission.BLUETOOTH_CONNECT)")
- void getOptionalCodecsEnabled(in BluetoothDevice device, in AttributionSource attributionSource, in SynchronousResultReceiver receiver);
- @JavaPassthrough(annotation="@android.annotation.RequiresPermission(android.Manifest.permission.BLUETOOTH_CONNECT)")
+ @JavaPassthrough(annotation="@android.annotation.RequiresPermission(allOf={android.Manifest.permission.BLUETOOTH_CONNECT,android.Manifest.permission.BLUETOOTH_PRIVILEGED})")
+ void isOptionalCodecsSupported(in BluetoothDevice device, in AttributionSource attributionSource, in SynchronousResultReceiver receiver);
+ @JavaPassthrough(annotation="@android.annotation.RequiresPermission(allOf={android.Manifest.permission.BLUETOOTH_CONNECT,android.Manifest.permission.BLUETOOTH_PRIVILEGED})")
+ void isOptionalCodecsEnabled(in BluetoothDevice device, in AttributionSource attributionSource, in SynchronousResultReceiver receiver);
+ @JavaPassthrough(annotation="@android.annotation.RequiresPermission(allOf={android.Manifest.permission.BLUETOOTH_CONNECT,android.Manifest.permission.BLUETOOTH_PRIVILEGED})")
oneway void setOptionalCodecsEnabled(in BluetoothDevice device, int value, in AttributionSource attributionSource);
@JavaPassthrough(annotation="@android.annotation.RequiresPermission(allOf={android.Manifest.permission.BLUETOOTH_CONNECT,android.Manifest.permission.BLUETOOTH_PRIVILEGED})")
void getDynamicBufferSupport(in AttributionSource attributionSource, in SynchronousResultReceiver receiver);
diff --git a/system/blueberry/tests/gd/cert/gd_device.py b/system/blueberry/tests/gd/cert/gd_device.py
index 8074c52..7fbc93c 100644
--- a/system/blueberry/tests/gd/cert/gd_device.py
+++ b/system/blueberry/tests/gd/cert/gd_device.py
@@ -653,7 +653,7 @@
self.adb.shell("setprop persist.sys.timezone %s" % target_timezone)
self.reboot()
self.adb.remount()
- device_tz = self.adb.shell("date +%z")
+ device_tz = self.adb.shell("date +%z").decode(UTF_8).rstrip()
asserts.assert_equal(
host_tz, device_tz, "Device timezone %s still does not match host "
"timezone %s after reset" % (device_tz, host_tz))
diff --git a/system/bta/hh/bta_hh_act.cc b/system/bta/hh/bta_hh_act.cc
index 1af7b8f..dd9decd 100644
--- a/system/bta/hh/bta_hh_act.cc
+++ b/system/bta/hh/bta_hh_act.cc
@@ -1017,33 +1017,56 @@
}
void bta_hh_write_dev_act(tBTA_HH_DEV_CB* p_cb, const tBTA_HH_DATA* p_data) {
- tBTA_HH_CBDATA cbdata = {BTA_HH_OK, 0};
uint16_t event =
(p_data->api_sndcmd.t_type - HID_TRANS_GET_REPORT) + BTA_HH_GET_RPT_EVT;
if (p_cb->is_le_device)
bta_hh_le_write_dev_act(p_cb, p_data);
- else
- {
-
- cbdata.handle = p_cb->hid_handle;
-
+ else {
/* match up BTE/BTA report/boot mode def */
const uint8_t api_sndcmd_param =
convert_api_sndcmd_param(p_data->api_sndcmd);
- if (HID_HostWriteDev(p_cb->hid_handle, p_data->api_sndcmd.t_type,
- api_sndcmd_param, p_data->api_sndcmd.data,
- p_data->api_sndcmd.rpt_id,
- p_data->api_sndcmd.p_data) != HID_SUCCESS) {
- APPL_TRACE_ERROR("HID_HostWriteDev Error ");
- cbdata.status = BTA_HH_ERR;
+ tHID_STATUS status = HID_HostWriteDev(p_cb->hid_handle,
+ p_data->api_sndcmd.t_type,
+ api_sndcmd_param,
+ p_data->api_sndcmd.data,
+ p_data->api_sndcmd.rpt_id,
+ p_data->api_sndcmd.p_data);
+ if (status != HID_SUCCESS) {
+ LOG_ERROR("HID_HostWriteDev Error, status: %d", status);
if (p_data->api_sndcmd.t_type != HID_TRANS_CONTROL &&
- p_data->api_sndcmd.t_type != HID_TRANS_DATA)
- (*bta_hh_cb.p_cback)(event, (tBTA_HH*)&cbdata);
- else if (api_sndcmd_param == BTA_HH_CTRL_VIRTUAL_CABLE_UNPLUG)
- (*bta_hh_cb.p_cback)(BTA_HH_VC_UNPLUG_EVT, (tBTA_HH*)&cbdata);
+ p_data->api_sndcmd.t_type != HID_TRANS_DATA) {
+ BT_HDR cbhdr = {
+ .event = BTA_HH_GET_RPT_EVT,
+ .len = 0,
+ .offset = 0,
+ .layer_specific = 0,
+ };
+ tBTA_HH cbdata = {
+ .hs_data = {
+ .status = BTA_HH_ERR,
+ .handle = p_cb->hid_handle,
+ .rsp_data = {
+ .p_rpt_data = &cbhdr,
+ },
+ },
+ };
+ (*bta_hh_cb.p_cback)(event, &cbdata);
+ } else if (api_sndcmd_param == BTA_HH_CTRL_VIRTUAL_CABLE_UNPLUG) {
+ tBTA_HH cbdata = {
+ .dev_status = {
+ .status = BTA_HH_ERR,
+ .handle = p_cb->hid_handle,
+ },
+ };
+ (*bta_hh_cb.p_cback)(BTA_HH_VC_UNPLUG_EVT, &cbdata);
+ } else {
+ LOG_ERROR("skipped executing callback in hid host error handling. "
+ "command type: %d, param: %d", p_data->api_sndcmd.t_type,
+ p_data->api_sndcmd.param);
+ }
} else {
switch (p_data->api_sndcmd.t_type) {
case HID_TRANS_SET_PROTOCOL:
diff --git a/system/bta/le_audio/broadcaster/state_machine.cc b/system/bta/le_audio/broadcaster/state_machine.cc
index bbf7a6c..af15c8c 100644
--- a/system/bta/le_audio/broadcaster/state_machine.cc
+++ b/system/bta/le_audio/broadcaster/state_machine.cc
@@ -488,7 +488,8 @@
SetMuted(true);
IsoManager::GetInstance()->RemoveIsoDataPath(
- conn_handle, bluetooth::hci::iso_manager::kIsoDataPathDirectionIn);
+ conn_handle,
+ bluetooth::hci::iso_manager::kRemoveIsoDataPathDirectionInput);
}
void HandleHciEvent(uint16_t event, void* data) override {
diff --git a/system/bta/le_audio/client.cc b/system/bta/le_audio/client.cc
index fb86745..b005037 100644
--- a/system/bta/le_audio/client.cc
+++ b/system/bta/le_audio/client.cc
@@ -2316,8 +2316,8 @@
std::vector<uint16_t> mixed(left->size() * 2);
for (size_t i = 0; i < left->size(); i++) {
- mixed[2 * i] = (*left)[i];
- mixed[2 * i + 1] = (*right)[i];
+ mixed[2 * i] = (*right)[i];
+ mixed[2 * i + 1] = (*left)[i];
}
to_write = sizeof(int16_t) * mixed.size();
written =
@@ -2338,8 +2338,8 @@
std::vector<uint16_t> mixed(mono_size * 2);
for (size_t i = 0; i < mono_size; i++) {
- mixed[2 * i] = left ? (*left)[i] : 0;
- mixed[2 * i + 1] = right ? (*right)[i] : 0;
+ mixed[2 * i] = right ? (*right)[i] : 0;
+ mixed[2 * i + 1] = left ? (*left)[i] : 0;
}
to_write = sizeof(int16_t) * mixed.size();
written =
diff --git a/system/bta/le_audio/state_machine.cc b/system/bta/le_audio/state_machine.cc
index d8b783a..4232bd9 100644
--- a/system/bta/le_audio/state_machine.cc
+++ b/system/bta/le_audio/state_machine.cc
@@ -832,11 +832,11 @@
IsoManager::GetInstance()->RemoveIsoDataPath(
ase->cis_conn_hdl,
(ases_pair.sink
- ? bluetooth::hci::iso_manager::kIsoDataPathDirectionOut
+ ? bluetooth::hci::iso_manager::kRemoveIsoDataPathDirectionOutput
: 0x00) |
- (ases_pair.source
- ? bluetooth::hci::iso_manager::kIsoDataPathDirectionIn
- : 0x00));
+ (ases_pair.source ? bluetooth::hci::iso_manager::
+ kRemoveIsoDataPathDirectionInput
+ : 0x00));
}
}
@@ -1070,10 +1070,11 @@
IsoManager::GetInstance()->RemoveIsoDataPath(
ase->cis_conn_hdl,
- (ases_pair.sink ? bluetooth::hci::iso_manager::kIsoDataPathDirectionOut
- : 0x00) |
+ (ases_pair.sink
+ ? bluetooth::hci::iso_manager::kRemoveIsoDataPathDirectionOutput
+ : 0x00) |
(ases_pair.source
- ? bluetooth::hci::iso_manager::kIsoDataPathDirectionIn
+ ? bluetooth::hci::iso_manager::kRemoveIsoDataPathDirectionInput
: 0x00));
}
@@ -1902,12 +1903,12 @@
leAudioDevice->GetAsesByCisConnHdl(ase->cis_conn_hdl);
IsoManager::GetInstance()->RemoveIsoDataPath(
ase->cis_conn_hdl,
- (ases_pair.sink
- ? bluetooth::hci::iso_manager::kIsoDataPathDirectionOut
- : 0x00) |
- (ases_pair.source
- ? bluetooth::hci::iso_manager::kIsoDataPathDirectionIn
- : 0x00));
+ (ases_pair.sink ? bluetooth::hci::iso_manager::
+ kRemoveIsoDataPathDirectionOutput
+ : 0x00) |
+ (ases_pair.source ? bluetooth::hci::iso_manager::
+ kRemoveIsoDataPathDirectionInput
+ : 0x00));
} else if (ase->data_path_state ==
AudioStreamDataPathState::CIS_ESTABLISHED ||
ase->data_path_state ==
diff --git a/system/btif/include/btif_metrics_logging.h b/system/btif/include/btif_metrics_logging.h
index c24c907..632062e 100644
--- a/system/btif/include/btif_metrics_logging.h
+++ b/system/btif/include/btif_metrics_logging.h
@@ -47,6 +47,9 @@
uint32_t cmd_status,
int32_t transmit_power_level);
+void log_counter_metrics_btif(android::bluetooth::CodePathCounterKeyEnum key,
+ int64_t value);
+
void log_socket_connection_state(
const RawAddress& address, int port, int type,
android::bluetooth::SocketConnectionstateEnum connection_state,
diff --git a/system/btif/src/btif_av.cc b/system/btif/src/btif_av.cc
index e677221..ef5dd46 100644
--- a/system/btif/src/btif_av.cc
+++ b/system/btif/src/btif_av.cc
@@ -24,6 +24,7 @@
#include <base/logging.h>
#include <base/strings/stringprintf.h>
#include <frameworks/proto_logging/stats/enums/bluetooth/a2dp/enums.pb.h>
+#include <frameworks/proto_logging/stats/enums/bluetooth/enums.pb.h>
#include <cstdint>
#include <future>
@@ -39,6 +40,7 @@
#include "btif/include/btif_a2dp_source.h"
#include "btif/include/btif_av_co.h"
#include "btif/include/btif_common.h"
+#include "btif/include/btif_metrics_logging.h"
#include "btif/include/btif_profile_queue.h"
#include "btif/include/btif_rc.h"
#include "btif/include/btif_util.h"
@@ -1676,6 +1678,9 @@
"%s: Peer %s : event=%s: transitioning to Idle due to ACL Disconnect",
__PRETTY_FUNCTION__, peer_.PeerAddress().ToString().c_str(),
BtifAvEvent::EventName(event).c_str());
+ log_counter_metrics_btif(android::bluetooth::CodePathCounterKeyEnum::
+ A2DP_CONNECTION_ACL_DISCONNECTED,
+ 1);
btif_report_connection_state(peer_.PeerAddress(),
BTAV_CONNECTION_STATE_DISCONNECTED);
peer_.StateMachine().TransitionTo(BtifAvStateMachine::kStateIdle);
@@ -1688,6 +1693,9 @@
peer_.PeerAddress().ToString().c_str(),
BtifAvEvent::EventName(event).c_str(),
peer_.FlagsToString().c_str());
+ log_counter_metrics_btif(android::bluetooth::CodePathCounterKeyEnum::
+ A2DP_CONNECTION_REJECT_EVT,
+ 1);
btif_report_connection_state(peer_.PeerAddress(),
BTAV_CONNECTION_STATE_DISCONNECTED);
peer_.StateMachine().TransitionTo(BtifAvStateMachine::kStateIdle);
@@ -1714,6 +1722,9 @@
av_state = BtifAvStateMachine::kStateOpened;
peer_.SetEdr(p_bta_data->open.edr);
CHECK(peer_.PeerSep() == p_bta_data->open.sep);
+ log_counter_metrics_btif(
+ android::bluetooth::CodePathCounterKeyEnum::A2DP_CONNECTION_SUCCESS,
+ 1);
} else {
if (btif_rc_is_connected_peer(peer_.PeerAddress())) {
// Disconnect the AVRCP connection, in case the A2DP connectiton
@@ -1721,6 +1732,10 @@
BTIF_TRACE_WARNING("%s: Peer %s : Disconnecting AVRCP",
__PRETTY_FUNCTION__,
peer_.PeerAddress().ToString().c_str());
+ log_counter_metrics_btif(
+ android::bluetooth::CodePathCounterKeyEnum::
+ A2DP_CONNECTION_FAILURE,
+ 1);
uint8_t peer_handle =
btif_rc_get_connected_peer_handle(peer_.PeerAddress());
if (peer_handle != BTRC_HANDLE_NONE) {
@@ -1768,6 +1783,9 @@
"ignore Connect request",
__PRETTY_FUNCTION__, peer_.PeerAddress().ToString().c_str(),
BtifAvEvent::EventName(event).c_str());
+ log_counter_metrics_btif(
+ android::bluetooth::CodePathCounterKeyEnum::A2DP_ALREADY_CONNECTING,
+ 1);
btif_queue_advance();
} break;
@@ -1779,6 +1797,9 @@
"ignore incoming request",
__PRETTY_FUNCTION__, peer_.PeerAddress().ToString().c_str(),
BtifAvEvent::EventName(event).c_str());
+ log_counter_metrics_btif(
+ android::bluetooth::CodePathCounterKeyEnum::A2DP_ALREADY_CONNECTING,
+ 1);
} break;
case BTIF_AV_OFFLOAD_START_REQ_EVT:
@@ -1787,6 +1808,9 @@
peer_.PeerAddress().ToString().c_str(),
BtifAvEvent::EventName(event).c_str());
btif_a2dp_on_offload_started(peer_.PeerAddress(), BTA_AV_FAIL);
+ log_counter_metrics_btif(android::bluetooth::CodePathCounterKeyEnum::
+ A2DP_OFFLOAD_START_REQ_FAILURE,
+ 1);
break;
case BTA_AV_CLOSE_EVT:
@@ -1794,6 +1818,8 @@
btif_report_connection_state(peer_.PeerAddress(),
BTAV_CONNECTION_STATE_DISCONNECTED);
peer_.StateMachine().TransitionTo(BtifAvStateMachine::kStateIdle);
+ log_counter_metrics_btif(
+ android::bluetooth::CodePathCounterKeyEnum::A2DP_CONNECTION_CLOSE, 1);
if (peer_.SelfInitiatedConnection()) {
btif_queue_advance();
}
@@ -1804,6 +1830,9 @@
btif_report_connection_state(peer_.PeerAddress(),
BTAV_CONNECTION_STATE_DISCONNECTED);
peer_.StateMachine().TransitionTo(BtifAvStateMachine::kStateIdle);
+ log_counter_metrics_btif(android::bluetooth::CodePathCounterKeyEnum::
+ A2DP_CONNECTION_DISCONNECTED,
+ 1);
if (peer_.SelfInitiatedConnection()) {
btif_queue_advance();
}
@@ -1812,6 +1841,9 @@
CHECK_RC_EVENT(event, (tBTA_AV*)p_data);
default:
+ log_counter_metrics_btif(android::bluetooth::CodePathCounterKeyEnum::
+ A2DP_CONNECTION_UNKNOWN_EVENT,
+ 1);
BTIF_TRACE_WARNING("%s: Peer %s : Unhandled event=%s",
__PRETTY_FUNCTION__,
peer_.PeerAddress().ToString().c_str(),
diff --git a/system/btif/src/btif_metrics_logging.cc b/system/btif/src/btif_metrics_logging.cc
index 541e145..8af5033 100644
--- a/system/btif/src/btif_metrics_logging.cc
+++ b/system/btif/src/btif_metrics_logging.cc
@@ -77,6 +77,11 @@
server_port, socket_role);
}
+void log_counter_metrics_btif(android::bluetooth::CodePathCounterKeyEnum key,
+ int64_t value) {
+ bluetooth::shim::CountCounterMetrics(key, value);
+}
+
bool init_metric_id_allocator(
const std::unordered_map<RawAddress, int>& paired_device_map,
bluetooth::shim::CallbackLegacy save_device_callback,
diff --git a/system/gd/hci/acl_manager/le_impl.h b/system/gd/hci/acl_manager/le_impl.h
index a9c2ac9..c723810 100644
--- a/system/gd/hci/acl_manager/le_impl.h
+++ b/system/gd/hci/acl_manager/le_impl.h
@@ -591,9 +591,9 @@
}
void disarm_connectability() {
- if (connectability_state_ != ConnectabilityState::ARMED) {
+ if (connectability_state_ != ConnectabilityState::ARMED && connectability_state_ != ConnectabilityState::ARMING) {
LOG_ERROR(
- "Attempting to re-arm le connection state machine in unexpected state:%s",
+ "Attempting to disarm le connection state machine in unexpected state:%s",
connectability_state_machine_text(connectability_state_).c_str());
return;
}
@@ -669,6 +669,7 @@
ASSERT(check_connection_parameters(conn_interval_min, conn_interval_max, conn_latency, supervision_timeout));
connecting_le_.insert(address_with_type);
+ connectability_state_ = ConnectabilityState::ARMING;
if (initiator_filter_policy == InitiatorFilterPolicy::USE_CONNECT_LIST) {
address_with_type = AddressWithType();
diff --git a/system/gd/hci/hci_packets.pdl b/system/gd/hci/hci_packets.pdl
index fd17960..2051261 100644
--- a/system/gd/hci/hci_packets.pdl
+++ b/system/gd/hci/hci_packets.pdl
@@ -4370,10 +4370,16 @@
_reserved_ : 4,
}
+enum RemoveDataPathDirection : 8 {
+ INPUT = 1,
+ OUTPUT = 2,
+ INPUT_AND_OUTPUT = 3,
+}
+
packet LeRemoveIsoDataPath : LeIsoCommand (op_code = LE_REMOVE_ISO_DATA_PATH) {
connection_handle : 12,
_reserved_ : 4,
- data_path_direction : DataPathDirection,
+ remove_data_path_direction : RemoveDataPathDirection,
}
packet LeRemoveIsoDataPathComplete : CommandComplete (command_op_code = LE_REMOVE_ISO_DATA_PATH) {
diff --git a/system/main/shim/btm.cc b/system/main/shim/btm.cc
index 422c1a2..3ffd99a 100644
--- a/system/main/shim/btm.cc
+++ b/system/main/shim/btm.cc
@@ -120,7 +120,7 @@
uint8_t primary_phy, uint8_t secondary_phy, uint8_t advertising_sid,
int8_t tx_power, int8_t rssi, uint16_t periodic_advertising_interval,
std::vector<uint8_t> advertising_data) {
- tBLE_ADDR_TYPE ble_address_type = static_cast<tBLE_ADDR_TYPE>(address_type);
+ tBLE_ADDR_TYPE ble_address_type = to_ble_addr_type(address_type);
uint16_t extended_event_type = 0;
RawAddress raw_address;
diff --git a/system/main/shim/l2c_api.cc b/system/main/shim/l2c_api.cc
index ecbe502..3490425 100644
--- a/system/main/shim/l2c_api.cc
+++ b/system/main/shim/l2c_api.cc
@@ -1211,7 +1211,7 @@
}
auto local = channel->second->GetLinkOptions()->GetLocalAddress();
conn_addr = ToRawAddress(local.GetAddress());
- *p_addr_type = static_cast<tBLE_ADDR_TYPE>(local.GetAddressType());
+ *p_addr_type = to_ble_addr_type(static_cast<uint8_t>(local.GetAddressType()));
}
bool L2CA_ReadRemoteConnectionAddr(const RawAddress& pseudo_addr,
@@ -1224,7 +1224,7 @@
}
auto info = le_link_property_listener_shim_.info_[remote].address_with_type;
conn_addr = ToRawAddress(info.GetAddress());
- *p_addr_type = static_cast<tBLE_ADDR_TYPE>(info.GetAddressType());
+ *p_addr_type = to_ble_addr_type(static_cast<uint8_t>(info.GetAddressType()));
return true;
}
diff --git a/system/main/shim/le_scanning_manager.cc b/system/main/shim/le_scanning_manager.cc
index c20629e..dc776e0 100644
--- a/system/main/shim/le_scanning_manager.cc
+++ b/system/main/shim/le_scanning_manager.cc
@@ -338,28 +338,29 @@
int8_t tx_power, int8_t rssi, uint16_t periodic_advertising_interval,
std::vector<uint8_t> advertising_data) {
RawAddress raw_address = ToRawAddress(address);
+ tBLE_ADDR_TYPE ble_addr_type = to_ble_addr_type(address_type);
- if (address_type != BLE_ADDR_ANONYMOUS) {
- btm_ble_process_adv_addr(raw_address, &address_type);
+ if (ble_addr_type != BLE_ADDR_ANONYMOUS) {
+ btm_ble_process_adv_addr(raw_address, &ble_addr_type);
}
do_in_jni_thread(
FROM_HERE,
base::BindOnce(&BleScannerInterfaceImpl::handle_remote_properties,
- base::Unretained(this), raw_address, address_type,
+ base::Unretained(this), raw_address, ble_addr_type,
advertising_data));
do_in_jni_thread(
FROM_HERE,
base::BindOnce(&ScanningCallbacks::OnScanResult,
base::Unretained(scanning_callbacks_), event_type,
- address_type, raw_address, primary_phy, secondary_phy,
- advertising_sid, tx_power, rssi,
- periodic_advertising_interval, advertising_data));
+ static_cast<uint8_t>(address_type), raw_address,
+ primary_phy, secondary_phy, advertising_sid, tx_power,
+ rssi, periodic_advertising_interval, advertising_data));
// TODO: Remove when StartInquiry in GD part implemented
btm_ble_process_adv_pkt_cont_for_inquiry(
- event_type, address_type, raw_address, primary_phy, secondary_phy,
+ event_type, ble_addr_type, raw_address, primary_phy, secondary_phy,
advertising_sid, tx_power, rssi, periodic_advertising_interval,
advertising_data);
}
diff --git a/system/stack/btm/btm_ble_adv_filter.cc b/system/stack/btm/btm_ble_adv_filter.cc
index 2a7f3c3..ec4388a 100644
--- a/system/stack/btm/btm_ble_adv_filter.cc
+++ b/system/stack/btm/btm_ble_adv_filter.cc
@@ -649,7 +649,7 @@
case BTM_BLE_PF_ADDR_FILTER: {
tBLE_BD_ADDR target_addr;
target_addr.bda = cmd.address;
- target_addr.type = cmd.addr_type;
+ target_addr.type = to_ble_addr_type(cmd.addr_type);
BTM_LE_PF_addr_filter(action, filt_index, target_addr,
base::DoNothing());
@@ -688,7 +688,7 @@
// Set the IRK
tBTM_LE_PID_KEYS pid_keys;
pid_keys.irk = cmd.irk;
- pid_keys.identity_addr_type = cmd.addr_type;
+ pid_keys.identity_addr_type = to_ble_addr_type(cmd.addr_type);
pid_keys.identity_addr = cmd.address;
// Add it to the union to pass to SecAddBleKey
tBTM_LE_KEY_VALUE le_key;
diff --git a/system/stack/btm/btm_ble_batchscan.cc b/system/stack/btm/btm_ble_batchscan.cc
index 6c9c749..2278880 100644
--- a/system/stack/btm/btm_ble_batchscan.cc
+++ b/system/stack/btm/btm_ble_batchscan.cc
@@ -126,7 +126,7 @@
// Make sure the device is known
BTM_SecAddBleDevice(adv_data.bd_addr, BT_DEVICE_TYPE_BLE,
- adv_data.addr_type);
+ to_ble_addr_type(adv_data.addr_type));
ble_advtrack_cb.p_track_cback(&adv_data);
return;
diff --git a/system/stack/btm/btm_ble_bgconn.cc b/system/stack/btm/btm_ble_bgconn.cc
index 249f9ed..965e0d0 100644
--- a/system/stack/btm/btm_ble_bgconn.cc
+++ b/system/stack/btm/btm_ble_bgconn.cc
@@ -70,22 +70,6 @@
static std::unordered_map<RawAddress, BackgroundConnection, BgConnHash>
background_connections;
-/** This function is to stop auto connection procedure */
-static bool btm_ble_stop_auto_conn() {
- BTM_TRACE_EVENT("%s", __func__);
-
- if (!btm_cb.ble_ctr_cb.is_connection_state_connecting()) {
- LOG_DEBUG(
- "No need to stop auto connection procedure that is not connecting");
- return false;
- }
-
- btm_ble_create_conn_cancel();
-
- btm_cb.ble_ctr_cb.reset_acceptlist_process_in_progress();
- return true;
-}
-
const tBLE_BD_ADDR convert_to_address_with_type(
const RawAddress& bd_addr, const tBTM_SEC_DEV_REC* p_dev_rec) {
if (p_dev_rec == nullptr || !p_dev_rec->is_device_type_has_ble()) {
@@ -133,31 +117,6 @@
/*******************************************************************************
*
- * Function btm_ble_bgconn_cancel_if_disconnected
- *
- * Description If a device has been disconnected, it must be re-added to
- * the acceptlist. If needed, this function cancels a pending
- * initiate command in order to trigger restart of the initiate
- * command which in turn updates the acceptlist.
- *
- * Parameters bd_addr: updated device
- *
- ******************************************************************************/
-void btm_ble_bgconn_cancel_if_disconnected(const RawAddress& bd_addr) {
- if (!btm_cb.ble_ctr_cb.is_connection_state_connecting()) return;
-
- auto map_it = background_connections.find(bd_addr);
- if (map_it != background_connections.end()) {
- BackgroundConnection* connection = &map_it->second;
- if (!connection->in_controller_wl && !connection->pending_removal &&
- !BTM_IsAclConnectionUp(bd_addr, BT_TRANSPORT_LE)) {
- btm_ble_stop_auto_conn();
- }
- }
-}
-
-/*******************************************************************************
- *
* Function btm_ble_suspend_bg_conn
*
* Description This function is to suspend an active background connection
diff --git a/system/stack/btm/btm_ble_gap.cc b/system/stack/btm/btm_ble_gap.cc
index 34492d4..25fcddd 100644
--- a/system/stack/btm/btm_ble_gap.cc
+++ b/system/stack/btm/btm_ble_gap.cc
@@ -1107,8 +1107,10 @@
RawAddress bda = addr;
alarm_cancel(sync_timeout_alarm);
- if (address_type & BLE_ADDR_TYPE_ID_BIT) {
- btm_identity_addr_to_random_pseudo(&bda, &address_type, true);
+
+ tBLE_ADDR_TYPE ble_addr_type = to_ble_addr_type(address_type);
+ if (ble_addr_type & BLE_ADDR_TYPE_ID_BIT) {
+ btm_identity_addr_to_random_pseudo(&bda, &ble_addr_type, true);
#if (BLE_PRIVACY_SPT == TRUE)
btm_ble_disable_resolving_list(BTM_BLE_RL_SCAN, true);
#endif
@@ -1129,8 +1131,8 @@
tBTM_BLE_PERIODIC_SYNC* ps = &btm_ble_pa_sync_cb.p_sync[index];
ps->sync_handle = sync_handle;
ps->sync_state = PERIODIC_SYNC_ESTABLISHED;
- ps->sync_start_cb.Run(status, sync_handle, adv_sid, address_type, bda, phy,
- interval);
+ ps->sync_start_cb.Run(status, sync_handle, adv_sid,
+ from_ble_addr_type(ble_addr_type), bda, phy, interval);
btm_sync_queue_advance();
}
diff --git a/system/stack/btm/btm_ble_int.h b/system/stack/btm/btm_ble_int.h
index d92b953..8ca11f7 100644
--- a/system/stack/btm/btm_ble_int.h
+++ b/system/stack/btm/btm_ble_int.h
@@ -99,8 +99,6 @@
extern void btm_ble_update_mode_operation(uint8_t link_role,
const RawAddress* bda,
tHCI_STATUS status);
-extern void btm_ble_bgconn_cancel_if_disconnected(const RawAddress& bd_addr);
-
/* BLE address management */
extern void btm_gen_resolvable_private_addr(
base::Callback<void(const RawAddress& rpa)> cb);
diff --git a/system/stack/include/btm_iso_api_types.h b/system/stack/include/btm_iso_api_types.h
index 05a4449..63ca700 100644
--- a/system/stack/include/btm_iso_api_types.h
+++ b/system/stack/include/btm_iso_api_types.h
@@ -44,6 +44,9 @@
constexpr uint8_t kIsoDataPathDirectionIn = 0x00;
constexpr uint8_t kIsoDataPathDirectionOut = 0x01;
+constexpr uint8_t kRemoveIsoDataPathDirectionInput = 0x01;
+constexpr uint8_t kRemoveIsoDataPathDirectionOutput = 0x02;
+
constexpr uint8_t kIsoDataPathHci = 0x00;
constexpr uint8_t kIsoDataPathPlatformDefault = 0x01;
constexpr uint8_t kIsoDataPathDisabled = 0xFF;
diff --git a/system/stack/test/btm_iso_test.cc b/system/stack/test/btm_iso_test.cc
index f498429..0505a7e 100644
--- a/system/stack/test/btm_iso_test.cc
+++ b/system/stack/test/btm_iso_test.cc
@@ -1674,7 +1674,7 @@
// Setup and remove data paths for all CISes
path_params.data_path_dir =
- bluetooth::hci::iso_manager::kIsoDataPathDirectionIn;
+ bluetooth::hci::iso_manager::kRemoveIsoDataPathDirectionInput;
for (auto& handle : volatile_test_cig_create_cmpl_evt_.conn_handles) {
IsoManager::GetInstance()->SetupIsoDataPath(handle, path_params);
diff --git a/system/test/mock/mock_stack_btm_ble_bgconn.cc b/system/test/mock/mock_stack_btm_ble_bgconn.cc
index f98b614..4658cfc 100644
--- a/system/test/mock/mock_stack_btm_ble_bgconn.cc
+++ b/system/test/mock/mock_stack_btm_ble_bgconn.cc
@@ -48,8 +48,6 @@
// Function state capture and return values, if needed
struct convert_to_address_with_type convert_to_address_with_type;
struct btm_update_scanner_filter_policy btm_update_scanner_filter_policy;
-struct btm_ble_bgconn_cancel_if_disconnected
- btm_ble_bgconn_cancel_if_disconnected;
struct btm_ble_suspend_bg_conn btm_ble_suspend_bg_conn;
struct btm_ble_resume_bg_conn btm_ble_resume_bg_conn;
struct BTM_BackgroundConnectAddressKnown BTM_BackgroundConnectAddressKnown;
@@ -75,11 +73,6 @@
test::mock::stack_btm_ble_bgconn::btm_update_scanner_filter_policy(
scan_policy);
}
-void btm_ble_bgconn_cancel_if_disconnected(const RawAddress& bd_addr) {
- mock_function_count_map[__func__]++;
- test::mock::stack_btm_ble_bgconn::btm_ble_bgconn_cancel_if_disconnected(
- bd_addr);
-}
bool btm_ble_suspend_bg_conn(void) {
mock_function_count_map[__func__]++;
return test::mock::stack_btm_ble_bgconn::btm_ble_suspend_bg_conn();
diff --git a/system/test/mock/mock_stack_btm_ble_bgconn.h b/system/test/mock/mock_stack_btm_ble_bgconn.h
index 68e308d..f88cfb1 100644
--- a/system/test/mock/mock_stack_btm_ble_bgconn.h
+++ b/system/test/mock/mock_stack_btm_ble_bgconn.h
@@ -79,16 +79,6 @@
void operator()(tBTM_BLE_SFP scan_policy) { body(scan_policy); };
};
extern struct btm_update_scanner_filter_policy btm_update_scanner_filter_policy;
-// Name: btm_ble_bgconn_cancel_if_disconnected
-// Params: const RawAddress& bd_addr
-// Returns: void
-struct btm_ble_bgconn_cancel_if_disconnected {
- std::function<void(const RawAddress& bd_addr)> body{
- [](const RawAddress& bd_addr) {}};
- void operator()(const RawAddress& bd_addr) { body(bd_addr); };
-};
-extern struct btm_ble_bgconn_cancel_if_disconnected
- btm_ble_bgconn_cancel_if_disconnected;
// Name: btm_ble_suspend_bg_conn
// Params: void
// Returns: bool
diff --git a/tools/pdl/Android.bp b/tools/pdl/Android.bp
new file mode 100644
index 0000000..cbf8e53
--- /dev/null
+++ b/tools/pdl/Android.bp
@@ -0,0 +1,17 @@
+
+rust_binary_host {
+ name: "pdl",
+ srcs: [
+ "src/main.rs",
+ ],
+ rustlibs: [
+ "libpest",
+ "libserde",
+ "libserde_json",
+ "libstructopt",
+ "libcodespan_reporting",
+ ],
+ proc_macros: [
+ "libpest_derive",
+ ],
+}
diff --git a/tools/pdl/src/ast.rs b/tools/pdl/src/ast.rs
new file mode 100644
index 0000000..e25b01a
--- /dev/null
+++ b/tools/pdl/src/ast.rs
@@ -0,0 +1,301 @@
+use codespan_reporting::diagnostic;
+use codespan_reporting::files;
+use serde::Serialize;
+use std::fmt;
+use std::ops;
+
+/// File identfiier.
+/// References a source file in the source database.
+pub type FileId = usize;
+
+/// Source database.
+/// Stores the source file contents for reference.
+pub type SourceDatabase = files::SimpleFiles<String, String>;
+
+#[derive(Debug, Copy, Clone, Serialize, PartialEq, Eq, PartialOrd, Ord)]
+pub struct SourceLocation {
+ pub offset: usize,
+ pub line: usize,
+ pub column: usize,
+}
+
+#[derive(Debug, Clone, Serialize)]
+pub struct SourceRange {
+ pub file: FileId,
+ pub start: SourceLocation,
+ pub end: SourceLocation,
+}
+
+#[derive(Debug, Serialize)]
+#[serde(tag = "kind", rename = "comment")]
+pub struct Comment {
+ pub loc: SourceRange,
+ pub text: String,
+}
+
+#[derive(Debug, Serialize)]
+#[serde(rename_all = "snake_case")]
+pub enum EndiannessValue {
+ LittleEndian,
+ BigEndian,
+}
+
+#[derive(Debug, Serialize)]
+#[serde(tag = "kind", rename = "endianness_declaration")]
+pub struct Endianness {
+ pub loc: SourceRange,
+ pub value: EndiannessValue,
+}
+
+#[derive(Debug, Serialize)]
+#[serde(tag = "kind")]
+pub enum Expr {
+ #[serde(rename = "identifier")]
+ Identifier { loc: SourceRange, name: String },
+ #[serde(rename = "integer")]
+ Integer { loc: SourceRange, value: usize },
+ #[serde(rename = "unary_expr")]
+ Unary { loc: SourceRange, op: String, operand: Box<Expr> },
+ #[serde(rename = "binary_expr")]
+ Binary { loc: SourceRange, op: String, operands: Box<(Expr, Expr)> },
+}
+
+#[derive(Debug, Serialize)]
+#[serde(tag = "kind", rename = "tag")]
+pub struct Tag {
+ pub id: String,
+ pub loc: SourceRange,
+ pub value: usize,
+}
+
+#[derive(Debug, Serialize)]
+#[serde(tag = "kind", rename = "constraint")]
+pub struct Constraint {
+ pub id: String,
+ pub loc: SourceRange,
+ pub value: Expr,
+}
+
+#[derive(Debug, Serialize)]
+#[serde(tag = "kind")]
+pub enum Field {
+ #[serde(rename = "checksum_field")]
+ Checksum { loc: SourceRange, field_id: String },
+ #[serde(rename = "padding_field")]
+ Padding { loc: SourceRange, width: usize },
+ #[serde(rename = "size_field")]
+ Size { loc: SourceRange, field_id: String, width: usize },
+ #[serde(rename = "count_field")]
+ Count { loc: SourceRange, field_id: String, width: usize },
+ #[serde(rename = "body_field")]
+ Body { loc: SourceRange },
+ #[serde(rename = "payload_field")]
+ Payload { loc: SourceRange, size_modifier: Option<String> },
+ #[serde(rename = "fixed_field")]
+ Fixed {
+ loc: SourceRange,
+ width: Option<usize>,
+ value: Option<usize>,
+ enum_id: Option<String>,
+ tag_id: Option<String>,
+ },
+ #[serde(rename = "reserved_field")]
+ Reserved { loc: SourceRange, width: usize },
+ #[serde(rename = "array_field")]
+ Array {
+ loc: SourceRange,
+ id: String,
+ width: Option<usize>,
+ type_id: Option<String>,
+ size_modifier: Option<String>,
+ size: Option<usize>,
+ },
+ #[serde(rename = "scalar_field")]
+ Scalar { loc: SourceRange, id: String, width: usize },
+ #[serde(rename = "typedef_field")]
+ Typedef { loc: SourceRange, id: String, type_id: String },
+ #[serde(rename = "group_field")]
+ Group { loc: SourceRange, group_id: String, constraints: Vec<Constraint> },
+}
+
+#[derive(Debug, Serialize)]
+#[serde(tag = "kind", rename = "test_case")]
+pub struct TestCase {
+ pub loc: SourceRange,
+ pub input: String,
+}
+
+#[derive(Debug, Serialize)]
+#[serde(tag = "kind")]
+pub enum Decl {
+ #[serde(rename = "checksum_declaration")]
+ Checksum { id: String, loc: SourceRange, function: String, width: usize },
+ #[serde(rename = "custom_field_declaration")]
+ CustomField { id: String, loc: SourceRange, width: Option<usize>, function: String },
+ #[serde(rename = "enum_declaration")]
+ Enum { id: String, loc: SourceRange, tags: Vec<Tag>, width: usize },
+ #[serde(rename = "packet_declaration")]
+ Packet {
+ id: String,
+ loc: SourceRange,
+ constraints: Vec<Constraint>,
+ fields: Vec<Field>,
+ parent_id: Option<String>,
+ },
+ #[serde(rename = "struct_declaration")]
+ Struct {
+ id: String,
+ loc: SourceRange,
+ constraints: Vec<Constraint>,
+ fields: Vec<Field>,
+ parent_id: Option<String>,
+ },
+ #[serde(rename = "group_declaration")]
+ Group { id: String, loc: SourceRange, fields: Vec<Field> },
+ #[serde(rename = "test_declaration")]
+ Test { loc: SourceRange, type_id: String, test_cases: Vec<TestCase> },
+}
+
+#[derive(Debug, Serialize)]
+pub struct Grammar {
+ pub version: String,
+ pub file: FileId,
+ pub comments: Vec<Comment>,
+ pub endianness: Option<Endianness>,
+ pub declarations: Vec<Decl>,
+}
+
+/// Implemented for all AST elements.
+pub trait Located<'d> {
+ fn loc(&'d self) -> &'d SourceRange;
+}
+
+/// Implemented for named AST elements.
+pub trait Named<'d> {
+ fn id(&'d self) -> Option<&'d String>;
+}
+
+impl SourceLocation {
+ pub fn new(offset: usize, line_starts: &[usize]) -> SourceLocation {
+ for (line, start) in line_starts.iter().enumerate() {
+ if *start <= offset {
+ return SourceLocation { offset, line, column: offset - start };
+ }
+ }
+ unreachable!()
+ }
+}
+
+impl SourceRange {
+ pub fn primary(&self) -> diagnostic::Label<FileId> {
+ diagnostic::Label::primary(self.file, self.start.offset..self.end.offset)
+ }
+ pub fn secondary(&self) -> diagnostic::Label<FileId> {
+ diagnostic::Label::secondary(self.file, self.start.offset..self.end.offset)
+ }
+}
+
+impl fmt::Display for SourceRange {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ if self.start.line == self.end.line {
+ write!(f, "{}:{}-{}", self.start.line, self.start.column, self.end.column)
+ } else {
+ write!(
+ f,
+ "{}:{}-{}:{}",
+ self.start.line, self.start.column, self.end.line, self.end.column
+ )
+ }
+ }
+}
+
+impl ops::Add<SourceRange> for SourceRange {
+ type Output = SourceRange;
+
+ fn add(self, rhs: SourceRange) -> SourceRange {
+ assert!(self.file == rhs.file);
+ SourceRange {
+ file: self.file,
+ start: self.start.min(rhs.start),
+ end: self.end.max(rhs.end),
+ }
+ }
+}
+
+impl Grammar {
+ pub fn new(file: FileId) -> Grammar {
+ Grammar {
+ version: "1,0".to_owned(),
+ comments: vec![],
+ endianness: None,
+ declarations: vec![],
+ file,
+ }
+ }
+}
+
+impl<'d> Located<'d> for Field {
+ fn loc(&'d self) -> &'d SourceRange {
+ match self {
+ Field::Checksum { loc, .. }
+ | Field::Padding { loc, .. }
+ | Field::Size { loc, .. }
+ | Field::Count { loc, .. }
+ | Field::Body { loc, .. }
+ | Field::Payload { loc, .. }
+ | Field::Fixed { loc, .. }
+ | Field::Reserved { loc, .. }
+ | Field::Array { loc, .. }
+ | Field::Scalar { loc, .. }
+ | Field::Typedef { loc, .. }
+ | Field::Group { loc, .. } => loc,
+ }
+ }
+}
+
+impl<'d> Located<'d> for Decl {
+ fn loc(&'d self) -> &'d SourceRange {
+ match self {
+ Decl::Checksum { loc, .. }
+ | Decl::CustomField { loc, .. }
+ | Decl::Enum { loc, .. }
+ | Decl::Packet { loc, .. }
+ | Decl::Struct { loc, .. }
+ | Decl::Group { loc, .. }
+ | Decl::Test { loc, .. } => loc,
+ }
+ }
+}
+
+impl<'d> Named<'d> for Field {
+ fn id(&'d self) -> Option<&'d String> {
+ match self {
+ Field::Checksum { .. }
+ | Field::Padding { .. }
+ | Field::Size { .. }
+ | Field::Count { .. }
+ | Field::Body { .. }
+ | Field::Payload { .. }
+ | Field::Fixed { .. }
+ | Field::Reserved { .. }
+ | Field::Group { .. } => None,
+ Field::Array { id, .. } | Field::Scalar { id, .. } | Field::Typedef { id, .. } => {
+ Some(id)
+ }
+ }
+ }
+}
+
+impl<'d> Named<'d> for Decl {
+ fn id(&'d self) -> Option<&'d String> {
+ match self {
+ Decl::Test { .. } => None,
+ Decl::Checksum { id, .. }
+ | Decl::CustomField { id, .. }
+ | Decl::Enum { id, .. }
+ | Decl::Packet { id, .. }
+ | Decl::Struct { id, .. }
+ | Decl::Group { id, .. } => Some(id),
+ }
+ }
+}
diff --git a/tools/pdl/src/lint.rs b/tools/pdl/src/lint.rs
new file mode 100644
index 0000000..0b2d3bc
--- /dev/null
+++ b/tools/pdl/src/lint.rs
@@ -0,0 +1,1265 @@
+use codespan_reporting::diagnostic::Diagnostic;
+use codespan_reporting::files;
+use codespan_reporting::term;
+use codespan_reporting::term::termcolor;
+use std::collections::HashMap;
+
+use crate::ast::*;
+
+/// Aggregate linter diagnostics.
+pub struct LintDiagnostics {
+ pub diagnostics: Vec<Diagnostic<FileId>>,
+}
+
+/// Implement lint checks for an AST element.
+pub trait Lintable {
+ /// Generate lint warnings and errors for the
+ /// input element.
+ fn lint(&self) -> LintDiagnostics;
+}
+
+/// Represents a chain of group expansion.
+/// Each field but the last in the chain is a typedef field of a group.
+/// The last field can also be a typedef field of a group if the chain is
+/// not fully expanded.
+type FieldPath<'d> = Vec<&'d Field>;
+
+/// Gather information about the full grammar declaration.
+struct Scope<'d> {
+ // Collection of Group declarations.
+ groups: HashMap<String, &'d Decl>,
+
+ // Collection of Packet declarations.
+ packets: HashMap<String, &'d Decl>,
+
+ // Collection of Enum, Struct, Checksum, and CustomField declarations.
+ // Packet and Group can not be referenced in a Typedef field and thus
+ // do not share the same namespace.
+ typedef: HashMap<String, &'d Decl>,
+
+ // Collection of Packet, Struct, and Group scope declarations.
+ scopes: HashMap<&'d Decl, PacketScope<'d>>,
+}
+
+/// Gather information about a Packet, Struct, or Group declaration.
+struct PacketScope<'d> {
+ // Checksum starts, indexed by the checksum field id.
+ checksums: HashMap<String, FieldPath<'d>>,
+
+ // Size or count fields, indexed by the field id.
+ sizes: HashMap<String, FieldPath<'d>>,
+
+ // Payload or body field.
+ payload: Option<FieldPath<'d>>,
+
+ // Typedef, scalar, array fields.
+ named: HashMap<String, FieldPath<'d>>,
+
+ // Group fields.
+ groups: HashMap<String, &'d Field>,
+
+ // Flattened field declarations.
+ // Contains field declarations from the original Packet, Struct, or Group,
+ // where Group fields have been substituted by their body.
+ // Constrained Scalar or Typedef Group fields are substitued by a Fixed
+ // field.
+ fields: Vec<FieldPath<'d>>,
+
+ // Constraint declarations gathered from Group inlining.
+ constraints: HashMap<String, &'d Constraint>,
+
+ // Local and inherited field declarations. Only named fields are preserved.
+ // Saved here for reference for parent constraint resolving.
+ all_fields: HashMap<String, &'d Field>,
+
+ // Local and inherited constraint declarations.
+ // Saved here for constraint conflict checks.
+ all_constraints: HashMap<String, &'d Constraint>,
+}
+
+impl std::cmp::Eq for &Decl {}
+impl<'d> std::cmp::PartialEq for &'d Decl {
+ fn eq(&self, other: &Self) -> bool {
+ std::ptr::eq(*self, *other)
+ }
+}
+
+impl<'d> std::hash::Hash for &'d Decl {
+ fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
+ std::ptr::hash(*self, state);
+ }
+}
+
+impl<'d> Located<'d> for FieldPath<'d> {
+ fn loc(&'d self) -> &'d SourceRange {
+ self.last().unwrap().loc()
+ }
+}
+
+impl LintDiagnostics {
+ fn new() -> LintDiagnostics {
+ LintDiagnostics { diagnostics: vec![] }
+ }
+
+ pub fn print(
+ &self,
+ sources: &SourceDatabase,
+ color: termcolor::ColorChoice,
+ ) -> Result<(), files::Error> {
+ let writer = termcolor::StandardStream::stderr(color);
+ let config = term::Config::default();
+ for d in self.diagnostics.iter() {
+ term::emit(&mut writer.lock(), &config, sources, d)?;
+ }
+ Ok(())
+ }
+
+ fn push(&mut self, diagnostic: Diagnostic<FileId>) {
+ self.diagnostics.push(diagnostic)
+ }
+
+ fn err_undeclared(&mut self, id: &str, loc: &SourceRange) {
+ self.diagnostics.push(
+ Diagnostic::error()
+ .with_message(format!("undeclared identifier `{}`", id))
+ .with_labels(vec![loc.primary()]),
+ )
+ }
+
+ fn err_redeclared(&mut self, id: &str, kind: &str, loc: &SourceRange, prev: &SourceRange) {
+ self.diagnostics.push(
+ Diagnostic::error()
+ .with_message(format!("redeclaration of {} identifier `{}`", kind, id))
+ .with_labels(vec![
+ loc.primary(),
+ prev.secondary().with_message(format!("`{}` is first declared here", id)),
+ ]),
+ )
+ }
+}
+
+fn bit_width(val: usize) -> usize {
+ usize::BITS as usize - val.leading_zeros() as usize
+}
+
+impl<'d> PacketScope<'d> {
+ /// Insert a field declaration into a packet scope.
+ fn insert(&mut self, field: &'d Field, result: &mut LintDiagnostics) {
+ match field {
+ Field::Checksum { loc, field_id, .. } => {
+ self.checksums.insert(field_id.clone(), vec![field]).map(|prev| {
+ result.push(
+ Diagnostic::error()
+ .with_message(format!(
+ "redeclaration of checksum start for `{}`",
+ field_id
+ ))
+ .with_labels(vec![
+ loc.primary(),
+ prev.loc()
+ .secondary()
+ .with_message("checksum start is first declared here"),
+ ]),
+ )
+ })
+ }
+
+ Field::Padding { .. } | Field::Reserved { .. } | Field::Fixed { .. } => None,
+
+ Field::Size { loc, field_id, .. } | Field::Count { loc, field_id, .. } => {
+ self.sizes.insert(field_id.clone(), vec![field]).map(|prev| {
+ result.push(
+ Diagnostic::error()
+ .with_message(format!(
+ "redeclaration of size or count for `{}`",
+ field_id
+ ))
+ .with_labels(vec![
+ loc.primary(),
+ prev.loc().secondary().with_message("size is first declared here"),
+ ]),
+ )
+ })
+ }
+
+ Field::Body { loc, .. } | Field::Payload { loc, .. } => {
+ if let Some(prev) = self.payload.as_ref() {
+ result.push(
+ Diagnostic::error()
+ .with_message("redeclaration of payload or body field")
+ .with_labels(vec![
+ loc.primary(),
+ prev.loc()
+ .secondary()
+ .with_message("payload is first declared here"),
+ ]),
+ )
+ }
+ self.payload = Some(vec![field]);
+ None
+ }
+
+ Field::Array { loc, id, .. }
+ | Field::Scalar { loc, id, .. }
+ | Field::Typedef { loc, id, .. } => self
+ .named
+ .insert(id.clone(), vec![field])
+ .map(|prev| result.err_redeclared(id, "field", loc, prev.loc())),
+
+ Field::Group { loc, group_id, .. } => {
+ self.groups.insert(group_id.clone(), field).map(|prev| {
+ result.push(
+ Diagnostic::error()
+ .with_message(format!("duplicate group `{}` insertion", group_id))
+ .with_labels(vec![
+ loc.primary(),
+ prev.loc()
+ .secondary()
+ .with_message(format!("`{}` is first used here", group_id)),
+ ]),
+ )
+ })
+ }
+ };
+ }
+
+ /// Add parent fields and constraints to the scope.
+ /// Only named fields are imported.
+ fn inherit(
+ &mut self,
+ scope: &Scope,
+ parent: &PacketScope<'d>,
+ constraints: impl Iterator<Item = &'d Constraint>,
+ result: &mut LintDiagnostics,
+ ) {
+ // Check constraints.
+ assert!(self.all_constraints.is_empty());
+ self.all_constraints = parent.all_constraints.clone();
+ for constraint in constraints {
+ lint_constraint(scope, parent, constraint, result);
+ let id = constraint.id.clone();
+ if let Some(prev) = self.all_constraints.insert(id, constraint) {
+ result.push(
+ Diagnostic::error()
+ .with_message(format!("duplicate constraint on field `{}`", constraint.id))
+ .with_labels(vec![
+ constraint.loc.primary(),
+ prev.loc.secondary().with_message("the constraint is first set here"),
+ ]),
+ )
+ }
+ }
+
+ // Merge group constraints into parent constraints,
+ // but generate no duplication warnings, the constraints
+ // do no apply to the same field set.
+ for (id, constraint) in self.constraints.iter() {
+ self.all_constraints.insert(id.clone(), constraint);
+ }
+
+ // Save parent fields.
+ self.all_fields = parent.all_fields.clone();
+ }
+
+ /// Insert group field declarations into a packet scope.
+ fn inline(
+ &mut self,
+ scope: &Scope,
+ packet_scope: &PacketScope<'d>,
+ group: &'d Field,
+ constraints: impl Iterator<Item = &'d Constraint>,
+ result: &mut LintDiagnostics,
+ ) {
+ fn err_redeclared_by_group(
+ result: &mut LintDiagnostics,
+ message: impl Into<String>,
+ loc: &SourceRange,
+ prev: &SourceRange,
+ ) {
+ result.push(Diagnostic::error().with_message(message).with_labels(vec![
+ loc.primary(),
+ prev.secondary().with_message("first declared here"),
+ ]))
+ }
+
+ for (id, field) in packet_scope.checksums.iter() {
+ if let Some(prev) = self.checksums.insert(id.clone(), field.clone()) {
+ err_redeclared_by_group(
+ result,
+ format!("inserted group redeclares checksum start for `{}`", id),
+ group.loc(),
+ prev.loc(),
+ )
+ }
+ }
+ for (id, field) in packet_scope.sizes.iter() {
+ if let Some(prev) = self.sizes.insert(id.clone(), field.clone()) {
+ err_redeclared_by_group(
+ result,
+ format!("inserted group redeclares size or count for `{}`", id),
+ group.loc(),
+ prev.loc(),
+ )
+ }
+ }
+ match (&self.payload, &packet_scope.payload) {
+ (Some(prev), Some(next)) => err_redeclared_by_group(
+ result,
+ "inserted group redeclares payload or body field",
+ next.loc(),
+ prev.loc(),
+ ),
+ (None, Some(payload)) => self.payload = Some(payload.clone()),
+ _ => (),
+ }
+ for (id, field) in packet_scope.named.iter() {
+ let mut path = vec![group];
+ path.extend(field.clone());
+ if let Some(prev) = self.named.insert(id.clone(), path) {
+ err_redeclared_by_group(
+ result,
+ format!("inserted group redeclares field `{}`", id),
+ group.loc(),
+ prev.loc(),
+ )
+ }
+ }
+
+ // Append group fields to the finalizeed fields.
+ for field in packet_scope.fields.iter() {
+ let mut path = vec![group];
+ path.extend(field.clone());
+ self.fields.push(path);
+ }
+
+ // Append group constraints to the caller packet_scope.
+ for (id, constraint) in packet_scope.constraints.iter() {
+ self.constraints.insert(id.clone(), constraint);
+ }
+
+ // Add constraints to the packet_scope, checking for duplicate constraints.
+ for constraint in constraints {
+ lint_constraint(scope, packet_scope, constraint, result);
+ let id = constraint.id.clone();
+ if let Some(prev) = self.constraints.insert(id, constraint) {
+ result.push(
+ Diagnostic::error()
+ .with_message(format!("duplicate constraint on field `{}`", constraint.id))
+ .with_labels(vec![
+ constraint.loc.primary(),
+ prev.loc.secondary().with_message("the constraint is first set here"),
+ ]),
+ )
+ }
+ }
+ }
+
+ /// Cleanup scope after processing all fields.
+ fn finalize(&mut self, result: &mut LintDiagnostics) {
+ // Check field shadowing.
+ for f in self.fields.iter().map(|f| f.last().unwrap()) {
+ if let Some(id) = f.id() {
+ if let Some(prev) = self.all_fields.insert(id.clone(), f) {
+ result.push(
+ Diagnostic::warning()
+ .with_message(format!("declaration of `{}` shadows parent field", id))
+ .with_labels(vec![
+ f.loc().primary(),
+ prev.loc()
+ .secondary()
+ .with_message(format!("`{}` is first declared here", id)),
+ ]),
+ )
+ }
+ }
+ }
+ }
+}
+
+/// Helper for linting value constraints over packet fields.
+fn lint_constraint(
+ scope: &Scope,
+ packet_scope: &PacketScope,
+ constraint: &Constraint,
+ result: &mut LintDiagnostics,
+) {
+ // Validate constraint value types.
+ match (packet_scope.all_fields.get(&constraint.id), &constraint.value) {
+ (
+ Some(Field::Scalar { loc: field_loc, width, .. }),
+ Expr::Integer { value, loc: value_loc, .. },
+ ) => {
+ if bit_width(*value) > *width {
+ result.push(
+ Diagnostic::error().with_message("invalid integer literal").with_labels(vec![
+ value_loc.primary().with_message(format!(
+ "expected maximum value of `{}`",
+ (1 << *width) - 1
+ )),
+ field_loc.secondary().with_message("the value is used here"),
+ ]),
+ )
+ }
+ }
+
+ (Some(Field::Typedef { type_id, loc: field_loc, .. }), _) => {
+ match (scope.typedef.get(type_id), &constraint.value) {
+ (Some(Decl::Enum { tags, .. }), Expr::Identifier { name, loc: name_loc, .. }) => {
+ if !tags.iter().any(|t| &t.id == name) {
+ result.push(
+ Diagnostic::error()
+ .with_message(format!("undeclared enum tag `{}`", name))
+ .with_labels(vec![
+ name_loc.primary(),
+ field_loc.secondary().with_message("the value is used here"),
+ ]),
+ )
+ }
+ }
+ (Some(Decl::Enum { .. }), _) => result.push(
+ Diagnostic::error().with_message("invalid literal type").with_labels(vec![
+ constraint
+ .loc
+ .primary()
+ .with_message(format!("expected `{}` tag identifier", type_id)),
+ field_loc.secondary().with_message("the value is used here"),
+ ]),
+ ),
+ (Some(decl), _) => result.push(
+ Diagnostic::error().with_message("invalid constraint").with_labels(vec![
+ constraint.loc.primary(),
+ field_loc.secondary().with_message(format!(
+ "`{}` has type {}, expected enum field",
+ constraint.id,
+ decl.kind()
+ )),
+ ]),
+ ),
+ // This error will be reported during field linting
+ (None, _) => (),
+ }
+ }
+
+ (Some(Field::Scalar { loc: field_loc, .. }), _) => {
+ result.push(Diagnostic::error().with_message("invalid literal type").with_labels(vec![
+ constraint.loc.primary().with_message("expected integer literal"),
+ field_loc.secondary().with_message("the value is used here"),
+ ]))
+ }
+ (Some(_), _) => unreachable!(),
+ (None, _) => result.push(
+ Diagnostic::error()
+ .with_message(format!("undeclared identifier `{}`", constraint.id))
+ .with_labels(vec![constraint.loc.primary()]),
+ ),
+ }
+}
+
+impl<'d> Scope<'d> {
+ // Sort Packet, Struct, and Group declarations by reverse topological
+ // orde, and inline Group fields.
+ // Raises errors and warnings for:
+ // - undeclared included Groups,
+ // - undeclared Typedef fields,
+ // - undeclared Packet or Struct parents,
+ // - recursive Group insertion,
+ // - recursive Packet or Struct inheritance.
+ fn finalize(&mut self, result: &mut LintDiagnostics) -> Vec<&'d Decl> {
+ // Auxiliary function implementing BFS on Packet tree.
+ enum Mark {
+ Temporary,
+ Permanent,
+ }
+ struct Context<'d> {
+ list: Vec<&'d Decl>,
+ visited: HashMap<&'d Decl, Mark>,
+ scopes: HashMap<&'d Decl, PacketScope<'d>>,
+ }
+
+ fn bfs<'s, 'd>(
+ decl: &'d Decl,
+ context: &'s mut Context<'d>,
+ scope: &Scope<'d>,
+ result: &mut LintDiagnostics,
+ ) -> Option<&'s PacketScope<'d>> {
+ match context.visited.get(&decl) {
+ Some(Mark::Permanent) => return context.scopes.get(&decl),
+ Some(Mark::Temporary) => {
+ result.push(
+ Diagnostic::error()
+ .with_message(format!(
+ "recursive declaration of {} `{}`",
+ decl.kind(),
+ decl.id().unwrap()
+ ))
+ .with_labels(vec![decl.loc().primary()]),
+ );
+ return None;
+ }
+ _ => (),
+ }
+
+ let (parent_id, parent_namespace, fields) = match decl {
+ Decl::Packet { parent_id, fields, .. } => (parent_id, &scope.packets, fields),
+ Decl::Struct { parent_id, fields, .. } => (parent_id, &scope.typedef, fields),
+ Decl::Group { fields, .. } => (&None, &scope.groups, fields),
+ _ => return None,
+ };
+
+ context.visited.insert(decl, Mark::Temporary);
+ let mut lscope = decl.scope(result).unwrap();
+
+ // Iterate over Struct and Group fields.
+ for f in fields {
+ match f {
+ Field::Group { group_id, constraints, .. } => {
+ match scope.groups.get(group_id) {
+ None => result.push(
+ Diagnostic::error()
+ .with_message(format!(
+ "undeclared group identifier `{}`",
+ group_id
+ ))
+ .with_labels(vec![f.loc().primary()]),
+ ),
+ Some(group_decl) => {
+ // Recurse to flatten the inserted group.
+ if let Some(rscope) = bfs(group_decl, context, scope, result) {
+ // Inline the group fields and constraints into
+ // the current scope.
+ lscope.inline(scope, rscope, f, constraints.iter(), result)
+ }
+ }
+ }
+ }
+ Field::Typedef { type_id, .. } => {
+ lscope.fields.push(vec![f]);
+ match scope.typedef.get(type_id) {
+ None => result.push(
+ Diagnostic::error()
+ .with_message(format!(
+ "undeclared typedef identifier `{}`",
+ type_id
+ ))
+ .with_labels(vec![f.loc().primary()]),
+ ),
+ Some(struct_decl @ Decl::Struct { .. }) => {
+ bfs(struct_decl, context, scope, result);
+ }
+ Some(_) => (),
+ }
+ }
+ _ => lscope.fields.push(vec![f]),
+ }
+ }
+
+ // Iterate over parent declaration.
+ for id in parent_id {
+ match parent_namespace.get(id) {
+ None => result.push(
+ Diagnostic::error()
+ .with_message(format!("undeclared parent identifier `{}`", id))
+ .with_labels(vec![decl.loc().primary()])
+ .with_notes(vec![format!("hint: expected {} parent", decl.kind())]),
+ ),
+ Some(parent_decl) => {
+ if let Some(rscope) = bfs(parent_decl, context, scope, result) {
+ // Import the parent fields and constraints into the current scope.
+ lscope.inherit(scope, rscope, decl.constraints(), result)
+ }
+ }
+ }
+ }
+
+ lscope.finalize(result);
+ context.list.push(decl);
+ context.visited.insert(decl, Mark::Permanent);
+ context.scopes.insert(decl, lscope);
+ context.scopes.get(&decl)
+ }
+
+ let mut context =
+ Context::<'d> { list: vec![], visited: HashMap::new(), scopes: HashMap::new() };
+
+ for decl in self.packets.values().chain(self.typedef.values()).chain(self.groups.values()) {
+ bfs(decl, &mut context, self, result);
+ }
+
+ self.scopes = context.scopes;
+ context.list
+ }
+}
+
+impl Field {
+ fn kind(&self) -> &str {
+ match self {
+ Field::Checksum { .. } => "payload",
+ Field::Padding { .. } => "padding",
+ Field::Size { .. } => "size",
+ Field::Count { .. } => "count",
+ Field::Body { .. } => "body",
+ Field::Payload { .. } => "payload",
+ Field::Fixed { .. } => "fixed",
+ Field::Reserved { .. } => "reserved",
+ Field::Group { .. } => "group",
+ Field::Array { .. } => "array",
+ Field::Scalar { .. } => "scalar",
+ Field::Typedef { .. } => "typedef",
+ }
+ }
+}
+
+// Helper for linting an enum declaration.
+fn lint_enum(tags: &[Tag], width: usize, result: &mut LintDiagnostics) {
+ let mut local_scope = HashMap::new();
+ for tag in tags {
+ // Tags must be unique within the scope of the
+ // enum declaration.
+ if let Some(prev) = local_scope.insert(tag.id.clone(), tag) {
+ result.push(
+ Diagnostic::error()
+ .with_message(format!("redeclaration of tag identifier `{}`", &tag.id))
+ .with_labels(vec![
+ tag.loc.primary(),
+ prev.loc.secondary().with_message("first declared here"),
+ ]),
+ )
+ }
+
+ // Tag values must fit the enum declared width.
+ if bit_width(tag.value) > width {
+ result.push(Diagnostic::error().with_message("invalid literal value").with_labels(
+ vec![tag.loc.primary().with_message(format!(
+ "expected maximum value of `{}`",
+ (1 << width) - 1
+ ))],
+ ))
+ }
+ }
+}
+
+// Helper for linting checksum fields.
+fn lint_checksum(
+ scope: &Scope,
+ packet_scope: &PacketScope,
+ path: &FieldPath,
+ field_id: &str,
+ result: &mut LintDiagnostics,
+) {
+ // Checksum field must be declared before
+ // the checksum start. The field must be a typedef with
+ // a valid checksum type.
+ let checksum_loc = path.loc();
+ let field_decl = packet_scope.named.get(field_id);
+
+ match field_decl.and_then(|f| f.last()) {
+ Some(Field::Typedef { loc: field_loc, type_id, .. }) => {
+ // Check declaration type of checksum field.
+ match scope.typedef.get(type_id) {
+ Some(Decl::Checksum { .. }) => (),
+ Some(decl) => result.push(
+ Diagnostic::error()
+ .with_message(format!("checksum start uses invalid field `{}`", field_id))
+ .with_labels(vec![
+ checksum_loc.primary(),
+ field_loc.secondary().with_message(format!(
+ "`{}` is declared with {} type `{}`, expected checksum_field",
+ field_id,
+ decl.kind(),
+ type_id
+ )),
+ ]),
+ ),
+ // This error case will be reported when the field itself
+ // is checked.
+ None => (),
+ };
+ // Check declaration order of checksum field.
+ match field_decl.and_then(|f| f.first()) {
+ Some(decl) if decl.loc().start > checksum_loc.start => result.push(
+ Diagnostic::error()
+ .with_message("invalid checksum start declaration")
+ .with_labels(vec![
+ checksum_loc
+ .primary()
+ .with_message("checksum start precedes checksum field"),
+ decl.loc().secondary().with_message("checksum field is declared here"),
+ ]),
+ ),
+ _ => (),
+ }
+ }
+ Some(field) => result.push(
+ Diagnostic::error()
+ .with_message(format!("checksum start uses invalid field `{}`", field_id))
+ .with_labels(vec![
+ checksum_loc.primary(),
+ field.loc().secondary().with_message(format!(
+ "`{}` is declared as {} field, expected typedef",
+ field_id,
+ field.kind()
+ )),
+ ]),
+ ),
+ None => result.err_undeclared(field_id, checksum_loc),
+ }
+}
+
+// Helper for linting size fields.
+fn lint_size(
+ _scope: &Scope,
+ packet_scope: &PacketScope,
+ path: &FieldPath,
+ field_id: &str,
+ _width: usize,
+ result: &mut LintDiagnostics,
+) {
+ // Size fields should be declared before
+ // the sized field (body, payload, or array).
+ // The field must reference a valid body, payload or array
+ // field.
+
+ let size_loc = path.loc();
+
+ if field_id == "_payload_" {
+ return match packet_scope.payload.as_ref().and_then(|f| f.last()) {
+ Some(Field::Body { .. }) => result.push(
+ Diagnostic::error()
+ .with_message("size field uses undeclared payload field, did you mean _body_ ?")
+ .with_labels(vec![size_loc.primary()]),
+ ),
+ Some(Field::Payload { .. }) => {
+ match packet_scope.payload.as_ref().and_then(|f| f.first()) {
+ Some(field) if field.loc().start < size_loc.start => result.push(
+ Diagnostic::error().with_message("invalid size field").with_labels(vec![
+ size_loc
+ .primary()
+ .with_message("size field is declared after payload field"),
+ field.loc().secondary().with_message("payload field is declared here"),
+ ]),
+ ),
+ _ => (),
+ }
+ }
+ Some(_) => unreachable!(),
+ None => result.push(
+ Diagnostic::error()
+ .with_message("size field uses undeclared payload field")
+ .with_labels(vec![size_loc.primary()]),
+ ),
+ };
+ }
+ if field_id == "_body_" {
+ return match packet_scope.payload.as_ref().and_then(|f| f.last()) {
+ Some(Field::Payload { .. }) => result.push(
+ Diagnostic::error()
+ .with_message("size field uses undeclared body field, did you mean _payload_ ?")
+ .with_labels(vec![size_loc.primary()]),
+ ),
+ Some(Field::Body { .. }) => {
+ match packet_scope.payload.as_ref().and_then(|f| f.first()) {
+ Some(field) if field.loc().start < size_loc.start => result.push(
+ Diagnostic::error().with_message("invalid size field").with_labels(vec![
+ size_loc
+ .primary()
+ .with_message("size field is declared after body field"),
+ field.loc().secondary().with_message("body field is declared here"),
+ ]),
+ ),
+ _ => (),
+ }
+ }
+ Some(_) => unreachable!(),
+ None => result.push(
+ Diagnostic::error()
+ .with_message("size field uses undeclared body field")
+ .with_labels(vec![size_loc.primary()]),
+ ),
+ };
+ }
+
+ let field = packet_scope.named.get(field_id);
+
+ match field.and_then(|f| f.last()) {
+ Some(Field::Array { size: Some(_), loc: array_loc, .. }) => result.push(
+ Diagnostic::warning()
+ .with_message(format!("size field uses array `{}` with static size", field_id))
+ .with_labels(vec![
+ size_loc.primary(),
+ array_loc.secondary().with_message(format!("`{}` is declared here", field_id)),
+ ]),
+ ),
+ Some(Field::Array { .. }) => (),
+ Some(field) => result.push(
+ Diagnostic::error()
+ .with_message(format!("invalid `{}` field type", field_id))
+ .with_labels(vec![
+ field.loc().primary().with_message(format!(
+ "`{}` is declared as {}",
+ field_id,
+ field.kind()
+ )),
+ size_loc
+ .secondary()
+ .with_message(format!("`{}` is used here as array", field_id)),
+ ]),
+ ),
+
+ None => result.err_undeclared(field_id, size_loc),
+ };
+ match field.and_then(|f| f.first()) {
+ Some(field) if field.loc().start < size_loc.start => {
+ result.push(Diagnostic::error().with_message("invalid size field").with_labels(vec![
+ size_loc
+ .primary()
+ .with_message(format!("size field is declared after field `{}`", field_id)),
+ field
+ .loc()
+ .secondary()
+ .with_message(format!("`{}` is declared here", field_id)),
+ ]))
+ }
+ _ => (),
+ }
+}
+
+// Helper for linting count fields.
+fn lint_count(
+ _scope: &Scope,
+ packet_scope: &PacketScope,
+ path: &FieldPath,
+ field_id: &str,
+ _width: usize,
+ result: &mut LintDiagnostics,
+) {
+ // Count fields should be declared before the sized field.
+ // The field must reference a valid array field.
+ // Warning if the array already has a known size.
+
+ let count_loc = path.loc();
+ let field = packet_scope.named.get(field_id);
+
+ match field.and_then(|f| f.last()) {
+ Some(Field::Array { size: Some(_), loc: array_loc, .. }) => result.push(
+ Diagnostic::warning()
+ .with_message(format!("count field uses array `{}` with static size", field_id))
+ .with_labels(vec![
+ count_loc.primary(),
+ array_loc.secondary().with_message(format!("`{}` is declared here", field_id)),
+ ]),
+ ),
+
+ Some(Field::Array { .. }) => (),
+ Some(field) => result.push(
+ Diagnostic::error()
+ .with_message(format!("invalid `{}` field type", field_id))
+ .with_labels(vec![
+ field.loc().primary().with_message(format!(
+ "`{}` is declared as {}",
+ field_id,
+ field.kind()
+ )),
+ count_loc
+ .secondary()
+ .with_message(format!("`{}` is used here as array", field_id)),
+ ]),
+ ),
+
+ None => result.err_undeclared(field_id, count_loc),
+ };
+ match field.and_then(|f| f.first()) {
+ Some(field) if field.loc().start < count_loc.start => {
+ result.push(Diagnostic::error().with_message("invalid count field").with_labels(vec![
+ count_loc.primary().with_message(format!(
+ "count field is declared after field `{}`",
+ field_id
+ )),
+ field
+ .loc()
+ .secondary()
+ .with_message(format!("`{}` is declared here", field_id)),
+ ]))
+ }
+ _ => (),
+ }
+}
+
+// Helper for linting fixed fields.
+#[allow(clippy::too_many_arguments)]
+fn lint_fixed(
+ scope: &Scope,
+ _packet_scope: &PacketScope,
+ path: &FieldPath,
+ width: &Option<usize>,
+ value: &Option<usize>,
+ enum_id: &Option<String>,
+ tag_id: &Option<String>,
+ result: &mut LintDiagnostics,
+) {
+ // By parsing constraint, we already have that either
+ // (width and value) or (enum_id and tag_id) are Some.
+
+ let fixed_loc = path.loc();
+
+ if width.is_some() {
+ // The value of a fixed field should have .
+ if bit_width(value.unwrap()) > width.unwrap() {
+ result.push(Diagnostic::error().with_message("invalid integer literal").with_labels(
+ vec![fixed_loc.primary().with_message(format!(
+ "expected maximum value of `{}`",
+ (1 << width.unwrap()) - 1
+ ))],
+ ))
+ }
+ } else {
+ // The fixed field should reference a valid enum id and tag id
+ // association.
+ match scope.typedef.get(enum_id.as_ref().unwrap()) {
+ Some(Decl::Enum { tags, .. }) => {
+ match tags.iter().find(|t| &t.id == tag_id.as_ref().unwrap()) {
+ Some(_) => (),
+ None => result.push(
+ Diagnostic::error()
+ .with_message(format!(
+ "undeclared enum tag `{}`",
+ tag_id.as_ref().unwrap()
+ ))
+ .with_labels(vec![fixed_loc.primary()]),
+ ),
+ }
+ }
+ Some(decl) => result.push(
+ Diagnostic::error()
+ .with_message(format!(
+ "fixed field uses invalid typedef `{}`",
+ decl.id().unwrap()
+ ))
+ .with_labels(vec![fixed_loc.primary().with_message(format!(
+ "{} has kind {}, expected enum",
+ decl.id().unwrap(),
+ decl.kind(),
+ ))]),
+ ),
+ None => result.push(
+ Diagnostic::error()
+ .with_message(format!("undeclared enum type `{}`", enum_id.as_ref().unwrap()))
+ .with_labels(vec![fixed_loc.primary()]),
+ ),
+ }
+ }
+}
+
+// Helper for linting array fields.
+#[allow(clippy::too_many_arguments)]
+fn lint_array(
+ scope: &Scope,
+ _packet_scope: &PacketScope,
+ path: &FieldPath,
+ _width: &Option<usize>,
+ type_id: &Option<String>,
+ _size_modifier: &Option<String>,
+ _size: &Option<usize>,
+ result: &mut LintDiagnostics,
+) {
+ // By parsing constraint, we have that width and type_id are mutually
+ // exclusive, as well as size_modifier and size.
+ // type_id must reference a valid enum or packet type.
+ // TODO(hchataing) unbounded arrays should have a matching size
+ // or count field
+
+ let array_loc = path.loc();
+
+ if type_id.is_some() {
+ match scope.typedef.get(type_id.as_ref().unwrap()) {
+ Some(Decl::Enum { .. })
+ | Some(Decl::Struct { .. })
+ | Some(Decl::CustomField { .. }) => (),
+ Some(decl) => result.push(
+ Diagnostic::error()
+ .with_message(format!(
+ "array field uses invalid {} element type `{}`",
+ decl.kind(),
+ type_id.as_ref().unwrap()
+ ))
+ .with_labels(vec![array_loc.primary()])
+ .with_notes(vec!["hint: expected enum, struct, custom_field".to_owned()]),
+ ),
+ None => result.push(
+ Diagnostic::error()
+ .with_message(format!(
+ "array field uses undeclared element type `{}`",
+ type_id.as_ref().unwrap()
+ ))
+ .with_labels(vec![array_loc.primary()])
+ .with_notes(vec!["hint: expected enum, struct, custom_field".to_owned()]),
+ ),
+ }
+ }
+}
+
+// Helper for linting typedef fields.
+fn lint_typedef(
+ scope: &Scope,
+ _packet_scope: &PacketScope,
+ path: &FieldPath,
+ type_id: &str,
+ result: &mut LintDiagnostics,
+) {
+ // The typedef field must reference a valid struct, enum,
+ // custom_field, or checksum type.
+ // TODO(hchataing) checksum fields should have a matching checksum start
+
+ let typedef_loc = path.loc();
+
+ match scope.typedef.get(type_id) {
+ Some(Decl::Enum { .. })
+ | Some(Decl::Struct { .. })
+ | Some(Decl::CustomField { .. })
+ | Some(Decl::Checksum { .. }) => (),
+
+ Some(decl) => result.push(
+ Diagnostic::error()
+ .with_message(format!(
+ "typedef field uses invalid {} element type `{}`",
+ decl.kind(),
+ type_id
+ ))
+ .with_labels(vec![typedef_loc.primary()])
+ .with_notes(vec!["hint: expected enum, struct, custom_field, checksum".to_owned()]),
+ ),
+ None => result.push(
+ Diagnostic::error()
+ .with_message(format!("typedef field uses undeclared element type `{}`", type_id))
+ .with_labels(vec![typedef_loc.primary()])
+ .with_notes(vec!["hint: expected enum, struct, custom_field, checksum".to_owned()]),
+ ),
+ }
+}
+
+// Helper for linting a field declaration.
+fn lint_field(
+ scope: &Scope,
+ packet_scope: &PacketScope,
+ field: &FieldPath,
+ result: &mut LintDiagnostics,
+) {
+ match field.last().unwrap() {
+ Field::Checksum { field_id, .. } => {
+ lint_checksum(scope, packet_scope, field, field_id, result)
+ }
+ Field::Size { field_id, width, .. } => {
+ lint_size(scope, packet_scope, field, field_id, *width, result)
+ }
+ Field::Count { field_id, width, .. } => {
+ lint_count(scope, packet_scope, field, field_id, *width, result)
+ }
+ Field::Fixed { width, value, enum_id, tag_id, .. } => {
+ lint_fixed(scope, packet_scope, field, width, value, enum_id, tag_id, result)
+ }
+ Field::Array { width, type_id, size_modifier, size, .. } => {
+ lint_array(scope, packet_scope, field, width, type_id, size_modifier, size, result)
+ }
+ Field::Typedef { type_id, .. } => lint_typedef(scope, packet_scope, field, type_id, result),
+ Field::Padding { .. }
+ | Field::Reserved { .. }
+ | Field::Scalar { .. }
+ | Field::Body { .. }
+ | Field::Payload { .. } => (),
+ Field::Group { .. } => unreachable!(),
+ }
+}
+
+// Helper for linting a packet declaration.
+fn lint_packet(
+ scope: &Scope,
+ decl: &Decl,
+ id: &str,
+ loc: &SourceRange,
+ constraints: &[Constraint],
+ parent_id: &Option<String>,
+ result: &mut LintDiagnostics,
+) {
+ // The parent declaration is checked by Scope::finalize.
+ // The local scope is also generated by Scope::finalize.
+ // TODO(hchataing) check parent payload size constraint: compute an upper
+ // bound of the payload size and check against the encoded maximum size.
+
+ if parent_id.is_none() && !constraints.is_empty() {
+ // Constraint list should be empty when there is
+ // no inheritance.
+ result.push(
+ Diagnostic::warning()
+ .with_message(format!(
+ "packet `{}` has field constraints, but no parent declaration",
+ id
+ ))
+ .with_labels(vec![loc.primary()])
+ .with_notes(vec!["hint: expected parent declaration".to_owned()]),
+ )
+ }
+
+ // Retrieve pre-computed packet scope.
+ // Scope validation was done before, so it must exist.
+ let packet_scope = &scope.scopes.get(&decl).unwrap();
+
+ for field in packet_scope.fields.iter() {
+ lint_field(scope, packet_scope, field, result)
+ }
+}
+
+// Helper for linting a struct declaration.
+fn lint_struct(
+ scope: &Scope,
+ decl: &Decl,
+ id: &str,
+ loc: &SourceRange,
+ constraints: &[Constraint],
+ parent_id: &Option<String>,
+ result: &mut LintDiagnostics,
+) {
+ // The parent declaration is checked by Scope::finalize.
+ // The local scope is also generated by Scope::finalize.
+ // TODO(hchataing) check parent payload size constraint: compute an upper
+ // bound of the payload size and check against the encoded maximum size.
+
+ if parent_id.is_none() && !constraints.is_empty() {
+ // Constraint list should be empty when there is
+ // no inheritance.
+ result.push(
+ Diagnostic::warning()
+ .with_message(format!(
+ "struct `{}` has field constraints, but no parent declaration",
+ id
+ ))
+ .with_labels(vec![loc.primary()])
+ .with_notes(vec!["hint: expected parent declaration".to_owned()]),
+ )
+ }
+
+ // Retrieve pre-computed packet scope.
+ // Scope validation was done before, so it must exist.
+ let packet_scope = &scope.scopes.get(&decl).unwrap();
+
+ for field in packet_scope.fields.iter() {
+ lint_field(scope, packet_scope, field, result)
+ }
+}
+
+impl Decl {
+ fn constraints(&self) -> impl Iterator<Item = &Constraint> {
+ match self {
+ Decl::Packet { constraints, .. } | Decl::Struct { constraints, .. } => {
+ Some(constraints.iter())
+ }
+ _ => None,
+ }
+ .into_iter()
+ .flatten()
+ }
+
+ fn scope<'d>(&'d self, result: &mut LintDiagnostics) -> Option<PacketScope<'d>> {
+ match self {
+ Decl::Packet { fields, .. }
+ | Decl::Struct { fields, .. }
+ | Decl::Group { fields, .. } => {
+ let mut scope = PacketScope {
+ checksums: HashMap::new(),
+ sizes: HashMap::new(),
+ payload: None,
+ named: HashMap::new(),
+ groups: HashMap::new(),
+
+ fields: Vec::new(),
+ constraints: HashMap::new(),
+ all_fields: HashMap::new(),
+ all_constraints: HashMap::new(),
+ };
+ for field in fields {
+ scope.insert(field, result)
+ }
+ Some(scope)
+ }
+ _ => None,
+ }
+ }
+
+ fn lint<'d>(&'d self, scope: &Scope<'d>, result: &mut LintDiagnostics) {
+ match self {
+ Decl::Checksum { .. } | Decl::CustomField { .. } => (),
+ Decl::Enum { tags, width, .. } => lint_enum(tags, *width, result),
+ Decl::Packet { id, loc, constraints, parent_id, .. } => {
+ lint_packet(scope, self, id, loc, constraints, parent_id, result)
+ }
+ Decl::Struct { id, loc, constraints, parent_id, .. } => {
+ lint_struct(scope, self, id, loc, constraints, parent_id, result)
+ }
+ // Groups are finalizeed before linting, to make sure
+ // potential errors are raised only once.
+ Decl::Group { .. } => (),
+ Decl::Test { .. } => (),
+ }
+ }
+
+ fn kind(&self) -> &str {
+ match self {
+ Decl::Checksum { .. } => "checksum",
+ Decl::CustomField { .. } => "custom field",
+ Decl::Enum { .. } => "enum",
+ Decl::Packet { .. } => "packet",
+ Decl::Struct { .. } => "struct",
+ Decl::Group { .. } => "group",
+ Decl::Test { .. } => "test",
+ }
+ }
+}
+
+impl Grammar {
+ fn scope<'d>(&'d self, result: &mut LintDiagnostics) -> Scope<'d> {
+ let mut scope = Scope {
+ groups: HashMap::new(),
+ packets: HashMap::new(),
+ typedef: HashMap::new(),
+ scopes: HashMap::new(),
+ };
+
+ // Gather top-level declarations.
+ // Validate the top-level scopes (Group, Packet, Typedef).
+ //
+ // TODO: switch to try_insert when stable
+ for decl in &self.declarations {
+ if let Some((id, namespace)) = match decl {
+ Decl::Checksum { id, .. }
+ | Decl::CustomField { id, .. }
+ | Decl::Struct { id, .. }
+ | Decl::Enum { id, .. } => Some((id, &mut scope.typedef)),
+ Decl::Group { id, .. } => Some((id, &mut scope.groups)),
+ Decl::Packet { id, .. } => Some((id, &mut scope.packets)),
+ _ => None,
+ } {
+ if let Some(prev) = namespace.insert(id.clone(), decl) {
+ result.err_redeclared(id, decl.kind(), decl.loc(), prev.loc())
+ }
+ }
+ if let Some(lscope) = decl.scope(result) {
+ scope.scopes.insert(decl, lscope);
+ }
+ }
+
+ scope.finalize(result);
+ scope
+ }
+}
+
+impl Lintable for Grammar {
+ fn lint(&self) -> LintDiagnostics {
+ let mut result = LintDiagnostics::new();
+ let scope = self.scope(&mut result);
+ if !result.diagnostics.is_empty() {
+ return result;
+ }
+ for decl in &self.declarations {
+ decl.lint(&scope, &mut result)
+ }
+ result
+ }
+}
diff --git a/tools/pdl/src/main.rs b/tools/pdl/src/main.rs
new file mode 100644
index 0000000..5f488fd
--- /dev/null
+++ b/tools/pdl/src/main.rs
@@ -0,0 +1,51 @@
+//! PDL parser and linter.
+
+extern crate codespan_reporting;
+extern crate pest;
+#[macro_use]
+extern crate pest_derive;
+extern crate serde;
+extern crate serde_json;
+extern crate structopt;
+
+use codespan_reporting::term;
+use codespan_reporting::term::termcolor;
+use structopt::StructOpt;
+
+mod ast;
+mod lint;
+mod parser;
+
+use crate::lint::Lintable;
+
+#[derive(Debug, StructOpt)]
+#[structopt(name = "pdl-parser", about = "Packet Description Language parser tool.")]
+struct Opt {
+ #[structopt(short, long = "--version", help = "Print tool version and exit.")]
+ version: bool,
+
+ #[structopt(name = "FILE", help = "Input file.")]
+ input_file: String,
+}
+
+fn main() {
+ let opt = Opt::from_args();
+
+ if opt.version {
+ println!("Packet Description Language parser version 1.0");
+ return;
+ }
+
+ let mut sources = ast::SourceDatabase::new();
+ match parser::parse_file(&mut sources, opt.input_file) {
+ Ok(grammar) => {
+ let _ = grammar.lint().print(&sources, termcolor::ColorChoice::Always);
+ println!("{}", serde_json::to_string_pretty(&grammar).unwrap())
+ }
+ Err(err) => {
+ let writer = termcolor::StandardStream::stderr(termcolor::ColorChoice::Always);
+ let config = term::Config::default();
+ _ = term::emit(&mut writer.lock(), &config, &sources, &err);
+ }
+ }
+}
diff --git a/tools/pdl/src/parser.rs b/tools/pdl/src/parser.rs
new file mode 100644
index 0000000..bea80e0
--- /dev/null
+++ b/tools/pdl/src/parser.rs
@@ -0,0 +1,530 @@
+use super::ast;
+use codespan_reporting::diagnostic::Diagnostic;
+use codespan_reporting::files;
+use pest::iterators::{Pair, Pairs};
+use pest::{Parser, Token};
+use std::iter::{Filter, Peekable};
+
+// Generate the PDL parser.
+// TODO: use #[grammar = "pdl.pest"]
+// currently not possible because CARGO_MANIFEST_DIR is not set
+// in soong environment.
+#[derive(Parser)]
+#[grammar_inline = r#"
+WHITESPACE = _{ " " | "\n" }
+COMMENT = { block_comment | line_comment }
+
+block_comment = { "/*" ~ (!"*/" ~ ANY)* ~ "*/" }
+line_comment = { "//" ~ (!"\n" ~ ANY)* }
+
+alpha = { 'a'..'z' | 'A'..'Z' }
+digit = { '0'..'9' }
+hexdigit = { digit | 'a'..'f' | 'A'..'F' }
+alphanum = { alpha | digit | "_" }
+
+identifier = @{ alpha ~ alphanum* }
+payload_identifier = @{ "_payload_" }
+body_identifier = @{ "_body_" }
+intvalue = @{ digit+ }
+hexvalue = @{ ("0x"|"0X") ~ hexdigit+ }
+integer = @{ hexvalue | intvalue }
+string = @{ "\"" ~ (!"\"" ~ ANY)* ~ "\"" }
+size_modifier = @{
+ ("+"|"-"|"*"|"/") ~ (digit|"+"|"-"|"*"|"/")+
+}
+
+endianness_declaration = { "little_endian_packets" | "big_endian_packets" }
+
+enum_tag = { identifier ~ "=" ~ integer }
+enum_tag_list = { enum_tag ~ ("," ~ enum_tag)* ~ ","? }
+enum_declaration = {
+ "enum" ~ identifier ~ ":" ~ integer ~ "{" ~
+ enum_tag_list ~
+ "}"
+}
+
+constraint = { identifier ~ "=" ~ (identifier|integer) }
+constraint_list = { constraint ~ ("," ~ constraint)* }
+
+checksum_field = { "_checksum_start_" ~ "(" ~ identifier ~ ")" }
+padding_field = { "_padding_" ~ "[" ~ integer ~ "]" }
+size_field = { "_size_" ~ "(" ~ (identifier|payload_identifier|body_identifier) ~ ")" ~ ":" ~ integer }
+count_field = { "_count_" ~ "(" ~ identifier ~ ")" ~ ":" ~ integer }
+body_field = @{ "_body_" }
+payload_field = { "_payload_" ~ (":" ~ "[" ~ size_modifier ~ "]")? }
+fixed_field = { "_fixed_" ~ "=" ~ (
+ (integer ~ ":" ~ integer) |
+ (identifier ~ ":" ~ identifier)
+)}
+reserved_field = { "_reserved_" ~ ":" ~ integer }
+array_field = { identifier ~ ":" ~ (integer|identifier) ~
+ "[" ~ (size_modifier|integer)? ~ "]"
+}
+scalar_field = { identifier ~ ":" ~ integer }
+typedef_field = { identifier ~ ":" ~ identifier }
+group_field = { identifier ~ ("{" ~ constraint_list ~ "}")? }
+
+field = _{
+ checksum_field |
+ padding_field |
+ size_field |
+ count_field |
+ body_field |
+ payload_field |
+ fixed_field |
+ reserved_field |
+ array_field |
+ scalar_field |
+ typedef_field |
+ group_field
+}
+field_list = { field ~ ("," ~ field)* ~ ","? }
+
+packet_declaration = {
+ "packet" ~ identifier ~
+ (":" ~ identifier)? ~
+ ("(" ~ constraint_list ~ ")")? ~
+ "{" ~
+ field_list? ~
+ "}"
+}
+
+struct_declaration = {
+ "struct" ~ identifier ~
+ (":" ~ identifier)? ~
+ ("(" ~ constraint_list ~ ")")? ~
+ "{" ~
+ field_list? ~
+ "}"
+}
+
+group_declaration = {
+ "group" ~ identifier ~ "{" ~ field_list ~ "}"
+}
+
+checksum_declaration = {
+ "checksum" ~ identifier ~ ":" ~ integer ~ string
+}
+
+custom_field_declaration = {
+ "custom_field" ~ identifier ~ (":" ~ integer)? ~ string
+}
+
+test_case = { string }
+test_case_list = _{ test_case ~ ("," ~ test_case)* ~ ","? }
+test_declaration = {
+ "test" ~ identifier ~ "{" ~
+ test_case_list ~
+ "}"
+}
+
+declaration = _{
+ enum_declaration |
+ packet_declaration |
+ struct_declaration |
+ group_declaration |
+ checksum_declaration |
+ custom_field_declaration |
+ test_declaration
+}
+
+grammar = {
+ SOI ~
+ endianness_declaration? ~
+ declaration* ~
+ EOI
+}
+"#]
+pub struct PDLParser;
+
+type Node<'i> = Pair<'i, Rule>;
+type NodeIterator<'i> = Peekable<Filter<Pairs<'i, Rule>, fn(&Node<'i>) -> bool>>;
+type Context<'a> = (ast::FileId, &'a Vec<usize>);
+
+trait Helpers<'i> {
+ fn children(self) -> NodeIterator<'i>;
+ fn as_loc(&self, context: &Context) -> ast::SourceRange;
+ fn as_string(&self) -> String;
+ fn as_usize(&self) -> Result<usize, String>;
+}
+
+impl<'i> Helpers<'i> for Node<'i> {
+ fn children(self) -> NodeIterator<'i> {
+ self.into_inner().filter((|n| n.as_rule() != Rule::COMMENT) as fn(&Self) -> bool).peekable()
+ }
+
+ fn as_loc(&self, context: &Context) -> ast::SourceRange {
+ let span = self.as_span();
+ ast::SourceRange {
+ file: context.0,
+ start: ast::SourceLocation::new(span.start_pos().pos(), context.1),
+ end: ast::SourceLocation::new(span.end_pos().pos(), context.1),
+ }
+ }
+
+ fn as_string(&self) -> String {
+ self.as_str().to_owned()
+ }
+
+ fn as_usize(&self) -> Result<usize, String> {
+ let text = self.as_str();
+ if let Some(num) = text.strip_prefix("0x") {
+ usize::from_str_radix(num, 16)
+ .map_err(|_| format!("cannot convert '{}' to usize", self.as_str()))
+ } else {
+ #[allow(clippy::from_str_radix_10)]
+ usize::from_str_radix(text, 10)
+ .map_err(|_| format!("cannot convert '{}' to usize", self.as_str()))
+ }
+ }
+}
+
+fn err_unexpected_rule<T>(expected: Rule, found: Rule) -> Result<T, String> {
+ Err(format!("expected rule {:?}, got {:?}", expected, found))
+}
+
+fn err_missing_rule<T>(expected: Rule) -> Result<T, String> {
+ Err(format!("expected rule {:?}, got nothing", expected))
+}
+
+fn expect<'i>(iter: &mut NodeIterator<'i>, rule: Rule) -> Result<Node<'i>, String> {
+ match iter.next() {
+ Some(node) if node.as_rule() == rule => Ok(node),
+ Some(node) => err_unexpected_rule(rule, node.as_rule()),
+ None => err_missing_rule(rule),
+ }
+}
+
+fn maybe<'i>(iter: &mut NodeIterator<'i>, rule: Rule) -> Option<Node<'i>> {
+ iter.next_if(|n| n.as_rule() == rule)
+}
+
+fn parse_identifier(iter: &mut NodeIterator<'_>) -> Result<String, String> {
+ expect(iter, Rule::identifier).map(|n| n.as_string())
+}
+
+fn parse_integer(iter: &mut NodeIterator<'_>) -> Result<usize, String> {
+ expect(iter, Rule::integer).and_then(|n| n.as_usize())
+}
+
+fn parse_identifier_opt(iter: &mut NodeIterator<'_>) -> Result<Option<String>, String> {
+ Ok(maybe(iter, Rule::identifier).map(|n| n.as_string()))
+}
+
+fn parse_integer_opt(iter: &mut NodeIterator<'_>) -> Result<Option<usize>, String> {
+ maybe(iter, Rule::integer).map(|n| n.as_usize()).transpose()
+}
+
+fn parse_identifier_or_integer(
+ iter: &mut NodeIterator<'_>,
+) -> Result<(Option<String>, Option<usize>), String> {
+ match iter.next() {
+ Some(n) if n.as_rule() == Rule::identifier => Ok((Some(n.as_string()), None)),
+ Some(n) if n.as_rule() == Rule::integer => Ok((None, Some(n.as_usize()?))),
+ Some(n) => Err(format!(
+ "expected rule {:?} or {:?}, got {:?}",
+ Rule::identifier,
+ Rule::integer,
+ n.as_rule()
+ )),
+ None => {
+ Err(format!("expected rule {:?} or {:?}, got nothing", Rule::identifier, Rule::integer))
+ }
+ }
+}
+
+fn parse_string(iter: &mut NodeIterator<'_>) -> Result<String, String> {
+ expect(iter, Rule::string).map(|n| n.as_string())
+}
+
+fn parse_atomic_expr(iter: &mut NodeIterator<'_>, context: &Context) -> Result<ast::Expr, String> {
+ match iter.next() {
+ Some(n) if n.as_rule() == Rule::identifier => {
+ Ok(ast::Expr::Identifier { loc: n.as_loc(context), name: n.as_string() })
+ }
+ Some(n) if n.as_rule() == Rule::integer => {
+ Ok(ast::Expr::Integer { loc: n.as_loc(context), value: n.as_usize()? })
+ }
+ Some(n) => Err(format!(
+ "expected rule {:?} or {:?}, got {:?}",
+ Rule::identifier,
+ Rule::integer,
+ n.as_rule()
+ )),
+ None => {
+ Err(format!("expected rule {:?} or {:?}, got nothing", Rule::identifier, Rule::integer))
+ }
+ }
+}
+
+fn parse_size_modifier_opt(iter: &mut NodeIterator<'_>) -> Option<String> {
+ maybe(iter, Rule::size_modifier).map(|n| n.as_string())
+}
+
+fn parse_endianness(node: Node<'_>, context: &Context) -> Result<ast::Endianness, String> {
+ if node.as_rule() != Rule::endianness_declaration {
+ err_unexpected_rule(Rule::endianness_declaration, node.as_rule())
+ } else {
+ Ok(ast::Endianness {
+ loc: node.as_loc(context),
+ value: match node.as_str() {
+ "little_endian_packets" => ast::EndiannessValue::LittleEndian,
+ "big_endian_packets" => ast::EndiannessValue::BigEndian,
+ _ => unreachable!(),
+ },
+ })
+ }
+}
+
+fn parse_constraint(node: Node<'_>, context: &Context) -> Result<ast::Constraint, String> {
+ if node.as_rule() != Rule::constraint {
+ err_unexpected_rule(Rule::constraint, node.as_rule())
+ } else {
+ let loc = node.as_loc(context);
+ let mut children = node.children();
+ let id = parse_identifier(&mut children)?;
+ let value = parse_atomic_expr(&mut children, context)?;
+ Ok(ast::Constraint { id, loc, value })
+ }
+}
+
+fn parse_constraint_list_opt(
+ iter: &mut NodeIterator<'_>,
+ context: &Context,
+) -> Result<Vec<ast::Constraint>, String> {
+ maybe(iter, Rule::constraint_list)
+ .map_or(Ok(vec![]), |n| n.children().map(|n| parse_constraint(n, context)).collect())
+}
+
+fn parse_enum_tag(node: Node<'_>, context: &Context) -> Result<ast::Tag, String> {
+ if node.as_rule() != Rule::enum_tag {
+ err_unexpected_rule(Rule::enum_tag, node.as_rule())
+ } else {
+ let loc = node.as_loc(context);
+ let mut children = node.children();
+ let id = parse_identifier(&mut children)?;
+ let value = parse_integer(&mut children)?;
+ Ok(ast::Tag { id, loc, value })
+ }
+}
+
+fn parse_enum_tag_list(
+ iter: &mut NodeIterator<'_>,
+ context: &Context,
+) -> Result<Vec<ast::Tag>, String> {
+ expect(iter, Rule::enum_tag_list)
+ .and_then(|n| n.children().map(|n| parse_enum_tag(n, context)).collect())
+}
+
+fn parse_field(node: Node<'_>, context: &Context) -> Result<ast::Field, String> {
+ let loc = node.as_loc(context);
+ let rule = node.as_rule();
+ let mut children = node.children();
+ Ok(match rule {
+ Rule::checksum_field => {
+ let field_id = parse_identifier(&mut children)?;
+ ast::Field::Checksum { loc, field_id }
+ }
+ Rule::padding_field => {
+ let width = parse_integer(&mut children)?;
+ ast::Field::Padding { loc, width }
+ }
+ Rule::size_field => {
+ let field_id = match children.next() {
+ Some(n) if n.as_rule() == Rule::identifier => n.as_string(),
+ Some(n) if n.as_rule() == Rule::payload_identifier => n.as_string(),
+ Some(n) if n.as_rule() == Rule::body_identifier => n.as_string(),
+ Some(n) => err_unexpected_rule(Rule::identifier, n.as_rule())?,
+ None => err_missing_rule(Rule::identifier)?,
+ };
+ let width = parse_integer(&mut children)?;
+ ast::Field::Size { loc, field_id, width }
+ }
+ Rule::count_field => {
+ let field_id = parse_identifier(&mut children)?;
+ let width = parse_integer(&mut children)?;
+ ast::Field::Count { loc, field_id, width }
+ }
+ Rule::body_field => ast::Field::Body { loc },
+ Rule::payload_field => {
+ let size_modifier = parse_size_modifier_opt(&mut children);
+ ast::Field::Payload { loc, size_modifier }
+ }
+ Rule::fixed_field => {
+ let (tag_id, value) = parse_identifier_or_integer(&mut children)?;
+ let (enum_id, width) = parse_identifier_or_integer(&mut children)?;
+ ast::Field::Fixed { loc, enum_id, tag_id, width, value }
+ }
+ Rule::reserved_field => {
+ let width = parse_integer(&mut children)?;
+ ast::Field::Reserved { loc, width }
+ }
+ Rule::array_field => {
+ let id = parse_identifier(&mut children)?;
+ let (type_id, width) = parse_identifier_or_integer(&mut children)?;
+ let (size, size_modifier) = match children.next() {
+ Some(n) if n.as_rule() == Rule::integer => (Some(n.as_usize()?), None),
+ Some(n) if n.as_rule() == Rule::size_modifier => (None, Some(n.as_string())),
+ Some(n) => {
+ return Err(format!(
+ "expected rule {:?} or {:?}, got {:?}",
+ Rule::integer,
+ Rule::size_modifier,
+ n.as_rule()
+ ))
+ }
+ None => (None, None),
+ };
+ ast::Field::Array { loc, id, type_id, width, size, size_modifier }
+ }
+ Rule::scalar_field => {
+ let id = parse_identifier(&mut children)?;
+ let width = parse_integer(&mut children)?;
+ ast::Field::Scalar { loc, id, width }
+ }
+ Rule::typedef_field => {
+ let id = parse_identifier(&mut children)?;
+ let type_id = parse_identifier(&mut children)?;
+ ast::Field::Typedef { loc, id, type_id }
+ }
+ Rule::group_field => {
+ let group_id = parse_identifier(&mut children)?;
+ let constraints = parse_constraint_list_opt(&mut children, context)?;
+ ast::Field::Group { loc, group_id, constraints }
+ }
+ _ => return Err(format!("expected rule *_field, got {:?}", rule)),
+ })
+}
+
+fn parse_field_list<'i>(
+ iter: &mut NodeIterator<'i>,
+ context: &Context,
+) -> Result<Vec<ast::Field>, String> {
+ expect(iter, Rule::field_list)
+ .and_then(|n| n.children().map(|n| parse_field(n, context)).collect())
+}
+
+fn parse_field_list_opt<'i>(
+ iter: &mut NodeIterator<'i>,
+ context: &Context,
+) -> Result<Vec<ast::Field>, String> {
+ maybe(iter, Rule::field_list)
+ .map_or(Ok(vec![]), |n| n.children().map(|n| parse_field(n, context)).collect())
+}
+
+fn parse_grammar(root: Node<'_>, context: &Context) -> Result<ast::Grammar, String> {
+ let mut toplevel_comments = vec![];
+ let mut grammar = ast::Grammar::new(context.0);
+
+ let mut comment_start = vec![];
+ for token in root.clone().tokens() {
+ match token {
+ Token::Start { rule: Rule::COMMENT, pos } => comment_start.push(pos),
+ Token::End { rule: Rule::COMMENT, pos } => {
+ let start_pos = comment_start.pop().unwrap();
+ grammar.comments.push(ast::Comment {
+ loc: ast::SourceRange {
+ file: context.0,
+ start: ast::SourceLocation::new(start_pos.pos(), context.1),
+ end: ast::SourceLocation::new(pos.pos(), context.1),
+ },
+ text: start_pos.span(&pos).as_str().to_owned(),
+ })
+ }
+ _ => (),
+ }
+ }
+
+ for node in root.children() {
+ let loc = node.as_loc(context);
+ let rule = node.as_rule();
+ match rule {
+ Rule::endianness_declaration => {
+ grammar.endianness = Some(parse_endianness(node, context)?)
+ }
+ Rule::checksum_declaration => {
+ let mut children = node.children();
+ let id = parse_identifier(&mut children)?;
+ let width = parse_integer(&mut children)?;
+ let function = parse_string(&mut children)?;
+ grammar.declarations.push(ast::Decl::Checksum { id, loc, function, width })
+ }
+ Rule::custom_field_declaration => {
+ let mut children = node.children();
+ let id = parse_identifier(&mut children)?;
+ let width = parse_integer_opt(&mut children)?;
+ let function = parse_string(&mut children)?;
+ grammar.declarations.push(ast::Decl::CustomField { id, loc, function, width })
+ }
+ Rule::enum_declaration => {
+ let mut children = node.children();
+ let id = parse_identifier(&mut children)?;
+ let width = parse_integer(&mut children)?;
+ let tags = parse_enum_tag_list(&mut children, context)?;
+ grammar.declarations.push(ast::Decl::Enum { id, loc, width, tags })
+ }
+ Rule::packet_declaration => {
+ let mut children = node.children();
+ let id = parse_identifier(&mut children)?;
+ let parent_id = parse_identifier_opt(&mut children)?;
+ let constraints = parse_constraint_list_opt(&mut children, context)?;
+ let fields = parse_field_list_opt(&mut children, context)?;
+ grammar.declarations.push(ast::Decl::Packet {
+ id,
+ loc,
+ parent_id,
+ constraints,
+ fields,
+ })
+ }
+ Rule::struct_declaration => {
+ let mut children = node.children();
+ let id = parse_identifier(&mut children)?;
+ let parent_id = parse_identifier_opt(&mut children)?;
+ let constraints = parse_constraint_list_opt(&mut children, context)?;
+ let fields = parse_field_list_opt(&mut children, context)?;
+ grammar.declarations.push(ast::Decl::Struct {
+ id,
+ loc,
+ parent_id,
+ constraints,
+ fields,
+ })
+ }
+ Rule::group_declaration => {
+ let mut children = node.children();
+ let id = parse_identifier(&mut children)?;
+ let fields = parse_field_list(&mut children, context)?;
+ grammar.declarations.push(ast::Decl::Group { id, loc, fields })
+ }
+ Rule::test_declaration => {}
+ Rule::EOI => (),
+ _ => unreachable!(),
+ }
+ }
+ grammar.comments.append(&mut toplevel_comments);
+ Ok(grammar)
+}
+
+/// Parse a new source file.
+/// The source file is fully read and added to the compilation database.
+/// Returns the constructed AST, or a descriptive error message in case
+/// of syntax error.
+pub fn parse_file(
+ sources: &mut ast::SourceDatabase,
+ name: String,
+) -> Result<ast::Grammar, Diagnostic<ast::FileId>> {
+ let source = std::fs::read_to_string(&name).map_err(|e| {
+ Diagnostic::error().with_message(format!("failed to read input file '{}': {}", &name, e))
+ })?;
+ let root = PDLParser::parse(Rule::grammar, &source)
+ .map_err(|e| {
+ Diagnostic::error()
+ .with_message(format!("failed to parse input file '{}': {}", &name, e))
+ })?
+ .next()
+ .unwrap();
+ let line_starts: Vec<_> = files::line_starts(&source).collect();
+ let file = sources.add(name, source.clone());
+ parse_grammar(root, &(file, &line_starts)).map_err(|e| Diagnostic::error().with_message(e))
+}
diff --git a/tools/pdl/src/pdl.pest b/tools/pdl/src/pdl.pest
new file mode 100644
index 0000000..43b5095
--- /dev/null
+++ b/tools/pdl/src/pdl.pest
@@ -0,0 +1,123 @@
+WHITESPACE = _{ " " | "\n" }
+COMMENT = { block_comment | line_comment }
+
+block_comment = { "/*" ~ (!"*/" ~ ANY)* ~ "*/" }
+line_comment = { "//" ~ (!"\n" ~ ANY)* }
+
+alpha = { 'a'..'z' | 'A'..'Z' }
+digit = { '0'..'9' }
+hexdigit = { digit | 'a'..'f' | 'A'..'F' }
+alphanum = { alpha | digit | "_" }
+
+identifier = @{ alpha ~ alphanum* }
+payload_identifier = @{ "_payload_" }
+body_identifier = @{ "_body_" }
+intvalue = @{ digit+ }
+hexvalue = @{ ("0x"|"0X") ~ hexdigit+ }
+integer = @{ hexvalue | intvalue }
+string = @{ "\"" ~ (!"\"" ~ ANY)* ~ "\"" }
+size_modifier = @{
+ ("+"|"-"|"*"|"/") ~ (digit|"+"|"-"|"*"|"/")+
+}
+
+endianness_declaration = { "little_endian_packets" | "big_endian_packets" }
+
+enum_tag = { identifier ~ "=" ~ integer }
+enum_tag_list = { enum_tag ~ ("," ~ enum_tag)* ~ ","? }
+enum_declaration = {
+ "enum" ~ identifier ~ ":" ~ integer ~ "{" ~
+ enum_tag_list ~
+ "}"
+}
+
+constraint = { identifier ~ "=" ~ (identifier|integer) }
+constraint_list = { constraint ~ ("," ~ constraint)* }
+
+checksum_field = { "_checksum_start_" ~ "(" ~ identifier ~ ")" }
+padding_field = { "_padding_" ~ "[" ~ integer ~ "]" }
+size_field = { "_size_" ~ "(" ~ (identifier|payload_identifier|body_identifier) ~ ")" ~ ":" ~ integer }
+count_field = { "_count_" ~ "(" ~ identifier ~ ")" ~ ":" ~ integer }
+body_field = @{ "_body_" }
+payload_field = { "_payload_" ~ (":" ~ "[" ~ size_modifier ~ "]")? }
+fixed_field = { "_fixed_" ~ "=" ~ (
+ (integer ~ ":" ~ integer) |
+ (identifier ~ ":" ~ identifier)
+)}
+reserved_field = { "_reserved_" ~ ":" ~ integer }
+array_field = { identifier ~ ":" ~ (integer|identifier) ~
+ "[" ~ (size_modifier|integer)? ~ "]"
+}
+scalar_field = { identifier ~ ":" ~ integer }
+typedef_field = { identifier ~ ":" ~ identifier }
+group_field = { identifier ~ ("{" ~ constraint_list ~ "}")? }
+
+field = _{
+ checksum_field |
+ padding_field |
+ size_field |
+ count_field |
+ body_field |
+ payload_field |
+ fixed_field |
+ reserved_field |
+ array_field |
+ scalar_field |
+ typedef_field |
+ group_field
+}
+field_list = { field ~ ("," ~ field)* ~ ","? }
+
+packet_declaration = {
+ "packet" ~ identifier ~
+ (":" ~ identifier)? ~
+ ("(" ~ constraint_list ~ ")")? ~
+ "{" ~
+ field_list? ~
+ "}"
+}
+
+struct_declaration = {
+ "struct" ~ identifier ~
+ (":" ~ identifier)? ~
+ ("(" ~ constraint_list ~ ")")? ~
+ "{" ~
+ field_list? ~
+ "}"
+}
+
+group_declaration = {
+ "group" ~ identifier ~ "{" ~ field_list ~ "}"
+}
+
+checksum_declaration = {
+ "checksum" ~ identifier ~ ":" ~ integer ~ string
+}
+
+custom_field_declaration = {
+ "custom_field" ~ identifier ~ (":" ~ integer)? ~ string
+}
+
+test_case = { string }
+test_case_list = _{ test_case ~ ("," ~ test_case)* ~ ","? }
+test_declaration = {
+ "test" ~ identifier ~ "{" ~
+ test_case_list ~
+ "}"
+}
+
+declaration = _{
+ enum_declaration |
+ packet_declaration |
+ struct_declaration |
+ group_declaration |
+ checksum_declaration |
+ custom_field_declaration |
+ test_declaration
+}
+
+grammar = {
+ SOI ~
+ endianness_declaration? ~
+ declaration* ~
+ EOI
+}
diff --git a/tools/pdl/test/array-field.pdl b/tools/pdl/test/array-field.pdl
new file mode 100644
index 0000000..070a6cc
--- /dev/null
+++ b/tools/pdl/test/array-field.pdl
@@ -0,0 +1,39 @@
+little_endian_packets
+
+custom_field custom: 1 "custom"
+checksum checksum: 1 "checksum"
+
+enum Enum : 1 {
+ tag = 0,
+}
+
+struct Struct {
+ a: 1,
+}
+
+packet Packet {
+ a: 1,
+}
+
+group Group {
+ a: 1,
+}
+
+packet InvalidKind {
+ array_0: Group[],
+ array_1: Packet[],
+ array_2: checksum[],
+}
+
+packet UndeclaredType {
+ array: Unknown[],
+}
+
+packet Correct {
+ array_0: custom[],
+ array_1: Enum[],
+ array_2: Struct[],
+ array_3: 1[],
+ array_4: 1[42],
+ array_5: 1[+2],
+}
diff --git a/tools/pdl/test/checksum-field.pdl b/tools/pdl/test/checksum-field.pdl
new file mode 100644
index 0000000..0e1a98b
--- /dev/null
+++ b/tools/pdl/test/checksum-field.pdl
@@ -0,0 +1,22 @@
+little_endian_packets
+
+checksum crc16: 16 "crc16"
+
+packet Undefined {
+ _checksum_start_ (crc16),
+}
+
+packet InvalidType {
+ crc16: 16,
+ _checksum_start_ (crc16),
+}
+
+packet InvalidOrder {
+ _checksum_start_ (crc16),
+ crc16: crc16,
+}
+
+packet Correct {
+ crc16: crc16,
+ _checksum_start_ (crc16),
+}
diff --git a/tools/pdl/test/count-field.pdl b/tools/pdl/test/count-field.pdl
new file mode 100644
index 0000000..a88cccd
--- /dev/null
+++ b/tools/pdl/test/count-field.pdl
@@ -0,0 +1,25 @@
+little_endian_packets
+
+packet Undefined {
+ _count_ (array): 8,
+}
+
+packet InvalidType {
+ _count_ (array): 8,
+ array: 16,
+}
+
+packet InvalidOrder {
+ array: 16[],
+ _count_ (array): 8,
+}
+
+packet InvalidSize {
+ _count_ (array): 8,
+ array: 16[32],
+}
+
+packet Correct {
+ _count_ (array): 8,
+ array: 16[],
+}
diff --git a/tools/pdl/test/decl-scope.pdl b/tools/pdl/test/decl-scope.pdl
new file mode 100644
index 0000000..c1391ab
--- /dev/null
+++ b/tools/pdl/test/decl-scope.pdl
@@ -0,0 +1,26 @@
+
+// Clashes with custom_field, struct, enum
+checksum decl_name: 16 "crc16"
+
+// Clashes with checksum, struct, enum
+custom_field decl_name: 1 "custom"
+
+// Clashes with checksum, custom_field, struct
+enum decl_name : 1 {
+ A = 1,
+}
+
+// Clashes with checksum, custom_field, enum
+struct decl_name {
+ a: 1,
+}
+
+// OK
+group decl_name {
+ a: 1,
+}
+
+// OK
+packet decl_name {
+ a: 1,
+}
diff --git a/tools/pdl/test/example.pdl b/tools/pdl/test/example.pdl
new file mode 100644
index 0000000..b34d140
--- /dev/null
+++ b/tools/pdl/test/example.pdl
@@ -0,0 +1,78 @@
+// line comment
+/* block comment */
+
+little_endian_packets
+
+/* stuff */
+enum FourBits : 4 {
+ ONE = 1,
+ TWO = 2,
+ THREE = 3,
+ FIVE = 5,
+ TEN = 10,
+ LAZY_ME = 15,
+}
+
+/* other stuff */
+enum FourBits : 4 {
+ ONE = 1,
+ TWO = 2,
+ THREE = 3,
+ FIVE = 5,
+ TEN = 10,
+ LAZY_ME = 15
+}
+
+packet Test {
+ /* Checksum */
+ _checksum_start_ (crc16),
+ /* Padding */
+ _padding_ [1],
+ /* Size */
+ _size_ (_payload_) : 1,
+ _size_ (_body_) : 1,
+ _size_ (id) : 1,
+ /* Body */
+ _body_,
+ /* Payload */
+ _payload_,
+ _payload_ : [+1],
+ /* Fixed */
+ _fixed_ = 1:1,
+ _fixed_ = id:id,
+ /* Reserved */
+ _reserved_ : 1,
+ /* Array */
+ id: 1[+1],
+ id: id[+1],
+ id: 1[1],
+ id: id[1],
+ id: 1[],
+ id: id[],
+ /* Scalar */
+ id: 1,
+ /* Typedef */
+ id : id,
+ /* Group */
+ id { a=1, b=2 },
+ id,
+}
+
+packet TestChild : Test {
+}
+
+packet TestChild (a=1, b=2) {
+}
+
+packet TestChild : Test (a=1, b=2) {
+}
+
+checksum id: 1 "id"
+
+custom_field id : 1 "id"
+custom_field id "id"
+
+test Test {
+ "1111",
+ "2222",
+}
diff --git a/tools/pdl/test/fixed-field.pdl b/tools/pdl/test/fixed-field.pdl
new file mode 100644
index 0000000..e69fc7e
--- /dev/null
+++ b/tools/pdl/test/fixed-field.pdl
@@ -0,0 +1,22 @@
+little_endian_packets
+
+enum Enum : 1 {
+ tag = 0,
+}
+
+packet InvalidValue {
+ _fixed_ = 1: 256,
+}
+
+packet UndeclaredEnum {
+ _fixed_ = tag : InvalidEnum,
+}
+
+packet UndeclaredTag {
+ _fixed_ = invalid_tag : Enum,
+}
+
+packet Correct {
+ _fixed_ = 1: 256,
+ _fixed_ = tag: Enum,
+}
diff --git a/tools/pdl/test/group-constraint.pdl b/tools/pdl/test/group-constraint.pdl
new file mode 100644
index 0000000..1f4e10d
--- /dev/null
+++ b/tools/pdl/test/group-constraint.pdl
@@ -0,0 +1,39 @@
+little_endian_packets
+
+custom_field custom: 1 "custom"
+checksum checksum: 1 "checksum"
+
+enum Enum : 1 {
+ tag = 0,
+}
+
+group Group {
+ a: 4,
+ b: Enum,
+ c: custom_field,
+ d: checksum,
+}
+
+struct Undeclared {
+ Group { e=1 },
+}
+
+struct Redeclared {
+ Group { a=1, a=2 },
+}
+
+struct TypeMismatch {
+ Group { a=tag, b=1, c=1, d=1 },
+}
+
+struct InvalidLiteral {
+ Group { a=42 },
+}
+
+struct UndeclaredTag {
+ Group { b=undeclared_tag },
+}
+
+struct Correct {
+ Group { a=1, b=tag },
+}
diff --git a/tools/pdl/test/packet.pdl b/tools/pdl/test/packet.pdl
new file mode 100644
index 0000000..9b9ca20
--- /dev/null
+++ b/tools/pdl/test/packet.pdl
@@ -0,0 +1,52 @@
+little_endian_packets
+
+custom_field custom: 1 "custom"
+checksum checksum: 1 "checksum"
+
+enum Enum : 1 {
+ tag = 0,
+}
+
+packet Packet {
+ a: 4,
+ b: Enum,
+ c: custom,
+ d: checksum,
+}
+
+struct Struct {
+ a: 4,
+}
+
+packet RecursivePacket_0 : RecursivePacket_1 {
+}
+
+packet RecursivePacket_1 : RecursivePacket_0 {
+}
+
+packet InvalidParent : Struct {
+}
+
+packet UndeclaredParent : FooBar {
+}
+
+packet UnnecessaryConstraints (a=1) {
+}
+
+packet Undeclared : Packet (c=1) {
+}
+
+packet Redeclared : Packet (a=1, a=2) {
+}
+
+packet TypeMismatch : Packet (a=tag, b=1, c=1, d=1) {
+}
+
+packet InvalidLiteral : Packet (a=42) {
+}
+
+packet UndeclaredTag : Packet (b=undeclared_tag) {
+}
+
+packet Correct : Packet (a=1, b=tag) {
+}
diff --git a/tools/pdl/test/recurse.pdl b/tools/pdl/test/recurse.pdl
new file mode 100644
index 0000000..ad3a200
--- /dev/null
+++ b/tools/pdl/test/recurse.pdl
@@ -0,0 +1,38 @@
+
+struct Struct_0: Struct_1 {
+}
+
+struct Struct_1: Struct_0 {
+}
+
+
+struct Packet_0: Packet_1 {
+}
+
+struct Packet_1: Packet_0 {
+}
+
+
+group Group_0 {
+ Group_1
+}
+
+struct Struct_2 {
+ Group_0
+}
+
+group Group_1 {
+ a: Struct_2
+}
+
+
+struct Struct_3: Struct_4 {
+}
+
+struct Struct_4 {
+ Group_2
+}
+
+group Group_2 {
+ a: Struct_3
+}
diff --git a/tools/pdl/test/size-field.pdl b/tools/pdl/test/size-field.pdl
new file mode 100644
index 0000000..dfa9ad7
--- /dev/null
+++ b/tools/pdl/test/size-field.pdl
@@ -0,0 +1,58 @@
+little_endian_packets
+
+packet Undefined {
+ _size_ (array): 8,
+}
+
+packet UndefinedPayloadWithBody {
+ _size_ (_payload_): 8,
+ _body_,
+}
+
+packet UndefinedPayload {
+ _size_ (_payload_): 8,
+}
+
+packet UndefinedBodyWithPayload {
+ _size_ (_body_): 8,
+ _payload_,
+}
+
+packet UndefinedBody {
+ _size_ (_body_): 8,
+}
+
+packet InvalidType {
+ _size_ (array): 8,
+ array: 16,
+}
+
+packet InvalidArrayOrder {
+ array: 16[],
+ _size_ (array): 8,
+}
+
+packet InvalidPayloadOrder {
+ _payload_,
+ _size_ (_payload_): 8,
+}
+
+packet InvalidBodyOrder {
+ _body_,
+ _size_ (_body_): 8,
+}
+
+packet CorrectArray {
+ _size_ (array): 8,
+ array: 16[],
+}
+
+packet CorrectPayload {
+ _size_ (_payload_): 8,
+ _payload_,
+}
+
+packet CorrectBody {
+ _size_ (_body_): 8,
+ _body_,
+}
diff --git a/tools/pdl/test/struct.pdl b/tools/pdl/test/struct.pdl
new file mode 100644
index 0000000..d8ed439
--- /dev/null
+++ b/tools/pdl/test/struct.pdl
@@ -0,0 +1,52 @@
+little_endian_packets
+
+custom_field custom: 1 "custom"
+checksum checksum: 1 "checksum"
+
+enum Enum : 1 {
+ tag = 0,
+}
+
+struct Struct {
+ a: 4,
+ b: Enum,
+ c: custom,
+ d: checksum,
+}
+
+packet Packet {
+ a: 4,
+}
+
+struct RecursiveStruct_0 : RecursiveStruct_1 {
+}
+
+struct RecursiveStruct_1 : RecursiveStruct_0 {
+}
+
+struct InvalidParent : Packet {
+}
+
+struct UndeclaredParent : FooBar {
+}
+
+struct UnnecessaryConstraints (a=1) {
+}
+
+struct Undeclared : Struct (c=1) {
+}
+
+struct Redeclared : Struct (a=1, a=2) {
+}
+
+struct TypeMismatch : Struct (a=tag, b=1, c=1, d=1) {
+}
+
+struct InvalidLiteral : Struct (a=42) {
+}
+
+struct UndeclaredTag : Struct (b=undeclared_tag) {
+}
+
+struct Correct : Struct (a=1, b=tag) {
+}
diff --git a/tools/pdl/test/typedef-field.pdl b/tools/pdl/test/typedef-field.pdl
new file mode 100644
index 0000000..2e56676
--- /dev/null
+++ b/tools/pdl/test/typedef-field.pdl
@@ -0,0 +1,36 @@
+little_endian_packets
+
+custom_field custom: 1 "custom"
+checksum checksum: 1 "checksum"
+
+enum Enum : 1 {
+ tag = 0,
+}
+
+struct Struct {
+ a: 1,
+}
+
+packet Packet {
+ a: 1,
+}
+
+group Group {
+ a: 1,
+}
+
+packet InvalidKind {
+ typedef_0: Group,
+ typedef_1: Packet,
+}
+
+packet UndeclaredType {
+ typedef: Unknown,
+}
+
+packet Correct {
+ typedef_0: custom,
+ typedef_1: checksum,
+ typedef_2: Enum,
+ typedef_3: Struct,
+}
diff --git a/tools/rootcanal/model/controller/dual_mode_controller.cc b/tools/rootcanal/model/controller/dual_mode_controller.cc
index acd413b..3770fcf 100644
--- a/tools/rootcanal/model/controller/dual_mode_controller.cc
+++ b/tools/rootcanal/model/controller/dual_mode_controller.cc
@@ -2299,7 +2299,8 @@
gd_hci::LeRemoveIsoDataPathView::Create(std::move(iso_command_view));
ASSERT(command_view.IsValid());
link_layer_controller_.LeRemoveIsoDataPath(
- command_view.GetConnectionHandle(), command_view.GetDataPathDirection());
+ command_view.GetConnectionHandle(),
+ command_view.GetRemoveDataPathDirection());
}
void DualModeController::LeReadRemoteFeatures(CommandView command) {
diff --git a/tools/rootcanal/model/controller/link_layer_controller.cc b/tools/rootcanal/model/controller/link_layer_controller.cc
index 871a595..b4764cb 100644
--- a/tools/rootcanal/model/controller/link_layer_controller.cc
+++ b/tools/rootcanal/model/controller/link_layer_controller.cc
@@ -3395,7 +3395,7 @@
void LinkLayerController::LeRemoveIsoDataPath(
uint16_t /* connection_handle */,
- bluetooth::hci::DataPathDirection /* data_path_direction */) {}
+ bluetooth::hci::RemoveDataPathDirection /* remove_data_path_direction */) {}
void LinkLayerController::HandleLeEnableEncryption(
uint16_t handle, std::array<uint8_t, 8> rand, uint16_t ediv,
diff --git a/tools/rootcanal/model/controller/link_layer_controller.h b/tools/rootcanal/model/controller/link_layer_controller.h
index 6147fb4..0066c69 100644
--- a/tools/rootcanal/model/controller/link_layer_controller.h
+++ b/tools/rootcanal/model/controller/link_layer_controller.h
@@ -239,7 +239,7 @@
std::vector<uint8_t> codec_configuration);
void LeRemoveIsoDataPath(
uint16_t connection_handle,
- bluetooth::hci::DataPathDirection data_path_direction);
+ bluetooth::hci::RemoveDataPathDirection remove_data_path_direction);
void HandleLeEnableEncryption(uint16_t handle, std::array<uint8_t, 8> rand,
uint16_t ediv, std::array<uint8_t, 16> ltk);