JNI: Inline fast-path for `JniMethodStart()`.
Golem results for art-opt-cc (higher is better):
linux-ia32 before after
NativeDowncallStaticNormal 35.306 47.382 (+34.20%)
NativeDowncallStaticNormal6 32.951 42.247 (+28.21%)
NativeDowncallStaticNormalRefs6 17.866 41.355 (+131.5%)
NativeDowncallVirtualNormal 35.341 46.836 (+32.53%)
NativeDowncallVirtualNormal6 32.403 41.791 (+28.97%)
NativeDowncallVirtualNormalRefs6 32.131 40.500 (+26.05%)
linux-x64 before after
NativeDowncallStaticNormal 33.350 43.716 (+31.08%)
NativeDowncallStaticNormal6 31.096 43.176 (+38.85%)
NativeDowncallStaticNormalRefs6 30.617 38.500 (+25.75%)
NativeDowncallVirtualNormal 33.234 43.672 (+32.41%)
NativeDowncallVirtualNormal6 30.617 42.247 (+37.98%)
NativeDowncallVirtualNormalRefs6 32.131 42.701 (+32.90%)
linux-armv7 before after
NativeDowncallStaticNormal 7.8701 9.9651 (+26.62%)
NativeDowncallStaticNormal6 7.4147 8.9463 (+20.66%)
NativeDowncallStaticNormalRefs6 6.8830 8.3868 (+21.85%)
NativeDowncallVirtualNormal 7.8316 9.8377 (+25.61%)
NativeDowncallVirtualNormal6 7.4147 9.3596 (+26.23%)
NativeDowncallVirtualNormalRefs6 6.6794 8.4325 (+26.25%)
linux-armv8 before after
NativeDowncallStaticNormal 7.6372 9.8571 (+29.07%)
NativeDowncallStaticNormal6 7.4147 9.4905 (+28.00%)
NativeDowncallStaticNormalRefs6 6.8527 8.6705 (+26.53%)
NativeDowncallVirtualNormal 7.4147 9.3183 (+25.67%)
NativeDowncallVirtualNormal6 7.0755 9.2593 (+30.86%)
NativeDowncallVirtualNormalRefs6 6.5604 8.2967 (+26.47%)
Note that NativeDowncallStaticNormalRefs6 on x86 has been
jumping like crazy since
https://android-review.googlesource.com/1905055
between ~17.6 and ~32.4 for completely unrelated changes,
so if we take the 32.4 as a baseline, the improvement is
only ~27.6% in line with the other x86 benchmarks.
Test: m test-art-host-gtest
Test: testrunner.py --host --optimizing
Test: run-gtests.sh
Test: testrunner.py --target --optimizing
Bug: 172332525
Change-Id: I771a4765bd3a7c4e58b94be4155515241ea6fa3c
diff --git a/libartbase/base/bit_field.h b/libartbase/base/bit_field.h
index f57c414..101fbd1 100644
--- a/libartbase/base/bit_field.h
+++ b/libartbase/base/bit_field.h
@@ -40,47 +40,47 @@
static_assert(size + position <= sizeof(uintptr_t) * kBitsPerByte, "Invalid position + size.");
// Tells whether the provided value fits into the bit field.
- static bool IsValid(T value) {
+ static constexpr bool IsValid(T value) {
return (static_cast<uintptr_t>(value) & ~((kUintPtrTOne << size) - 1)) == 0;
}
// Returns a uword mask of the bit field.
- static uintptr_t Mask() {
+ static constexpr uintptr_t Mask() {
return (kUintPtrTOne << size) - 1;
}
// Returns a uword mask of the bit field which can be applied directly to
// the raw unshifted bits.
- static uintptr_t MaskInPlace() {
+ static constexpr uintptr_t MaskInPlace() {
return ((kUintPtrTOne << size) - 1) << position;
}
// Returns the shift count needed to right-shift the bit field to
// the least-significant bits.
- static int Shift() {
+ static constexpr int Shift() {
return position;
}
// Returns the size of the bit field.
- static int BitSize() {
+ static constexpr int BitSize() {
return size;
}
// Returns a uword with the bit field value encoded.
- static uintptr_t Encode(T value) {
+ static constexpr uintptr_t Encode(T value) {
DCHECK(IsValid(value));
return static_cast<uintptr_t>(value) << position;
}
// Extracts the bit field from the value.
- static T Decode(uintptr_t value) {
+ static constexpr T Decode(uintptr_t value) {
return static_cast<T>((value >> position) & ((kUintPtrTOne << size) - 1));
}
// Returns a uword with the bit field value encoded based on the
// original value. Only the bits corresponding to this bit field
// will be changed.
- static uintptr_t Update(T value, uintptr_t original) {
+ static constexpr uintptr_t Update(T value, uintptr_t original) {
DCHECK(IsValid(value));
return (static_cast<uintptr_t>(value) << position) |
(~MaskInPlace() & original);