Support for synchronized native methods.
This change adds support for synchronized native methods by using
calls to MonitorEnter and MonitorExit on the JNIEnv*. There is
some tidying of the assembler and a straw man JNIEnv implementation.
The JNIEnv implementation just warns when MonitorEnter/Exit are called
and doesn't adhere to the correct JNIEnv layout.
Change-Id: I90ed6ec8f85f5b01b929f16e0dbdecadd0b01359
diff --git a/build/Android.common.mk b/build/Android.common.mk
index fe24801..24bfe02 100644
--- a/build/Android.common.mk
+++ b/build/Android.common.mk
@@ -38,6 +38,7 @@
src/dex_instruction.cc \
src/dex_verifier.cc \
src/jni_compiler.cc \
+ src/jni_internal.cc \
src/memory_region.cc \
src/object.cc \
src/raw_dex_file.cc \
diff --git a/src/assembler_arm.cc b/src/assembler_arm.cc
index 91c259e..a519e35 100644
--- a/src/assembler_arm.cc
+++ b/src/assembler_arm.cc
@@ -1336,6 +1336,11 @@
StoreToOffset(kStoreWord, src.AsCoreRegister(), SP, dest.Int32Value());
}
+void Assembler::StoreRawPtr(FrameOffset dest, ManagedRegister src) {
+ CHECK(src.IsCoreRegister());
+ StoreToOffset(kStoreWord, src.AsCoreRegister(), SP, dest.Int32Value());
+}
+
void Assembler::CopyRef(FrameOffset dest, FrameOffset src,
ManagedRegister scratch) {
LoadFromOffset(kLoadWord, scratch.AsCoreRegister(), SP, src.Int32Value());
@@ -1468,8 +1473,7 @@
}
void Assembler::LoadReferenceFromStackHandle(ManagedRegister out_reg,
- ManagedRegister in_reg,
- FrameOffset shb_offset) {
+ ManagedRegister in_reg) {
CHECK(out_reg.IsCoreRegister());
CHECK(in_reg.IsCoreRegister());
Label null_arg;
@@ -1477,8 +1481,8 @@
LoadImmediate(out_reg.AsCoreRegister(), 0, EQ);
}
cmp(in_reg.AsCoreRegister(), ShifterOperand(0));
- LoadFromOffset(kLoadWord, out_reg.AsCoreRegister(), in_reg.AsCoreRegister(),
- shb_offset.Int32Value(), NE);
+ LoadFromOffset(kLoadWord, out_reg.AsCoreRegister(),
+ in_reg.AsCoreRegister(), 0, NE);
}
void Assembler::ValidateRef(ManagedRegister src, bool could_be_null) {
@@ -1489,7 +1493,7 @@
// TODO: not validating references
}
-void Assembler::Call(ManagedRegister base, MemberOffset offset,
+void Assembler::Call(ManagedRegister base, Offset offset,
ManagedRegister scratch) {
CHECK(base.IsCoreRegister());
CHECK(scratch.IsCoreRegister());
@@ -1499,13 +1503,4 @@
// TODO: place reference map on call
}
-// Emit code that will lock the reference in the given register
-void Assembler::LockReferenceOnStack(FrameOffset fr_offs) {
- LOG(FATAL) << "TODO";
-}
-// Emit code that will unlock the reference in the given register
-void Assembler::UnLockReferenceOnStack(FrameOffset fr_offs) {
- LOG(FATAL) << "TODO";
-}
-
} // namespace art
diff --git a/src/assembler_arm.h b/src/assembler_arm.h
index a20a40b..4fb061f 100644
--- a/src/assembler_arm.h
+++ b/src/assembler_arm.h
@@ -424,8 +424,9 @@
// Store bytes from the given register onto the stack
void Store(FrameOffset dest, ManagedRegister src, size_t size);
-
void StoreRef(FrameOffset dest, ManagedRegister src);
+ void StoreRawPtr(FrameOffset dest, ManagedRegister src);
+
void CopyRef(FrameOffset dest, FrameOffset src, ManagedRegister scratch);
void LoadRef(ManagedRegister dest, ManagedRegister base, MemberOffset offs);
@@ -454,20 +455,13 @@
void CreateStackHandle(FrameOffset out_off, FrameOffset handle_offset,
ManagedRegister scratch, bool null_allowed);
- void LoadReferenceFromStackHandle(ManagedRegister dst, ManagedRegister src,
- FrameOffset shb_offset);
+ void LoadReferenceFromStackHandle(ManagedRegister dst, ManagedRegister src);
void ValidateRef(ManagedRegister src, bool could_be_null);
void ValidateRef(FrameOffset src, bool could_be_null);
- void Call(ManagedRegister base, MemberOffset offset, ManagedRegister scratch);
-
- // Emit code that will lock the reference in the given frame location
- void LockReferenceOnStack(FrameOffset fr_offs);
-
- // Emit code that will unlock the reference in the given frame location
- void UnLockReferenceOnStack(FrameOffset fr_offs);
+ void Call(ManagedRegister base, Offset offset, ManagedRegister scratch);
// Emit data (e.g. encoded instruction or immediate) to the
// instruction stream.
diff --git a/src/assembler_x86.cc b/src/assembler_x86.cc
index 6f833ad..35e6129 100644
--- a/src/assembler_x86.cc
+++ b/src/assembler_x86.cc
@@ -1413,6 +1413,11 @@
movl(Address(ESP, dest), src.AsCpuRegister());
}
+void Assembler::StoreRawPtr(FrameOffset dest, ManagedRegister src) {
+ CHECK(src.IsCpuRegister());
+ movl(Address(ESP, dest), src.AsCpuRegister());
+}
+
void Assembler::CopyRef(FrameOffset dest, FrameOffset src,
ManagedRegister scratch) {
CHECK(scratch.IsCpuRegister());
@@ -1546,8 +1551,7 @@
// Given a stack handle, load the associated reference.
void Assembler::LoadReferenceFromStackHandle(ManagedRegister out_reg,
- ManagedRegister in_reg,
- FrameOffset shb_offset) {
+ ManagedRegister in_reg) {
CHECK(out_reg.IsCpuRegister());
CHECK(in_reg.IsCpuRegister());
Label null_arg;
@@ -1568,21 +1572,11 @@
// TODO: not validating references
}
-void Assembler::Call(ManagedRegister base, MemberOffset offset,
+void Assembler::Call(ManagedRegister base, Offset offset,
ManagedRegister) {
CHECK(base.IsCpuRegister());
- call(Address(base.AsCpuRegister(), offset));
+ call(Address(base.AsCpuRegister(), offset.Int32Value()));
// TODO: place reference map on call
}
-// Emit code that will lock the reference in the given register
-void Assembler::LockReferenceOnStack(FrameOffset fr_offs) {
- LOG(FATAL) << "TODO";
-}
-// Emit code that will unlock the reference in the given register
-void Assembler::UnLockReferenceOnStack(FrameOffset fr_offs) {
- LOG(FATAL) << "TODO";
-}
-
-
} // namespace art
diff --git a/src/assembler_x86.h b/src/assembler_x86.h
index 981141a..85cc50b 100644
--- a/src/assembler_x86.h
+++ b/src/assembler_x86.h
@@ -419,6 +419,7 @@
// Store bytes from the given register onto the stack
void Store(FrameOffset offs, ManagedRegister src, size_t size);
void StoreRef(FrameOffset dest, ManagedRegister src);
+ void StoreRawPtr(FrameOffset dest, ManagedRegister src);
void CopyRef(FrameOffset dest, FrameOffset src, ManagedRegister scratch);
@@ -455,19 +456,12 @@
void CreateStackHandle(FrameOffset out_off, FrameOffset handle_offset,
ManagedRegister scratch, bool null_allowed);
- void LoadReferenceFromStackHandle(ManagedRegister dst, ManagedRegister src,
- FrameOffset shb_offset);
+ void LoadReferenceFromStackHandle(ManagedRegister dst, ManagedRegister src);
void ValidateRef(ManagedRegister src, bool could_be_null);
void ValidateRef(FrameOffset src, bool could_be_null);
- void Call(ManagedRegister base, MemberOffset offset, ManagedRegister scratch);
-
- // Emit code that will lock the reference in the given frame location
- void LockReferenceOnStack(FrameOffset fr_offs);
-
- // Emit code that will unlock the reference in the given frame location
- void UnLockReferenceOnStack(FrameOffset fr_offs);
+ void Call(ManagedRegister base, Offset offset, ManagedRegister scratch);
void AddImmediate(Register reg, const Immediate& imm);
diff --git a/src/calling_convention.cc b/src/calling_convention.cc
index 4760a8c..ae6ac25 100644
--- a/src/calling_convention.cc
+++ b/src/calling_convention.cc
@@ -7,6 +7,8 @@
namespace art {
+// Managed runtime calling convention
+
size_t ManagedRuntimeCallingConvention::FrameSize() {
LOG(FATAL) << "Unimplemented";
return 0;
@@ -30,21 +32,22 @@
return GetMethod()->IsStatic() || (itr_position_ != 0);
}
-size_t ManagedRuntimeCallingConvention::CurrentParamSizeInBytes() {
- return GetMethod()->ParamSizeInBytes(itr_position_);
+size_t ManagedRuntimeCallingConvention::CurrentParamSize() {
+ return GetMethod()->ParamSize(itr_position_);
}
bool ManagedRuntimeCallingConvention::IsCurrentParamAReference() {
return GetMethod()->IsParamAReference(itr_position_);
}
+// JNI calling convention
size_t JniCallingConvention::FrameSize() {
// Return address and Method*
size_t frame_data_size = 2 * kPointerSize;
// Handles plus 2 words for SHB header
size_t handle_area_size = (HandleCount() + 2) * kPointerSize;
- return RoundUp(frame_data_size + handle_area_size, 16);
+ return RoundUp(frame_data_size + handle_area_size + SizeOfReturnValue(), 16);
}
size_t JniCallingConvention::OutArgSize() {
@@ -56,6 +59,12 @@
return method->NumReferenceArgs() + (method->IsStatic() ? 1 : 0);
}
+FrameOffset JniCallingConvention::ReturnValueSaveLocation() {
+ size_t start_of_shb = ShbLinkOffset().Int32Value() + kPointerSize;
+ size_t handle_size = kPointerSize * HandleCount(); // size excluding header
+ return FrameOffset(start_of_shb + handle_size);
+}
+
bool JniCallingConvention::HasNext() {
if (itr_position_ <= kObjectOrClass) {
return true;
@@ -108,12 +117,12 @@
return FrameOffset(result);
}
-unsigned int JniCallingConvention::CurrentParamSizeInBytes() {
+size_t JniCallingConvention::CurrentParamSize() {
if (itr_position_ <= kObjectOrClass) {
return kPointerSize; // JNIEnv or jobject/jclass
} else {
int arg_pos = itr_position_ - (GetMethod()->IsStatic() ? 2 : 1);
- return GetMethod()->ParamSizeInBytes(arg_pos);
+ return GetMethod()->ParamSize(arg_pos);
}
}
diff --git a/src/calling_convention.h b/src/calling_convention.h
index cf12692..4e2eab7 100644
--- a/src/calling_convention.h
+++ b/src/calling_convention.h
@@ -17,6 +17,8 @@
bool IsReturnAReference() const { return method_->IsReturnAReference(); }
+ size_t SizeOfReturnValue() const { return method_->ReturnSize(); }
+
// Register that holds the incoming method argument
ManagedRegister MethodRegister();
// Register that holds result of this method
@@ -66,7 +68,7 @@
bool IsCurrentParamInRegister();
bool IsCurrentParamOnStack();
bool IsCurrentParamPossiblyNull();
- size_t CurrentParamSizeInBytes();
+ size_t CurrentParamSize();
ManagedRegister CurrentParamRegister();
FrameOffset CurrentParamStackOffset();
@@ -77,10 +79,10 @@
// | incoming stack args | <-- Prior SP
// | { Spilled registers |
// | & return address } |
-// | { Saved JNI Env Data } |
+// | { Return value spill } | (live on return slow paths)
// | { Stack Handle Block |
// | ... |
-// | length/link } | (here to prior SP is frame size)
+// | num. refs./link } | (here to prior SP is frame size)
// | Method* | <-- Anchor SP written to thread
// | { Outgoing stack args |
// | ... } | <-- SP at point of call
@@ -98,6 +100,9 @@
size_t OutArgSize();
// Number of handles in stack handle block
size_t HandleCount();
+ // Location where the return value of a call can be squirreled if another
+ // call is made following the native call
+ FrameOffset ReturnValueSaveLocation();
// Iterator interface
bool HasNext();
@@ -105,7 +110,7 @@
bool IsCurrentParamAReference();
bool IsCurrentParamInRegister();
bool IsCurrentParamOnStack();
- size_t CurrentParamSizeInBytes();
+ size_t CurrentParamSize();
ManagedRegister CurrentParamRegister();
FrameOffset CurrentParamStackOffset();
diff --git a/src/common_test.h b/src/common_test.h
index 4531b39..ee22cb6 100644
--- a/src/common_test.h
+++ b/src/common_test.h
@@ -146,22 +146,23 @@
// native double fooDD(double x, double y);
// native Object fooIOO(int x, Object y, Object z);
// static native Object fooSIOO(int x, Object y, Object z);
+// static synchronized native Object fooSSIOO(int x, Object y, Object z);
// }
static const char kMyClassNativesDex[] =
- "ZGV4CjAzNQDuVKYsovltNxptaisjQasgppGak46k/n0sAwAAcAAAAHhWNBIAAAAAAAAAAJgCAAAS"
- "AAAAcAAAAAUAAAC4AAAABQAAAMwAAAAAAAAAAAAAAAgAAAAIAQAAAgAAAEgBAACkAQAAiAEAANYB"
- "AADeAQAA4QEAAOYBAADpAQAA7QEAAPIBAAD4AQAAAwIAABcCAAAlAgAAMgIAADUCAAA6AgAAQQIA"
- "AEcCAABOAgAAVgIAAAEAAAADAAAABwAAAAgAAAALAAAAAgAAAAAAAAC0AQAABAAAAAEAAAC8AQAA"
- "BQAAAAEAAADEAQAABgAAAAMAAADMAQAACwAAAAQAAAAAAAAAAgAEAAAAAAACAAQADAAAAAIAAAAN"
- "AAAAAgABAA4AAAACAAIADwAAAAIAAwAQAAAAAgADABEAAAADAAQAAAAAAAMAAAABAAAA/////wAA"
- "AAAKAAAAAAAAAGkCAAAAAAAAAgAAAAAAAAADAAAAAAAAAAkAAAAAAAAAcwIAAAAAAAABAAEAAAAA"
- "AF8CAAABAAAADgAAAAEAAQABAAAAZAIAAAQAAABwEAcAAAAOAAIAAAAAAAAAAQAAAAEAAAACAAAA"
- "AQABAAMAAAABAAMAAwAGPGluaXQ+AAFEAANEREQAAUkAAklJAANJSUkABExJTEwACUxNeUNsYXNz"
- "OwASTGphdmEvbGFuZy9PYmplY3Q7AAxNeUNsYXNzLmphdmEAC09iamVjdC5qYXZhAAFWAANmb28A"
- "BWZvb0REAARmb29JAAVmb29JSQAGZm9vSU9PAAdmb29TSU9PAAMABw4AAQAHDgAAAAEAB4GABIgD"
- "AAACBQCAgAScAwaIAgABgAIAAYACAAGAAgABgAIAAYACAAAAAAwAAAAAAAAAAQAAAAAAAAABAAAA"
- "EgAAAHAAAAACAAAABQAAALgAAAADAAAABQAAAMwAAAAFAAAACAAAAAgBAAAGAAAAAgAAAEgBAAAB"
- "IAAAAgAAAIgBAAABEAAABAAAALQBAAACIAAAEgAAANYBAAADIAAAAgAAAF8CAAAAIAAAAgAAAGkC"
- "AAAAEAAAAQAAAJgCAAA=";
+ "ZGV4CjAzNQA4WWrpXgdlkoTHR8Yubx4LJO4HbGsX1p1EAwAAcAAAAHhWNBIAAAAAAAAAALACAAAT"
+ "AAAAcAAAAAUAAAC8AAAABQAAANAAAAAAAAAAAAAAAAkAAAAMAQAAAgAAAFQBAACwAQAAlAEAAOIB"
+ "AADqAQAA7QEAAPIBAAD1AQAA+QEAAP4BAAAEAgAADwIAACMCAAAxAgAAPgIAAEECAABGAgAATQIA"
+ "AFMCAABaAgAAYgIAAGsCAAABAAAAAwAAAAcAAAAIAAAACwAAAAIAAAAAAAAAwAEAAAQAAAABAAAA"
+ "yAEAAAUAAAABAAAA0AEAAAYAAAADAAAA2AEAAAsAAAAEAAAAAAAAAAIABAAAAAAAAgAEAAwAAAAC"
+ "AAAADQAAAAIAAQAOAAAAAgACAA8AAAACAAMAEAAAAAIAAwARAAAAAgADABIAAAADAAQAAAAAAAMA"
+ "AAABAAAA/////wAAAAAKAAAAAAAAAH8CAAAAAAAAAgAAAAAAAAADAAAAAAAAAAkAAAAAAAAAiQIA"
+ "AAAAAAABAAEAAAAAAHUCAAABAAAADgAAAAEAAQABAAAAegIAAAQAAABwEAgAAAAOAAIAAAAAAAAA"
+ "AQAAAAEAAAACAAAAAQABAAMAAAABAAMAAwAGPGluaXQ+AAFEAANEREQAAUkAAklJAANJSUkABExJ"
+ "TEwACUxNeUNsYXNzOwASTGphdmEvbGFuZy9PYmplY3Q7AAxNeUNsYXNzLmphdmEAC09iamVjdC5q"
+ "YXZhAAFWAANmb28ABWZvb0REAARmb29JAAVmb29JSQAGZm9vSU9PAAdmb29TSU9PAAhmb29TU0lP"
+ "TwADAAcOAAEABw4AAAABAAiBgASUAwAAAwUAgIAEqAMGiAIAAaiCCAABgAIAAYACAAGAAgABgAIA"
+ "AYACAAwAAAAAAAAAAQAAAAAAAAABAAAAEwAAAHAAAAACAAAABQAAALwAAAADAAAABQAAANAAAAAF"
+ "AAAACQAAAAwBAAAGAAAAAgAAAFQBAAABIAAAAgAAAJQBAAABEAAABAAAAMABAAACIAAAEwAAAOIB"
+ "AAADIAAAAgAAAHUCAAAAIAAAAgAAAH8CAAAAEAAAAQAAALACAAA=";
} // namespace art
diff --git a/src/jni_compiler.cc b/src/jni_compiler.cc
index 67f8bc8..5385a8b 100644
--- a/src/jni_compiler.cc
+++ b/src/jni_compiler.cc
@@ -4,6 +4,7 @@
#include <sys/mman.h>
#include "src/assembler.h"
#include "src/calling_convention.h"
+#include "src/jni_internal.h"
#include "src/macros.h"
#include "src/managed_register.h"
#include "src/logging.h"
@@ -85,26 +86,54 @@
jni_conv.Next();
}
- // 5. Acquire lock for synchronized methods. Done here as references are held
- // live in handle block but we're in managed code and can work on
- // references
- if (native_method->IsSynchronized()) {
- jni_conv.ResetIterator(FrameOffset(0));
- jni_conv.Next(); // skip JNI environment
- jni_asm->LockReferenceOnStack(jni_conv.CurrentParamHandleOffset());
- }
-
- // 6. Transition from being in managed to native code
+ // 5. Transition from being in managed to native code
// TODO: write out anchor, ensure the transition to native follow a store
// fence.
jni_asm->StoreImmediateToThread(Thread::StateOffset(), Thread::kNative,
mr_conv.InterproceduralScratchRegister());
- // 7. Move frame down to allow space for out going args. Do for as short a
+ // 6. Move frame down to allow space for out going args. Do for as short a
// time as possible to aid profiling..
const size_t out_arg_size = jni_conv.OutArgSize();
jni_asm->IncreaseFrameSize(out_arg_size);
+ // 7. Acquire lock for synchronized methods.
+ if (native_method->IsSynchronized()) {
+ mr_conv.ResetIterator(FrameOffset(frame_size+out_arg_size));
+ jni_conv.ResetIterator(FrameOffset(out_arg_size));
+ jni_conv.Next(); // Skip JNIEnv*
+ // Get stack handle for 1st argument
+ if (is_static) {
+ FrameOffset handle_offset = jni_conv.CurrentParamHandleOffset();
+ if (jni_conv.IsCurrentParamOnStack()) {
+ FrameOffset out_off = jni_conv.CurrentParamStackOffset();
+ jni_asm->CreateStackHandle(out_off, handle_offset,
+ mr_conv.InterproceduralScratchRegister(),
+ false);
+ } else {
+ ManagedRegister out_reg = jni_conv.CurrentParamRegister();
+ jni_asm->CreateStackHandle(out_reg, handle_offset,
+ ManagedRegister::NoRegister(), false);
+ }
+ } else {
+ CopyParameter(jni_asm, &mr_conv, &jni_conv, frame_size, out_arg_size);
+ }
+ // Generate JNIEnv* in place and leave a copy in jni_env_register
+ ManagedRegister jni_env_register =
+ jni_conv.InterproceduralScratchRegister();
+ if (jni_conv.IsCurrentParamInRegister()) {
+ jni_env_register = jni_conv.CurrentParamRegister();
+ }
+ jni_asm->LoadRawPtrFromThread(jni_env_register, Thread::JniEnvOffset());
+ if (!jni_conv.IsCurrentParamInRegister()) {
+ FrameOffset out_off = jni_conv.CurrentParamStackOffset();
+ jni_asm->StoreRawPtr(out_off, jni_env_register);
+ }
+ // Call JNIEnv*->MonitorEnter(JNIEnv*, object)
+ jni_asm->Call(jni_env_register, JniEnvironment::MonitorEnterOffset(),
+ jni_conv.InterproceduralScratchRegister());
+ }
+
// 8. Iterate over arguments placing values from managed calling convention in
// to the convention required for a native call (shuffling). For references
// place an index/pointer to the reference after checking whether it is
@@ -130,79 +159,7 @@
}
while (mr_conv.HasNext()) {
CHECK(jni_conv.HasNext());
- bool input_in_reg = mr_conv.IsCurrentParamInRegister();
- bool output_in_reg = jni_conv.IsCurrentParamInRegister();
- FrameOffset handle_offset(0);
- bool null_allowed = false;
- bool ref_param = jni_conv.IsCurrentParamAReference();
- CHECK(!ref_param || mr_conv.IsCurrentParamAReference());
- CHECK(input_in_reg || mr_conv.IsCurrentParamOnStack());
- CHECK(output_in_reg || jni_conv.IsCurrentParamOnStack());
- // References need handlerization and the handle address passing
- if (ref_param) {
- null_allowed = mr_conv.IsCurrentParamPossiblyNull();
- // Compute handle offset. Note null is placed in the SHB but the jobject
- // passed to the native code must be null (not a pointer into the SHB
- // as with regular references).
- handle_offset = jni_conv.CurrentParamHandleOffset();
- // Check handle offset is within frame.
- CHECK_LT(handle_offset.Uint32Value(), (frame_size+out_arg_size));
- }
- if (input_in_reg && output_in_reg) {
- LOG(FATAL) << "UNTESTED";
- ManagedRegister in_reg = mr_conv.CurrentParamRegister();
- ManagedRegister out_reg = jni_conv.CurrentParamRegister();
- if (ref_param) {
- jni_asm->CreateStackHandle(out_reg, handle_offset, in_reg,
- null_allowed);
- } else {
- jni_asm->Move(out_reg, in_reg);
- }
- } else if (!input_in_reg && !output_in_reg) {
- FrameOffset out_off = jni_conv.CurrentParamStackOffset();
- if (ref_param) {
- jni_asm->CreateStackHandle(out_off, handle_offset,
- mr_conv.InterproceduralScratchRegister(),
- null_allowed);
- } else {
- FrameOffset in_off = mr_conv.CurrentParamStackOffset();
- size_t param_size = mr_conv.CurrentParamSizeInBytes();
- CHECK_EQ(param_size, jni_conv.CurrentParamSizeInBytes());
- jni_asm->Copy(out_off, in_off, mr_conv.InterproceduralScratchRegister(),
- param_size);
- }
- } else if (!input_in_reg && output_in_reg) {
- LOG(FATAL) << "UNTESTED";
- FrameOffset in_off = mr_conv.CurrentParamStackOffset();
- ManagedRegister out_reg = jni_conv.CurrentParamRegister();
- // Check that incoming stack arguments are above the current stack frame.
- CHECK_GT(in_off.Uint32Value(), frame_size);
- if (ref_param) {
- jni_asm->CreateStackHandle(out_reg, handle_offset,
- ManagedRegister::NoRegister(), null_allowed);
- } else {
- unsigned int param_size = mr_conv.CurrentParamSizeInBytes();
- CHECK_EQ(param_size, jni_conv.CurrentParamSizeInBytes());
- jni_asm->Load(out_reg, in_off, param_size);
- }
- } else {
- LOG(FATAL) << "UNTESTED";
- CHECK(input_in_reg && !output_in_reg);
- ManagedRegister in_reg = mr_conv.CurrentParamRegister();
- FrameOffset out_off = jni_conv.CurrentParamStackOffset();
- // Check outgoing argument is within frame
- CHECK_LT(out_off.Uint32Value(), frame_size);
- if (ref_param) {
- // TODO: recycle value in in_reg rather than reload from handle
- jni_asm->CreateStackHandle(out_off, handle_offset,
- mr_conv.InterproceduralScratchRegister(),
- null_allowed);
- } else {
- size_t param_size = mr_conv.CurrentParamSizeInBytes();
- CHECK_EQ(param_size, jni_conv.CurrentParamSizeInBytes());
- jni_asm->Store(out_off, in_reg, param_size);
- }
- }
+ CopyParameter(jni_asm, &mr_conv, &jni_conv, frame_size, out_arg_size);
mr_conv.Next();
jni_conv.Next();
}
@@ -221,8 +178,55 @@
jni_asm->Call(mr_conv.MethodRegister(), Method::NativeMethodOffset(),
mr_conv.InterproceduralScratchRegister());
+ // 11. Release lock for synchronized methods.
+ if (native_method->IsSynchronized()) {
+ mr_conv.ResetIterator(FrameOffset(frame_size+out_arg_size));
+ jni_conv.ResetIterator(FrameOffset(out_arg_size));
+ jni_conv.Next(); // Skip JNIEnv*
+ // Save return value
+ FrameOffset return_save_location = jni_conv.ReturnValueSaveLocation();
+ CHECK_LT(return_save_location.Uint32Value(), frame_size+out_arg_size);
+ jni_asm->Store(return_save_location, jni_conv.ReturnRegister(),
+ jni_conv.SizeOfReturnValue());
+ // Get stack handle for 1st argument
+ if (is_static) {
+ FrameOffset handle_offset = jni_conv.CurrentParamHandleOffset();
+ if (jni_conv.IsCurrentParamOnStack()) {
+ FrameOffset out_off = jni_conv.CurrentParamStackOffset();
+ jni_asm->CreateStackHandle(out_off, handle_offset,
+ mr_conv.InterproceduralScratchRegister(),
+ false);
+ } else {
+ ManagedRegister out_reg = jni_conv.CurrentParamRegister();
+ jni_asm->CreateStackHandle(out_reg, handle_offset,
+ ManagedRegister::NoRegister(), false);
+ }
+ } else {
+ CopyParameter(jni_asm, &mr_conv, &jni_conv, frame_size, out_arg_size);
+ }
+ // Generate JNIEnv* in place and leave a copy in jni_env_register
+ ManagedRegister jni_env_register =
+ jni_conv.InterproceduralScratchRegister();
+ if (jni_conv.IsCurrentParamInRegister()) {
+ jni_env_register = jni_conv.CurrentParamRegister();
+ }
+ jni_asm->LoadRawPtrFromThread(jni_env_register, Thread::JniEnvOffset());
+ if (!jni_conv.IsCurrentParamInRegister()) {
+ FrameOffset out_off = jni_conv.CurrentParamStackOffset();
+ jni_asm->StoreRawPtr(out_off, jni_env_register);
+ }
+ // Call JNIEnv*->MonitorExit(JNIEnv*, object)
+ jni_asm->Call(jni_env_register, JniEnvironment::MonitorExitOffset(),
+ jni_conv.InterproceduralScratchRegister());
+ // Reload return value
+ jni_asm->Load(jni_conv.ReturnRegister(), return_save_location,
+ jni_conv.SizeOfReturnValue());
+ }
+
// 11. Release outgoing argument area
jni_asm->DecreaseFrameSize(out_arg_size);
+ mr_conv.ResetIterator(FrameOffset(frame_size));
+ jni_conv.ResetIterator(FrameOffset(0));
// 12. Transition from being in native to managed code, possibly entering a
// safepoint
@@ -230,21 +234,10 @@
mr_conv.InterproceduralScratchRegister());
// TODO: check for safepoint transition
- // 13. Move to first handle offset
- jni_conv.ResetIterator(FrameOffset(0));
- jni_conv.Next(); // skip JNI environment
-
- // 14. Release lock for synchronized methods (done in the managed state so
- // references can be touched)
- if (native_method->IsSynchronized()) {
- jni_asm->UnLockReferenceOnStack(jni_conv.CurrentParamHandleOffset());
- }
-
// 15. Place result in correct register possibly dehandlerizing
if (jni_conv.IsReturnAReference()) {
jni_asm->LoadReferenceFromStackHandle(mr_conv.ReturnRegister(),
- jni_conv.ReturnRegister(),
- jni_conv.CurrentParamHandleOffset());
+ jni_conv.ReturnRegister());
} else {
jni_asm->Move(mr_conv.ReturnRegister(), jni_conv.ReturnRegister());
}
@@ -263,6 +256,86 @@
native_method->SetCode(code.pointer());
}
+// Copy a single parameter from the managed to the JNI calling convention
+void JniCompiler::CopyParameter(Assembler* jni_asm,
+ ManagedRuntimeCallingConvention* mr_conv,
+ JniCallingConvention* jni_conv,
+ size_t frame_size, size_t out_arg_size) {
+ bool input_in_reg = mr_conv->IsCurrentParamInRegister();
+ bool output_in_reg = jni_conv->IsCurrentParamInRegister();
+ FrameOffset handle_offset(0);
+ bool null_allowed = false;
+ bool ref_param = jni_conv->IsCurrentParamAReference();
+ CHECK(!ref_param || mr_conv->IsCurrentParamAReference());
+ CHECK(input_in_reg || mr_conv->IsCurrentParamOnStack());
+ CHECK(output_in_reg || jni_conv->IsCurrentParamOnStack());
+ // References need handlerization and the handle address passing
+ if (ref_param) {
+ null_allowed = mr_conv->IsCurrentParamPossiblyNull();
+ // Compute handle offset. Note null is placed in the SHB but the jobject
+ // passed to the native code must be null (not a pointer into the SHB
+ // as with regular references).
+ handle_offset = jni_conv->CurrentParamHandleOffset();
+ // Check handle offset is within frame.
+ CHECK_LT(handle_offset.Uint32Value(), (frame_size+out_arg_size));
+ }
+ if (input_in_reg && output_in_reg) {
+ LOG(FATAL) << "UNTESTED";
+ ManagedRegister in_reg = mr_conv->CurrentParamRegister();
+ ManagedRegister out_reg = jni_conv->CurrentParamRegister();
+ if (ref_param) {
+ jni_asm->CreateStackHandle(out_reg, handle_offset, in_reg,
+ null_allowed);
+ } else {
+ jni_asm->Move(out_reg, in_reg);
+ }
+ } else if (!input_in_reg && !output_in_reg) {
+ FrameOffset out_off = jni_conv->CurrentParamStackOffset();
+ if (ref_param) {
+ jni_asm->CreateStackHandle(out_off, handle_offset,
+ mr_conv->InterproceduralScratchRegister(),
+ null_allowed);
+ } else {
+ FrameOffset in_off = mr_conv->CurrentParamStackOffset();
+ size_t param_size = mr_conv->CurrentParamSize();
+ CHECK_EQ(param_size, jni_conv->CurrentParamSize());
+ jni_asm->Copy(out_off, in_off, mr_conv->InterproceduralScratchRegister(),
+ param_size);
+ }
+ } else if (!input_in_reg && output_in_reg) {
+ LOG(FATAL) << "UNTESTED";
+ FrameOffset in_off = mr_conv->CurrentParamStackOffset();
+ ManagedRegister out_reg = jni_conv->CurrentParamRegister();
+ // Check that incoming stack arguments are above the current stack frame.
+ CHECK_GT(in_off.Uint32Value(), frame_size);
+ if (ref_param) {
+ jni_asm->CreateStackHandle(out_reg, handle_offset,
+ ManagedRegister::NoRegister(), null_allowed);
+ } else {
+ unsigned int param_size = mr_conv->CurrentParamSize();
+ CHECK_EQ(param_size, jni_conv->CurrentParamSize());
+ jni_asm->Load(out_reg, in_off, param_size);
+ }
+ } else {
+ LOG(FATAL) << "UNTESTED";
+ CHECK(input_in_reg && !output_in_reg);
+ ManagedRegister in_reg = mr_conv->CurrentParamRegister();
+ FrameOffset out_off = jni_conv->CurrentParamStackOffset();
+ // Check outgoing argument is within frame
+ CHECK_LT(out_off.Uint32Value(), frame_size);
+ if (ref_param) {
+ // TODO: recycle value in in_reg rather than reload from handle
+ jni_asm->CreateStackHandle(out_off, handle_offset,
+ mr_conv->InterproceduralScratchRegister(),
+ null_allowed);
+ } else {
+ size_t param_size = mr_conv->CurrentParamSize();
+ CHECK_EQ(param_size, jni_conv->CurrentParamSize());
+ jni_asm->Store(out_off, in_reg, param_size);
+ }
+ }
+}
+
void* JniCompiler::AllocateCode(size_t size) {
CHECK_LT(((jni_code_top_ - jni_code_) + size), jni_code_size_);
void *result = jni_code_top_;
diff --git a/src/jni_compiler.h b/src/jni_compiler.h
index 4a6b1ba..48666aa 100644
--- a/src/jni_compiler.h
+++ b/src/jni_compiler.h
@@ -3,6 +3,7 @@
#ifndef ART_SRC_JNI_COMPILER_H_
#define ART_SRC_JNI_COMPILER_H_
+#include "calling_convention.h"
#include "globals.h"
#include "macros.h"
@@ -19,7 +20,14 @@
JniCompiler();
~JniCompiler();
void Compile(Assembler* jni_asm, Method* method);
+
private:
+ // Copy a single parameter from the managed to the JNI calling convention
+ void CopyParameter(Assembler* jni_asm,
+ ManagedRuntimeCallingConvention* mr_conv,
+ JniCallingConvention* jni_conv,
+ size_t frame_size, size_t out_arg_size);
+
// A poor man's code cache
void* AllocateCode(size_t size);
diff --git a/src/jni_compiler_test.cc b/src/jni_compiler_test.cc
index 59d8c2b..23615ed 100644
--- a/src/jni_compiler_test.cc
+++ b/src/jni_compiler_test.cc
@@ -163,6 +163,22 @@
}
}
+int gJava_MyClass_fooSSIOO_calls = 0;
+jobject Java_MyClass_fooSSIOO(JNIEnv*, jclass klass, jint x, jobject y,
+ jobject z) {
+ EXPECT_EQ(3u, Thread::Current()->NumShbHandles());
+ EXPECT_EQ(Thread::kNative, Thread::Current()->GetState());
+ gJava_MyClass_fooSSIOO_calls++;
+ switch (x) {
+ case 1:
+ return y;
+ case 2:
+ return z;
+ default:
+ return klass;
+ }
+}
+
TEST_F(JniCompilerTest, CompileAndRunNoArgMethod) {
scoped_ptr<DexFile> dex(OpenDexFileBase64(kMyClassNativesDex));
scoped_ptr<ClassLinker> linker(ClassLinker::Create());
@@ -387,4 +403,57 @@
EXPECT_EQ(7, gJava_MyClass_fooSIOO_calls);
}
+TEST_F(JniCompilerTest, CompileAndRunStaticSynchronizedIntObjectObjectMethod) {
+ scoped_ptr<DexFile> dex(OpenDexFileBase64(kMyClassNativesDex));
+ scoped_ptr<ClassLinker> linker(ClassLinker::Create());
+ linker->AppendToClassPath(dex.get());
+ Class* klass = linker->FindClass("LMyClass;", NULL);
+ Method* method = klass->FindDirectMethod("fooSSIOO");
+
+ Assembler jni_asm;
+ JniCompiler jni_compiler;
+ jni_compiler.Compile(&jni_asm, method);
+
+ // TODO: should really use JNIEnv to RegisterNative, but missing a
+ // complete story on this, so hack the RegisterNative below
+ method->RegisterNative(reinterpret_cast<void*>(&Java_MyClass_fooSSIOO));
+
+ jvalue a, b, c, d;
+ a.i = 0;
+ b.l = (jobject)NULL;
+ c.l = (jobject)NULL;
+ EXPECT_EQ(0, gJava_MyClass_fooSSIOO_calls);
+ d = RunMethod(method, a, b, c, a);
+ ASSERT_EQ((jobject)method->GetClass(), d.l);
+ EXPECT_EQ(1, gJava_MyClass_fooSSIOO_calls);
+ a.i = 0;
+ b.l = (jobject)NULL;
+ c.l = (jobject)16;
+ d = RunMethod(method, a, b, c, a);
+ ASSERT_EQ((jobject)method->GetClass(), d.l);
+ EXPECT_EQ(2, gJava_MyClass_fooSSIOO_calls);
+ a.i = 1;
+ d = RunMethod(method, a, b, c, a);
+ ASSERT_EQ((jobject)NULL, d.l);
+ EXPECT_EQ(3, gJava_MyClass_fooSSIOO_calls);
+ a.i = 2;
+ d = RunMethod(method, a, b, c, a);
+ ASSERT_EQ((jobject)16, d.l);
+ EXPECT_EQ(4, gJava_MyClass_fooSSIOO_calls);
+ a.i = 0;
+ b.l = (jobject)16;
+ c.l = (jobject)NULL;
+ d = RunMethod(method, a, b, c, a);
+ ASSERT_EQ((jobject)method->GetClass(), d.l);
+ EXPECT_EQ(5, gJava_MyClass_fooSSIOO_calls);
+ a.i = 1;
+ d = RunMethod(method, a, b, c, a);
+ ASSERT_EQ((jobject)16, d.l);
+ EXPECT_EQ(6, gJava_MyClass_fooSSIOO_calls);
+ a.i = 2;
+ d = RunMethod(method, a, b, c, a);
+ ASSERT_EQ((jobject)NULL, d.l);
+ EXPECT_EQ(7, gJava_MyClass_fooSSIOO_calls);
+}
+
} // namespace art
diff --git a/src/jni_internal.cc b/src/jni_internal.cc
new file mode 100644
index 0000000..7a5025a
--- /dev/null
+++ b/src/jni_internal.cc
@@ -0,0 +1,21 @@
+// Copyright 2011 Google Inc. All Rights Reserved.
+
+#include "src/jni_internal.h"
+#include "src/logging.h"
+
+namespace art {
+
+static void JniMonitorEnter(JniEnvironment*, jobject) {
+ LOG(WARNING) << "Unimplemented: JNI Monitor Enter";
+}
+
+static void JniMonitorExit(JniEnvironment*, jobject) {
+ LOG(WARNING) << "Unimplemented: JNI Monitor Exit";
+}
+
+JniEnvironment::JniEnvironment() {
+ monitor_enter_ = &JniMonitorEnter;
+ monitor_exit_ = &JniMonitorExit;
+}
+
+} // namespace art
diff --git a/src/jni_internal.h b/src/jni_internal.h
new file mode 100644
index 0000000..64941cc
--- /dev/null
+++ b/src/jni_internal.h
@@ -0,0 +1,32 @@
+// Copyright 2011 Google Inc. All Rights Reserved.
+
+#ifndef ART_SRC_JNI_INTERNAL_H_
+#define ART_SRC_JNI_INTERNAL_H_
+
+#include "jni.h"
+#include "src/assembler.h"
+#include "src/macros.h"
+
+namespace art {
+
+// TODO: This is a place holder for a true JNIEnv used to provide limited
+// functionality for the JNI compiler
+class JniEnvironment {
+ public:
+ explicit JniEnvironment();
+
+ static Offset MonitorEnterOffset() {
+ return Offset(OFFSETOF_MEMBER(JniEnvironment, monitor_enter_));
+ }
+
+ static Offset MonitorExitOffset() {
+ return Offset(OFFSETOF_MEMBER(JniEnvironment, monitor_exit_));
+ }
+
+ private:
+ void (*monitor_enter_)(JniEnvironment*, jobject);
+ void (*monitor_exit_)(JniEnvironment*, jobject);
+};
+
+} // namespace art
+#endif // ART_SRC_JNI_INTERNAL_H_
diff --git a/src/object.cc b/src/object.cc
index 95e8b67..c8bd353 100644
--- a/src/object.cc
+++ b/src/object.cc
@@ -1,8 +1,8 @@
// Copyright 2011 Google Inc. All Rights Reserved.
#include "src/object.h"
-#include <algorithm>
#include <string.h>
+#include <algorithm>
#include "src/globals.h"
#include "src/logging.h"
#include "src/dex_file.h"
@@ -127,14 +127,8 @@
return (shorty_[param] == 'J') || (shorty_[param] == 'D');
}
-size_t Method::ParamSizeInBytes(unsigned int param) const {
- CHECK_LT(param, NumArgs());
- if (IsStatic()) {
- param++; // 0th argument must skip return value at start of the shorty
- } else if (param == 0) {
- return kPointerSize; // this argument
- }
- switch (shorty_[param]) {
+static size_t ShortyCharToSize(char x) {
+ switch (x) {
case '[': return kPointerSize;
case 'L': return kPointerSize;
case 'D': return 8;
@@ -143,6 +137,20 @@
}
}
+size_t Method::ParamSize(unsigned int param) const {
+ CHECK_LT(param, NumArgs());
+ if (IsStatic()) {
+ param++; // 0th argument must skip return value at start of the shorty
+ } else if (param == 0) {
+ return kPointerSize; // this argument
+ }
+ return ShortyCharToSize(shorty_[param]);
+}
+
+size_t Method::ReturnSize() const {
+ return ShortyCharToSize(shorty_[0]);
+}
+
bool Method::HasSameArgumentTypes(const Method* that) const {
const RawDexFile* raw1 = this->GetClass()->GetDexFile()->GetRaw();
const RawDexFile::ProtoId& proto1 = raw1->GetProtoId(this->proto_idx_);
diff --git a/src/object.h b/src/object.h
index 31d4390..49940e0 100644
--- a/src/object.h
+++ b/src/object.h
@@ -436,7 +436,11 @@
// Is the given method parameter a long or double?
bool IsParamALongOrDouble(unsigned int param) const;
- size_t ParamSizeInBytes(unsigned int param) const;
+ // Size in bytes of the given parameter
+ size_t ParamSize(unsigned int param) const;
+
+ // Size in bytes of the return value
+ size_t ReturnSize() const;
void SetCode(const void* code) {
code_ = code;
diff --git a/src/thread.h b/src/thread.h
index ab0bacc..3f962d5 100644
--- a/src/thread.h
+++ b/src/thread.h
@@ -9,6 +9,7 @@
#include "src/globals.h"
#include "src/heap.h"
+#include "src/jni_internal.h"
#include "src/logging.h"
#include "src/macros.h"
#include "src/runtime.h"
@@ -179,7 +180,7 @@
}
// JNI methods
- JNIEnv* GetJniEnv() const {
+ JniEnvironment* GetJniEnv() const {
return jni_env_;
}
@@ -204,10 +205,12 @@
private:
Thread() :
- thread_id_(1234), top_shb_(NULL),
- jni_env_(reinterpret_cast<JNIEnv*>(0xEBADC0DE)), exception_(NULL) {
+ thread_id_(1234), top_shb_(NULL), exception_(NULL) {
+ jni_env_ = new JniEnvironment();
}
- ~Thread() {}
+ ~Thread() {
+ delete jni_env_;
+ }
void InitCpu();
@@ -224,7 +227,7 @@
StackHandleBlock* top_shb_;
// Every thread may have an associated JNI environment
- JNIEnv* jni_env_;
+ JniEnvironment* jni_env_;
State state_;