blob: 2e6e262451779cfb3b9cfe66e57826893765a623 [file] [log] [blame]
Ian Rogers2dd0e2c2013-01-24 12:42:14 -08001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Brian Carlstromfc0e3212013-07-17 14:40:12 -070017#ifndef ART_RUNTIME_MIRROR_ABSTRACT_METHOD_H_
18#define ART_RUNTIME_MIRROR_ABSTRACT_METHOD_H_
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080019
20#include "class.h"
Jeff Hao790ad902013-05-22 15:02:08 -070021#include "dex_file.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080022#include "invoke_type.h"
23#include "locks.h"
24#include "modifiers.h"
25#include "object.h"
26
27namespace art {
28
29struct AbstractMethodOffsets;
30struct ConstructorMethodOffsets;
31union JValue;
32struct MethodClassOffsets;
Jeff Hao790ad902013-05-22 15:02:08 -070033class MethodHelper;
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080034struct MethodOffsets;
35class StringPiece;
Jeff Hao16743632013-05-08 10:59:04 -070036class ShadowFrame;
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080037
38namespace mirror {
39
40class StaticStorageBase;
41
Jeff Hao790ad902013-05-22 15:02:08 -070042typedef void (EntryPointFromInterpreter)(Thread* self, MethodHelper& mh,
43 const DexFile::CodeItem* code_item, ShadowFrame* shadow_frame, JValue* result);
Jeff Hao16743632013-05-08 10:59:04 -070044
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080045// C++ mirror of java.lang.reflect.Method and java.lang.reflect.Constructor
46class MANAGED AbstractMethod : public Object {
47 public:
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080048 Class* GetDeclaringClass() const;
49
50 void SetDeclaringClass(Class *new_declaring_class) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
51
52 static MemberOffset DeclaringClassOffset() {
53 return MemberOffset(OFFSETOF_MEMBER(AbstractMethod, declaring_class_));
54 }
55
Jeff Haoaa4a7932013-05-13 11:28:27 -070056 static MemberOffset EntryPointFromCompiledCodeOffset() {
57 return MemberOffset(OFFSETOF_MEMBER(AbstractMethod, entry_point_from_compiled_code_));
Jeff Hao5d917302013-02-27 17:57:33 -080058 }
59
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080060 uint32_t GetAccessFlags() const;
61
62 void SetAccessFlags(uint32_t new_access_flags) {
63 SetField32(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, access_flags_), new_access_flags, false);
64 }
65
66 // Approximate what kind of method call would be used for this method.
67 InvokeType GetInvokeType() const;
68
69 // Returns true if the method is declared public.
70 bool IsPublic() const {
71 return (GetAccessFlags() & kAccPublic) != 0;
72 }
73
74 // Returns true if the method is declared private.
75 bool IsPrivate() const {
76 return (GetAccessFlags() & kAccPrivate) != 0;
77 }
78
79 // Returns true if the method is declared static.
80 bool IsStatic() const {
81 return (GetAccessFlags() & kAccStatic) != 0;
82 }
83
84 // Returns true if the method is a constructor.
85 bool IsConstructor() const {
86 return (GetAccessFlags() & kAccConstructor) != 0;
87 }
88
89 // Returns true if the method is static, private, or a constructor.
90 bool IsDirect() const {
91 return IsDirect(GetAccessFlags());
92 }
93
94 static bool IsDirect(uint32_t access_flags) {
95 return (access_flags & (kAccStatic | kAccPrivate | kAccConstructor)) != 0;
96 }
97
98 // Returns true if the method is declared synchronized.
99 bool IsSynchronized() const {
100 uint32_t synchonized = kAccSynchronized | kAccDeclaredSynchronized;
101 return (GetAccessFlags() & synchonized) != 0;
102 }
103
104 bool IsFinal() const {
105 return (GetAccessFlags() & kAccFinal) != 0;
106 }
107
108 bool IsMiranda() const {
109 return (GetAccessFlags() & kAccMiranda) != 0;
110 }
111
112 bool IsNative() const {
113 return (GetAccessFlags() & kAccNative) != 0;
114 }
115
116 bool IsAbstract() const {
117 return (GetAccessFlags() & kAccAbstract) != 0;
118 }
119
120 bool IsSynthetic() const {
121 return (GetAccessFlags() & kAccSynthetic) != 0;
122 }
123
124 bool IsProxyMethod() const;
125
Sebastien Hertz233ea8e2013-06-06 11:57:09 +0200126 bool IsPreverified() const {
127 return (GetAccessFlags() & kAccPreverified) != 0;
128 }
129
130 void SetPreverified() {
131 SetAccessFlags(GetAccessFlags() | kAccPreverified);
132 }
133
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800134 bool CheckIncompatibleClassChange(InvokeType type) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
135
136 uint16_t GetMethodIndex() const;
137
138 size_t GetVtableIndex() const {
139 return GetMethodIndex();
140 }
141
142 void SetMethodIndex(uint16_t new_method_index) {
143 SetField32(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, method_index_), new_method_index, false);
144 }
145
146 static MemberOffset MethodIndexOffset() {
147 return OFFSET_OF_OBJECT_MEMBER(AbstractMethod, method_index_);
148 }
149
150 uint32_t GetCodeItemOffset() const {
151 return GetField32(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, code_item_offset_), false);
152 }
153
154 void SetCodeItemOffset(uint32_t new_code_off) {
155 SetField32(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, code_item_offset_), new_code_off, false);
156 }
157
158 // Number of 32bit registers that would be required to hold all the arguments
159 static size_t NumArgRegisters(const StringPiece& shorty);
160
161 uint32_t GetDexMethodIndex() const;
162
163 void SetDexMethodIndex(uint32_t new_idx) {
164 SetField32(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, method_dex_index_), new_idx, false);
165 }
166
167 ObjectArray<String>* GetDexCacheStrings() const;
168 void SetDexCacheStrings(ObjectArray<String>* new_dex_cache_strings)
169 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
170
171 static MemberOffset DexCacheStringsOffset() {
172 return OFFSET_OF_OBJECT_MEMBER(AbstractMethod, dex_cache_strings_);
173 }
174
175 static MemberOffset DexCacheResolvedMethodsOffset() {
176 return OFFSET_OF_OBJECT_MEMBER(AbstractMethod, dex_cache_resolved_methods_);
177 }
178
179 static MemberOffset DexCacheResolvedTypesOffset() {
180 return OFFSET_OF_OBJECT_MEMBER(AbstractMethod, dex_cache_resolved_types_);
181 }
182
183 static MemberOffset DexCacheInitializedStaticStorageOffset() {
184 return OFFSET_OF_OBJECT_MEMBER(AbstractMethod,
185 dex_cache_initialized_static_storage_);
186 }
187
188 ObjectArray<AbstractMethod>* GetDexCacheResolvedMethods() const;
189 void SetDexCacheResolvedMethods(ObjectArray<AbstractMethod>* new_dex_cache_methods)
190 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
191
192 ObjectArray<Class>* GetDexCacheResolvedTypes() const;
193 void SetDexCacheResolvedTypes(ObjectArray<Class>* new_dex_cache_types)
194 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
195
196 ObjectArray<StaticStorageBase>* GetDexCacheInitializedStaticStorage() const;
197 void SetDexCacheInitializedStaticStorage(ObjectArray<StaticStorageBase>* new_value)
198 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
199
200 // Find the method that this method overrides
201 AbstractMethod* FindOverriddenMethod() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
202
Jeff Hao6474d192013-03-26 14:08:09 -0700203 void Invoke(Thread* self, uint32_t* args, uint32_t args_size, JValue* result, char result_type)
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800204 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
205
Jeff Hao16743632013-05-08 10:59:04 -0700206 EntryPointFromInterpreter* GetEntryPointFromInterpreter() const {
207 return GetFieldPtr<EntryPointFromInterpreter*>(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, entry_point_from_interpreter_), false);
208 }
209
210 void SetEntryPointFromInterpreter(EntryPointFromInterpreter* entry_point_from_interpreter) {
211 SetFieldPtr<EntryPointFromInterpreter*>(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, entry_point_from_interpreter_), entry_point_from_interpreter, false);
212 }
213
Jeff Haoaa4a7932013-05-13 11:28:27 -0700214 const void* GetEntryPointFromCompiledCode() const {
215 return GetFieldPtr<const void*>(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, entry_point_from_compiled_code_), false);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800216 }
217
Jeff Haoaa4a7932013-05-13 11:28:27 -0700218 void SetEntryPointFromCompiledCode(const void* entry_point_from_compiled_code) {
219 SetFieldPtr<const void*>(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, entry_point_from_compiled_code_), entry_point_from_compiled_code, false);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800220 }
221
222 uint32_t GetCodeSize() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
223
224 bool IsWithinCode(uintptr_t pc) const
225 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Jeff Haoaa4a7932013-05-13 11:28:27 -0700226 uintptr_t code = reinterpret_cast<uintptr_t>(GetEntryPointFromCompiledCode());
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800227 if (code == 0) {
228 return pc == 0;
229 }
230 /*
231 * During a stack walk, a return PC may point to the end of the code + 1
232 * (in the case that the last instruction is a call that isn't expected to
233 * return. Thus, we check <= code + GetCodeSize().
234 */
235 return (code <= pc && pc <= code + GetCodeSize());
236 }
237
238 void AssertPcIsWithinCode(uintptr_t pc) const
239 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
240
241 uint32_t GetOatCodeOffset() const;
242
243 void SetOatCodeOffset(uint32_t code_offset);
244
Jeff Haoaa4a7932013-05-13 11:28:27 -0700245 static MemberOffset GetEntryPointFromCompiledCodeOffset() {
246 return OFFSET_OF_OBJECT_MEMBER(AbstractMethod, entry_point_from_compiled_code_);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800247 }
248
249 const uint32_t* GetMappingTable() const {
250 const uint32_t* map = GetMappingTableRaw();
251 if (map == NULL) {
252 return map;
253 }
254 return map + 1;
255 }
256
257 uint32_t GetPcToDexMappingTableLength() const {
258 const uint32_t* map = GetMappingTableRaw();
259 if (map == NULL) {
260 return 0;
261 }
262 return map[2];
263 }
264
265 const uint32_t* GetPcToDexMappingTable() const {
266 const uint32_t* map = GetMappingTableRaw();
267 if (map == NULL) {
268 return map;
269 }
270 return map + 3;
271 }
272
273
274 uint32_t GetDexToPcMappingTableLength() const {
275 const uint32_t* map = GetMappingTableRaw();
276 if (map == NULL) {
277 return 0;
278 }
279 return map[1] - map[2];
280 }
281
282 const uint32_t* GetDexToPcMappingTable() const {
283 const uint32_t* map = GetMappingTableRaw();
284 if (map == NULL) {
285 return map;
286 }
287 return map + 3 + map[2];
288 }
289
290
291 const uint32_t* GetMappingTableRaw() const {
292 return GetFieldPtr<const uint32_t*>(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, mapping_table_), false);
293 }
294
295 void SetMappingTable(const uint32_t* mapping_table) {
296 SetFieldPtr<const uint32_t*>(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, mapping_table_),
297 mapping_table, false);
298 }
299
300 uint32_t GetOatMappingTableOffset() const;
301
302 void SetOatMappingTableOffset(uint32_t mapping_table_offset);
303
304 // Callers should wrap the uint16_t* in a VmapTable instance for convenient access.
305 const uint16_t* GetVmapTableRaw() const {
306 return GetFieldPtr<const uint16_t*>(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, vmap_table_), false);
307 }
308
309 void SetVmapTable(const uint16_t* vmap_table) {
310 SetFieldPtr<const uint16_t*>(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, vmap_table_), vmap_table, false);
311 }
312
313 uint32_t GetOatVmapTableOffset() const;
314
315 void SetOatVmapTableOffset(uint32_t vmap_table_offset);
316
317 const uint8_t* GetNativeGcMap() const {
Jeff Hao16743632013-05-08 10:59:04 -0700318 return GetFieldPtr<uint8_t*>(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, gc_map_), false);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800319 }
320 void SetNativeGcMap(const uint8_t* data) {
Jeff Hao16743632013-05-08 10:59:04 -0700321 SetFieldPtr<const uint8_t*>(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, gc_map_), data, false);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800322 }
323
324 // When building the oat need a convenient place to stuff the offset of the native GC map.
325 void SetOatNativeGcMapOffset(uint32_t gc_map_offset);
326 uint32_t GetOatNativeGcMapOffset() const;
327
328 size_t GetFrameSizeInBytes() const {
329 DCHECK_EQ(sizeof(size_t), sizeof(uint32_t));
330 size_t result = GetField32(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, frame_size_in_bytes_), false);
331 DCHECK_LE(static_cast<size_t>(kStackAlignment), result);
332 return result;
333 }
334
335 void SetFrameSizeInBytes(size_t new_frame_size_in_bytes) {
336 DCHECK_EQ(sizeof(size_t), sizeof(uint32_t));
337 SetField32(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, frame_size_in_bytes_),
338 new_frame_size_in_bytes, false);
339 }
340
341 size_t GetReturnPcOffsetInBytes() const {
342 return GetFrameSizeInBytes() - kPointerSize;
343 }
344
Ian Rogers62d6c772013-02-27 08:32:07 -0800345 size_t GetSirtOffsetInBytes() const {
346 CHECK(IsNative());
347 return kPointerSize;
348 }
349
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800350 bool IsRegistered() const;
351
352 void RegisterNative(Thread* self, const void* native_method)
353 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
354
355 void UnregisterNative(Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
356
357 static MemberOffset NativeMethodOffset() {
358 return OFFSET_OF_OBJECT_MEMBER(AbstractMethod, native_method_);
359 }
360
361 const void* GetNativeMethod() const {
362 return reinterpret_cast<const void*>(GetField32(NativeMethodOffset(), false));
363 }
364
365 void SetNativeMethod(const void*);
366
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800367 static MemberOffset GetMethodIndexOffset() {
368 return OFFSET_OF_OBJECT_MEMBER(AbstractMethod, method_index_);
369 }
370
371 uint32_t GetCoreSpillMask() const {
372 return GetField32(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, core_spill_mask_), false);
373 }
374
375 void SetCoreSpillMask(uint32_t core_spill_mask) {
376 // Computed during compilation
377 SetField32(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, core_spill_mask_), core_spill_mask, false);
378 }
379
380 uint32_t GetFpSpillMask() const {
381 return GetField32(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, fp_spill_mask_), false);
382 }
383
384 void SetFpSpillMask(uint32_t fp_spill_mask) {
385 // Computed during compilation
386 SetField32(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, fp_spill_mask_), fp_spill_mask, false);
387 }
388
389 // Is this a CalleSaveMethod or ResolutionMethod and therefore doesn't adhere to normal
390 // conventions for a method of managed code. Returns false for Proxy methods.
391 bool IsRuntimeMethod() const;
392
393 // Is this a hand crafted method used for something like describing callee saves?
394 bool IsCalleeSaveMethod() const;
395
396 bool IsResolutionMethod() const;
397
398 uintptr_t NativePcOffset(const uintptr_t pc) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
399
400 // Converts a native PC to a dex PC.
401 uint32_t ToDexPc(const uintptr_t pc) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
402
403 // Converts a dex PC to a native PC.
404 uintptr_t ToNativePc(const uint32_t dex_pc) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
405
406 // Converts a dex PC to the first corresponding safepoint PC.
407 uintptr_t ToFirstNativeSafepointPc(const uint32_t dex_pc)
408 const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
409
Ian Rogersc449aa82013-07-29 14:35:46 -0700410 // Find the catch block for the given exception type and dex_pc. When a catch block is found,
411 // indicates whether the found catch block is responsible for clearing the exception or whether
412 // a move-exception instruction is present.
413 uint32_t FindCatchBlock(Class* exception_type, uint32_t dex_pc, bool* has_no_move_exception) const
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800414 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
415
416 static void SetClasses(Class* java_lang_reflect_Constructor, Class* java_lang_reflect_Method);
417
418 static Class* GetConstructorClass() {
419 return java_lang_reflect_Constructor_;
420 }
421
422 static Class* GetMethodClass() {
423 return java_lang_reflect_Method_;
424 }
425
426 static void ResetClasses();
427
428 protected:
429 // Field order required by test "ValidateFieldOrderOfJavaCppUnionClasses".
430 // The class we are a part of
431 Class* declaring_class_;
432
433 // short cuts to declaring_class_->dex_cache_ member for fast compiled code access
434 ObjectArray<StaticStorageBase>* dex_cache_initialized_static_storage_;
435
436 // short cuts to declaring_class_->dex_cache_ member for fast compiled code access
437 ObjectArray<AbstractMethod>* dex_cache_resolved_methods_;
438
439 // short cuts to declaring_class_->dex_cache_ member for fast compiled code access
440 ObjectArray<Class>* dex_cache_resolved_types_;
441
442 // short cuts to declaring_class_->dex_cache_ member for fast compiled code access
443 ObjectArray<String>* dex_cache_strings_;
444
445 // Access flags; low 16 bits are defined by spec.
446 uint32_t access_flags_;
447
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800448 // Offset to the CodeItem.
449 uint32_t code_item_offset_;
450
451 // Architecture-dependent register spill mask
452 uint32_t core_spill_mask_;
453
Jeff Haoaa4a7932013-05-13 11:28:27 -0700454 // Compiled code associated with this method for callers from managed code.
455 // May be compiled managed code or a bridge for invoking a native method.
456 // TODO: Break apart this into portable and quick.
457 const void* entry_point_from_compiled_code_;
458
Jeff Hao16743632013-05-08 10:59:04 -0700459 // Called by the interpreter to execute this method.
460 EntryPointFromInterpreter* entry_point_from_interpreter_;
461
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800462 // Architecture-dependent register spill mask
463 uint32_t fp_spill_mask_;
464
465 // Total size in bytes of the frame
466 size_t frame_size_in_bytes_;
467
Jeff Hao16743632013-05-08 10:59:04 -0700468 // Garbage collection map of native PC offsets (quick) or dex PCs (portable) to reference bitmaps.
469 const uint8_t* gc_map_;
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800470
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800471 // Mapping from native pc to dex pc
472 const uint32_t* mapping_table_;
473
474 // Index into method_ids of the dex file associated with this method
475 uint32_t method_dex_index_;
476
477 // For concrete virtual methods, this is the offset of the method in Class::vtable_.
478 //
479 // For abstract methods in an interface class, this is the offset of the method in
480 // "iftable_->Get(n)->GetMethodArray()".
481 //
482 // For static and direct methods this is the index in the direct methods table.
483 uint32_t method_index_;
484
485 // The target native method registered with this method
486 const void* native_method_;
487
488 // When a register is promoted into a register, the spill mask holds which registers hold dex
489 // registers. The first promoted register's corresponding dex register is vmap_table_[1], the Nth
490 // is vmap_table_[N]. vmap_table_[0] holds the length of the table.
491 const uint16_t* vmap_table_;
492
493 static Class* java_lang_reflect_Constructor_;
494 static Class* java_lang_reflect_Method_;
495
496 friend struct art::AbstractMethodOffsets; // for verifying offset information
497 friend struct art::ConstructorMethodOffsets; // for verifying offset information
498 friend struct art::MethodOffsets; // for verifying offset information
499 DISALLOW_IMPLICIT_CONSTRUCTORS(AbstractMethod);
500};
501
Brian Carlstrom0cd7ec22013-07-17 23:40:20 -0700502class MANAGED Method : public AbstractMethod {};
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800503
Brian Carlstrom0cd7ec22013-07-17 23:40:20 -0700504class MANAGED Constructor : public AbstractMethod {};
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800505
506class MANAGED AbstractMethodClass : public Class {
507 private:
508 Object* ORDER_BY_SIGNATURE_;
509 friend struct art::MethodClassOffsets; // for verifying offset information
510 DISALLOW_IMPLICIT_CONSTRUCTORS(AbstractMethodClass);
511};
512
513} // namespace mirror
514} // namespace art
515
Brian Carlstromfc0e3212013-07-17 14:40:12 -0700516#endif // ART_RUNTIME_MIRROR_ABSTRACT_METHOD_H_