blob: 3fda7167d478de79f3982e624cc409b2c78058b6 [file] [log] [blame]
Mathieu Chartier39e32612013-11-12 16:28:05 -08001/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_RUNTIME_GC_REFERENCE_QUEUE_H_
18#define ART_RUNTIME_GC_REFERENCE_QUEUE_H_
19
20#include <iosfwd>
21#include <string>
22#include <vector>
23
David Sehrc431b9d2018-03-02 12:01:51 -080024#include "base/atomic.h"
Andreas Gampe7fbc4a52018-11-28 08:26:47 -080025#include "base/locks.h"
Mathieu Chartier39e32612013-11-12 16:28:05 -080026#include "base/timing_logger.h"
Mathieu Chartier39e32612013-11-12 16:28:05 -080027#include "jni.h"
Mathieu Chartier5d3f73a2016-10-14 14:28:47 -070028#include "obj_ptr.h"
Mathieu Chartier39e32612013-11-12 16:28:05 -080029#include "offsets.h"
Andreas Gampe5a0430d2019-01-04 14:33:57 -080030#include "runtime_globals.h"
Mathieu Chartier39e32612013-11-12 16:28:05 -080031#include "thread_pool.h"
32
33namespace art {
Andreas Gampe7fbc4a52018-11-28 08:26:47 -080034
35class Mutex;
36
Mathieu Chartier8fa2dad2014-03-13 12:22:56 -070037namespace mirror {
38class Reference;
39} // namespace mirror
40
Andreas Gampe5d08fcc2017-06-05 17:56:46 -070041class IsMarkedVisitor;
42class MarkObjectVisitor;
43
Mathieu Chartier39e32612013-11-12 16:28:05 -080044namespace gc {
45
Mathieu Chartier97509952015-07-13 14:35:43 -070046namespace collector {
47class GarbageCollector;
48} // namespace collector
49
Mathieu Chartier39e32612013-11-12 16:28:05 -080050class Heap;
51
Hans Boehmd7b41612021-06-17 18:31:14 -070052struct FinalizerStats {
53 FinalizerStats(size_t num_refs, size_t num_enqueued)
54 : num_refs_(num_refs), num_enqueued_(num_enqueued) {}
55 const uint32_t num_refs_;
56 const uint32_t num_enqueued_;
57};
58
Mathieu Chartier39e32612013-11-12 16:28:05 -080059// Used to temporarily store java.lang.ref.Reference(s) during GC and prior to queueing on the
Richard Uhlerc4695df2016-01-15 14:08:05 -080060// appropriate java.lang.ref.ReferenceQueue. The linked list is maintained as an unordered,
61// circular, and singly-linked list using the pendingNext fields of the java.lang.ref.Reference
62// objects.
Mathieu Chartier39e32612013-11-12 16:28:05 -080063class ReferenceQueue {
64 public:
Mathieu Chartiera5a53ef2014-09-12 12:58:05 -070065 explicit ReferenceQueue(Mutex* lock);
Ian Rogers6f3dbba2014-10-14 17:41:57 -070066
Richard Uhlerc4695df2016-01-15 14:08:05 -080067 // Enqueue a reference if it is unprocessed. Thread safe to call from multiple
68 // threads since it uses a lock to avoid a race between checking for the references presence and
69 // adding it.
Mathieu Chartier5d3f73a2016-10-14 14:28:47 -070070 void AtomicEnqueueIfNotEnqueued(Thread* self, ObjPtr<mirror::Reference> ref)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -070071 REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!*lock_);
Ian Rogers6f3dbba2014-10-14 17:41:57 -070072
Richard Uhlerc4695df2016-01-15 14:08:05 -080073 // Enqueue a reference. The reference must be unprocessed.
74 // Not thread safe, used when mutators are paused to minimize lock overhead.
Mathieu Chartier5d3f73a2016-10-14 14:28:47 -070075 void EnqueueReference(ObjPtr<mirror::Reference> ref) REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers6f3dbba2014-10-14 17:41:57 -070076
Richard Uhlerc4695df2016-01-15 14:08:05 -080077 // Dequeue a reference from the queue and return that dequeued reference.
Hiroshi Yamauchi057d9772017-02-17 15:33:23 -080078 // Call DisableReadBarrierForReference for the reference that's returned from this function.
Mathieu Chartier5d3f73a2016-10-14 14:28:47 -070079 ObjPtr<mirror::Reference> DequeuePendingReference() REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers6f3dbba2014-10-14 17:41:57 -070080
Hiroshi Yamauchi057d9772017-02-17 15:33:23 -080081 // If applicable, disable the read barrier for the reference after its referent is handled (see
82 // ConcurrentCopying::ProcessMarkStackRef.) This must be called for a reference that's dequeued
83 // from pending queue (DequeuePendingReference).
84 void DisableReadBarrierForReference(ObjPtr<mirror::Reference> ref)
85 REQUIRES_SHARED(Locks::mutator_lock_);
86
Mathieu Chartier9e2094f2014-12-11 18:43:48 -080087 // Enqueues finalizer references with white referents. White referents are blackened, moved to
88 // the zombie field, and the referent field is cleared.
Hans Boehmd7b41612021-06-17 18:31:14 -070089 FinalizerStats EnqueueFinalizerReferences(ReferenceQueue* cleared_references,
Mathieu Chartier97509952015-07-13 14:35:43 -070090 collector::GarbageCollector* collector)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -070091 REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers6f3dbba2014-10-14 17:41:57 -070092
Hans Boehm1b3ec0f2022-01-26 16:53:07 +000093 // Walks the reference list marking and dequeuing any references subject to the reference
94 // clearing policy. References with a black referent are removed from the list. References
95 // with white referents biased toward saving are blackened and also removed from the list.
96 // Returns the number of non-null soft references. May be called concurrently with
97 // AtomicEnqueueIfNotEnqueued().
Hans Boehmd7b41612021-06-17 18:31:14 -070098 uint32_t ForwardSoftReferences(MarkObjectVisitor* visitor)
Hans Boehm1b3ec0f2022-01-26 16:53:07 +000099 REQUIRES(!*lock_)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700100 REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700101
Mathieu Chartier9e2094f2014-12-11 18:43:48 -0800102 // Unlink the reference list clearing references objects with white referents. Cleared references
Mathieu Chartier39e32612013-11-12 16:28:05 -0800103 // registered to a reference queue are scheduled for appending by the heap worker thread.
Mathieu Chartier308351a2014-06-15 12:39:02 -0700104 void ClearWhiteReferences(ReferenceQueue* cleared_references,
Hans Boehm1b3ec0f2022-01-26 16:53:07 +0000105 collector::GarbageCollector* collector,
106 bool report_cleared = false)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700107 REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700108
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700109 void Dump(std::ostream& os) const REQUIRES_SHARED(Locks::mutator_lock_);
110 size_t GetLength() const REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700111
Mathieu Chartier39e32612013-11-12 16:28:05 -0800112 bool IsEmpty() const {
113 return list_ == nullptr;
114 }
Hans Boehm1b3ec0f2022-01-26 16:53:07 +0000115
116 // Clear this queue. Only safe after handing off the contents elsewhere for further processing.
Mathieu Chartier39e32612013-11-12 16:28:05 -0800117 void Clear() {
118 list_ = nullptr;
119 }
Hans Boehm1b3ec0f2022-01-26 16:53:07 +0000120
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700121 mirror::Reference* GetList() REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartier39e32612013-11-12 16:28:05 -0800122 return list_;
123 }
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700124
Mathieu Chartier52e4b432014-06-10 11:22:31 -0700125 // Visits list_, currently only used for the mark compact GC.
Mathieu Chartier97509952015-07-13 14:35:43 -0700126 void UpdateRoots(IsMarkedVisitor* visitor)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700127 REQUIRES_SHARED(Locks::mutator_lock_);
Mathieu Chartier39e32612013-11-12 16:28:05 -0800128
129 private:
130 // Lock, used for parallel GC reference enqueuing. It allows for multiple threads simultaneously
131 // calling AtomicEnqueueIfNotEnqueued.
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700132 Mutex* const lock_;
Hans Boehm1b3ec0f2022-01-26 16:53:07 +0000133 // The actual reference list. Only a root for the mark compact GC since it
134 // will be null during root marking for other GC types. Not an ObjPtr since it
135 // is accessed from multiple threads. Points to a singly-linked circular list
136 // using the pendingNext field.
Mathieu Chartier8fa2dad2014-03-13 12:22:56 -0700137 mirror::Reference* list_;
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700138
Mathieu Chartier3130cdf2015-05-03 15:20:23 -0700139 DISALLOW_IMPLICIT_CONSTRUCTORS(ReferenceQueue);
Mathieu Chartier39e32612013-11-12 16:28:05 -0800140};
141
142} // namespace gc
143} // namespace art
144
145#endif // ART_RUNTIME_GC_REFERENCE_QUEUE_H_