blob: c2fe1f583865d495a7f12cfb105af243a94dcbb8 [file] [log] [blame]
Florian Mayer07710c52019-09-16 15:53:38 +00001/*
2 * Copyright (C) 2019 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#define LOG_TAG "perfetto_hprof"
18
19#include "perfetto_hprof.h"
20
21#include <android-base/logging.h>
22#include <fcntl.h>
23#include <inttypes.h>
24#include <sched.h>
25#include <signal.h>
Florian Mayerb04b30c2020-04-06 11:00:45 +020026#include <sys/socket.h>
Florian Mayer07710c52019-09-16 15:53:38 +000027#include <sys/stat.h>
28#include <sys/types.h>
Florian Mayerb04b30c2020-04-06 11:00:45 +020029#include <sys/un.h>
Florian Mayer6d41e572020-01-24 15:13:59 +000030#include <sys/wait.h>
Florian Mayer07710c52019-09-16 15:53:38 +000031#include <thread>
Florian Mayerc99a2312019-12-17 11:07:34 +000032#include <time.h>
Florian Mayer07710c52019-09-16 15:53:38 +000033
Florian Mayerc560e1b2020-06-03 14:34:52 +020034#include <type_traits>
35
Florian Mayer07710c52019-09-16 15:53:38 +000036#include "gc/heap-visit-objects-inl.h"
37#include "gc/heap.h"
38#include "gc/scoped_gc_critical_section.h"
39#include "mirror/object-refvisitor-inl.h"
40#include "nativehelper/scoped_local_ref.h"
Florian Mayer4bbc62b2019-09-25 12:13:35 +010041#include "perfetto/profiling/normalize.h"
Florian Mayer4b79ef42020-04-03 15:10:45 +020042#include "perfetto/profiling/parse_smaps.h"
Florian Mayer07710c52019-09-16 15:53:38 +000043#include "perfetto/trace/interned_data/interned_data.pbzero.h"
44#include "perfetto/trace/profiling/heap_graph.pbzero.h"
45#include "perfetto/trace/profiling/profile_common.pbzero.h"
Florian Mayer4b79ef42020-04-03 15:10:45 +020046#include "perfetto/trace/profiling/smaps.pbzero.h"
Florian Mayer4bbc62b2019-09-25 12:13:35 +010047#include "perfetto/config/profiling/java_hprof_config.pbzero.h"
Florian Mayer2c5dfe12019-11-14 11:22:25 +000048#include "perfetto/protozero/packed_repeated_fields.h"
Florian Mayer07710c52019-09-16 15:53:38 +000049#include "perfetto/tracing.h"
50#include "runtime-inl.h"
51#include "runtime_callbacks.h"
52#include "scoped_thread_state_change-inl.h"
53#include "thread_list.h"
54#include "well_known_classes.h"
Florian Mayer29e62c32020-03-19 12:05:46 +010055#include "dex/descriptors_names.h"
Florian Mayer07710c52019-09-16 15:53:38 +000056
57// There are three threads involved in this:
58// * listener thread: this is idle in the background when this plugin gets loaded, and waits
59// for data on on g_signal_pipe_fds.
60// * signal thread: an arbitrary thread that handles the signal and writes data to
61// g_signal_pipe_fds.
62// * perfetto producer thread: once the signal is received, the app forks. In the newly forked
63// child, the Perfetto Client API spawns a thread to communicate with traced.
64
65namespace perfetto_hprof {
66
67constexpr int kJavaHeapprofdSignal = __SIGRTMIN + 6;
68constexpr time_t kWatchdogTimeoutSec = 120;
Florian Mayer2246a4e2020-02-24 16:16:41 +000069// This needs to be lower than the maximum acceptable chunk size, because this
70// is checked *before* writing another submessage. We conservatively assume
71// submessages can be up to 100k here for a 500k chunk size.
72// DropBox has a 500k chunk limit, and each chunk needs to parse as a proto.
73constexpr uint32_t kPacketSizeThreshold = 400000;
Florian Mayer07710c52019-09-16 15:53:38 +000074constexpr char kByte[1] = {'x'};
75static art::Mutex& GetStateMutex() {
76 static art::Mutex state_mutex("perfetto_hprof_state_mutex", art::LockLevel::kGenericBottomLock);
77 return state_mutex;
78}
79
80static art::ConditionVariable& GetStateCV() {
81 static art::ConditionVariable state_cv("perfetto_hprof_state_cv", GetStateMutex());
82 return state_cv;
83}
84
85static State g_state = State::kUninitialized;
86
87// Pipe to signal from the signal handler into a worker thread that handles the
88// dump requests.
89int g_signal_pipe_fds[2];
90static struct sigaction g_orig_act = {};
91
Florian Mayer614bffc2020-06-02 12:15:49 +020092template <typename T>
93uint64_t FindOrAppend(std::map<T, uint64_t>* m, const T& s) {
Florian Mayer07710c52019-09-16 15:53:38 +000094 auto it = m->find(s);
95 if (it == m->end()) {
96 std::tie(it, std::ignore) = m->emplace(s, m->size());
97 }
98 return it->second;
99}
100
101void ArmWatchdogOrDie() {
102 timer_t timerid{};
103 struct sigevent sev {};
104 sev.sigev_notify = SIGEV_SIGNAL;
105 sev.sigev_signo = SIGKILL;
106
107 if (timer_create(CLOCK_MONOTONIC, &sev, &timerid) == -1) {
108 // This only gets called in the child, so we can fatal without impacting
109 // the app.
110 PLOG(FATAL) << "failed to create watchdog timer";
111 }
112
113 struct itimerspec its {};
114 its.it_value.tv_sec = kWatchdogTimeoutSec;
115
116 if (timer_settime(timerid, 0, &its, nullptr) == -1) {
117 // This only gets called in the child, so we can fatal without impacting
118 // the app.
119 PLOG(FATAL) << "failed to arm watchdog timer";
120 }
121}
122
Florian Mayer4b79ef42020-04-03 15:10:45 +0200123bool StartsWith(const std::string& str, const std::string& prefix) {
124 return str.compare(0, prefix.length(), prefix) == 0;
125}
126
127// Sample entries that match one of the following
128// start with /system/
129// start with /vendor/
130// start with /data/app/
131// contains "extracted in memory from Y", where Y matches any of the above
132bool ShouldSampleSmapsEntry(const perfetto::profiling::SmapsEntry& e) {
133 if (StartsWith(e.pathname, "/system/") || StartsWith(e.pathname, "/vendor/") ||
134 StartsWith(e.pathname, "/data/app/")) {
135 return true;
136 }
137 if (StartsWith(e.pathname, "[anon:")) {
138 if (e.pathname.find("extracted in memory from /system/") != std::string::npos) {
139 return true;
140 }
141 if (e.pathname.find("extracted in memory from /vendor/") != std::string::npos) {
142 return true;
143 }
144 if (e.pathname.find("extracted in memory from /data/app/") != std::string::npos) {
145 return true;
146 }
147 }
148 return false;
149}
150
Florian Mayerb04b30c2020-04-06 11:00:45 +0200151bool CanConnectToSocket(const char* name) {
152 struct sockaddr_un addr = {};
153 addr.sun_family = AF_UNIX;
154 strncpy(addr.sun_path, name, sizeof(addr.sun_path) - 1);
155 int fd = socket(AF_UNIX, SOCK_STREAM | SOCK_CLOEXEC, 0);
156 if (fd == -1) {
157 PLOG(ERROR) << "failed to create socket";
158 return false;
159 }
160 bool connected = connect(fd, reinterpret_cast<struct sockaddr*>(&addr), sizeof(addr)) == 0;
161 close(fd);
162 return connected;
163}
164
Florian Mayer4bbc62b2019-09-25 12:13:35 +0100165constexpr size_t kMaxCmdlineSize = 512;
166
Florian Mayer07710c52019-09-16 15:53:38 +0000167class JavaHprofDataSource : public perfetto::DataSource<JavaHprofDataSource> {
168 public:
Florian Mayer3b1d8e32019-10-01 14:46:58 +0100169 constexpr static perfetto::BufferExhaustedPolicy kBufferExhaustedPolicy =
170 perfetto::BufferExhaustedPolicy::kStall;
Florian Mayer4bbc62b2019-09-25 12:13:35 +0100171 void OnSetup(const SetupArgs& args) override {
172 // This is on the heap as it triggers -Wframe-larger-than.
173 std::unique_ptr<perfetto::protos::pbzero::JavaHprofConfig::Decoder> cfg(
174 new perfetto::protos::pbzero::JavaHprofConfig::Decoder(
175 args.config->java_hprof_config_raw()));
176
Florian Mayerb04b30c2020-04-06 11:00:45 +0200177 if (args.config->enable_extra_guardrails() && !CanConnectToSocket("/dev/socket/heapprofd")) {
178 LOG(ERROR) << "rejecting extra guardrails";
179 enabled_ = false;
180 return;
181 }
182
Florian Mayer4b79ef42020-04-03 15:10:45 +0200183 dump_smaps_ = cfg->dump_smaps();
Florian Mayer4075edd2020-09-24 15:26:23 +0100184 for (auto it = cfg->ignored_types(); it; ++it) {
185 std::string name = (*it).ToStdString();
186 ignored_types_.emplace_back(std::move(name));
187 }
Florian Mayer4b79ef42020-04-03 15:10:45 +0200188
Florian Mayer4bbc62b2019-09-25 12:13:35 +0100189 uint64_t self_pid = static_cast<uint64_t>(getpid());
Primiano Tucci4d319c72019-10-17 15:18:45 +0100190 for (auto pid_it = cfg->pid(); pid_it; ++pid_it) {
191 if (*pid_it == self_pid) {
Florian Mayer4bbc62b2019-09-25 12:13:35 +0100192 enabled_ = true;
193 return;
194 }
195 }
196
197 if (cfg->has_process_cmdline()) {
198 int fd = open("/proc/self/cmdline", O_RDONLY | O_CLOEXEC);
199 if (fd == -1) {
200 PLOG(ERROR) << "failed to open /proc/self/cmdline";
201 return;
202 }
203 char cmdline[kMaxCmdlineSize];
204 ssize_t rd = read(fd, cmdline, sizeof(cmdline) - 1);
205 if (rd == -1) {
206 PLOG(ERROR) << "failed to read /proc/self/cmdline";
207 }
208 close(fd);
209 if (rd == -1) {
210 return;
211 }
212 cmdline[rd] = '\0';
213 char* cmdline_ptr = cmdline;
214 ssize_t sz = perfetto::profiling::NormalizeCmdLine(&cmdline_ptr, static_cast<size_t>(rd + 1));
215 if (sz == -1) {
216 PLOG(ERROR) << "failed to normalize cmdline";
217 }
218 for (auto it = cfg->process_cmdline(); it; ++it) {
Primiano Tucci4d319c72019-10-17 15:18:45 +0100219 std::string other = (*it).ToStdString();
Florian Mayer4bbc62b2019-09-25 12:13:35 +0100220 // Append \0 to make this a C string.
221 other.resize(other.size() + 1);
222 char* other_ptr = &(other[0]);
223 ssize_t other_sz = perfetto::profiling::NormalizeCmdLine(&other_ptr, other.size());
224 if (other_sz == -1) {
225 PLOG(ERROR) << "failed to normalize other cmdline";
226 continue;
227 }
228 if (sz == other_sz && strncmp(cmdline_ptr, other_ptr, static_cast<size_t>(sz)) == 0) {
229 enabled_ = true;
230 return;
231 }
232 }
233 }
234 }
235
Florian Mayer4b79ef42020-04-03 15:10:45 +0200236 bool dump_smaps() { return dump_smaps_; }
Florian Mayer4bbc62b2019-09-25 12:13:35 +0100237 bool enabled() { return enabled_; }
Florian Mayer07710c52019-09-16 15:53:38 +0000238
239 void OnStart(const StartArgs&) override {
Florian Mayer4bbc62b2019-09-25 12:13:35 +0100240 if (!enabled()) {
241 return;
242 }
Florian Mayer07710c52019-09-16 15:53:38 +0000243 art::MutexLock lk(art_thread(), GetStateMutex());
244 if (g_state == State::kWaitForStart) {
245 g_state = State::kStart;
246 GetStateCV().Broadcast(art_thread());
247 }
248 }
249
Florian Mayercb4fab12020-09-25 13:57:37 +0100250 // This datasource can be used with a trace config with a short duration_ms
251 // but a long datasource_stop_timeout_ms. In that case, OnStop is called (in
252 // general) before the dump is done. In that case, we handle the stop
253 // asynchronously, and notify the tracing service once we are done.
254 // In case OnStop is called after the dump is done (but before the process)
255 // has exited, we just acknowledge the request.
256 void OnStop(const StopArgs& a) override {
257 art::MutexLock lk(art_thread(), finish_mutex_);
258 if (is_finished_) {
259 return;
260 }
261 is_stopped_ = true;
262 async_stop_ = std::move(a.HandleStopAsynchronously());
263 }
Florian Mayer07710c52019-09-16 15:53:38 +0000264
265 static art::Thread* art_thread() {
266 // TODO(fmayer): Attach the Perfetto producer thread to ART and give it a name. This is
267 // not trivial, we cannot just attach the first time this method is called, because
268 // AttachCurrentThread deadlocks with the ConditionVariable::Wait in WaitForDataSource.
269 //
270 // We should attach the thread as soon as the Client API spawns it, but that needs more
271 // complicated plumbing.
272 return nullptr;
273 }
274
Florian Mayer4075edd2020-09-24 15:26:23 +0100275 std::vector<std::string> ignored_types() { return ignored_types_; }
276
Florian Mayercb4fab12020-09-25 13:57:37 +0100277 void Finish() {
278 art::MutexLock lk(art_thread(), finish_mutex_);
279 if (is_stopped_) {
280 async_stop_();
281 } else {
282 is_finished_ = true;
283 }
284 }
285
Florian Mayer07710c52019-09-16 15:53:38 +0000286 private:
Florian Mayer4bbc62b2019-09-25 12:13:35 +0100287 bool enabled_ = false;
Florian Mayer4b79ef42020-04-03 15:10:45 +0200288 bool dump_smaps_ = false;
Florian Mayer4075edd2020-09-24 15:26:23 +0100289 std::vector<std::string> ignored_types_;
Florian Mayer07710c52019-09-16 15:53:38 +0000290 static art::Thread* self_;
Florian Mayercb4fab12020-09-25 13:57:37 +0100291
292 art::Mutex finish_mutex_{"perfetto_hprof_ds_mutex", art::LockLevel::kGenericBottomLock};
293 bool is_finished_ = false;
294 bool is_stopped_ = false;
295 std::function<void()> async_stop_;
Florian Mayer07710c52019-09-16 15:53:38 +0000296};
297
298art::Thread* JavaHprofDataSource::self_ = nullptr;
299
300
301void WaitForDataSource(art::Thread* self) {
302 perfetto::TracingInitArgs args;
303 args.backends = perfetto::BackendType::kSystemBackend;
304 perfetto::Tracing::Initialize(args);
305
306 perfetto::DataSourceDescriptor dsd;
307 dsd.set_name("android.java_hprof");
Florian Mayercb4fab12020-09-25 13:57:37 +0100308 dsd.set_will_notify_on_stop(true);
Florian Mayer07710c52019-09-16 15:53:38 +0000309 JavaHprofDataSource::Register(dsd);
310
311 LOG(INFO) << "waiting for data source";
312
313 art::MutexLock lk(self, GetStateMutex());
314 while (g_state != State::kStart) {
315 GetStateCV().Wait(self);
316 }
317}
318
319class Writer {
320 public:
Florian Mayerc99a2312019-12-17 11:07:34 +0000321 Writer(pid_t parent_pid, JavaHprofDataSource::TraceContext* ctx, uint64_t timestamp)
Florian Mayer2246a4e2020-02-24 16:16:41 +0000322 : parent_pid_(parent_pid), ctx_(ctx), timestamp_(timestamp),
323 last_written_(ctx_->written()) {}
324
325 // Return whether the next call to GetHeapGraph will create a new TracePacket.
326 bool will_create_new_packet() {
327 return !heap_graph_ || ctx_->written() - last_written_ > kPacketSizeThreshold;
328 }
Florian Mayer07710c52019-09-16 15:53:38 +0000329
330 perfetto::protos::pbzero::HeapGraph* GetHeapGraph() {
Florian Mayer2246a4e2020-02-24 16:16:41 +0000331 if (will_create_new_packet()) {
332 CreateNewHeapGraph();
Florian Mayer07710c52019-09-16 15:53:38 +0000333 }
334 return heap_graph_;
335 }
336
Florian Mayer2246a4e2020-02-24 16:16:41 +0000337 void CreateNewHeapGraph() {
338 if (heap_graph_) {
339 heap_graph_->set_continued(true);
340 }
341 Finalize();
342
343 uint64_t written = ctx_->written();
344
345 trace_packet_ = ctx_->NewTracePacket();
346 trace_packet_->set_timestamp(timestamp_);
347 heap_graph_ = trace_packet_->set_heap_graph();
348 heap_graph_->set_pid(parent_pid_);
349 heap_graph_->set_index(index_++);
350
351 last_written_ = written;
352 }
353
Florian Mayer07710c52019-09-16 15:53:38 +0000354 void Finalize() {
355 if (trace_packet_) {
356 trace_packet_->Finalize();
357 }
358 heap_graph_ = nullptr;
359 }
360
361 ~Writer() { Finalize(); }
362
363 private:
364 const pid_t parent_pid_;
365 JavaHprofDataSource::TraceContext* const ctx_;
Florian Mayerc99a2312019-12-17 11:07:34 +0000366 const uint64_t timestamp_;
Florian Mayer07710c52019-09-16 15:53:38 +0000367
Florian Mayer2246a4e2020-02-24 16:16:41 +0000368 uint64_t last_written_ = 0;
369
Florian Mayer07710c52019-09-16 15:53:38 +0000370 perfetto::DataSource<JavaHprofDataSource>::TraceContext::TracePacketHandle
371 trace_packet_;
372 perfetto::protos::pbzero::HeapGraph* heap_graph_ = nullptr;
373
374 uint64_t index_ = 0;
Florian Mayer07710c52019-09-16 15:53:38 +0000375};
376
377class ReferredObjectsFinder {
378 public:
379 explicit ReferredObjectsFinder(
380 std::vector<std::pair<std::string, art::mirror::Object*>>* referred_objects)
Florian Mayer07710c52019-09-16 15:53:38 +0000381 : referred_objects_(referred_objects) {}
382
383 // For art::mirror::Object::VisitReferences.
384 void operator()(art::ObjPtr<art::mirror::Object> obj, art::MemberOffset offset,
385 bool is_static) const
386 REQUIRES_SHARED(art::Locks::mutator_lock_) {
Florian Mayer4a525a02020-08-17 17:34:39 +0100387 if (offset.Uint32Value() == art::mirror::Object::ClassOffset().Uint32Value()) {
388 // Skip shadow$klass pointer.
389 return;
390 }
Florian Mayer07710c52019-09-16 15:53:38 +0000391 art::mirror::Object* ref = obj->GetFieldObject<art::mirror::Object>(offset);
392 art::ArtField* field;
393 if (is_static) {
394 field = art::ArtField::FindStaticFieldWithOffset(obj->AsClass(), offset.Uint32Value());
395 } else {
396 field = art::ArtField::FindInstanceFieldWithOffset(obj->GetClass(), offset.Uint32Value());
397 }
398 std::string field_name = "";
399 if (field != nullptr) {
Florian Mayer22be0652020-02-06 17:51:46 +0000400 field_name = field->PrettyField(/*with_type=*/true);
Florian Mayer07710c52019-09-16 15:53:38 +0000401 }
402 referred_objects_->emplace_back(std::move(field_name), ref);
403 }
404
405 void VisitRootIfNonNull(art::mirror::CompressedReference<art::mirror::Object>* root
406 ATTRIBUTE_UNUSED) const {}
407 void VisitRoot(art::mirror::CompressedReference<art::mirror::Object>* root
408 ATTRIBUTE_UNUSED) const {}
409
410 private:
411 // We can use a raw Object* pointer here, because there are no concurrent GC threads after the
412 // fork.
413 std::vector<std::pair<std::string, art::mirror::Object*>>* referred_objects_;
414};
415
Florian Mayer46392352019-10-11 14:25:49 +0100416class RootFinder : public art::SingleRootVisitor {
417 public:
418 explicit RootFinder(
419 std::map<art::RootType, std::vector<art::mirror::Object*>>* root_objects)
420 : root_objects_(root_objects) {}
421
422 void VisitRoot(art::mirror::Object* root, const art::RootInfo& info) override {
423 (*root_objects_)[info.GetType()].emplace_back(root);
424 }
425
426 private:
427 // We can use a raw Object* pointer here, because there are no concurrent GC threads after the
428 // fork.
429 std::map<art::RootType, std::vector<art::mirror::Object*>>* root_objects_;
430};
431
432perfetto::protos::pbzero::HeapGraphRoot::Type ToProtoType(art::RootType art_type) {
Florian Mayer4a525a02020-08-17 17:34:39 +0100433 using perfetto::protos::pbzero::HeapGraphRoot;
Florian Mayer46392352019-10-11 14:25:49 +0100434 switch (art_type) {
435 case art::kRootUnknown:
Florian Mayer4a525a02020-08-17 17:34:39 +0100436 return HeapGraphRoot::ROOT_UNKNOWN;
Florian Mayer46392352019-10-11 14:25:49 +0100437 case art::kRootJNIGlobal:
Florian Mayer4a525a02020-08-17 17:34:39 +0100438 return HeapGraphRoot::ROOT_JNI_GLOBAL;
Florian Mayer46392352019-10-11 14:25:49 +0100439 case art::kRootJNILocal:
Florian Mayer4a525a02020-08-17 17:34:39 +0100440 return HeapGraphRoot::ROOT_JNI_LOCAL;
Florian Mayer46392352019-10-11 14:25:49 +0100441 case art::kRootJavaFrame:
Florian Mayer4a525a02020-08-17 17:34:39 +0100442 return HeapGraphRoot::ROOT_JAVA_FRAME;
Florian Mayer46392352019-10-11 14:25:49 +0100443 case art::kRootNativeStack:
Florian Mayer4a525a02020-08-17 17:34:39 +0100444 return HeapGraphRoot::ROOT_NATIVE_STACK;
Florian Mayer46392352019-10-11 14:25:49 +0100445 case art::kRootStickyClass:
Florian Mayer4a525a02020-08-17 17:34:39 +0100446 return HeapGraphRoot::ROOT_STICKY_CLASS;
Florian Mayer46392352019-10-11 14:25:49 +0100447 case art::kRootThreadBlock:
Florian Mayer4a525a02020-08-17 17:34:39 +0100448 return HeapGraphRoot::ROOT_THREAD_BLOCK;
Florian Mayer46392352019-10-11 14:25:49 +0100449 case art::kRootMonitorUsed:
Florian Mayer4a525a02020-08-17 17:34:39 +0100450 return HeapGraphRoot::ROOT_MONITOR_USED;
Florian Mayer46392352019-10-11 14:25:49 +0100451 case art::kRootThreadObject:
Florian Mayer4a525a02020-08-17 17:34:39 +0100452 return HeapGraphRoot::ROOT_THREAD_OBJECT;
Florian Mayer46392352019-10-11 14:25:49 +0100453 case art::kRootInternedString:
Florian Mayer4a525a02020-08-17 17:34:39 +0100454 return HeapGraphRoot::ROOT_INTERNED_STRING;
Florian Mayer46392352019-10-11 14:25:49 +0100455 case art::kRootFinalizing:
Florian Mayer4a525a02020-08-17 17:34:39 +0100456 return HeapGraphRoot::ROOT_FINALIZING;
Florian Mayer46392352019-10-11 14:25:49 +0100457 case art::kRootDebugger:
Florian Mayer4a525a02020-08-17 17:34:39 +0100458 return HeapGraphRoot::ROOT_DEBUGGER;
Florian Mayer46392352019-10-11 14:25:49 +0100459 case art::kRootReferenceCleanup:
Florian Mayer4a525a02020-08-17 17:34:39 +0100460 return HeapGraphRoot::ROOT_REFERENCE_CLEANUP;
Florian Mayer46392352019-10-11 14:25:49 +0100461 case art::kRootVMInternal:
Florian Mayer4a525a02020-08-17 17:34:39 +0100462 return HeapGraphRoot::ROOT_VM_INTERNAL;
Florian Mayer46392352019-10-11 14:25:49 +0100463 case art::kRootJNIMonitor:
Florian Mayer4a525a02020-08-17 17:34:39 +0100464 return HeapGraphRoot::ROOT_JNI_MONITOR;
465 }
466}
467
468perfetto::protos::pbzero::HeapGraphType::Kind ProtoClassKind(uint32_t class_flags) {
469 using perfetto::protos::pbzero::HeapGraphType;
470 switch (class_flags) {
471 case art::mirror::kClassFlagNormal:
472 return HeapGraphType::KIND_NORMAL;
473 case art::mirror::kClassFlagNoReferenceFields:
474 return HeapGraphType::KIND_NOREFERENCES;
475 case art::mirror::kClassFlagString | art::mirror::kClassFlagNoReferenceFields:
476 return HeapGraphType::KIND_STRING;
477 case art::mirror::kClassFlagObjectArray:
478 return HeapGraphType::KIND_ARRAY;
479 case art::mirror::kClassFlagClass:
480 return HeapGraphType::KIND_CLASS;
481 case art::mirror::kClassFlagClassLoader:
482 return HeapGraphType::KIND_CLASSLOADER;
483 case art::mirror::kClassFlagDexCache:
484 return HeapGraphType::KIND_DEXCACHE;
485 case art::mirror::kClassFlagSoftReference:
486 return HeapGraphType::KIND_SOFT_REFERENCE;
487 case art::mirror::kClassFlagWeakReference:
488 return HeapGraphType::KIND_WEAK_REFERENCE;
489 case art::mirror::kClassFlagFinalizerReference:
490 return HeapGraphType::KIND_FINALIZER_REFERENCE;
491 case art::mirror::kClassFlagPhantomReference:
492 return HeapGraphType::KIND_PHANTOM_REFERENCE;
493 default:
494 return HeapGraphType::KIND_UNKNOWN;
Florian Mayer46392352019-10-11 14:25:49 +0100495 }
496}
497
Florian Mayer29e62c32020-03-19 12:05:46 +0100498std::string PrettyType(art::mirror::Class* klass) NO_THREAD_SAFETY_ANALYSIS {
499 if (klass == nullptr) {
500 return "(raw)";
501 }
502 std::string temp;
503 std::string result(art::PrettyDescriptor(klass->GetDescriptor(&temp)));
504 return result;
505}
506
Florian Mayer4b79ef42020-04-03 15:10:45 +0200507void DumpSmaps(JavaHprofDataSource::TraceContext* ctx) {
508 FILE* smaps = fopen("/proc/self/smaps", "r");
509 if (smaps != nullptr) {
510 auto trace_packet = ctx->NewTracePacket();
511 auto* smaps_packet = trace_packet->set_smaps_packet();
512 smaps_packet->set_pid(getpid());
513 perfetto::profiling::ParseSmaps(smaps,
514 [&smaps_packet](const perfetto::profiling::SmapsEntry& e) {
515 if (ShouldSampleSmapsEntry(e)) {
516 auto* smaps_entry = smaps_packet->add_entries();
517 smaps_entry->set_path(e.pathname);
518 smaps_entry->set_size_kb(e.size_kb);
519 smaps_entry->set_private_dirty_kb(e.private_dirty_kb);
520 smaps_entry->set_swap_kb(e.swap_kb);
521 }
522 });
523 fclose(smaps);
524 } else {
525 PLOG(ERROR) << "failed to open smaps";
526 }
527}
528
Florian Mayerc560e1b2020-06-03 14:34:52 +0200529uint64_t GetObjectId(const art::mirror::Object* obj) {
530 return reinterpret_cast<uint64_t>(obj) / std::alignment_of<art::mirror::Object>::value;
531}
532
Florian Mayer4a525a02020-08-17 17:34:39 +0100533template <typename F>
534void ForInstanceReferenceField(art::mirror::Class* klass, F fn) NO_THREAD_SAFETY_ANALYSIS {
535 for (art::ArtField& af : klass->GetIFields()) {
536 if (af.IsPrimitiveType() ||
537 af.GetOffset().Uint32Value() == art::mirror::Object::ClassOffset().Uint32Value()) {
538 continue;
539 }
540 fn(af.GetOffset());
541 }
542}
543
Florian Mayer4075edd2020-09-24 15:26:23 +0100544bool IsIgnored(const std::vector<std::string>& ignored_types,
545 art::mirror::Object* obj) NO_THREAD_SAFETY_ANALYSIS {
546 if (obj->IsClass()) {
547 return false;
548 }
549 art::mirror::Class* klass = obj->GetClass();
550 return std::find(ignored_types.begin(), ignored_types.end(), PrettyType(klass)) !=
551 ignored_types.end();
552}
553
Florian Mayer07710c52019-09-16 15:53:38 +0000554void DumpPerfetto(art::Thread* self) {
555 pid_t parent_pid = getpid();
556 LOG(INFO) << "preparing to dump heap for " << parent_pid;
557
558 // Need to take a heap dump while GC isn't running. See the comment in
559 // Heap::VisitObjects(). Also we need the critical section to avoid visiting
560 // the same object twice. See b/34967844.
561 //
562 // We need to do this before the fork, because otherwise it can deadlock
563 // waiting for the GC, as all other threads get terminated by the clone, but
564 // their locks are not released.
565 art::gc::ScopedGCCriticalSection gcs(self, art::gc::kGcCauseHprof,
566 art::gc::kCollectorTypeHprof);
567
568 art::ScopedSuspendAll ssa(__FUNCTION__, /* long_suspend=*/ true);
569
570 pid_t pid = fork();
Florian Mayer6d41e572020-01-24 15:13:59 +0000571 if (pid == -1) {
572 // Fork error.
573 PLOG(ERROR) << "fork";
Florian Mayer07710c52019-09-16 15:53:38 +0000574 return;
575 }
Florian Mayer6d41e572020-01-24 15:13:59 +0000576 if (pid != 0) {
577 // Parent
578 int stat_loc;
579 for (;;) {
580 if (waitpid(pid, &stat_loc, 0) != -1 || errno != EINTR) {
581 break;
582 }
583 }
584 return;
585 }
586
587 // The following code is only executed by the child of the original process.
588 //
589 // Daemon creates a new process that is the grand-child of the original process, and exits.
590 if (daemon(0, 0) == -1) {
591 PLOG(FATAL) << "daemon";
592 }
593
594 // The following code is only executed by the grand-child of the original process.
Florian Mayer07710c52019-09-16 15:53:38 +0000595
596 // Make sure that this is the first thing we do after forking, so if anything
597 // below hangs, the fork will go away from the watchdog.
598 ArmWatchdogOrDie();
599
Florian Mayerc99a2312019-12-17 11:07:34 +0000600 struct timespec ts = {};
601 if (clock_gettime(CLOCK_BOOTTIME, &ts) != 0) {
602 LOG(FATAL) << "Failed to get boottime.";
603 }
604 uint64_t timestamp = ts.tv_sec * 1000000000LL + ts.tv_nsec;
605
Florian Mayer07710c52019-09-16 15:53:38 +0000606 WaitForDataSource(self);
607
608 JavaHprofDataSource::Trace(
Florian Mayerc99a2312019-12-17 11:07:34 +0000609 [parent_pid, timestamp](JavaHprofDataSource::TraceContext ctx)
Florian Mayer07710c52019-09-16 15:53:38 +0000610 NO_THREAD_SAFETY_ANALYSIS {
Florian Mayer4b79ef42020-04-03 15:10:45 +0200611 bool dump_smaps;
Florian Mayer4075edd2020-09-24 15:26:23 +0100612 std::vector<std::string> ignored_types;
Florian Mayer4bbc62b2019-09-25 12:13:35 +0100613 {
614 auto ds = ctx.GetDataSourceLocked();
615 if (!ds || !ds->enabled()) {
Florian Mayercb4fab12020-09-25 13:57:37 +0100616 if (ds) ds->Finish();
Florian Mayer4bbc62b2019-09-25 12:13:35 +0100617 LOG(INFO) << "skipping irrelevant data source.";
618 return;
619 }
Florian Mayer4b79ef42020-04-03 15:10:45 +0200620 dump_smaps = ds->dump_smaps();
Florian Mayer4075edd2020-09-24 15:26:23 +0100621 ignored_types = ds->ignored_types();
Florian Mayer4bbc62b2019-09-25 12:13:35 +0100622 }
Florian Mayer07710c52019-09-16 15:53:38 +0000623 LOG(INFO) << "dumping heap for " << parent_pid;
Florian Mayer4b79ef42020-04-03 15:10:45 +0200624 if (dump_smaps) {
625 DumpSmaps(&ctx);
626 }
Florian Mayerc99a2312019-12-17 11:07:34 +0000627 Writer writer(parent_pid, &ctx, timestamp);
Florian Mayer07710c52019-09-16 15:53:38 +0000628 // Make sure that intern ID 0 (default proto value for a uint64_t) always maps to ""
629 // (default proto value for a string).
630 std::map<std::string, uint64_t> interned_fields{{"", 0}};
Florian Mayer29e62c32020-03-19 12:05:46 +0100631 std::map<std::string, uint64_t> interned_locations{{"", 0}};
Florian Mayer614bffc2020-06-02 12:15:49 +0200632 std::map<uintptr_t, uint64_t> interned_classes{{0, 0}};
Florian Mayer07710c52019-09-16 15:53:38 +0000633
Florian Mayer46392352019-10-11 14:25:49 +0100634 std::map<art::RootType, std::vector<art::mirror::Object*>> root_objects;
635 RootFinder rcf(&root_objects);
636 art::Runtime::Current()->VisitRoots(&rcf);
Florian Mayer2c5dfe12019-11-14 11:22:25 +0000637 std::unique_ptr<protozero::PackedVarInt> object_ids(
638 new protozero::PackedVarInt);
Florian Mayer46392352019-10-11 14:25:49 +0100639 for (const auto& p : root_objects) {
640 const art::RootType root_type = p.first;
641 const std::vector<art::mirror::Object*>& children = p.second;
642 perfetto::protos::pbzero::HeapGraphRoot* root_proto =
643 writer.GetHeapGraph()->add_roots();
644 root_proto->set_root_type(ToProtoType(root_type));
Florian Mayer2246a4e2020-02-24 16:16:41 +0000645 for (art::mirror::Object* obj : children) {
646 if (writer.will_create_new_packet()) {
647 root_proto->set_object_ids(*object_ids);
648 object_ids->Reset();
649 root_proto = writer.GetHeapGraph()->add_roots();
650 root_proto->set_root_type(ToProtoType(root_type));
651 }
Florian Mayerc560e1b2020-06-03 14:34:52 +0200652 object_ids->Append(GetObjectId(obj));
Florian Mayer2246a4e2020-02-24 16:16:41 +0000653 }
Florian Mayer2c5dfe12019-11-14 11:22:25 +0000654 root_proto->set_object_ids(*object_ids);
655 object_ids->Reset();
Florian Mayer46392352019-10-11 14:25:49 +0100656 }
657
Florian Mayer2c5dfe12019-11-14 11:22:25 +0000658 std::unique_ptr<protozero::PackedVarInt> reference_field_ids(
659 new protozero::PackedVarInt);
660 std::unique_ptr<protozero::PackedVarInt> reference_object_ids(
661 new protozero::PackedVarInt);
662
Florian Mayer07710c52019-09-16 15:53:38 +0000663 art::Runtime::Current()->GetHeap()->VisitObjectsPaused(
Florian Mayer4075edd2020-09-24 15:26:23 +0100664 [&writer, &interned_fields, &interned_locations, &reference_field_ids,
665 &reference_object_ids, &interned_classes, &ignored_types](
Florian Mayer07710c52019-09-16 15:53:38 +0000666 art::mirror::Object* obj) REQUIRES_SHARED(art::Locks::mutator_lock_) {
Florian Mayer29e62c32020-03-19 12:05:46 +0100667 if (obj->IsClass()) {
668 art::mirror::Class* klass = obj->AsClass().Ptr();
669 perfetto::protos::pbzero::HeapGraphType* type_proto =
670 writer.GetHeapGraph()->add_types();
Florian Mayer614bffc2020-06-02 12:15:49 +0200671 type_proto->set_id(FindOrAppend(&interned_classes,
672 reinterpret_cast<uintptr_t>(klass)));
Florian Mayer29e62c32020-03-19 12:05:46 +0100673 type_proto->set_class_name(PrettyType(klass));
674 type_proto->set_location_id(FindOrAppend(&interned_locations,
675 klass->GetLocation()));
Florian Mayer89992b82020-07-31 15:16:53 +0200676 type_proto->set_object_size(klass->GetObjectSize());
Florian Mayer4a525a02020-08-17 17:34:39 +0100677 type_proto->set_kind(ProtoClassKind(klass->GetClassFlags()));
Florian Mayer7c9201a2020-08-04 17:08:56 +0100678 if (klass->GetSuperClass().Ptr()) {
679 type_proto->set_superclass_id(
680 FindOrAppend(&interned_classes,
681 reinterpret_cast<uintptr_t>(klass->GetSuperClass().Ptr())));
682 }
Florian Mayer4a525a02020-08-17 17:34:39 +0100683 ForInstanceReferenceField(
684 klass, [klass, &reference_field_ids, &interned_fields](
685 art::MemberOffset offset) NO_THREAD_SAFETY_ANALYSIS {
686 auto art_field = art::ArtField::FindInstanceFieldWithOffset(
687 klass, offset.Uint32Value());
688 reference_field_ids->Append(
689 FindOrAppend(&interned_fields, art_field->PrettyField(true)));
690 });
691 type_proto->set_reference_field_id(*reference_field_ids);
692 reference_field_ids->Reset();
Florian Mayer29e62c32020-03-19 12:05:46 +0100693 }
694
695 art::mirror::Class* klass = obj->GetClass();
Florian Mayer614bffc2020-06-02 12:15:49 +0200696 uintptr_t class_ptr = reinterpret_cast<uintptr_t>(klass);
Florian Mayer29e62c32020-03-19 12:05:46 +0100697 // We need to synethesize a new type for Class<Foo>, which does not exist
698 // in the runtime. Otherwise, all the static members of all classes would be
699 // attributed to java.lang.Class.
700 if (klass->IsClassClass()) {
701 CHECK(obj->IsClass());
702 perfetto::protos::pbzero::HeapGraphType* type_proto =
703 writer.GetHeapGraph()->add_types();
704 // All pointers are at least multiples of two, so this way we can make sure
705 // we are not colliding with a real class.
Florian Mayer614bffc2020-06-02 12:15:49 +0200706 class_ptr = reinterpret_cast<uintptr_t>(obj) | 1;
707 auto class_id = FindOrAppend(&interned_classes, class_ptr);
Florian Mayer29e62c32020-03-19 12:05:46 +0100708 type_proto->set_id(class_id);
709 type_proto->set_class_name(obj->PrettyTypeOf());
710 type_proto->set_location_id(FindOrAppend(&interned_locations,
711 obj->AsClass()->GetLocation()));
712 }
713
Florian Mayer4075edd2020-09-24 15:26:23 +0100714 if (IsIgnored(ignored_types, obj)) {
715 return;
716 }
717
Florian Mayer614bffc2020-06-02 12:15:49 +0200718 auto class_id = FindOrAppend(&interned_classes, class_ptr);
719
Florian Mayer07710c52019-09-16 15:53:38 +0000720 perfetto::protos::pbzero::HeapGraphObject* object_proto =
721 writer.GetHeapGraph()->add_objects();
Florian Mayerc560e1b2020-06-03 14:34:52 +0200722 object_proto->set_id(GetObjectId(obj));
Florian Mayer29e62c32020-03-19 12:05:46 +0100723 object_proto->set_type_id(class_id);
Florian Mayer89992b82020-07-31 15:16:53 +0200724
725 // Arrays / strings are magic and have an instance dependent size.
726 if (obj->SizeOf() != klass->GetObjectSize())
727 object_proto->set_self_size(obj->SizeOf());
Florian Mayer07710c52019-09-16 15:53:38 +0000728
729 std::vector<std::pair<std::string, art::mirror::Object*>>
730 referred_objects;
731 ReferredObjectsFinder objf(&referred_objects);
Florian Mayer4a525a02020-08-17 17:34:39 +0100732
733 const bool emit_field_ids =
734 klass->GetClassFlags() != art::mirror::kClassFlagObjectArray &&
735 klass->GetClassFlags() != art::mirror::kClassFlagNormal;
736 if (klass->GetClassFlags() != art::mirror::kClassFlagNormal) {
737 obj->VisitReferences(objf, art::VoidFunctor());
738 } else {
739 for (art::mirror::Class* cls = klass; cls != nullptr;
740 cls = cls->GetSuperClass().Ptr()) {
741 ForInstanceReferenceField(
742 cls, [obj, objf](art::MemberOffset offset) NO_THREAD_SAFETY_ANALYSIS {
743 objf(art::ObjPtr<art::mirror::Object>(obj), offset,
744 /*is_static=*/false);
745 });
746 }
747 }
Florian Mayer4075edd2020-09-24 15:26:23 +0100748 for (auto& p : referred_objects) {
749 const std::string& field_name = p.first;
750 art::mirror::Object* referred_obj = p.second;
751 if (referred_obj && IsIgnored(ignored_types, referred_obj)) {
752 referred_obj = nullptr;
Florian Mayer4a525a02020-08-17 17:34:39 +0100753 }
Florian Mayer4075edd2020-09-24 15:26:23 +0100754 if (emit_field_ids) {
755 reference_field_ids->Append(FindOrAppend(&interned_fields, field_name));
756 }
757 reference_object_ids->Append(GetObjectId(referred_obj));
Florian Mayer07710c52019-09-16 15:53:38 +0000758 }
Florian Mayer4a525a02020-08-17 17:34:39 +0100759 if (emit_field_ids) {
760 object_proto->set_reference_field_id(*reference_field_ids);
761 reference_field_ids->Reset();
762 }
Florian Mayer2c5dfe12019-11-14 11:22:25 +0000763 object_proto->set_reference_object_id(*reference_object_ids);
Florian Mayer2c5dfe12019-11-14 11:22:25 +0000764 reference_object_ids->Reset();
Florian Mayer07710c52019-09-16 15:53:38 +0000765 });
766
767 for (const auto& p : interned_fields) {
768 const std::string& str = p.first;
769 uint64_t id = p.second;
770
771 perfetto::protos::pbzero::InternedString* field_proto =
772 writer.GetHeapGraph()->add_field_names();
773 field_proto->set_iid(id);
774 field_proto->set_str(
775 reinterpret_cast<const uint8_t*>(str.c_str()), str.size());
776 }
Florian Mayer29e62c32020-03-19 12:05:46 +0100777 for (const auto& p : interned_locations) {
Florian Mayer07710c52019-09-16 15:53:38 +0000778 const std::string& str = p.first;
779 uint64_t id = p.second;
780
Florian Mayer29e62c32020-03-19 12:05:46 +0100781 perfetto::protos::pbzero::InternedString* location_proto =
782 writer.GetHeapGraph()->add_location_names();
783 location_proto->set_iid(id);
784 location_proto->set_str(reinterpret_cast<const uint8_t*>(str.c_str()),
Florian Mayer07710c52019-09-16 15:53:38 +0000785 str.size());
786 }
787
788 writer.Finalize();
Florian Mayer07710c52019-09-16 15:53:38 +0000789 ctx.Flush([] {
790 {
791 art::MutexLock lk(JavaHprofDataSource::art_thread(), GetStateMutex());
792 g_state = State::kEnd;
793 GetStateCV().Broadcast(JavaHprofDataSource::art_thread());
794 }
795 });
Florian Mayercb4fab12020-09-25 13:57:37 +0100796 // Wait for the Flush that will happen on the Perfetto thread.
797 {
798 art::MutexLock lk(JavaHprofDataSource::art_thread(), GetStateMutex());
799 while (g_state != State::kEnd) {
800 GetStateCV().Wait(JavaHprofDataSource::art_thread());
801 }
802 }
803 {
804 auto ds = ctx.GetDataSourceLocked();
805 if (ds) {
806 ds->Finish();
807 } else {
808 LOG(ERROR) << "datasource timed out (duration_ms + datasource_stop_timeout_ms) "
809 "before dump finished";
810 }
811 }
Florian Mayer07710c52019-09-16 15:53:38 +0000812 });
813
Florian Mayer07710c52019-09-16 15:53:38 +0000814 LOG(INFO) << "finished dumping heap for " << parent_pid;
815 // Prevent the atexit handlers to run. We do not want to call cleanup
816 // functions the parent process has registered.
817 _exit(0);
818}
819
820// The plugin initialization function.
821extern "C" bool ArtPlugin_Initialize() {
822 if (art::Runtime::Current() == nullptr) {
823 return false;
824 }
825 art::Thread* self = art::Thread::Current();
826 {
827 art::MutexLock lk(self, GetStateMutex());
828 if (g_state != State::kUninitialized) {
829 LOG(ERROR) << "perfetto_hprof already initialized. state: " << g_state;
830 return false;
831 }
832 g_state = State::kWaitForListener;
833 }
834
Nick Kralevich20d57d12020-01-31 12:54:35 -0800835 if (pipe2(g_signal_pipe_fds, O_CLOEXEC) == -1) {
Florian Mayer07710c52019-09-16 15:53:38 +0000836 PLOG(ERROR) << "Failed to pipe";
837 return false;
838 }
839
840 struct sigaction act = {};
Florian Mayer516745b2020-01-27 14:29:57 +0000841 act.sa_flags = SA_SIGINFO | SA_RESTART;
Florian Mayer07710c52019-09-16 15:53:38 +0000842 act.sa_sigaction = [](int, siginfo_t*, void*) {
843 if (write(g_signal_pipe_fds[1], kByte, sizeof(kByte)) == -1) {
844 PLOG(ERROR) << "Failed to trigger heap dump";
845 }
846 };
847
848 // TODO(fmayer): We can probably use the SignalCatcher thread here to not
849 // have an idle thread.
850 if (sigaction(kJavaHeapprofdSignal, &act, &g_orig_act) != 0) {
851 close(g_signal_pipe_fds[0]);
852 close(g_signal_pipe_fds[1]);
853 PLOG(ERROR) << "Failed to sigaction";
854 return false;
855 }
856
857 std::thread th([] {
858 art::Runtime* runtime = art::Runtime::Current();
859 if (!runtime) {
Florian Mayer516745b2020-01-27 14:29:57 +0000860 LOG(FATAL_WITHOUT_ABORT) << "no runtime in perfetto_hprof_listener";
Florian Mayer07710c52019-09-16 15:53:38 +0000861 return;
862 }
Florian Mayer516745b2020-01-27 14:29:57 +0000863 if (!runtime->AttachCurrentThread("perfetto_hprof_listener", /*as_daemon=*/ true,
Florian Mayer07710c52019-09-16 15:53:38 +0000864 runtime->GetSystemThreadGroup(), /*create_peer=*/ false)) {
865 LOG(ERROR) << "failed to attach thread.";
Florian Mayerfa082fb2020-05-15 14:07:53 +0200866 {
867 art::MutexLock lk(nullptr, GetStateMutex());
868 g_state = State::kUninitialized;
869 GetStateCV().Broadcast(nullptr);
870 }
871
Florian Mayer07710c52019-09-16 15:53:38 +0000872 return;
873 }
874 art::Thread* self = art::Thread::Current();
875 if (!self) {
Florian Mayer516745b2020-01-27 14:29:57 +0000876 LOG(FATAL_WITHOUT_ABORT) << "no thread in perfetto_hprof_listener";
Florian Mayer07710c52019-09-16 15:53:38 +0000877 return;
878 }
879 {
880 art::MutexLock lk(self, GetStateMutex());
881 if (g_state == State::kWaitForListener) {
882 g_state = State::kWaitForStart;
883 GetStateCV().Broadcast(self);
884 }
885 }
886 char buf[1];
887 for (;;) {
888 int res;
889 do {
890 res = read(g_signal_pipe_fds[0], buf, sizeof(buf));
891 } while (res == -1 && errno == EINTR);
892
893 if (res <= 0) {
894 if (res == -1) {
895 PLOG(ERROR) << "failed to read";
896 }
897 close(g_signal_pipe_fds[0]);
898 return;
899 }
900
901 perfetto_hprof::DumpPerfetto(self);
902 }
903 });
904 th.detach();
905
Florian Mayer07710c52019-09-16 15:53:38 +0000906 return true;
907}
908
909extern "C" bool ArtPlugin_Deinitialize() {
910 if (sigaction(kJavaHeapprofdSignal, &g_orig_act, nullptr) != 0) {
911 PLOG(ERROR) << "failed to reset signal handler";
912 // We cannot close the pipe if the signal handler wasn't unregistered,
913 // to avoid receiving SIGPIPE.
914 return false;
915 }
916 close(g_signal_pipe_fds[1]);
917
918 art::Thread* self = art::Thread::Current();
919 art::MutexLock lk(self, GetStateMutex());
Florian Mayerfa082fb2020-05-15 14:07:53 +0200920 // Wait until after the thread was registered to the runtime. This is so
921 // we do not attempt to register it with the runtime after it had been torn
922 // down (ArtPlugin_Deinitialize gets called in the Runtime dtor).
923 while (g_state == State::kWaitForListener) {
924 GetStateCV().Wait(art::Thread::Current());
Florian Mayer07710c52019-09-16 15:53:38 +0000925 }
Florian Mayerfa082fb2020-05-15 14:07:53 +0200926 g_state = State::kUninitialized;
927 GetStateCV().Broadcast(self);
Florian Mayer07710c52019-09-16 15:53:38 +0000928 return true;
929}
930
931} // namespace perfetto_hprof
932
933namespace perfetto {
934
935PERFETTO_DEFINE_DATA_SOURCE_STATIC_MEMBERS(perfetto_hprof::JavaHprofDataSource);
936
937}