1// Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
2// for details. All rights reserved. Use of this source code is governed by a
3// BSD-style license that can be found in the LICENSE file.
4
5#ifndef RUNTIME_VM_PROFILER_H_
6#define RUNTIME_VM_PROFILER_H_
7
8#include "platform/atomic.h"
9
10#include "vm/allocation.h"
11#include "vm/bitfield.h"
12#include "vm/code_observers.h"
13#include "vm/globals.h"
14#include "vm/growable_array.h"
15#include "vm/native_symbol.h"
16#include "vm/object.h"
17#include "vm/tags.h"
18#include "vm/thread_interrupter.h"
19
20// Profiler sampling and stack walking support.
21// NOTE: For service related code, see profile_service.h.
22
23namespace dart {
24
25// Forward declarations.
26class ProcessedSample;
27class ProcessedSampleBuffer;
28
29class Sample;
30class SampleBlock;
31
32#define PROFILER_COUNTERS(V) \
33 V(bail_out_unknown_task) \
34 V(bail_out_jump_to_exception_handler) \
35 V(bail_out_check_isolate) \
36 V(single_frame_sample_deoptimizing) \
37 V(single_frame_sample_get_and_validate_stack_bounds) \
38 V(stack_walker_native) \
39 V(stack_walker_dart_exit) \
40 V(stack_walker_dart) \
41 V(stack_walker_none) \
42 V(incomplete_sample_fp_bounds) \
43 V(incomplete_sample_fp_step) \
44 V(incomplete_sample_bad_pc) \
45 V(sample_allocation_failure)
46
47struct ProfilerCounters {
48#define DECLARE_PROFILER_COUNTER(name) RelaxedAtomic<int64_t> name;
49 PROFILER_COUNTERS(DECLARE_PROFILER_COUNTER)
50#undef DECLARE_PROFILER_COUNTER
51};
52
53class Profiler : public AllStatic {
54 public:
55 static void Init();
56 static void Cleanup();
57
58 static void SetSampleDepth(intptr_t depth);
59 static void SetSamplePeriod(intptr_t period);
60 // Restarts sampling with a given profile period. This is called after the
61 // profile period is changed via the service protocol.
62 static void UpdateSamplePeriod();
63 // Starts or shuts down the profiler after --profiler is changed via the
64 // service protocol.
65 static void UpdateRunningState();
66
67 static SampleBlockBuffer* sample_block_buffer() {
68 return sample_block_buffer_;
69 }
70 static void set_sample_block_buffer(SampleBlockBuffer* buffer) {
71 sample_block_buffer_ = buffer;
72 }
73
74 static void DumpStackTrace(void* context);
75 static void DumpStackTrace(bool for_crash = true);
76
77 static void SampleAllocation(Thread* thread,
78 intptr_t cid,
79 uint32_t identity_hash);
80
81 // SampleThread is called from inside the signal handler and hence it is very
82 // critical that the implementation of SampleThread does not do any of the
83 // following:
84 // * Accessing TLS -- Because on Fuchsia, Mac and Windows the callback will
85 // be running in a different thread.
86 // * Allocating memory -- Because this takes locks which may already be
87 // held, resulting in a dead lock.
88 // * Taking a lock -- See above.
89 static void SampleThread(Thread* thread, const InterruptedThreadState& state);
90
91 static ProfilerCounters counters() {
92 // Copies the counter values.
93 return counters_;
94 }
95 inline static intptr_t Size();
96
97 static void ProcessCompletedBlocks(Isolate* isolate);
98 static void IsolateShutdown(Thread* thread);
99
100 private:
101 static void DumpStackTrace(uword sp, uword fp, uword pc, bool for_crash);
102
103 // Calculates the sample buffer capacity. Returns
104 // SampleBuffer::kDefaultBufferCapacity if --sample-buffer-duration is not
105 // provided. Otherwise, the capacity is based on the sample rate, maximum
106 // sample stack depth, and the number of seconds of samples the sample buffer
107 // should be able to accomodate.
108 static intptr_t CalculateSampleBufferCapacity();
109
110 // Does not walk the thread's stack.
111 static void SampleThreadSingleFrame(Thread* thread,
112 Sample* sample,
113 uintptr_t pc);
114 static RelaxedAtomic<bool> initialized_;
115
116 static SampleBlockBuffer* sample_block_buffer_;
117
118 static ProfilerCounters counters_;
119
120 friend class Thread;
121};
122
123class SampleVisitor : public ValueObject {
124 public:
125 explicit SampleVisitor(Dart_Port port) : port_(port), visited_(0) {}
126 virtual ~SampleVisitor() {}
127
128 virtual void VisitSample(Sample* sample) = 0;
129
130 virtual void Reset() { visited_ = 0; }
131
132 intptr_t visited() const { return visited_; }
133
134 void IncrementVisited() { visited_++; }
135
136 Dart_Port port() const { return port_; }
137
138 private:
139 Dart_Port port_;
140 intptr_t visited_;
141
142 DISALLOW_IMPLICIT_CONSTRUCTORS(SampleVisitor);
143};
144
145class SampleFilter : public ValueObject {
146 public:
147 SampleFilter(Dart_Port port,
148 intptr_t thread_task_mask,
149 int64_t time_origin_micros,
150 int64_t time_extent_micros,
151 bool take_samples = false)
152 : port_(port),
153 thread_task_mask_(thread_task_mask),
154 time_origin_micros_(time_origin_micros),
155 time_extent_micros_(time_extent_micros),
156 take_samples_(take_samples) {
157 ASSERT(thread_task_mask != 0);
158 ASSERT(time_origin_micros_ >= -1);
159 ASSERT(time_extent_micros_ >= -1);
160 }
161 virtual ~SampleFilter() {}
162
163 // Override this function.
164 // Return |true| if |sample| passes the filter.
165 virtual bool FilterSample(Sample* sample) { return true; }
166
167 Dart_Port port() const { return port_; }
168
169 // Returns |true| if |sample| passes the time filter.
170 bool TimeFilterSample(Sample* sample);
171
172 // Returns |true| if |sample| passes the thread task filter.
173 bool TaskFilterSample(Sample* sample);
174
175 bool take_samples() const { return take_samples_; }
176
177 static constexpr intptr_t kNoTaskFilter = -1;
178
179 private:
180 Dart_Port port_;
181 intptr_t thread_task_mask_;
182 int64_t time_origin_micros_;
183 int64_t time_extent_micros_;
184 bool take_samples_;
185};
186
187class ClearProfileVisitor : public SampleVisitor {
188 public:
189 explicit ClearProfileVisitor(Isolate* isolate);
190
191 virtual void VisitSample(Sample* sample);
192};
193
194// Each Sample holds a stack trace from an isolate.
195class Sample {
196 public:
197 Sample() = default;
198
199 void Init(Dart_Port port, int64_t timestamp, ThreadId tid) {
200 Clear();
201 timestamp_ = timestamp;
202 tid_ = tid;
203 port_ = port;
204 next_ = nullptr;
205 }
206
207 Dart_Port port() const { return port_; }
208
209 // Thread sample was taken on.
210 ThreadId tid() const { return tid_; }
211
212 void Clear() {
213 timestamp_ = 0;
214 port_ = ILLEGAL_PORT;
215 tid_ = OSThread::kInvalidThreadId;
216 for (intptr_t i = 0; i < kStackBufferSizeInWords; i++) {
217 stack_buffer_[i] = 0;
218 }
219 for (intptr_t i = 0; i < kPCArraySizeInWords; i++) {
220 pc_array_[i] = 0;
221 }
222 vm_tag_ = VMTag::kInvalidTagId;
223 user_tag_ = UserTags::kDefaultUserTag;
224 state_ = 0;
225 next_ = nullptr;
226 allocation_identity_hash_ = 0;
227 set_head_sample(true);
228 }
229
230 // Timestamp sample was taken at.
231 int64_t timestamp() const { return timestamp_; }
232
233 // Top most pc.
234 uword pc() const { return At(i: 0); }
235
236 // Get stack trace entry.
237 uword At(intptr_t i) const {
238 ASSERT(i >= 0);
239 ASSERT(i < kPCArraySizeInWords);
240 return pc_array_[i];
241 }
242
243 // Set stack trace entry.
244 void SetAt(intptr_t i, uword pc) {
245 ASSERT(i >= 0);
246 ASSERT(i < kPCArraySizeInWords);
247 pc_array_[i] = pc;
248 }
249
250 void DumpStackTrace() {
251 for (intptr_t i = 0; i < kPCArraySizeInWords; ++i) {
252 uintptr_t start = 0;
253 uword pc = At(i);
254 char* native_symbol_name =
255 NativeSymbolResolver::LookupSymbolName(pc, start: &start);
256 if (native_symbol_name == nullptr) {
257 OS::PrintErr(format: " [0x%" Pp "] Unknown symbol\n", pc);
258 } else {
259 OS::PrintErr(format: " [0x%" Pp "] %s\n", pc, native_symbol_name);
260 NativeSymbolResolver::FreeSymbolName(name: native_symbol_name);
261 }
262 }
263 }
264
265 uword vm_tag() const { return vm_tag_; }
266 void set_vm_tag(uword tag) {
267 ASSERT(tag != VMTag::kInvalidTagId);
268 vm_tag_ = tag;
269 }
270
271 uword user_tag() const { return user_tag_; }
272 void set_user_tag(uword tag) { user_tag_ = tag; }
273
274 bool leaf_frame_is_dart() const { return LeafFrameIsDart::decode(value: state_); }
275
276 void set_leaf_frame_is_dart(bool leaf_frame_is_dart) {
277 state_ = LeafFrameIsDart::update(value: leaf_frame_is_dart, original: state_);
278 }
279
280 bool ignore_sample() const { return IgnoreBit::decode(value: state_); }
281
282 void set_ignore_sample(bool ignore_sample) {
283 state_ = IgnoreBit::update(value: ignore_sample, original: state_);
284 }
285
286 bool exit_frame_sample() const { return ExitFrameBit::decode(value: state_); }
287
288 void set_exit_frame_sample(bool exit_frame_sample) {
289 state_ = ExitFrameBit::update(value: exit_frame_sample, original: state_);
290 }
291
292 bool missing_frame_inserted() const {
293 return MissingFrameInsertedBit::decode(value: state_);
294 }
295
296 void set_missing_frame_inserted(bool missing_frame_inserted) {
297 state_ = MissingFrameInsertedBit::update(value: missing_frame_inserted, original: state_);
298 }
299
300 bool truncated_trace() const { return TruncatedTraceBit::decode(value: state_); }
301
302 void set_truncated_trace(bool truncated_trace) {
303 state_ = TruncatedTraceBit::update(value: truncated_trace, original: state_);
304 }
305
306 bool is_allocation_sample() const {
307 return ClassAllocationSampleBit::decode(value: state_);
308 }
309
310 void set_is_allocation_sample(bool allocation_sample) {
311 state_ = ClassAllocationSampleBit::update(value: allocation_sample, original: state_);
312 }
313
314 uint32_t allocation_identity_hash() const {
315 return allocation_identity_hash_;
316 }
317
318 void set_allocation_identity_hash(uint32_t hash) {
319 allocation_identity_hash_ = hash;
320 }
321
322 Thread::TaskKind thread_task() const { return ThreadTaskBit::decode(value: state_); }
323
324 void set_thread_task(Thread::TaskKind task) {
325 state_ = ThreadTaskBit::update(value: task, original: state_);
326 }
327
328 bool is_continuation_sample() const {
329 return ContinuationSampleBit::decode(value: state_);
330 }
331
332 void SetContinuation(Sample* next) {
333 ASSERT(!is_continuation_sample());
334 ASSERT(next_ == nullptr);
335 state_ = ContinuationSampleBit::update(value: true, original: state_);
336 next_ = next;
337 }
338
339 Sample* continuation_sample() const { return next_; }
340
341 intptr_t allocation_cid() const {
342 ASSERT(is_allocation_sample());
343 return metadata();
344 }
345
346 void set_head_sample(bool head_sample) {
347 state_ = HeadSampleBit::update(value: head_sample, original: state_);
348 }
349
350 bool head_sample() const { return HeadSampleBit::decode(value: state_); }
351
352 intptr_t metadata() const { return MetadataBits::decode(value: state_); }
353 void set_metadata(intptr_t metadata) {
354 state_ = MetadataBits::update(value: metadata, original: state_);
355 }
356
357 void SetAllocationCid(intptr_t cid) {
358 set_is_allocation_sample(true);
359 set_metadata(cid);
360 }
361
362 static constexpr int kPCArraySizeInWords = 32;
363 uword* GetPCArray() { return &pc_array_[0]; }
364
365 static constexpr int kStackBufferSizeInWords = 2;
366 uword* GetStackBuffer() { return &stack_buffer_[0]; }
367
368 private:
369 enum StateBits {
370 kHeadSampleBit = 0,
371 kLeafFrameIsDartBit = 1,
372 kIgnoreBit = 2,
373 kExitFrameBit = 3,
374 kMissingFrameInsertedBit = 4,
375 kTruncatedTraceBit = 5,
376 kClassAllocationSampleBit = 6,
377 kContinuationSampleBit = 7,
378 kThreadTaskBit = 8, // 7 bits.
379 kMetadataBit = 15, // 16 bits.
380 kNextFreeBit = 31,
381 };
382 class HeadSampleBit : public BitField<uint32_t, bool, kHeadSampleBit, 1> {};
383 class LeafFrameIsDart
384 : public BitField<uint32_t, bool, kLeafFrameIsDartBit, 1> {};
385 class IgnoreBit : public BitField<uint32_t, bool, kIgnoreBit, 1> {};
386 class ExitFrameBit : public BitField<uint32_t, bool, kExitFrameBit, 1> {};
387 class MissingFrameInsertedBit
388 : public BitField<uint32_t, bool, kMissingFrameInsertedBit, 1> {};
389 class TruncatedTraceBit
390 : public BitField<uint32_t, bool, kTruncatedTraceBit, 1> {};
391 class ClassAllocationSampleBit
392 : public BitField<uint32_t, bool, kClassAllocationSampleBit, 1> {};
393 class ContinuationSampleBit
394 : public BitField<uint32_t, bool, kContinuationSampleBit, 1> {};
395 class ThreadTaskBit
396 : public BitField<uint32_t, Thread::TaskKind, kThreadTaskBit, 7> {};
397 class MetadataBits : public BitField<uint32_t, intptr_t, kMetadataBit, 16> {};
398
399 int64_t timestamp_;
400 Dart_Port port_;
401 ThreadId tid_;
402 uword stack_buffer_[kStackBufferSizeInWords];
403 uword pc_array_[kPCArraySizeInWords];
404 uword vm_tag_;
405 uword user_tag_;
406 uint32_t state_;
407 Sample* next_;
408 uint32_t allocation_identity_hash_;
409
410 DISALLOW_COPY_AND_ASSIGN(Sample);
411};
412
413class AbstractCode {
414 public:
415 explicit AbstractCode(ObjectPtr code) : code_(Object::Handle(ptr: code)) {
416 ASSERT(code_.IsNull() || code_.IsCode());
417 }
418
419 ObjectPtr ptr() const { return code_.ptr(); }
420 const Object* handle() const { return &code_; }
421
422 uword PayloadStart() const {
423 ASSERT(code_.IsCode());
424 return Code::Cast(obj: code_).PayloadStart();
425 }
426
427 uword Size() const {
428 ASSERT(code_.IsCode());
429 return Code::Cast(obj: code_).Size();
430 }
431
432 int64_t compile_timestamp() const {
433 if (code_.IsCode()) {
434 return Code::Cast(obj: code_).compile_timestamp();
435 } else {
436 return 0;
437 }
438 }
439
440 const char* Name() const {
441 if (code_.IsCode()) {
442 return Code::Cast(obj: code_).Name();
443 } else {
444 return "";
445 }
446 }
447
448 const char* QualifiedName() const {
449 if (code_.IsCode()) {
450 return Code::Cast(obj: code_).QualifiedName(
451 params: NameFormattingParams(Object::kUserVisibleName));
452 } else {
453 return "";
454 }
455 }
456
457 bool IsStubCode() const {
458 if (code_.IsCode()) {
459 return Code::Cast(obj: code_).IsStubCode();
460 } else {
461 return false;
462 }
463 }
464
465 bool IsAllocationStubCode() const {
466 if (code_.IsCode()) {
467 return Code::Cast(obj: code_).IsAllocationStubCode();
468 } else {
469 return false;
470 }
471 }
472
473 bool IsTypeTestStubCode() const {
474 if (code_.IsCode()) {
475 return Code::Cast(obj: code_).IsTypeTestStubCode();
476 } else {
477 return false;
478 }
479 }
480
481 ObjectPtr owner() const {
482 if (code_.IsCode()) {
483 return Code::Cast(obj: code_).owner();
484 } else {
485 return Object::null();
486 }
487 }
488
489 bool IsNull() const { return code_.IsNull(); }
490 bool IsCode() const { return code_.IsCode(); }
491
492 bool is_optimized() const {
493 if (code_.IsCode()) {
494 return Code::Cast(obj: code_).is_optimized();
495 } else {
496 return false;
497 }
498 }
499
500 private:
501 const Object& code_;
502};
503
504// A Code object descriptor.
505class CodeDescriptor : public ZoneAllocated {
506 public:
507 explicit CodeDescriptor(const AbstractCode code);
508
509 uword Start() const;
510
511 uword Size() const;
512
513 int64_t CompileTimestamp() const;
514
515 const AbstractCode code() const { return code_; }
516
517 const char* Name() const { return code_.Name(); }
518
519 bool Contains(uword pc) const {
520 uword end = Start() + Size();
521 return (pc >= Start()) && (pc < end);
522 }
523
524 static int Compare(CodeDescriptor* const* a, CodeDescriptor* const* b) {
525 ASSERT(a != nullptr);
526 ASSERT(b != nullptr);
527
528 uword a_start = (*a)->Start();
529 uword b_start = (*b)->Start();
530
531 if (a_start < b_start) {
532 return -1;
533 } else if (a_start > b_start) {
534 return 1;
535 } else {
536 return 0;
537 }
538 }
539
540 private:
541 const AbstractCode code_;
542
543 DISALLOW_COPY_AND_ASSIGN(CodeDescriptor);
544};
545
546// Fast lookup of Dart code objects.
547class CodeLookupTable : public ZoneAllocated {
548 public:
549 explicit CodeLookupTable(Thread* thread);
550
551 intptr_t length() const { return code_objects_.length(); }
552
553 const CodeDescriptor* At(intptr_t index) const {
554 return code_objects_.At(index);
555 }
556
557 const CodeDescriptor* FindCode(uword pc) const;
558
559 private:
560 void Build(Thread* thread);
561
562 void Add(const Object& code);
563
564 // Code objects sorted by entry.
565 ZoneGrowableArray<CodeDescriptor*> code_objects_;
566
567 friend class CodeLookupTableBuilder;
568
569 DISALLOW_COPY_AND_ASSIGN(CodeLookupTable);
570};
571
572// Interface for a class that can create a ProcessedSampleBuffer.
573class ProcessedSampleBufferBuilder {
574 public:
575 virtual ~ProcessedSampleBufferBuilder() = default;
576 virtual ProcessedSampleBuffer* BuildProcessedSampleBuffer(
577 SampleFilter* filter,
578 ProcessedSampleBuffer* buffer = nullptr) = 0;
579};
580
581class SampleBuffer : public ProcessedSampleBufferBuilder {
582 public:
583 SampleBuffer() = default;
584 virtual ~SampleBuffer() = default;
585
586 virtual void Init(Sample* samples, intptr_t capacity) {
587 ASSERT(samples != nullptr);
588 ASSERT(capacity > 0);
589 samples_ = samples;
590 capacity_ = capacity;
591 }
592
593 void VisitSamples(SampleVisitor* visitor) {
594 ASSERT(visitor != nullptr);
595 const intptr_t length = capacity();
596 for (intptr_t i = 0; i < length; i++) {
597 Sample* sample = At(idx: i);
598 if (!sample->head_sample()) {
599 // An inner sample in a chain of samples.
600 continue;
601 }
602 if (sample->ignore_sample()) {
603 // Bad sample.
604 continue;
605 }
606 if (sample->port() != visitor->port()) {
607 // Another isolate.
608 continue;
609 }
610 if (sample->timestamp() == 0) {
611 // Empty.
612 continue;
613 }
614 if (sample->At(i: 0) == 0) {
615 // No frames.
616 continue;
617 }
618 visitor->IncrementVisited();
619 visitor->VisitSample(sample);
620 }
621 }
622
623 virtual Sample* ReserveSample() = 0;
624 virtual Sample* ReserveSampleAndLink(Sample* previous) = 0;
625
626 Sample* At(intptr_t idx) const {
627 ASSERT(idx >= 0);
628 ASSERT(idx < capacity_);
629 return &samples_[idx];
630 }
631
632 intptr_t capacity() const { return capacity_; }
633
634 virtual ProcessedSampleBuffer* BuildProcessedSampleBuffer(
635 SampleFilter* filter,
636 ProcessedSampleBuffer* buffer = nullptr);
637
638 protected:
639 Sample* Next(Sample* sample);
640
641 ProcessedSample* BuildProcessedSample(Sample* sample,
642 const CodeLookupTable& clt);
643
644 Sample* samples_;
645 intptr_t capacity_;
646
647 DISALLOW_COPY_AND_ASSIGN(SampleBuffer);
648};
649
650class SampleBlock : public SampleBuffer {
651 public:
652 // The default number of samples per block. Overridden by some tests.
653 static constexpr intptr_t kSamplesPerBlock = 100;
654
655 SampleBlock() = default;
656 virtual ~SampleBlock() = default;
657
658 // Returns the number of samples contained within this block.
659 intptr_t capacity() const { return capacity_; }
660
661 Isolate* owner() const { return owner_; }
662 void set_owner(Isolate* isolate) { owner_ = isolate; }
663
664 virtual Sample* ReserveSample();
665 virtual Sample* ReserveSampleAndLink(Sample* previous);
666
667 bool TryAllocateFree() {
668 State expected = kFree;
669 State desired = kSampling;
670 std::memory_order success_order = std::memory_order_acquire;
671 std::memory_order failure_order = std::memory_order_relaxed;
672 return state_.compare_exchange_strong(e&: expected, d: desired, s: success_order,
673 f: failure_order);
674 }
675 bool TryAllocateCompleted() {
676 State expected = kCompleted;
677 State desired = kSampling;
678 std::memory_order success_order = std::memory_order_acquire;
679 std::memory_order failure_order = std::memory_order_relaxed;
680 if (state_.compare_exchange_strong(e&: expected, d: desired, s: success_order,
681 f: failure_order)) {
682 owner_ = nullptr;
683 cursor_ = 0;
684 return true;
685 }
686 return false;
687 }
688 void MarkCompleted() {
689 ASSERT(state_.load(std::memory_order_relaxed) == kSampling);
690 state_.store(d: kCompleted, m: std::memory_order_release);
691 }
692 bool TryAcquireStreaming(Isolate* isolate) {
693 if (state_.load(m: std::memory_order_relaxed) != kCompleted) return false;
694 if (owner_ != isolate) return false;
695
696 State expected = kCompleted;
697 State desired = kStreaming;
698 std::memory_order success_order = std::memory_order_acquire;
699 std::memory_order failure_order = std::memory_order_relaxed;
700 return state_.compare_exchange_strong(e&: expected, d: desired, s: success_order,
701 f: failure_order);
702 }
703 void StreamingToCompleted() {
704 ASSERT(state_.load(std::memory_order_relaxed) == kStreaming);
705 state_.store(d: kCompleted, m: std::memory_order_relaxed);
706 }
707 void StreamingToFree() {
708 ASSERT(state_.load(std::memory_order_relaxed) == kStreaming);
709 owner_ = nullptr;
710 cursor_ = 0;
711 state_.store(d: kFree, m: std::memory_order_release);
712 }
713 void FreeCompleted() {
714 State expected = kCompleted;
715 State desired = kStreaming;
716 std::memory_order success_order = std::memory_order_acquire;
717 std::memory_order failure_order = std::memory_order_relaxed;
718 if (state_.compare_exchange_strong(e&: expected, d: desired, s: success_order,
719 f: failure_order)) {
720 StreamingToFree();
721 }
722 }
723
724 protected:
725 bool HasStreamableSamples(const GrowableObjectArray& tag_table, UserTag* tag);
726
727 enum State : uint32_t {
728 kFree,
729 kSampling, // I.e., writing.
730 kCompleted,
731 kStreaming, // I.e., reading.
732 };
733 std::atomic<State> state_ = kFree;
734 RelaxedAtomic<uint32_t> cursor_ = 0;
735 Isolate* owner_ = nullptr;
736
737 private:
738 friend class SampleBlockListProcessor;
739 friend class SampleBlockBuffer;
740
741 DISALLOW_COPY_AND_ASSIGN(SampleBlock);
742};
743
744class SampleBlockBuffer : public ProcessedSampleBufferBuilder {
745 public:
746 static constexpr intptr_t kDefaultBlockCount = 600;
747
748 // Creates a SampleBlockBuffer with a predetermined number of blocks.
749 //
750 // Defaults to kDefaultBlockCount blocks. Block size is fixed to
751 // SampleBlock::kSamplesPerBlock samples per block, except for in tests.
752 explicit SampleBlockBuffer(
753 intptr_t blocks = kDefaultBlockCount,
754 intptr_t samples_per_block = SampleBlock::kSamplesPerBlock);
755
756 virtual ~SampleBlockBuffer();
757
758 void VisitSamples(SampleVisitor* visitor) {
759 ASSERT(visitor != nullptr);
760 for (intptr_t i = 0; i < capacity_; ++i) {
761 blocks_[i].VisitSamples(visitor);
762 }
763 }
764
765 void FreeCompletedBlocks();
766
767 // Reserves a sample for a CPU profile.
768 //
769 // Returns nullptr when a sample can't be reserved.
770 Sample* ReserveCPUSample(Isolate* isolate);
771
772 // Reserves a sample for a Dart object allocation profile.
773 //
774 // Returns nullptr when a sample can't be reserved.
775 Sample* ReserveAllocationSample(Isolate* isolate);
776
777 intptr_t Size() const { return memory_->size(); }
778
779 virtual ProcessedSampleBuffer* BuildProcessedSampleBuffer(
780 SampleFilter* filter,
781 ProcessedSampleBuffer* buffer = nullptr);
782
783 private:
784 Sample* ReserveSampleImpl(Isolate* isolate, bool allocation_sample);
785
786 // Returns nullptr if there are no available blocks.
787 SampleBlock* ReserveSampleBlock();
788
789 // Sample block management.
790 RelaxedAtomic<int> cursor_;
791 SampleBlock* blocks_;
792 intptr_t capacity_;
793
794 // Sample buffer management.
795 VirtualMemory* memory_;
796 Sample* sample_buffer_;
797
798 friend class Isolate;
799 DISALLOW_COPY_AND_ASSIGN(SampleBlockBuffer);
800};
801
802class StreamingSampleBufferBuilder : public ProcessedSampleBufferBuilder {
803 public:
804 explicit StreamingSampleBufferBuilder(Isolate* isolate) : isolate_(isolate) {}
805
806 virtual ProcessedSampleBuffer* BuildProcessedSampleBuffer(
807 SampleFilter* filter,
808 ProcessedSampleBuffer* buffer = nullptr);
809
810 // Returns true when at least one sample in the sample block list has a user
811 // tag with CPU sample streaming enabled.
812 bool HasStreamableSamples(Thread* thread);
813
814 private:
815 Isolate* isolate_;
816
817 DISALLOW_COPY_AND_ASSIGN(StreamingSampleBufferBuilder);
818};
819
820intptr_t Profiler::Size() {
821 intptr_t size = 0;
822 if (sample_block_buffer_ != nullptr) {
823 size += sample_block_buffer_->Size();
824 }
825 return size;
826}
827
828// A |ProcessedSample| is a combination of 1 (or more) |Sample|(s) that have
829// been merged into a logical sample. The raw data may have been processed to
830// improve the quality of the stack trace.
831class ProcessedSample : public ZoneAllocated {
832 public:
833 ProcessedSample();
834
835 // Add |pc| to stack trace.
836 void Add(uword pc) { pcs_.Add(value: pc); }
837
838 // Insert |pc| at |index|.
839 void InsertAt(intptr_t index, uword pc) { pcs_.InsertAt(idx: index, value: pc); }
840
841 // Number of pcs in stack trace.
842 intptr_t length() const { return pcs_.length(); }
843
844 // Get |pc| at |index|.
845 uword At(intptr_t index) const {
846 ASSERT(index >= 0);
847 ASSERT(index < length());
848 return pcs_[index];
849 }
850
851 // Timestamp sample was taken at.
852 int64_t timestamp() const { return timestamp_; }
853 void set_timestamp(int64_t timestamp) { timestamp_ = timestamp; }
854
855 ThreadId tid() const { return tid_; }
856 void set_tid(ThreadId tid) { tid_ = tid; }
857
858 // The VM tag.
859 uword vm_tag() const { return vm_tag_; }
860 void set_vm_tag(uword tag) { vm_tag_ = tag; }
861
862 // The user tag.
863 uword user_tag() const { return user_tag_; }
864 void set_user_tag(uword tag) { user_tag_ = tag; }
865
866 // The class id if this is an allocation profile sample. -1 otherwise.
867 intptr_t allocation_cid() const { return allocation_cid_; }
868 void set_allocation_cid(intptr_t cid) { allocation_cid_ = cid; }
869
870 // The identity hash code of the allocated object if this is an allocation
871 // profile sample. -1 otherwise.
872 uint32_t allocation_identity_hash() const {
873 return allocation_identity_hash_;
874 }
875 void set_allocation_identity_hash(uint32_t hash) {
876 allocation_identity_hash_ = hash;
877 }
878
879 bool IsAllocationSample() const { return allocation_cid_ > 0; }
880
881 // Was the stack trace truncated?
882 bool truncated() const { return truncated_; }
883 void set_truncated(bool truncated) { truncated_ = truncated; }
884
885 // Was the first frame in the stack trace executing?
886 bool first_frame_executing() const { return first_frame_executing_; }
887 void set_first_frame_executing(bool first_frame_executing) {
888 first_frame_executing_ = first_frame_executing;
889 }
890
891 private:
892 void FixupCaller(const CodeLookupTable& clt,
893 uword pc_marker,
894 uword* stack_buffer);
895
896 void CheckForMissingDartFrame(const CodeLookupTable& clt,
897 const CodeDescriptor* code,
898 uword pc_marker,
899 uword* stack_buffer);
900
901 ZoneGrowableArray<uword> pcs_;
902 int64_t timestamp_;
903 ThreadId tid_;
904 uword vm_tag_;
905 uword user_tag_;
906 intptr_t allocation_cid_;
907 uint32_t allocation_identity_hash_;
908 bool truncated_;
909 bool first_frame_executing_;
910
911 friend class SampleBuffer;
912 DISALLOW_COPY_AND_ASSIGN(ProcessedSample);
913};
914
915// A collection of |ProcessedSample|s.
916class ProcessedSampleBuffer : public ZoneAllocated {
917 public:
918 ProcessedSampleBuffer();
919
920 void Add(ProcessedSample* sample) { samples_.Add(value: sample); }
921
922 intptr_t length() const { return samples_.length(); }
923
924 ProcessedSample* At(intptr_t index) { return samples_.At(index); }
925
926 const CodeLookupTable& code_lookup_table() const {
927 return *code_lookup_table_;
928 }
929
930 private:
931 ZoneGrowableArray<ProcessedSample*> samples_;
932 CodeLookupTable* code_lookup_table_;
933
934 DISALLOW_COPY_AND_ASSIGN(ProcessedSampleBuffer);
935};
936
937class SampleBlockProcessor : public AllStatic {
938 public:
939 static void Init();
940
941 static void Startup();
942 static void Cleanup();
943
944 private:
945 static constexpr intptr_t kMaxThreads = 4096;
946 static bool initialized_;
947 static bool shutdown_;
948 static bool thread_running_;
949 static ThreadJoinId processor_thread_id_;
950 static Monitor* monitor_;
951
952 static void ThreadMain(uword parameters);
953};
954
955} // namespace dart
956
957#endif // RUNTIME_VM_PROFILER_H_
958

source code of flutter_engine/third_party/dart/runtime/vm/profiler.h