1// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
2// for details. All rights reserved. Use of this source code is governed by a
3// BSD-style license that can be found in the LICENSE file.
4
5#ifndef RUNTIME_VM_OBJECT_H_
6#define RUNTIME_VM_OBJECT_H_
7
8#if defined(SHOULD_NOT_INCLUDE_RUNTIME)
9#error "Should not include runtime"
10#endif
11
12#include <limits>
13#include <tuple>
14#include <utility>
15
16#include "include/dart_api.h"
17#include "platform/assert.h"
18#include "platform/atomic.h"
19#include "platform/thread_sanitizer.h"
20#include "platform/utils.h"
21#include "vm/bitmap.h"
22#include "vm/code_comments.h"
23#include "vm/code_entry_kind.h"
24#include "vm/compiler/assembler/object_pool_builder.h"
25#include "vm/compiler/method_recognizer.h"
26#include "vm/compiler/runtime_api.h"
27#include "vm/dart.h"
28#include "vm/flags.h"
29#include "vm/globals.h"
30#include "vm/growable_array.h"
31#include "vm/handles.h"
32#include "vm/heap/heap.h"
33#include "vm/isolate.h"
34#include "vm/json_stream.h"
35#include "vm/os.h"
36#include "vm/raw_object.h"
37#include "vm/report.h"
38#include "vm/static_type_exactness_state.h"
39#include "vm/thread.h"
40#include "vm/token_position.h"
41
42namespace dart {
43
44// Forward declarations.
45namespace compiler {
46class Assembler;
47}
48
49namespace kernel {
50class Program;
51class TreeNode;
52} // namespace kernel
53
54#define DEFINE_FORWARD_DECLARATION(clazz) class clazz;
55CLASS_LIST(DEFINE_FORWARD_DECLARATION)
56#undef DEFINE_FORWARD_DECLARATION
57class Api;
58class ArgumentsDescriptor;
59class Closure;
60class Code;
61class DeoptInstr;
62class DisassemblyFormatter;
63class FinalizablePersistentHandle;
64class FlowGraphCompiler;
65class HierarchyInfo;
66class LocalScope;
67class CallSiteResetter;
68class CodeStatistics;
69class IsolateGroupReloadContext;
70class ObjectGraphCopier;
71class FunctionTypeMapping;
72class NativeArguments;
73
74#define REUSABLE_FORWARD_DECLARATION(name) class Reusable##name##HandleScope;
75REUSABLE_HANDLE_LIST(REUSABLE_FORWARD_DECLARATION)
76#undef REUSABLE_FORWARD_DECLARATION
77
78class Symbols;
79class BaseTextBuffer;
80
81#if defined(DEBUG)
82#define CHECK_HANDLE() CheckHandle();
83#else
84#define CHECK_HANDLE()
85#endif
86
87// For AllStatic classes like OneByteString. Checks that
88// ContainsCompressedPointers() returns the same value for AllStatic class and
89// class used for handles.
90#define ALLSTATIC_CONTAINS_COMPRESSED_IMPLEMENTATION(object, handle) \
91 public: /* NOLINT */ \
92 using UntaggedObjectType = dart::Untagged##object; \
93 using ObjectPtrType = dart::object##Ptr; \
94 static_assert(std::is_base_of<dart::handle##Ptr, ObjectPtrType>::value, \
95 #object "Ptr must be a subtype of " #handle "Ptr"); \
96 static_assert(dart::handle::ContainsCompressedPointers() == \
97 UntaggedObjectType::kContainsCompressedPointers, \
98 "Pointer compression in Untagged" #object \
99 " must match pointer compression in Untagged" #handle); \
100 static constexpr bool ContainsCompressedPointers() { \
101 return UntaggedObjectType::kContainsCompressedPointers; \
102 } \
103 \
104 private: /* NOLINT */
105
106#define BASE_OBJECT_IMPLEMENTATION(object, super) \
107 public: /* NOLINT */ \
108 using UntaggedObjectType = dart::Untagged##object; \
109 using ObjectPtrType = dart::object##Ptr; \
110 static_assert(!dart::super::ContainsCompressedPointers() || \
111 UntaggedObjectType::kContainsCompressedPointers, \
112 "Untagged" #object \
113 " must have compressed pointers, as supertype Untagged" #super \
114 " has compressed pointers"); \
115 static constexpr bool ContainsCompressedPointers() { \
116 return UntaggedObjectType::kContainsCompressedPointers; \
117 } \
118 object##Ptr ptr() const { \
119 return static_cast<object##Ptr>(ptr_); \
120 } \
121 bool Is##object() const { \
122 return true; \
123 } \
124 DART_NOINLINE static object& Handle() { \
125 return static_cast<object&>( \
126 HandleImpl(Thread::Current()->zone(), object::null(), kClassId)); \
127 } \
128 DART_NOINLINE static object& Handle(Zone* zone) { \
129 return static_cast<object&>(HandleImpl(zone, object::null(), kClassId)); \
130 } \
131 DART_NOINLINE static object& Handle(object##Ptr ptr) { \
132 return static_cast<object&>( \
133 HandleImpl(Thread::Current()->zone(), ptr, kClassId)); \
134 } \
135 DART_NOINLINE static object& Handle(Zone* zone, object##Ptr ptr) { \
136 return static_cast<object&>(HandleImpl(zone, ptr, kClassId)); \
137 } \
138 DART_NOINLINE static object& ZoneHandle() { \
139 return static_cast<object&>( \
140 ZoneHandleImpl(Thread::Current()->zone(), object::null(), kClassId)); \
141 } \
142 DART_NOINLINE static object& ZoneHandle(Zone* zone) { \
143 return static_cast<object&>( \
144 ZoneHandleImpl(zone, object::null(), kClassId)); \
145 } \
146 DART_NOINLINE static object& ZoneHandle(object##Ptr ptr) { \
147 return static_cast<object&>( \
148 ZoneHandleImpl(Thread::Current()->zone(), ptr, kClassId)); \
149 } \
150 DART_NOINLINE static object& ZoneHandle(Zone* zone, object##Ptr ptr) { \
151 return static_cast<object&>(ZoneHandleImpl(zone, ptr, kClassId)); \
152 } \
153 static object* ReadOnlyHandle() { \
154 return static_cast<object*>(ReadOnlyHandleImpl(kClassId)); \
155 } \
156 DART_NOINLINE static object& CheckedHandle(Zone* zone, ObjectPtr ptr) { \
157 object* obj = reinterpret_cast<object*>(VMHandles::AllocateHandle(zone)); \
158 initializeHandle(obj, ptr); \
159 if (!obj->Is##object()) { \
160 FATAL("Handle check failed: saw %s expected %s", obj->ToCString(), \
161 #object); \
162 } \
163 return *obj; \
164 } \
165 DART_NOINLINE static object& CheckedZoneHandle(Zone* zone, ObjectPtr ptr) { \
166 object* obj = \
167 reinterpret_cast<object*>(VMHandles::AllocateZoneHandle(zone)); \
168 initializeHandle(obj, ptr); \
169 if (!obj->Is##object()) { \
170 FATAL("Handle check failed: saw %s expected %s", obj->ToCString(), \
171 #object); \
172 } \
173 return *obj; \
174 } \
175 DART_NOINLINE static object& CheckedZoneHandle(ObjectPtr ptr) { \
176 return CheckedZoneHandle(Thread::Current()->zone(), ptr); \
177 } \
178 /* T::Cast cannot be applied to a null Object, because the object vtable */ \
179 /* is not setup for type T, although some methods are supposed to work */ \
180 /* with null, for example Instance::Equals(). */ \
181 static const object& Cast(const Object& obj) { \
182 ASSERT(obj.Is##object()); \
183 return reinterpret_cast<const object&>(obj); \
184 } \
185 static object##Ptr RawCast(ObjectPtr raw) { \
186 ASSERT(Is##object##NoHandle(raw)); \
187 return static_cast<object##Ptr>(raw); \
188 } \
189 static object##Ptr null() { \
190 return static_cast<object##Ptr>(Object::null()); \
191 } \
192 virtual const char* ToCString() const; \
193 static const ClassId kClassId = k##object##Cid; \
194 \
195 private: /* NOLINT */ \
196 /* Initialize the handle based on the ptr in the presence of null. */ \
197 static void initializeHandle(object* obj, ObjectPtr ptr) { \
198 obj->setPtr(ptr, kClassId); \
199 } \
200 /* Disallow allocation, copy constructors and override super assignment. */ \
201 public: /* NOLINT */ \
202 void operator delete(void* pointer) { \
203 UNREACHABLE(); \
204 } \
205 \
206 private: /* NOLINT */ \
207 void* operator new(size_t size); \
208 object(const object& value) = delete; \
209 void operator=(super##Ptr value) = delete; \
210 void operator=(const object& value) = delete; \
211 void operator=(const super& value) = delete;
212
213// Conditionally include object_service.cc functionality in the vtable to avoid
214// link errors like the following:
215//
216// object.o:(.rodata._ZTVN4....E[_ZTVN4...E]+0x278):
217// undefined reference to
218// `dart::Instance::PrintSharedInstanceJSON(dart::JSONObject*, bool) const'.
219//
220#ifndef PRODUCT
221#define OBJECT_SERVICE_SUPPORT(object) \
222 protected: /* NOLINT */ \
223 /* Object is printed as JSON into stream. If ref is true only a header */ \
224 /* with an object id is printed. If ref is false the object is fully */ \
225 /* printed. */ \
226 virtual void PrintJSONImpl(JSONStream* stream, bool ref) const; \
227 /* Prints JSON objects that describe the implementation-level fields of */ \
228 /* the current Object to |jsarr_fields|. */ \
229 virtual void PrintImplementationFieldsImpl(const JSONArray& jsarr_fields) \
230 const; \
231 virtual const char* JSONType() const { \
232 return "" #object; \
233 }
234#else
235#define OBJECT_SERVICE_SUPPORT(object) protected: /* NOLINT */
236#endif // !PRODUCT
237
238#define SNAPSHOT_SUPPORT(object) \
239 friend class object##MessageSerializationCluster; \
240 friend class object##MessageDeserializationCluster;
241
242#define OBJECT_IMPLEMENTATION(object, super) \
243 public: /* NOLINT */ \
244 DART_NOINLINE void operator=(object##Ptr value) { \
245 initializeHandle(this, value); \
246 } \
247 DART_NOINLINE void operator^=(ObjectPtr value) { \
248 initializeHandle(this, value); \
249 ASSERT(IsNull() || Is##object()); \
250 } \
251 \
252 protected: /* NOLINT */ \
253 object() : super() {} \
254 BASE_OBJECT_IMPLEMENTATION(object, super) \
255 OBJECT_SERVICE_SUPPORT(object) \
256 friend class Object;
257
258extern "C" void DFLRT_ExitSafepoint(NativeArguments __unusable_);
259
260#define HEAP_OBJECT_IMPLEMENTATION(object, super) \
261 OBJECT_IMPLEMENTATION(object, super); \
262 Untagged##object* untag() const { \
263 ASSERT(ptr() != null()); \
264 return const_cast<Untagged##object*>(ptr()->untag()); \
265 } \
266 SNAPSHOT_SUPPORT(object) \
267 friend class StackFrame; \
268 friend class Thread; \
269 friend void DFLRT_ExitSafepoint(NativeArguments __unusable_);
270
271// This macro is used to denote types that do not have a sub-type.
272#define FINAL_HEAP_OBJECT_IMPLEMENTATION_HELPER(object, rettype, super) \
273 public: /* NOLINT */ \
274 void operator=(object##Ptr value) { \
275 ptr_ = value; \
276 CHECK_HANDLE(); \
277 } \
278 void operator^=(ObjectPtr value) { \
279 ptr_ = value; \
280 CHECK_HANDLE(); \
281 } \
282 \
283 private: /* NOLINT */ \
284 object() : super() {} \
285 BASE_OBJECT_IMPLEMENTATION(object, super) \
286 OBJECT_SERVICE_SUPPORT(object) \
287 Untagged##object* untag() const { \
288 ASSERT(ptr() != null()); \
289 return const_cast<Untagged##object*>(ptr()->untag()); \
290 } \
291 static intptr_t NextFieldOffset() { return -kWordSize; } \
292 SNAPSHOT_SUPPORT(rettype) \
293 friend class Object; \
294 friend class StackFrame; \
295 friend class Thread; \
296 friend void DFLRT_ExitSafepoint(NativeArguments __unusable_);
297
298#define FINAL_HEAP_OBJECT_IMPLEMENTATION(object, super) \
299 FINAL_HEAP_OBJECT_IMPLEMENTATION_HELPER(object, object, super)
300
301#define MINT_OBJECT_IMPLEMENTATION(object, rettype, super) \
302 FINAL_HEAP_OBJECT_IMPLEMENTATION_HELPER(object, rettype, super)
303
304// In precompiled runtime, there is no access to runtime_api.cc since host
305// and target are the same. In those cases, the namespace dart is used to refer
306// to the target namespace
307#if defined(DART_PRECOMPILED_RUNTIME)
308namespace RTN = dart;
309#else
310namespace RTN = dart::compiler::target;
311#endif // defined(DART_PRECOMPILED_RUNTIME)
312
313class Object {
314 public:
315 using UntaggedObjectType = UntaggedObject;
316 using ObjectPtrType = ObjectPtr;
317
318 // We use 30 bits for the hash code so hashes in a snapshot taken on a
319 // 64-bit architecture stay in Smi range when loaded on a 32-bit
320 // architecture.
321 static constexpr intptr_t kHashBits = 30;
322
323 static ObjectPtr RawCast(ObjectPtr obj) { return obj; }
324
325 virtual ~Object() {}
326
327 static constexpr bool ContainsCompressedPointers() {
328 return UntaggedObject::kContainsCompressedPointers;
329 }
330 ObjectPtr ptr() const { return ptr_; }
331 void operator=(ObjectPtr value) { initializeHandle(obj: this, ptr: value); }
332
333 bool IsCanonical() const { return ptr()->untag()->IsCanonical(); }
334 void SetCanonical() const { ptr()->untag()->SetCanonical(); }
335 void ClearCanonical() const { ptr()->untag()->ClearCanonical(); }
336 bool IsImmutable() const { return ptr()->untag()->IsImmutable(); }
337 void SetImmutable() const { ptr()->untag()->SetImmutable(); }
338 void ClearImmutable() const { ptr()->untag()->ClearImmutable(); }
339 intptr_t GetClassId() const {
340 return !ptr()->IsHeapObject() ? static_cast<intptr_t>(kSmiCid)
341 : ptr()->untag()->GetClassId();
342 }
343 inline ClassPtr clazz() const;
344 static intptr_t tags_offset() { return OFFSET_OF(UntaggedObject, tags_); }
345
346// Class testers.
347#define DEFINE_CLASS_TESTER(clazz) \
348 virtual bool Is##clazz() const { return false; } \
349 static bool Is##clazz##NoHandle(ObjectPtr ptr) { \
350 /* Use a stack handle to make RawCast safe in contexts where handles */ \
351 /* should not be allocated, such as GC or runtime transitions. Not */ \
352 /* using Object's constructor to avoid Is##clazz being de-virtualized. */ \
353 char buf[sizeof(Object)]; \
354 Object* obj = reinterpret_cast<Object*>(&buf); \
355 initializeHandle(obj, ptr); \
356 return obj->IsNull() || obj->Is##clazz(); \
357 }
358 CLASS_LIST_FOR_HANDLES(DEFINE_CLASS_TESTER);
359#undef DEFINE_CLASS_TESTER
360
361 bool IsNull() const { return ptr_ == null_; }
362
363 // Matches Object.toString on instances (except String::ToCString, bug 20583).
364 virtual const char* ToCString() const {
365 if (IsNull()) {
366 return "null";
367 } else {
368 return "Object";
369 }
370 }
371
372#ifndef PRODUCT
373 void PrintJSON(JSONStream* stream, bool ref = true) const;
374 virtual void PrintJSONImpl(JSONStream* stream, bool ref) const;
375 void PrintImplementationFields(JSONStream* stream) const;
376 virtual void PrintImplementationFieldsImpl(
377 const JSONArray& jsarr_fields) const;
378 virtual const char* JSONType() const { return IsNull() ? "null" : "Object"; }
379#endif
380
381 // Returns the name that is used to identify an object in the
382 // namespace dictionary.
383 // Object::DictionaryName() returns String::null(). Only subclasses
384 // of Object that need to be entered in the library and library prefix
385 // namespaces need to provide an implementation.
386 virtual StringPtr DictionaryName() const;
387
388 bool IsNew() const { return ptr()->IsNewObject(); }
389 bool IsOld() const { return ptr()->IsOldObject(); }
390#if defined(DEBUG)
391 bool InVMIsolateHeap() const;
392#else
393 bool InVMIsolateHeap() const { return ptr()->untag()->InVMIsolateHeap(); }
394#endif // DEBUG
395
396 // Print the object on stdout for debugging.
397 void Print() const;
398
399#if defined(DEBUG)
400 bool IsZoneHandle() const;
401 bool IsReadOnlyHandle() const;
402 bool IsNotTemporaryScopedHandle() const;
403#endif
404
405 static Object& Handle() {
406 return HandleImpl(zone: Thread::Current()->zone(), ptr: null_, default_cid: kObjectCid);
407 }
408 static Object& Handle(Zone* zone) {
409 return HandleImpl(zone, ptr: null_, default_cid: kObjectCid);
410 }
411 static Object& Handle(ObjectPtr ptr) {
412 return HandleImpl(zone: Thread::Current()->zone(), ptr, default_cid: kObjectCid);
413 }
414 static Object& Handle(Zone* zone, ObjectPtr ptr) {
415 return HandleImpl(zone, ptr, default_cid: kObjectCid);
416 }
417 static Object& ZoneHandle() {
418 return ZoneHandleImpl(zone: Thread::Current()->zone(), ptr: null_, default_cid: kObjectCid);
419 }
420 static Object& ZoneHandle(Zone* zone) {
421 return ZoneHandleImpl(zone, ptr: null_, default_cid: kObjectCid);
422 }
423 static Object& ZoneHandle(ObjectPtr ptr) {
424 return ZoneHandleImpl(zone: Thread::Current()->zone(), ptr, default_cid: kObjectCid);
425 }
426 static Object& ZoneHandle(Zone* zone, ObjectPtr ptr) {
427 return ZoneHandleImpl(zone, ptr, default_cid: kObjectCid);
428 }
429 static Object* ReadOnlyHandle() { return ReadOnlyHandleImpl(cid: kObjectCid); }
430
431 static ObjectPtr null() { return null_; }
432
433#if defined(HASH_IN_OBJECT_HEADER)
434 static uint32_t GetCachedHash(const ObjectPtr obj) {
435 return obj->untag()->GetHeaderHash();
436 }
437
438 static uint32_t SetCachedHashIfNotSet(ObjectPtr obj, uint32_t hash) {
439 return obj->untag()->SetHeaderHashIfNotSet(hash);
440 }
441#endif
442
443 // The list below enumerates read-only handles for singleton
444 // objects that are shared between the different isolates.
445 //
446 // - sentinel is a value that cannot be produced by Dart code. It can be used
447 // to mark special values, for example to distinguish "uninitialized" fields.
448 // - transition_sentinel is a value marking that we are transitioning from
449 // sentinel, e.g., computing a field value. Used to detect circular
450 // initialization.
451 // - unknown_constant and non_constant are optimizing compiler's constant
452 // propagation constants.
453 // - optimized_out results from deopt environment pruning or failure to
454 // capture variables in a closure's context
455#define SHARED_READONLY_HANDLES_LIST(V) \
456 V(Object, null_object) \
457 V(Class, null_class) \
458 V(Array, null_array) \
459 V(String, null_string) \
460 V(Instance, null_instance) \
461 V(Function, null_function) \
462 V(FunctionType, null_function_type) \
463 V(RecordType, null_record_type) \
464 V(TypeArguments, null_type_arguments) \
465 V(CompressedStackMaps, null_compressed_stackmaps) \
466 V(Closure, null_closure) \
467 V(TypeArguments, empty_type_arguments) \
468 V(Array, empty_array) \
469 V(Array, empty_instantiations_cache_array) \
470 V(Array, empty_subtype_test_cache_array) \
471 V(ContextScope, empty_context_scope) \
472 V(ObjectPool, empty_object_pool) \
473 V(CompressedStackMaps, empty_compressed_stackmaps) \
474 V(PcDescriptors, empty_descriptors) \
475 V(LocalVarDescriptors, empty_var_descriptors) \
476 V(ExceptionHandlers, empty_exception_handlers) \
477 V(ExceptionHandlers, empty_async_exception_handlers) \
478 V(Array, synthetic_getter_parameter_types) \
479 V(Array, synthetic_getter_parameter_names) \
480 V(Sentinel, sentinel) \
481 V(Sentinel, transition_sentinel) \
482 V(Sentinel, unknown_constant) \
483 V(Sentinel, non_constant) \
484 V(Sentinel, optimized_out) \
485 V(Bool, bool_true) \
486 V(Bool, bool_false) \
487 V(Smi, smi_illegal_cid) \
488 V(Smi, smi_zero) \
489 V(ApiError, no_callbacks_error) \
490 V(UnwindError, unwind_in_progress_error) \
491 V(LanguageError, snapshot_writer_error) \
492 V(LanguageError, branch_offset_error) \
493 V(LanguageError, speculative_inlining_error) \
494 V(LanguageError, background_compilation_error) \
495 V(LanguageError, out_of_memory_error) \
496 V(Array, vm_isolate_snapshot_object_table) \
497 V(Type, dynamic_type) \
498 V(Type, void_type) \
499 V(AbstractType, null_abstract_type)
500
501#define DEFINE_SHARED_READONLY_HANDLE_GETTER(Type, name) \
502 static const Type& name() { \
503 ASSERT(name##_ != nullptr); \
504 return *name##_; \
505 }
506 SHARED_READONLY_HANDLES_LIST(DEFINE_SHARED_READONLY_HANDLE_GETTER)
507#undef DEFINE_SHARED_READONLY_HANDLE_GETTER
508
509 static void set_vm_isolate_snapshot_object_table(const Array& table);
510
511 static ClassPtr class_class() { return class_class_; }
512 static ClassPtr dynamic_class() { return dynamic_class_; }
513 static ClassPtr void_class() { return void_class_; }
514 static ClassPtr type_parameters_class() { return type_parameters_class_; }
515 static ClassPtr type_arguments_class() { return type_arguments_class_; }
516 static ClassPtr patch_class_class() { return patch_class_class_; }
517 static ClassPtr function_class() { return function_class_; }
518 static ClassPtr closure_data_class() { return closure_data_class_; }
519 static ClassPtr ffi_trampoline_data_class() {
520 return ffi_trampoline_data_class_;
521 }
522 static ClassPtr field_class() { return field_class_; }
523 static ClassPtr script_class() { return script_class_; }
524 static ClassPtr library_class() { return library_class_; }
525 static ClassPtr namespace_class() { return namespace_class_; }
526 static ClassPtr kernel_program_info_class() {
527 return kernel_program_info_class_;
528 }
529 static ClassPtr code_class() { return code_class_; }
530 static ClassPtr instructions_class() { return instructions_class_; }
531 static ClassPtr instructions_section_class() {
532 return instructions_section_class_;
533 }
534 static ClassPtr instructions_table_class() {
535 return instructions_table_class_;
536 }
537 static ClassPtr object_pool_class() { return object_pool_class_; }
538 static ClassPtr pc_descriptors_class() { return pc_descriptors_class_; }
539 static ClassPtr code_source_map_class() { return code_source_map_class_; }
540 static ClassPtr compressed_stackmaps_class() {
541 return compressed_stackmaps_class_;
542 }
543 static ClassPtr var_descriptors_class() { return var_descriptors_class_; }
544 static ClassPtr exception_handlers_class() {
545 return exception_handlers_class_;
546 }
547 static ClassPtr context_class() { return context_class_; }
548 static ClassPtr context_scope_class() { return context_scope_class_; }
549 static ClassPtr sentinel_class() { return sentinel_class_; }
550 static ClassPtr api_error_class() { return api_error_class_; }
551 static ClassPtr language_error_class() { return language_error_class_; }
552 static ClassPtr unhandled_exception_class() {
553 return unhandled_exception_class_;
554 }
555 static ClassPtr unwind_error_class() { return unwind_error_class_; }
556 static ClassPtr singletargetcache_class() { return singletargetcache_class_; }
557 static ClassPtr unlinkedcall_class() { return unlinkedcall_class_; }
558 static ClassPtr monomorphicsmiablecall_class() {
559 return monomorphicsmiablecall_class_;
560 }
561 static ClassPtr icdata_class() { return icdata_class_; }
562 static ClassPtr megamorphic_cache_class() { return megamorphic_cache_class_; }
563 static ClassPtr subtypetestcache_class() { return subtypetestcache_class_; }
564 static ClassPtr loadingunit_class() { return loadingunit_class_; }
565 static ClassPtr weak_serialization_reference_class() {
566 return weak_serialization_reference_class_;
567 }
568 static ClassPtr weak_array_class() { return weak_array_class_; }
569
570 // Initialize the VM isolate.
571 static void InitNullAndBool(IsolateGroup* isolate_group);
572 static void Init(IsolateGroup* isolate_group);
573 static void InitVtables();
574 static void FinishInit(IsolateGroup* isolate_group);
575 static void FinalizeVMIsolate(IsolateGroup* isolate_group);
576 static void FinalizeReadOnlyObject(ObjectPtr object);
577
578 static void Cleanup();
579
580 // Initialize a new isolate either from a Kernel IR, from source, or from a
581 // snapshot.
582 static ErrorPtr Init(IsolateGroup* isolate_group,
583 const uint8_t* kernel_buffer,
584 intptr_t kernel_buffer_size);
585
586 static void MakeUnusedSpaceTraversable(const Object& obj,
587 intptr_t original_size,
588 intptr_t used_size);
589
590 static intptr_t InstanceSize() {
591 return RoundedAllocationSize(size: sizeof(UntaggedObject));
592 }
593
594 template <class FakeObject>
595 static void VerifyBuiltinVtable(intptr_t cid) {
596 FakeObject fake;
597 if (cid >= kNumPredefinedCids) {
598 cid = kInstanceCid;
599 }
600 ASSERT(builtin_vtables_[cid] == fake.vtable());
601 }
602 static void VerifyBuiltinVtables();
603
604 static const ClassId kClassId = kObjectCid;
605
606 // Different kinds of name visibility.
607 enum NameVisibility {
608 // Internal names are the true names of classes, fields,
609 // etc. inside the vm. These names include privacy suffixes,
610 // getter prefixes, and trailing dots on unnamed constructors.
611 //
612 // The names of core implementation classes (like _OneByteString)
613 // are preserved as well.
614 //
615 // e.g.
616 // private getter -> get:foo@6be832b
617 // private constructor -> _MyClass@6b3832b.
618 // private named constructor -> _MyClass@6b3832b.named
619 // core impl class name shown -> _OneByteString
620 kInternalName = 0,
621
622 // Scrubbed names drop privacy suffixes, getter prefixes, and
623 // trailing dots on unnamed constructors. These names are used in
624 // the vm service.
625 //
626 // e.g.
627 // get:foo@6be832b -> foo
628 // _MyClass@6b3832b. -> _MyClass
629 // _MyClass@6b3832b.named -> _MyClass.named
630 // _OneByteString -> _OneByteString (not remapped)
631 kScrubbedName,
632
633 // User visible names are appropriate for reporting type errors
634 // directly to programmers. The names have been scrubbed and
635 // the names of core implementation classes are remapped to their
636 // public interface names.
637 //
638 // e.g.
639 // get:foo@6be832b -> foo
640 // _MyClass@6b3832b. -> _MyClass
641 // _MyClass@6b3832b.named -> _MyClass.named
642 // _OneByteString -> String (remapped)
643 kUserVisibleName
644 };
645
646 // Sometimes simple formating might produce the same name for two different
647 // entities, for example we might inject a synthetic forwarder into the
648 // class which has the same name as an already existing function, or
649 // two different types can be formatted as X<T> because T has different
650 // meaning (refers to a different type parameter) in these two types.
651 // Such ambiguity might be acceptable in some contexts but not in others, so
652 // some formatting methods have two modes - one which tries to be more
653 // user friendly, and another one which tries to avoid name conflicts by
654 // emitting longer and less user friendly names.
655 enum class NameDisambiguation {
656 kYes,
657 kNo,
658 };
659
660 protected:
661 friend ObjectPtr AllocateObject(intptr_t, intptr_t, intptr_t);
662
663 // Used for extracting the C++ vtable during bringup.
664 Object() : ptr_(null_) {}
665
666 uword raw_value() const { return static_cast<uword>(ptr()); }
667
668 inline void setPtr(ObjectPtr value, intptr_t default_cid);
669 void CheckHandle() const;
670 DART_NOINLINE static Object& HandleImpl(Zone* zone,
671 ObjectPtr ptr,
672 intptr_t default_cid) {
673 Object* obj = reinterpret_cast<Object*>(VMHandles::AllocateHandle(zone));
674 obj->setPtr(value: ptr, default_cid);
675 return *obj;
676 }
677 DART_NOINLINE static Object& ZoneHandleImpl(Zone* zone,
678 ObjectPtr ptr,
679 intptr_t default_cid) {
680 Object* obj =
681 reinterpret_cast<Object*>(VMHandles::AllocateZoneHandle(zone));
682 obj->setPtr(value: ptr, default_cid);
683 return *obj;
684 }
685 DART_NOINLINE static Object* ReadOnlyHandleImpl(intptr_t cid) {
686 Object* obj = reinterpret_cast<Object*>(Dart::AllocateReadOnlyHandle());
687 obj->setPtr(value: Object::null(), default_cid: cid);
688 return obj;
689 }
690
691 // Memcpy to account for the strict aliasing rule.
692 // Explicit cast to silence -Wdynamic-class-memaccess.
693 // This is still undefined behavior because we're messing with the internal
694 // representation of C++ objects, but works okay in practice with
695 // -fno-strict-vtable-pointers.
696 cpp_vtable vtable() const {
697 cpp_vtable result;
698 memcpy(dest: &result, src: reinterpret_cast<const void*>(this), // NOLINT
699 n: sizeof(result));
700 return result;
701 }
702 void set_vtable(cpp_vtable value) {
703 memcpy(dest: reinterpret_cast<void*>(this), src: &value, // NOLINT
704 n: sizeof(cpp_vtable));
705 }
706
707 static ObjectPtr Allocate(intptr_t cls_id,
708 intptr_t size,
709 Heap::Space space,
710 bool compressed,
711 uword ptr_field_start_offset,
712 uword ptr_field_end_offset);
713
714 // Templates of Allocate that retrieve the appropriate values to pass from
715 // the class.
716
717 template <typename T>
718 DART_FORCE_INLINE static typename T::ObjectPtrType Allocate(
719 Heap::Space space) {
720 return static_cast<typename T::ObjectPtrType>(Allocate(
721 T::kClassId, T::InstanceSize(), space, T::ContainsCompressedPointers(),
722 Object::from_offset<T>(), Object::to_offset<T>()));
723 }
724 template <typename T>
725 DART_FORCE_INLINE static typename T::ObjectPtrType Allocate(
726 Heap::Space space,
727 intptr_t elements) {
728 return static_cast<typename T::ObjectPtrType>(
729 Allocate(T::kClassId, T::InstanceSize(elements), space,
730 T::ContainsCompressedPointers(), Object::from_offset<T>(),
731 Object::to_offset<T>(elements)));
732 }
733
734 // Additional versions that also take a class_id for types like Array, Map,
735 // and Set that have more than one possible class id.
736
737 template <typename T>
738 DART_FORCE_INLINE static typename T::ObjectPtrType AllocateVariant(
739 intptr_t class_id,
740 Heap::Space space) {
741 return static_cast<typename T::ObjectPtrType>(Allocate(
742 class_id, T::InstanceSize(), space, T::ContainsCompressedPointers(),
743 Object::from_offset<T>(), Object::to_offset<T>()));
744 }
745 template <typename T>
746 DART_FORCE_INLINE static typename T::ObjectPtrType
747 AllocateVariant(intptr_t class_id, Heap::Space space, intptr_t elements) {
748 return static_cast<typename T::ObjectPtrType>(
749 Allocate(class_id, T::InstanceSize(elements), space,
750 T::ContainsCompressedPointers(), Object::from_offset<T>(),
751 Object::to_offset<T>(elements)));
752 }
753
754 static constexpr intptr_t RoundedAllocationSize(intptr_t size) {
755 return Utils::RoundUp(x: size, alignment: kObjectAlignment);
756 }
757
758 bool Contains(uword addr) const { return ptr()->untag()->Contains(addr); }
759
760 // Start of field mutator guards.
761 //
762 // All writes to heap objects should ultimately pass through one of the
763 // methods below or their counterparts in UntaggedObject, to ensure that the
764 // write barrier is correctly applied.
765
766 template <typename type, std::memory_order order = std::memory_order_relaxed>
767 type LoadPointer(type const* addr) const {
768 return ptr()->untag()->LoadPointer<type, order>(addr);
769 }
770
771 template <typename type, std::memory_order order = std::memory_order_relaxed>
772 void StorePointer(type const* addr, type value) const {
773 ptr()->untag()->StorePointer<type, order>(addr, value);
774 }
775 template <typename type,
776 typename compressed_type,
777 std::memory_order order = std::memory_order_relaxed>
778 void StoreCompressedPointer(compressed_type const* addr, type value) const {
779 ptr()->untag()->StoreCompressedPointer<type, compressed_type, order>(addr,
780 value);
781 }
782 template <typename type>
783 void StorePointerUnaligned(type const* addr,
784 type value,
785 Thread* thread) const {
786 ptr()->untag()->StorePointerUnaligned<type>(addr, value, thread);
787 }
788
789 // Use for storing into an explicitly Smi-typed field of an object
790 // (i.e., both the previous and new value are Smis).
791 void StoreSmi(SmiPtr const* addr, SmiPtr value) const {
792 ptr()->untag()->StoreSmi(addr, value);
793 }
794
795 template <typename FieldType>
796 void StoreSimd128(const FieldType* addr, simd128_value_t value) const {
797 ASSERT(Contains(reinterpret_cast<uword>(addr)));
798 value.writeTo(const_cast<FieldType*>(addr));
799 }
800
801 template <typename FieldType>
802 FieldType LoadNonPointer(const FieldType* addr) const {
803 return *const_cast<FieldType*>(addr);
804 }
805
806 template <typename FieldType, std::memory_order order>
807 FieldType LoadNonPointer(const FieldType* addr) const {
808 return reinterpret_cast<std::atomic<FieldType>*>(
809 const_cast<FieldType*>(addr))
810 ->load(order);
811 }
812
813 // Needs two template arguments to allow assigning enums to fixed-size ints.
814 template <typename FieldType, typename ValueType>
815 void StoreNonPointer(const FieldType* addr, ValueType value) const {
816 // Can't use Contains, as it uses tags_, which is set through this method.
817 ASSERT(reinterpret_cast<uword>(addr) >= UntaggedObject::ToAddr(ptr()));
818 *const_cast<FieldType*>(addr) = value;
819 }
820
821 template <typename FieldType, typename ValueType, std::memory_order order>
822 void StoreNonPointer(const FieldType* addr, ValueType value) const {
823 // Can't use Contains, as it uses tags_, which is set through this method.
824 ASSERT(reinterpret_cast<uword>(addr) >= UntaggedObject::ToAddr(ptr()));
825 reinterpret_cast<std::atomic<FieldType>*>(const_cast<FieldType*>(addr))
826 ->store(value, order);
827 }
828
829 // Provides non-const access to non-pointer fields within the object. Such
830 // access does not need a write barrier, but it is *not* GC-safe, since the
831 // object might move, hence must be fully contained within a NoSafepointScope.
832 template <typename FieldType>
833 FieldType* UnsafeMutableNonPointer(const FieldType* addr) const {
834 // Allow pointers at the end of variable-length data, and disallow pointers
835 // within the header word.
836 ASSERT(Contains(reinterpret_cast<uword>(addr) - 1) &&
837 Contains(reinterpret_cast<uword>(addr) - kWordSize));
838 // At least check that there is a NoSafepointScope and hope it's big enough.
839 ASSERT(Thread::Current()->no_safepoint_scope_depth() > 0);
840 return const_cast<FieldType*>(addr);
841 }
842
843// Fail at link time if StoreNonPointer or UnsafeMutableNonPointer is
844// instantiated with an object pointer type.
845#define STORE_NON_POINTER_ILLEGAL_TYPE(type) \
846 template <typename ValueType> \
847 void StoreNonPointer(type##Ptr const* addr, ValueType value) const { \
848 UnimplementedMethod(); \
849 } \
850 type##Ptr* UnsafeMutableNonPointer(type##Ptr const* addr) const { \
851 UnimplementedMethod(); \
852 return nullptr; \
853 }
854
855 CLASS_LIST(STORE_NON_POINTER_ILLEGAL_TYPE);
856 void UnimplementedMethod() const;
857#undef STORE_NON_POINTER_ILLEGAL_TYPE
858
859 // Allocate an object and copy the body of 'orig'.
860 static ObjectPtr Clone(const Object& orig,
861 Heap::Space space,
862 bool load_with_relaxed_atomics = false);
863
864 // End of field mutator guards.
865
866 ObjectPtr ptr_; // The raw object reference.
867
868 protected:
869 // The first offset in an allocated object of the given type that contains a
870 // (possibly compressed) object pointer. Used to initialize object pointer
871 // fields to Object::null() instead of 0.
872 //
873 // Always returns an offset after the object header tags.
874 template <typename T>
875 DART_FORCE_INLINE static uword from_offset() {
876 return UntaggedObject::from_offset<typename T::UntaggedObjectType>();
877 }
878
879 // The last offset in an allocated object of the given type that contains a
880 // (possibly compressed) object pointer. Used to initialize object pointer
881 // fields to Object::null() instead of 0.
882 //
883 // Takes an optional argument that is the number of elements in the payload,
884 // which is ignored if the object never contains a payload.
885 //
886 // If there are no pointer fields in the object, then
887 // to_offset<T>() < from_offset<T>().
888 template <typename T>
889 DART_FORCE_INLINE static uword to_offset(intptr_t length = 0) {
890 return UntaggedObject::to_offset<typename T::UntaggedObjectType>(length);
891 }
892
893 void AddCommonObjectProperties(JSONObject* jsobj,
894 const char* protocol_type,
895 bool ref) const;
896
897 private:
898 static intptr_t NextFieldOffset() {
899 // Indicates this class cannot be extended by dart code.
900 return -kWordSize;
901 }
902
903 static void InitializeObject(uword address,
904 intptr_t id,
905 intptr_t size,
906 bool compressed,
907 uword ptr_field_start_offset,
908 uword ptr_field_end_offset);
909
910 // Templates of InitializeObject that retrieve the appropriate values to pass
911 // from the class.
912
913 template <typename T>
914 DART_FORCE_INLINE static void InitializeObject(uword address) {
915 return InitializeObject(address, T::kClassId, T::InstanceSize(),
916 T::ContainsCompressedPointers(),
917 Object::from_offset<T>(), Object::to_offset<T>());
918 }
919 template <typename T>
920 DART_FORCE_INLINE static void InitializeObject(uword address,
921 intptr_t elements) {
922 return InitializeObject(address, T::kClassId, T::InstanceSize(elements),
923 T::ContainsCompressedPointers(),
924 Object::from_offset<T>(),
925 Object::to_offset<T>(elements));
926 }
927
928 // Additional versions that also take a class_id for types like Array, Map,
929 // and Set that have more than one possible class id.
930
931 template <typename T>
932 DART_FORCE_INLINE static void InitializeObjectVariant(uword address,
933 intptr_t class_id) {
934 return InitializeObject(address, class_id, T::InstanceSize(),
935 T::ContainsCompressedPointers(),
936 Object::from_offset<T>(), Object::to_offset<T>());
937 }
938 template <typename T>
939 DART_FORCE_INLINE static void InitializeObjectVariant(uword address,
940 intptr_t class_id,
941 intptr_t elements) {
942 return InitializeObject(address, class_id, T::InstanceSize(elements),
943 T::ContainsCompressedPointers(),
944 Object::from_offset<T>(),
945 Object::to_offset<T>(elements));
946 }
947
948 static void RegisterClass(const Class& cls,
949 const String& name,
950 const Library& lib);
951 static void RegisterPrivateClass(const Class& cls,
952 const String& name,
953 const Library& lib);
954
955 /* Initialize the handle based on the ptr in the presence of null. */
956 static void initializeHandle(Object* obj, ObjectPtr ptr) {
957 obj->setPtr(value: ptr, default_cid: kObjectCid);
958 }
959
960 static cpp_vtable builtin_vtables_[kNumPredefinedCids];
961
962 // The static values below are singletons shared between the different
963 // isolates. They are all allocated in the non-GC'd Dart::vm_isolate_.
964 static ObjectPtr null_;
965 static BoolPtr true_;
966 static BoolPtr false_;
967
968 static ClassPtr class_class_;
969 static ClassPtr dynamic_class_;
970 static ClassPtr void_class_;
971 static ClassPtr type_parameters_class_;
972 static ClassPtr type_arguments_class_;
973 static ClassPtr patch_class_class_;
974 static ClassPtr function_class_;
975 static ClassPtr closure_data_class_;
976 static ClassPtr ffi_trampoline_data_class_;
977 static ClassPtr field_class_;
978 static ClassPtr script_class_;
979 static ClassPtr library_class_;
980 static ClassPtr namespace_class_;
981 static ClassPtr kernel_program_info_class_;
982 static ClassPtr code_class_;
983 static ClassPtr instructions_class_;
984 static ClassPtr instructions_section_class_;
985 static ClassPtr instructions_table_class_;
986 static ClassPtr object_pool_class_;
987 static ClassPtr pc_descriptors_class_;
988 static ClassPtr code_source_map_class_;
989 static ClassPtr compressed_stackmaps_class_;
990 static ClassPtr var_descriptors_class_;
991 static ClassPtr exception_handlers_class_;
992 static ClassPtr context_class_;
993 static ClassPtr context_scope_class_;
994 static ClassPtr sentinel_class_;
995 static ClassPtr singletargetcache_class_;
996 static ClassPtr unlinkedcall_class_;
997 static ClassPtr monomorphicsmiablecall_class_;
998 static ClassPtr icdata_class_;
999 static ClassPtr megamorphic_cache_class_;
1000 static ClassPtr subtypetestcache_class_;
1001 static ClassPtr loadingunit_class_;
1002 static ClassPtr api_error_class_;
1003 static ClassPtr language_error_class_;
1004 static ClassPtr unhandled_exception_class_;
1005 static ClassPtr unwind_error_class_;
1006 static ClassPtr weak_serialization_reference_class_;
1007 static ClassPtr weak_array_class_;
1008
1009#define DECLARE_SHARED_READONLY_HANDLE(Type, name) static Type* name##_;
1010 SHARED_READONLY_HANDLES_LIST(DECLARE_SHARED_READONLY_HANDLE)
1011#undef DECLARE_SHARED_READONLY_HANDLE
1012
1013 friend void ClassTable::Register(const Class& cls);
1014 friend void UntaggedObject::Validate(IsolateGroup* isolate_group) const;
1015 friend class Closure;
1016 friend class InstanceDeserializationCluster;
1017 friend class ObjectGraphCopier; // For Object::InitializeObject
1018 friend class Simd128MessageDeserializationCluster;
1019 friend class OneByteString;
1020 friend class TwoByteString;
1021 friend class ExternalOneByteString;
1022 friend class ExternalTwoByteString;
1023 friend class Thread;
1024
1025#define REUSABLE_FRIEND_DECLARATION(name) \
1026 friend class Reusable##name##HandleScope;
1027 REUSABLE_HANDLE_LIST(REUSABLE_FRIEND_DECLARATION)
1028#undef REUSABLE_FRIEND_DECLARATION
1029
1030 DISALLOW_ALLOCATION();
1031 DISALLOW_COPY_AND_ASSIGN(Object);
1032};
1033
1034// Used to declare setters and getters for untagged object fields that are
1035// defined with the WSR_COMPRESSED_POINTER_FIELD macro.
1036//
1037// In the precompiler, the getter transparently unwraps the
1038// WeakSerializationReference, if present, to get the wrapped value of the
1039// appropriate type, since a WeakSerializationReference object should be
1040// transparent to the parts of the precompiler that are not the serializer.
1041// Meanwhile, the setter takes an Object to allow the precompiler to set the
1042// field to a WeakSerializationReference.
1043//
1044// Since WeakSerializationReferences are only used during precompilation,
1045// this macro creates the normally expected getter and setter otherwise.
1046#if defined(DART_PRECOMPILER)
1047#define PRECOMPILER_WSR_FIELD_DECLARATION(Type, Name) \
1048 Type##Ptr Name() const; \
1049 void set_##Name(const Object& value) const { \
1050 untag()->set_##Name(value.ptr()); \
1051 }
1052#else
1053#define PRECOMPILER_WSR_FIELD_DECLARATION(Type, Name) \
1054 Type##Ptr Name() const { return untag()->Name(); } \
1055 void set_##Name(const Type& value) const;
1056#endif
1057
1058class PassiveObject : public Object {
1059 public:
1060 void operator=(ObjectPtr value) { ptr_ = value; }
1061 void operator^=(ObjectPtr value) { ptr_ = value; }
1062
1063 static PassiveObject& Handle(Zone* zone, ObjectPtr ptr) {
1064 PassiveObject* obj =
1065 reinterpret_cast<PassiveObject*>(VMHandles::AllocateHandle(zone));
1066 obj->ptr_ = ptr;
1067 obj->set_vtable(0);
1068 return *obj;
1069 }
1070 static PassiveObject& Handle(ObjectPtr ptr) {
1071 return Handle(zone: Thread::Current()->zone(), ptr);
1072 }
1073 static PassiveObject& Handle() {
1074 return Handle(zone: Thread::Current()->zone(), ptr: Object::null());
1075 }
1076 static PassiveObject& Handle(Zone* zone) {
1077 return Handle(zone, ptr: Object::null());
1078 }
1079 static PassiveObject& ZoneHandle(Zone* zone, ObjectPtr ptr) {
1080 PassiveObject* obj =
1081 reinterpret_cast<PassiveObject*>(VMHandles::AllocateZoneHandle(zone));
1082 obj->ptr_ = ptr;
1083 obj->set_vtable(0);
1084 return *obj;
1085 }
1086 static PassiveObject& ZoneHandle(ObjectPtr ptr) {
1087 return ZoneHandle(zone: Thread::Current()->zone(), ptr);
1088 }
1089 static PassiveObject& ZoneHandle() {
1090 return ZoneHandle(zone: Thread::Current()->zone(), ptr: Object::null());
1091 }
1092 static PassiveObject& ZoneHandle(Zone* zone) {
1093 return ZoneHandle(zone, ptr: Object::null());
1094 }
1095
1096 private:
1097 PassiveObject() : Object() {}
1098 DISALLOW_ALLOCATION();
1099 DISALLOW_COPY_AND_ASSIGN(PassiveObject);
1100};
1101
1102// A URIs array contains triplets of strings.
1103// The first string in the triplet is a type name (usually a class).
1104// The second string in the triplet is the URI of the type.
1105// The third string in the triplet is "print" if the triplet should be printed.
1106typedef ZoneGrowableHandlePtrArray<const String> URIs;
1107
1108enum class Nullability : uint8_t {
1109 kNullable = 0,
1110 kNonNullable = 1,
1111 kLegacy = 2,
1112 // Adjust kNullabilityBitSize in app_snapshot.cc if adding new values.
1113};
1114
1115// Equality kind between types.
1116enum class TypeEquality {
1117 kCanonical = 0,
1118 kSyntactical = 1,
1119 kInSubtypeTest = 2,
1120};
1121
1122// The NNBDMode reflects the opted-in status of libraries.
1123// Note that the weak or strong checking mode is not reflected in NNBDMode.
1124enum class NNBDMode {
1125 // Status of the library:
1126 kLegacyLib = 0, // Library is legacy.
1127 kOptedInLib = 1, // Library is opted-in.
1128};
1129
1130// The NNBDCompiledMode reflects the mode in which constants of the library were
1131// compiled by CFE.
1132enum class NNBDCompiledMode {
1133 kWeak = 0,
1134 kStrong = 1,
1135 kAgnostic = 2,
1136 kInvalid = 3,
1137};
1138
1139class Class : public Object {
1140 public:
1141 enum InvocationDispatcherEntry {
1142 kInvocationDispatcherName,
1143 kInvocationDispatcherArgsDesc,
1144 kInvocationDispatcherFunction,
1145 kInvocationDispatcherEntrySize,
1146 };
1147
1148 bool HasCompressedPointers() const;
1149 intptr_t host_instance_size() const {
1150 ASSERT(is_finalized() || is_prefinalized());
1151 return (untag()->host_instance_size_in_words_ * kCompressedWordSize);
1152 }
1153 intptr_t target_instance_size() const {
1154 ASSERT(is_finalized() || is_prefinalized());
1155#if defined(DART_PRECOMPILER)
1156 return (untag()->target_instance_size_in_words_ *
1157 compiler::target::kCompressedWordSize);
1158#else
1159 return host_instance_size();
1160#endif // defined(DART_PRECOMPILER)
1161 }
1162 static intptr_t host_instance_size(ClassPtr clazz) {
1163 return (clazz->untag()->host_instance_size_in_words_ * kCompressedWordSize);
1164 }
1165 static intptr_t target_instance_size(ClassPtr clazz) {
1166#if defined(DART_PRECOMPILER)
1167 return (clazz->untag()->target_instance_size_in_words_ *
1168 compiler::target::kCompressedWordSize);
1169#else
1170 return host_instance_size(clazz);
1171#endif // defined(DART_PRECOMPILER)
1172 }
1173 void set_instance_size(intptr_t host_value_in_bytes,
1174 intptr_t target_value_in_bytes) const {
1175 ASSERT(kCompressedWordSize != 0);
1176 set_instance_size_in_words(
1177 host_value: host_value_in_bytes / kCompressedWordSize,
1178 target_value: target_value_in_bytes / compiler::target::kCompressedWordSize);
1179 }
1180 void set_instance_size_in_words(intptr_t host_value,
1181 intptr_t target_value) const {
1182 ASSERT(
1183 Utils::IsAligned((host_value * kCompressedWordSize), kObjectAlignment));
1184 StoreNonPointer(addr: &untag()->host_instance_size_in_words_, value: host_value);
1185#if defined(DART_PRECOMPILER)
1186 ASSERT(
1187 Utils::IsAligned((target_value * compiler::target::kCompressedWordSize),
1188 compiler::target::kObjectAlignment));
1189 StoreNonPointer(&untag()->target_instance_size_in_words_, target_value);
1190#else
1191 // Could be different only during cross-compilation.
1192 ASSERT_EQUAL(host_value, target_value);
1193#endif // defined(DART_PRECOMPILER)
1194 }
1195
1196 intptr_t host_next_field_offset() const {
1197 return untag()->host_next_field_offset_in_words_ * kCompressedWordSize;
1198 }
1199 intptr_t target_next_field_offset() const {
1200#if defined(DART_PRECOMPILER)
1201 return untag()->target_next_field_offset_in_words_ *
1202 compiler::target::kCompressedWordSize;
1203#else
1204 return host_next_field_offset();
1205#endif // defined(DART_PRECOMPILER)
1206 }
1207 void set_next_field_offset(intptr_t host_value_in_bytes,
1208 intptr_t target_value_in_bytes) const {
1209 set_next_field_offset_in_words(
1210 host_value: host_value_in_bytes / kCompressedWordSize,
1211 target_value: target_value_in_bytes / compiler::target::kCompressedWordSize);
1212 }
1213 void set_next_field_offset_in_words(intptr_t host_value,
1214 intptr_t target_value) const {
1215 // Assert that the next field offset is either negative (ie, this object
1216 // can't be extended by dart code), or rounds up to the kObjectAligned
1217 // instance size.
1218 ASSERT((host_value < 0) ||
1219 ((host_value <= untag()->host_instance_size_in_words_) &&
1220 (host_value + (kObjectAlignment / kCompressedWordSize) >
1221 untag()->host_instance_size_in_words_)));
1222 StoreNonPointer(addr: &untag()->host_next_field_offset_in_words_, value: host_value);
1223#if defined(DART_PRECOMPILER)
1224 ASSERT((target_value < 0) ||
1225 ((target_value <= untag()->target_instance_size_in_words_) &&
1226 (target_value + (compiler::target::kObjectAlignment /
1227 compiler::target::kCompressedWordSize) >
1228 untag()->target_instance_size_in_words_)));
1229 StoreNonPointer(&untag()->target_next_field_offset_in_words_, target_value);
1230#else
1231 // Could be different only during cross-compilation.
1232 ASSERT_EQUAL(host_value, target_value);
1233#endif // defined(DART_PRECOMPILER)
1234 }
1235
1236 static bool is_valid_id(intptr_t value) {
1237 return UntaggedObject::ClassIdTag::is_valid(value);
1238 }
1239 intptr_t id() const { return untag()->id_; }
1240 void set_id(intptr_t value) const {
1241 ASSERT(value >= 0 && value < std::numeric_limits<classid_t>::max());
1242 StoreNonPointer(addr: &untag()->id_, value);
1243 }
1244 static intptr_t id_offset() { return OFFSET_OF(UntaggedClass, id_); }
1245
1246#if !defined(DART_PRECOMPILED_RUNTIME)
1247 // If the interface of this class has a single concrete implementation, either
1248 // via `extends` or by `implements`, returns its CID.
1249 // If it has no implementation, returns kIllegalCid.
1250 // If it has more than one implementation, returns kDynamicCid.
1251 intptr_t implementor_cid() const { return untag()->implementor_cid_; }
1252
1253 // Returns true if the implementor tracking state changes and so must be
1254 // propagated to this class's superclass and interfaces.
1255 bool NoteImplementor(const Class& implementor) const;
1256#endif
1257
1258 static intptr_t num_type_arguments_offset() {
1259 return OFFSET_OF(UntaggedClass, num_type_arguments_);
1260 }
1261
1262 StringPtr Name() const;
1263 StringPtr ScrubbedName() const;
1264 const char* ScrubbedNameCString() const;
1265 StringPtr UserVisibleName() const;
1266 const char* UserVisibleNameCString() const;
1267
1268 const char* NameCString(NameVisibility name_visibility) const;
1269
1270 // The mixin for this class if one exists. Otherwise, returns a raw pointer
1271 // to this class.
1272 ClassPtr Mixin() const;
1273
1274 // The NNBD mode of the library declaring this class.
1275 NNBDMode nnbd_mode() const;
1276
1277 bool IsInFullSnapshot() const;
1278
1279 virtual StringPtr DictionaryName() const { return Name(); }
1280
1281 ScriptPtr script() const { return untag()->script(); }
1282 void set_script(const Script& value) const;
1283
1284#if !defined(DART_PRECOMPILED_RUNTIME)
1285 KernelProgramInfoPtr KernelProgramInfo() const;
1286#endif
1287
1288 TokenPosition token_pos() const {
1289#if defined(DART_PRECOMPILED_RUNTIME)
1290 return TokenPosition::kNoSource;
1291#else
1292 return untag()->token_pos_;
1293#endif // defined(DART_PRECOMPILED_RUNTIME)
1294 }
1295
1296#if !defined(DART_PRECOMPILED_RUNTIME)
1297 void set_token_pos(TokenPosition value) const;
1298#endif // !defined(DART_PRECOMPILED_RUNTIME)
1299
1300 TokenPosition end_token_pos() const {
1301#if defined(DART_PRECOMPILED_RUNTIME)
1302 return TokenPosition::kNoSource;
1303#else
1304 return untag()->end_token_pos_;
1305#endif // defined(DART_PRECOMPILED_RUNTIME)
1306 }
1307
1308#if !defined(DART_PRECOMPILED_RUNTIME)
1309 void set_end_token_pos(TokenPosition value) const;
1310#endif // !defined(DART_PRECOMPILED_RUNTIME)
1311
1312 uint32_t Hash() const;
1313 static uint32_t Hash(ClassPtr);
1314
1315 int32_t SourceFingerprint() const;
1316
1317 // Return the Type with type arguments instantiated to bounds.
1318 TypePtr RareType() const;
1319
1320 // Return the non-nullable Type whose arguments are the type parameters
1321 // declared by this class.
1322 TypePtr DeclarationType() const;
1323
1324 static intptr_t declaration_type_offset() {
1325 return OFFSET_OF(UntaggedClass, declaration_type_);
1326 }
1327
1328 // Returns flattened instance type arguments vector for
1329 // instance of this class, parameterized with declared
1330 // type parameters of this class.
1331 TypeArgumentsPtr GetDeclarationInstanceTypeArguments() const;
1332
1333 // Returns flattened instance type arguments vector for
1334 // instance of this type, parameterized with given type arguments.
1335 //
1336 // Length of [type_arguments] should match number of type parameters
1337 // returned by [NumTypeParameters].
1338 TypeArgumentsPtr GetInstanceTypeArguments(Thread* thread,
1339 const TypeArguments& type_arguments,
1340 bool canonicalize = true) const;
1341
1342 LibraryPtr library() const { return untag()->library(); }
1343 void set_library(const Library& value) const;
1344
1345 // The formal type parameters and their bounds (no defaults), are specified as
1346 // an object of type TypeParameters.
1347 TypeParametersPtr type_parameters() const {
1348 ASSERT(is_declaration_loaded());
1349 return untag()->type_parameters();
1350 }
1351 void set_type_parameters(const TypeParameters& value) const;
1352 intptr_t NumTypeParameters(Thread* thread) const;
1353 intptr_t NumTypeParameters() const {
1354 return NumTypeParameters(thread: Thread::Current());
1355 }
1356
1357 // Return the type parameter declared at index.
1358 TypeParameterPtr TypeParameterAt(
1359 intptr_t index,
1360 Nullability nullability = Nullability::kNonNullable) const;
1361
1362 // Length of the flattened instance type arguments vector.
1363 // Includes type arguments of the super class.
1364 intptr_t NumTypeArguments() const;
1365
1366 // Return true if this class declares type parameters.
1367 bool IsGeneric() const {
1368 // If the declaration is not loaded, fall back onto NumTypeParameters.
1369 if (!is_declaration_loaded()) {
1370 return NumTypeParameters(thread: Thread::Current()) > 0;
1371 }
1372 return type_parameters() != Object::null();
1373 }
1374
1375 // Returns a canonicalized vector of the type parameters instantiated
1376 // to bounds. If non-generic, the empty type arguments vector is returned.
1377 TypeArgumentsPtr InstantiateToBounds(Thread* thread) const;
1378
1379 // If this class is parameterized, each instance has a type_arguments field.
1380 static constexpr intptr_t kNoTypeArguments = -1;
1381 intptr_t host_type_arguments_field_offset() const {
1382 ASSERT(is_type_finalized() || is_prefinalized());
1383 if (untag()->host_type_arguments_field_offset_in_words_ ==
1384 kNoTypeArguments) {
1385 return kNoTypeArguments;
1386 }
1387 return untag()->host_type_arguments_field_offset_in_words_ *
1388 kCompressedWordSize;
1389 }
1390 intptr_t target_type_arguments_field_offset() const {
1391#if defined(DART_PRECOMPILER)
1392 ASSERT(is_type_finalized() || is_prefinalized());
1393 if (untag()->target_type_arguments_field_offset_in_words_ ==
1394 compiler::target::Class::kNoTypeArguments) {
1395 return compiler::target::Class::kNoTypeArguments;
1396 }
1397 return untag()->target_type_arguments_field_offset_in_words_ *
1398 compiler::target::kCompressedWordSize;
1399#else
1400 return host_type_arguments_field_offset();
1401#endif // defined(DART_PRECOMPILER)
1402 }
1403 void set_type_arguments_field_offset(intptr_t host_value_in_bytes,
1404 intptr_t target_value_in_bytes) const {
1405 intptr_t host_value, target_value;
1406 if (host_value_in_bytes == kNoTypeArguments ||
1407 target_value_in_bytes == RTN::Class::kNoTypeArguments) {
1408 ASSERT(host_value_in_bytes == kNoTypeArguments &&
1409 target_value_in_bytes == RTN::Class::kNoTypeArguments);
1410 host_value = kNoTypeArguments;
1411 target_value = RTN::Class::kNoTypeArguments;
1412 } else {
1413 ASSERT(kCompressedWordSize != 0 && compiler::target::kCompressedWordSize);
1414 host_value = host_value_in_bytes / kCompressedWordSize;
1415 target_value =
1416 target_value_in_bytes / compiler::target::kCompressedWordSize;
1417 }
1418 set_type_arguments_field_offset_in_words(host_value, target_value);
1419 }
1420 void set_type_arguments_field_offset_in_words(intptr_t host_value,
1421 intptr_t target_value) const {
1422 StoreNonPointer(addr: &untag()->host_type_arguments_field_offset_in_words_,
1423 value: host_value);
1424#if defined(DART_PRECOMPILER)
1425 StoreNonPointer(&untag()->target_type_arguments_field_offset_in_words_,
1426 target_value);
1427#else
1428 // Could be different only during cross-compilation.
1429 ASSERT_EQUAL(host_value, target_value);
1430#endif // defined(DART_PRECOMPILER)
1431 }
1432 static intptr_t host_type_arguments_field_offset_in_words_offset() {
1433 return OFFSET_OF(UntaggedClass, host_type_arguments_field_offset_in_words_);
1434 }
1435
1436 // The super type of this class, Object type if not explicitly specified.
1437 TypePtr super_type() const {
1438 ASSERT(is_declaration_loaded());
1439 return untag()->super_type();
1440 }
1441 void set_super_type(const Type& value) const;
1442 static intptr_t super_type_offset() {
1443 return OFFSET_OF(UntaggedClass, super_type_);
1444 }
1445
1446 // Asserts that the class of the super type has been resolved.
1447 // If |class_table| is provided it will be used to resolve class id to the
1448 // actual class object, instead of using current class table on the isolate
1449 // group.
1450 ClassPtr SuperClass(ClassTable* class_table = nullptr) const;
1451
1452 // Interfaces is an array of Types.
1453 ArrayPtr interfaces() const {
1454 ASSERT(is_declaration_loaded());
1455 return untag()->interfaces();
1456 }
1457 void set_interfaces(const Array& value) const;
1458
1459 // Returns whether a path from [this] to [cls] can be found, where the first
1460 // element is a direct supertype of [this], each following element is a direct
1461 // supertype of the previous element and the final element has [cls] as its
1462 // type class. If [this] and [cls] are the same class, then the path is empty.
1463 //
1464 // If [path] is not nullptr, then the elements of the path are added to it.
1465 // This path can then be used to compute type arguments of [cls] given type
1466 // arguments for an instance of [this].
1467 //
1468 // Note: There may be multiple paths to [cls], but the result of applying each
1469 // path must be equal to the other results.
1470 bool FindInstantiationOf(Zone* zone,
1471 const Class& cls,
1472 GrowableArray<const Type*>* path,
1473 bool consider_only_super_classes = false) const;
1474 bool FindInstantiationOf(Zone* zone,
1475 const Class& cls,
1476 bool consider_only_super_classes = false) const {
1477 return FindInstantiationOf(zone, cls, /*path=*/path: nullptr,
1478 consider_only_super_classes);
1479 }
1480
1481 // Returns whether a path from [this] to [type] can be found, where the first
1482 // element is a direct supertype of [this], each following element is a direct
1483 // supertype of the previous element and the final element has the same type
1484 // class as [type]. If [this] is the type class of [type], then the path is
1485 // empty.
1486 //
1487 // If [path] is not nullptr, then the elements of the path are added to it.
1488 // This path can then be used to compute type arguments of [type]'s type
1489 // class given type arguments for an instance of [this].
1490 //
1491 // Note: There may be multiple paths to [type]'s type class, but the result of
1492 // applying each path must be equal to the other results.
1493 bool FindInstantiationOf(Zone* zone,
1494 const Type& type,
1495 GrowableArray<const Type*>* path,
1496 bool consider_only_super_classes = false) const;
1497 bool FindInstantiationOf(Zone* zone,
1498 const Type& type,
1499 bool consider_only_super_classes = false) const {
1500 return FindInstantiationOf(zone, type, /*path=*/path: nullptr,
1501 consider_only_super_classes);
1502 }
1503
1504 // If [this] is a subtype of a type with type class [cls], then this
1505 // returns [cls]<X_0, ..., X_n>, where n is the number of type arguments for
1506 // [cls] and where each type argument X_k is either instantiated or has free
1507 // class type parameters corresponding to the type parameters of [this].
1508 // Thus, given an instance of [this], the result can be instantiated
1509 // with the instance type arguments to get the type of the instance.
1510 //
1511 // If [this] is not a subtype of a type with type class [cls], returns null.
1512 TypePtr GetInstantiationOf(Zone* zone, const Class& cls) const;
1513
1514 // If [this] is a subtype of [type], then this returns [cls]<X_0, ..., X_n>,
1515 // where [cls] is the type class of [type], n is the number of type arguments
1516 // for [cls], and where each type argument X_k is either instantiated or has
1517 // free class type parameters corresponding to the type parameters of [this].
1518 // Thus, given an instance of [this], the result can be instantiated with the
1519 // instance type arguments to get the type of the instance.
1520 //
1521 // If [this] is not a subtype of a type with type class [cls], returns null.
1522 TypePtr GetInstantiationOf(Zone* zone, const Type& type) const;
1523
1524#if !defined(PRODUCT) || !defined(DART_PRECOMPILED_RUNTIME)
1525 // Returns the list of classes directly implementing this class.
1526 GrowableObjectArrayPtr direct_implementors() const {
1527 DEBUG_ASSERT(
1528 IsolateGroup::Current()->program_lock()->IsCurrentThreadReader());
1529 return untag()->direct_implementors();
1530 }
1531 GrowableObjectArrayPtr direct_implementors_unsafe() const {
1532 return untag()->direct_implementors();
1533 }
1534#endif // !defined(PRODUCT) || !defined(DART_PRECOMPILED_RUNTIME)
1535
1536#if !defined(DART_PRECOMPILED_RUNTIME)
1537 void set_direct_implementors(const GrowableObjectArray& implementors) const;
1538 void AddDirectImplementor(const Class& subclass, bool is_mixin) const;
1539#endif // !defined(DART_PRECOMPILED_RUNTIME)
1540
1541#if !defined(PRODUCT) || !defined(DART_PRECOMPILED_RUNTIME)
1542 // Returns the list of classes having this class as direct superclass.
1543 GrowableObjectArrayPtr direct_subclasses() const {
1544 DEBUG_ASSERT(
1545 IsolateGroup::Current()->program_lock()->IsCurrentThreadReader());
1546 return direct_subclasses_unsafe();
1547 }
1548 GrowableObjectArrayPtr direct_subclasses_unsafe() const {
1549 return untag()->direct_subclasses();
1550 }
1551#endif // !defined(PRODUCT) || !defined(DART_PRECOMPILED_RUNTIME)
1552
1553#if !defined(DART_PRECOMPILED_RUNTIME)
1554 void set_direct_subclasses(const GrowableObjectArray& subclasses) const;
1555 void AddDirectSubclass(const Class& subclass) const;
1556#endif // !defined(DART_PRECOMPILED_RUNTIME)
1557
1558 // Check if this class represents the class of null.
1559 bool IsNullClass() const { return id() == kNullCid; }
1560
1561 // Check if this class represents the 'dynamic' class.
1562 bool IsDynamicClass() const { return id() == kDynamicCid; }
1563
1564 // Check if this class represents the 'void' class.
1565 bool IsVoidClass() const { return id() == kVoidCid; }
1566
1567 // Check if this class represents the 'Never' class.
1568 bool IsNeverClass() const { return id() == kNeverCid; }
1569
1570 // Check if this class represents the 'Object' class.
1571 bool IsObjectClass() const { return id() == kInstanceCid; }
1572
1573 // Check if this class represents the 'Function' class.
1574 bool IsDartFunctionClass() const;
1575
1576 // Check if this class represents the 'Future' class.
1577 bool IsFutureClass() const;
1578
1579 // Check if this class represents the 'FutureOr' class.
1580 bool IsFutureOrClass() const { return id() == kFutureOrCid; }
1581
1582 // Check if this class represents the 'Closure' class.
1583 bool IsClosureClass() const { return id() == kClosureCid; }
1584 static bool IsClosureClass(ClassPtr cls) {
1585 return GetClassId(cls) == kClosureCid;
1586 }
1587
1588 // Check if this class represents the 'Record' class.
1589 bool IsRecordClass() const {
1590 return id() == kRecordCid;
1591 }
1592
1593 static bool IsInFullSnapshot(ClassPtr cls) {
1594 NoSafepointScope no_safepoint;
1595 return UntaggedLibrary::InFullSnapshotBit::decode(
1596 value: cls->untag()->library()->untag()->flags_);
1597 }
1598
1599 static intptr_t GetClassId(ClassPtr cls) {
1600 NoSafepointScope no_safepoint;
1601 return cls->untag()->id_;
1602 }
1603
1604 // Returns true if the type specified by cls, type_arguments, and nullability
1605 // is a subtype of the other type.
1606 static bool IsSubtypeOf(
1607 const Class& cls,
1608 const TypeArguments& type_arguments,
1609 Nullability nullability,
1610 const AbstractType& other,
1611 Heap::Space space,
1612 FunctionTypeMapping* function_type_equivalence = nullptr);
1613
1614 // Check if this is the top level class.
1615 bool IsTopLevel() const;
1616
1617 bool IsPrivate() const;
1618
1619 DART_WARN_UNUSED_RESULT
1620 ErrorPtr VerifyEntryPoint() const;
1621
1622 // Returns an array of instance and static fields defined by this class.
1623 ArrayPtr fields() const {
1624 // We rely on the fact that any loads from the array are dependent loads
1625 // and avoid the load-acquire barrier here.
1626 return untag()->fields();
1627 }
1628 void SetFields(const Array& value) const;
1629 void AddField(const Field& field) const;
1630 void AddFields(const GrowableArray<const Field*>& fields) const;
1631
1632 intptr_t FindFieldIndex(const Field& needle) const;
1633 FieldPtr FieldFromIndex(intptr_t idx) const;
1634
1635 // If this is a dart:internal.ClassID class, then inject our own const
1636 // fields. Returns true if synthetic fields are injected and regular
1637 // field declarations should be ignored.
1638 bool InjectCIDFields() const;
1639
1640 // Returns an array of all instance fields of this class and its superclasses
1641 // indexed by offset in words.
1642 // If |class_table| is provided it will be used to resolve super classes by
1643 // class id, instead of the current class_table stored in the isolate.
1644 ArrayPtr OffsetToFieldMap(ClassTable* class_table = nullptr) const;
1645
1646 // Returns true if non-static fields are defined.
1647 bool HasInstanceFields() const;
1648
1649 ArrayPtr current_functions() const {
1650 // We rely on the fact that any loads from the array are dependent loads
1651 // and avoid the load-acquire barrier here.
1652 return untag()->functions();
1653 }
1654 ArrayPtr functions() const {
1655 DEBUG_ASSERT(
1656 IsolateGroup::Current()->program_lock()->IsCurrentThreadReader());
1657 return current_functions();
1658 }
1659 void SetFunctions(const Array& value) const;
1660 void AddFunction(const Function& function) const;
1661 intptr_t FindFunctionIndex(const Function& needle) const;
1662 FunctionPtr FunctionFromIndex(intptr_t idx) const;
1663 intptr_t FindImplicitClosureFunctionIndex(const Function& needle) const;
1664 FunctionPtr ImplicitClosureFunctionFromIndex(intptr_t idx) const;
1665
1666 FunctionPtr LookupFunctionReadLocked(const String& name) const;
1667 FunctionPtr LookupDynamicFunctionUnsafe(const String& name) const;
1668
1669 FunctionPtr LookupDynamicFunctionAllowPrivate(const String& name) const;
1670 FunctionPtr LookupStaticFunction(const String& name) const;
1671 FunctionPtr LookupStaticFunctionAllowPrivate(const String& name) const;
1672 FunctionPtr LookupConstructor(const String& name) const;
1673 FunctionPtr LookupConstructorAllowPrivate(const String& name) const;
1674 FunctionPtr LookupFactory(const String& name) const;
1675 FunctionPtr LookupFactoryAllowPrivate(const String& name) const;
1676 FunctionPtr LookupFunctionAllowPrivate(const String& name) const;
1677 FunctionPtr LookupGetterFunction(const String& name) const;
1678 FunctionPtr LookupSetterFunction(const String& name) const;
1679 FieldPtr LookupInstanceField(const String& name) const;
1680 FieldPtr LookupStaticField(const String& name) const;
1681 FieldPtr LookupField(const String& name) const;
1682 FieldPtr LookupFieldAllowPrivate(const String& name,
1683 bool instance_only = false) const;
1684 FieldPtr LookupInstanceFieldAllowPrivate(const String& name) const;
1685 FieldPtr LookupStaticFieldAllowPrivate(const String& name) const;
1686
1687 // The methods above are more efficient than this generic one.
1688 InstancePtr LookupCanonicalInstance(Zone* zone, const Instance& value) const;
1689
1690 InstancePtr InsertCanonicalConstant(Zone* zone,
1691 const Instance& constant) const;
1692
1693 bool RequireCanonicalTypeErasureOfConstants(Zone* zone) const;
1694
1695 static intptr_t InstanceSize() {
1696 return RoundedAllocationSize(size: sizeof(UntaggedClass));
1697 }
1698
1699 // Returns true if any class implements this interface via `implements`.
1700 // Returns false if all possible implementations of this interface must be
1701 // instances of this class or its subclasses.
1702 bool is_implemented() const { return ImplementedBit::decode(value: state_bits()); }
1703 void set_is_implemented() const;
1704 void set_is_implemented_unsafe() const;
1705
1706 bool is_abstract() const { return AbstractBit::decode(value: state_bits()); }
1707 void set_is_abstract() const;
1708
1709 UntaggedClass::ClassLoadingState class_loading_state() const {
1710 return ClassLoadingBits::decode(value: state_bits());
1711 }
1712
1713 bool is_declaration_loaded() const {
1714 return class_loading_state() >= UntaggedClass::kDeclarationLoaded;
1715 }
1716 void set_is_declaration_loaded() const;
1717 void set_is_declaration_loaded_unsafe() const;
1718
1719 bool is_type_finalized() const {
1720 return class_loading_state() >= UntaggedClass::kTypeFinalized;
1721 }
1722 void set_is_type_finalized() const;
1723
1724 bool is_synthesized_class() const {
1725 return SynthesizedClassBit::decode(value: state_bits());
1726 }
1727 void set_is_synthesized_class() const;
1728 void set_is_synthesized_class_unsafe() const;
1729
1730 bool is_enum_class() const { return EnumBit::decode(value: state_bits()); }
1731 void set_is_enum_class() const;
1732
1733 bool is_finalized() const {
1734 return ClassFinalizedBits::decode(value: state_bits()) ==
1735 UntaggedClass::kFinalized ||
1736 ClassFinalizedBits::decode(value: state_bits()) ==
1737 UntaggedClass::kAllocateFinalized;
1738 }
1739 void set_is_finalized() const;
1740 void set_is_finalized_unsafe() const;
1741
1742 bool is_allocate_finalized() const {
1743 return ClassFinalizedBits::decode(value: state_bits()) ==
1744 UntaggedClass::kAllocateFinalized;
1745 }
1746 void set_is_allocate_finalized() const;
1747
1748 bool is_prefinalized() const {
1749 return ClassFinalizedBits::decode(value: state_bits()) ==
1750 UntaggedClass::kPreFinalized;
1751 }
1752
1753 void set_is_prefinalized() const;
1754
1755 bool is_const() const { return ConstBit::decode(value: state_bits()); }
1756 void set_is_const() const;
1757
1758 // Tests if this is a mixin application class which was desugared
1759 // to a normal class by kernel mixin transformation
1760 // (pkg/kernel/lib/transformations/mixin_full_resolution.dart).
1761 //
1762 // In such case, its mixed-in type was pulled into the end of
1763 // interfaces list.
1764 bool is_transformed_mixin_application() const {
1765 return TransformedMixinApplicationBit::decode(value: state_bits());
1766 }
1767 void set_is_transformed_mixin_application() const;
1768
1769 bool is_sealed() const { return SealedBit::decode(value: state_bits()); }
1770 void set_is_sealed() const;
1771
1772 bool is_mixin_class() const { return MixinClassBit::decode(value: state_bits()); }
1773 void set_is_mixin_class() const;
1774
1775 bool is_base_class() const { return BaseClassBit::decode(value: state_bits()); }
1776 void set_is_base_class() const;
1777
1778 bool is_interface_class() const {
1779 return InterfaceClassBit::decode(value: state_bits());
1780 }
1781 void set_is_interface_class() const;
1782
1783 bool is_final() const { return FinalBit::decode(value: state_bits()); }
1784 void set_is_final() const;
1785
1786 bool is_fields_marked_nullable() const {
1787 return FieldsMarkedNullableBit::decode(value: state_bits());
1788 }
1789 void set_is_fields_marked_nullable() const;
1790
1791 bool is_allocated() const { return IsAllocatedBit::decode(value: state_bits()); }
1792 void set_is_allocated(bool value) const;
1793 void set_is_allocated_unsafe(bool value) const;
1794
1795 bool is_loaded() const { return IsLoadedBit::decode(value: state_bits()); }
1796 void set_is_loaded(bool value) const;
1797
1798 uint16_t num_native_fields() const { return untag()->num_native_fields_; }
1799 void set_num_native_fields(uint16_t value) const {
1800 StoreNonPointer(addr: &untag()->num_native_fields_, value);
1801 }
1802 static uint16_t NumNativeFieldsOf(ClassPtr clazz) {
1803 return clazz->untag()->num_native_fields_;
1804 }
1805 static bool IsIsolateUnsendable(ClassPtr clazz) {
1806 return IsIsolateUnsendableBit::decode(value: clazz->untag()->state_bits_);
1807 }
1808
1809#if !defined(DART_PRECOMPILED_RUNTIME)
1810 CodePtr allocation_stub() const { return untag()->allocation_stub(); }
1811 void set_allocation_stub(const Code& value) const;
1812#endif // !defined(DART_PRECOMPILED_RUNTIME)
1813
1814 intptr_t kernel_offset() const {
1815#if defined(DART_PRECOMPILED_RUNTIME)
1816 return 0;
1817#else
1818 return untag()->kernel_offset_;
1819#endif
1820 }
1821
1822 void set_kernel_offset(intptr_t value) const {
1823#if defined(DART_PRECOMPILED_RUNTIME)
1824 UNREACHABLE();
1825#else
1826 ASSERT(value >= 0);
1827 StoreNonPointer(addr: &untag()->kernel_offset_, value);
1828#endif
1829 }
1830
1831 void DisableAllocationStub() const;
1832
1833 ArrayPtr constants() const;
1834 void set_constants(const Array& value) const;
1835
1836 intptr_t FindInvocationDispatcherFunctionIndex(const Function& needle) const;
1837 FunctionPtr InvocationDispatcherFunctionFromIndex(intptr_t idx) const;
1838
1839 FunctionPtr GetInvocationDispatcher(const String& target_name,
1840 const Array& args_desc,
1841 UntaggedFunction::Kind kind,
1842 bool create_if_absent) const;
1843
1844 FunctionPtr GetRecordFieldGetter(const String& getter_name) const;
1845
1846 void Finalize() const;
1847
1848 ObjectPtr Invoke(const String& selector,
1849 const Array& arguments,
1850 const Array& argument_names,
1851 bool respect_reflectable = true,
1852 bool check_is_entrypoint = false) const;
1853 ObjectPtr InvokeGetter(const String& selector,
1854 bool throw_nsm_if_absent,
1855 bool respect_reflectable = true,
1856 bool check_is_entrypoint = false) const;
1857 ObjectPtr InvokeSetter(const String& selector,
1858 const Instance& argument,
1859 bool respect_reflectable = true,
1860 bool check_is_entrypoint = false) const;
1861
1862 // Evaluate the given expression as if it appeared in a static method of this
1863 // class and return the resulting value, or an error object if evaluating the
1864 // expression fails. The method has the formal (type) parameters given in
1865 // (type_)param_names, and is invoked with the (type)argument values given in
1866 // (type_)param_values.
1867 ObjectPtr EvaluateCompiledExpression(
1868 const ExternalTypedData& kernel_buffer,
1869 const Array& type_definitions,
1870 const Array& param_values,
1871 const TypeArguments& type_param_values) const;
1872
1873 // Load class declaration (super type, interfaces, type parameters and
1874 // number of type arguments) if it is not loaded yet.
1875 void EnsureDeclarationLoaded() const;
1876
1877 ErrorPtr EnsureIsFinalized(Thread* thread) const;
1878 ErrorPtr EnsureIsAllocateFinalized(Thread* thread) const;
1879
1880 // Allocate a class used for VM internal objects.
1881 template <class FakeObject, class TargetFakeObject>
1882 static ClassPtr New(IsolateGroup* isolate_group, bool register_class = true);
1883
1884 // Allocate instance classes.
1885 static ClassPtr New(const Library& lib,
1886 const String& name,
1887 const Script& script,
1888 TokenPosition token_pos,
1889 bool register_class = true);
1890 static ClassPtr NewNativeWrapper(const Library& library,
1891 const String& name,
1892 int num_fields);
1893
1894 // Allocate the raw string classes.
1895 static ClassPtr NewStringClass(intptr_t class_id,
1896 IsolateGroup* isolate_group);
1897
1898 // Allocate the raw TypedData classes.
1899 static ClassPtr NewTypedDataClass(intptr_t class_id,
1900 IsolateGroup* isolate_group);
1901
1902 // Allocate the raw TypedDataView/ByteDataView classes.
1903 static ClassPtr NewTypedDataViewClass(intptr_t class_id,
1904 IsolateGroup* isolate_group);
1905 static ClassPtr NewUnmodifiableTypedDataViewClass(
1906 intptr_t class_id,
1907 IsolateGroup* isolate_group);
1908
1909 // Allocate the raw ExternalTypedData classes.
1910 static ClassPtr NewExternalTypedDataClass(intptr_t class_id,
1911 IsolateGroup* isolate);
1912
1913 // Allocate the raw Pointer classes.
1914 static ClassPtr NewPointerClass(intptr_t class_id,
1915 IsolateGroup* isolate_group);
1916
1917#if !defined(DART_PRECOMPILED_RUNTIME)
1918 // Register code that has used CHA for optimization.
1919 // TODO(srdjan): Also register kind of CHA optimization (e.g.: leaf class,
1920 // leaf method, ...).
1921 void RegisterCHACode(const Code& code);
1922
1923 void DisableCHAOptimizedCode(const Class& subclass);
1924
1925 void DisableAllCHAOptimizedCode();
1926
1927 void DisableCHAImplementorUsers() { DisableAllCHAOptimizedCode(); }
1928
1929 // Return the list of code objects that were compiled using CHA of this class.
1930 // These code objects will be invalidated if new subclasses of this class
1931 // are finalized.
1932 WeakArrayPtr dependent_code() const;
1933 void set_dependent_code(const WeakArray& array) const;
1934#endif // !defined(DART_PRECOMPILED_RUNTIME)
1935
1936 bool TraceAllocation(IsolateGroup* isolate_group) const;
1937 void SetTraceAllocation(bool trace_allocation) const;
1938
1939 void CopyStaticFieldValues(ProgramReloadContext* reload_context,
1940 const Class& old_cls) const;
1941 void PatchFieldsAndFunctions() const;
1942 void MigrateImplicitStaticClosures(ProgramReloadContext* context,
1943 const Class& new_cls) const;
1944 void CopyCanonicalConstants(const Class& old_cls) const;
1945 void CopyDeclarationType(const Class& old_cls) const;
1946 void CheckReload(const Class& replacement,
1947 ProgramReloadContext* context) const;
1948
1949 void AddInvocationDispatcher(const String& target_name,
1950 const Array& args_desc,
1951 const Function& dispatcher) const;
1952
1953 static int32_t host_instance_size_in_words(const ClassPtr cls) {
1954 return cls->untag()->host_instance_size_in_words_;
1955 }
1956
1957 static int32_t target_instance_size_in_words(const ClassPtr cls) {
1958#if defined(DART_PRECOMPILER)
1959 return cls->untag()->target_instance_size_in_words_;
1960#else
1961 return host_instance_size_in_words(cls);
1962#endif // defined(DART_PRECOMPILER)
1963 }
1964
1965 static int32_t host_next_field_offset_in_words(const ClassPtr cls) {
1966 return cls->untag()->host_next_field_offset_in_words_;
1967 }
1968
1969 static int32_t target_next_field_offset_in_words(const ClassPtr cls) {
1970#if defined(DART_PRECOMPILER)
1971 return cls->untag()->target_next_field_offset_in_words_;
1972#else
1973 return host_next_field_offset_in_words(cls);
1974#endif // defined(DART_PRECOMPILER)
1975 }
1976
1977 static int32_t host_type_arguments_field_offset_in_words(const ClassPtr cls) {
1978 return cls->untag()->host_type_arguments_field_offset_in_words_;
1979 }
1980
1981 static int32_t target_type_arguments_field_offset_in_words(
1982 const ClassPtr cls) {
1983#if defined(DART_PRECOMPILER)
1984 return cls->untag()->target_type_arguments_field_offset_in_words_;
1985#else
1986 return host_type_arguments_field_offset_in_words(cls);
1987#endif // defined(DART_PRECOMPILER)
1988 }
1989
1990 static intptr_t UnboxedFieldSizeInBytesByCid(intptr_t cid);
1991 void MarkFieldBoxedDuringReload(ClassTable* class_table,
1992 const Field& field) const;
1993
1994#if !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)
1995 void SetUserVisibleNameInClassTable();
1996#endif // !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)
1997
1998 private:
1999 TypePtr declaration_type() const {
2000 return untag()->declaration_type<std::memory_order_acquire>();
2001 }
2002
2003 // Caches the declaration type of this class.
2004 void set_declaration_type(const Type& type) const;
2005
2006 TypeArgumentsPtr declaration_instance_type_arguments() const {
2007 return untag()
2008 ->declaration_instance_type_arguments<std::memory_order_acquire>();
2009 }
2010 void set_declaration_instance_type_arguments(
2011 const TypeArguments& value) const;
2012
2013 bool CanReloadFinalized(const Class& replacement,
2014 ProgramReloadContext* context) const;
2015 bool CanReloadPreFinalized(const Class& replacement,
2016 ProgramReloadContext* context) const;
2017
2018 // Tells whether instances need morphing for reload.
2019 bool RequiresInstanceMorphing(ClassTable* class_table,
2020 const Class& replacement) const;
2021
2022 template <class FakeInstance, class TargetFakeInstance>
2023 static ClassPtr NewCommon(intptr_t index);
2024
2025 enum MemberKind {
2026 kAny = 0,
2027 kStatic,
2028 kInstance,
2029 kInstanceAllowAbstract,
2030 kConstructor,
2031 kFactory,
2032 };
2033 enum StateBits {
2034 kConstBit = 0,
2035 kImplementedBit = 1,
2036 kClassFinalizedPos = 2,
2037 kClassFinalizedSize = 2,
2038 kClassLoadingPos = kClassFinalizedPos + kClassFinalizedSize, // = 4
2039 kClassLoadingSize = 2,
2040 kAbstractBit = kClassLoadingPos + kClassLoadingSize, // = 6
2041 kSynthesizedClassBit,
2042 kMixinAppAliasBit,
2043 kMixinTypeAppliedBit,
2044 kFieldsMarkedNullableBit,
2045 kEnumBit,
2046 kTransformedMixinApplicationBit,
2047 kIsAllocatedBit,
2048 kIsLoadedBit,
2049 kHasPragmaBit,
2050 kSealedBit,
2051 kMixinClassBit,
2052 kBaseClassBit,
2053 kInterfaceClassBit,
2054 kFinalBit,
2055 // Whether instances of the class cannot be sent across ports.
2056 //
2057 // Will be true iff
2058 // - class is marked with `@pragma('vm:isolate-unsendable')
2059 // - super class / super interface classes are marked as unsendable.
2060 // - class has native fields.
2061 kIsIsolateUnsendableBit,
2062 // True if this class has `@pragma('vm:isolate-unsendable') annotation or
2063 // base class or implemented interfaces has this bit.
2064 kIsIsolateUnsendableDueToPragmaBit,
2065 // This class is a subtype of Future.
2066 kIsFutureSubtypeBit,
2067 // This class has a non-abstract subtype which is a subtype of Future.
2068 // It means that variable of static type based on this class may hold
2069 // a Future instance.
2070 kCanBeFutureBit,
2071 };
2072 class ConstBit : public BitField<uint32_t, bool, kConstBit, 1> {};
2073 class ImplementedBit : public BitField<uint32_t, bool, kImplementedBit, 1> {};
2074 class ClassFinalizedBits : public BitField<uint32_t,
2075 UntaggedClass::ClassFinalizedState,
2076 kClassFinalizedPos,
2077 kClassFinalizedSize> {};
2078 class ClassLoadingBits : public BitField<uint32_t,
2079 UntaggedClass::ClassLoadingState,
2080 kClassLoadingPos,
2081 kClassLoadingSize> {};
2082 class AbstractBit : public BitField<uint32_t, bool, kAbstractBit, 1> {};
2083 class SynthesizedClassBit
2084 : public BitField<uint32_t, bool, kSynthesizedClassBit, 1> {};
2085 class FieldsMarkedNullableBit
2086 : public BitField<uint32_t, bool, kFieldsMarkedNullableBit, 1> {};
2087 class EnumBit : public BitField<uint32_t, bool, kEnumBit, 1> {};
2088 class TransformedMixinApplicationBit
2089 : public BitField<uint32_t, bool, kTransformedMixinApplicationBit, 1> {};
2090 class IsAllocatedBit : public BitField<uint32_t, bool, kIsAllocatedBit, 1> {};
2091 class IsLoadedBit : public BitField<uint32_t, bool, kIsLoadedBit, 1> {};
2092 class HasPragmaBit : public BitField<uint32_t, bool, kHasPragmaBit, 1> {};
2093 class SealedBit : public BitField<uint32_t, bool, kSealedBit, 1> {};
2094 class MixinClassBit : public BitField<uint32_t, bool, kMixinClassBit, 1> {};
2095 class BaseClassBit : public BitField<uint32_t, bool, kBaseClassBit, 1> {};
2096 class InterfaceClassBit
2097 : public BitField<uint32_t, bool, kInterfaceClassBit, 1> {};
2098 class FinalBit : public BitField<uint32_t, bool, kFinalBit, 1> {};
2099 class IsIsolateUnsendableBit
2100 : public BitField<uint32_t, bool, kIsIsolateUnsendableBit, 1> {};
2101 class IsIsolateUnsendableDueToPragmaBit
2102 : public BitField<uint32_t, bool, kIsIsolateUnsendableDueToPragmaBit, 1> {
2103 };
2104 class IsFutureSubtypeBit
2105 : public BitField<uint32_t, bool, kIsFutureSubtypeBit, 1> {};
2106 class CanBeFutureBit : public BitField<uint32_t, bool, kCanBeFutureBit, 1> {};
2107
2108 void set_name(const String& value) const;
2109 void set_user_name(const String& value) const;
2110 const char* GenerateUserVisibleName() const;
2111 void set_state_bits(intptr_t bits) const;
2112 void set_implementor_cid(intptr_t value) const;
2113
2114 FunctionPtr CreateInvocationDispatcher(const String& target_name,
2115 const Array& args_desc,
2116 UntaggedFunction::Kind kind) const;
2117
2118 FunctionPtr CreateRecordFieldGetter(const String& getter_name) const;
2119
2120 // Returns the bitmap of unboxed fields
2121 UnboxedFieldBitmap CalculateFieldOffsets() const;
2122
2123 // functions_hash_table is in use iff there are at least this many functions.
2124 static constexpr intptr_t kFunctionLookupHashThreshold = 16;
2125
2126 // Initial value for the cached number of type arguments.
2127 static constexpr intptr_t kUnknownNumTypeArguments = -1;
2128
2129 int16_t num_type_arguments() const {
2130 return LoadNonPointer<int16_t, std::memory_order_relaxed>(
2131 addr: &untag()->num_type_arguments_);
2132 }
2133
2134 uint32_t state_bits() const {
2135 // Ensure any following load instructions do not get performed before this
2136 // one.
2137 return LoadNonPointer<uint32_t, std::memory_order_acquire>(
2138 addr: &untag()->state_bits_);
2139 }
2140
2141 public:
2142 void set_num_type_arguments(intptr_t value) const;
2143 void set_num_type_arguments_unsafe(intptr_t value) const;
2144
2145 bool has_pragma() const { return HasPragmaBit::decode(value: state_bits()); }
2146 void set_has_pragma(bool value) const;
2147
2148 void set_is_isolate_unsendable(bool value) const;
2149 bool is_isolate_unsendable() const {
2150 ASSERT(is_finalized()); // This bit is initialized in class finalizer.
2151 return IsIsolateUnsendableBit::decode(value: state_bits());
2152 }
2153
2154 void set_is_isolate_unsendable_due_to_pragma(bool value) const;
2155 bool is_isolate_unsendable_due_to_pragma() const {
2156 return IsIsolateUnsendableDueToPragmaBit::decode(value: state_bits());
2157 }
2158
2159 void set_is_future_subtype(bool value) const;
2160 bool is_future_subtype() const {
2161 ASSERT(is_type_finalized());
2162 return IsFutureSubtypeBit::decode(value: state_bits());
2163 }
2164
2165 void set_can_be_future(bool value) const;
2166 bool can_be_future() const { return CanBeFutureBit::decode(value: state_bits()); }
2167
2168 private:
2169 void set_functions(const Array& value) const;
2170 void set_fields(const Array& value) const;
2171 void set_invocation_dispatcher_cache(const Array& cache) const;
2172
2173 ArrayPtr invocation_dispatcher_cache() const;
2174
2175 // Calculates number of type arguments of this class.
2176 // This includes type arguments of a superclass and takes overlapping
2177 // of type arguments into account.
2178 intptr_t ComputeNumTypeArguments() const;
2179
2180 // Assigns empty array to all raw class array fields.
2181 void InitEmptyFields() const;
2182
2183 static FunctionPtr CheckFunctionType(const Function& func, MemberKind kind);
2184 FunctionPtr LookupFunctionReadLocked(const String& name,
2185 MemberKind kind) const;
2186 FunctionPtr LookupFunctionAllowPrivate(const String& name,
2187 MemberKind kind) const;
2188 FieldPtr LookupField(const String& name, MemberKind kind) const;
2189
2190 FunctionPtr LookupAccessorFunction(const char* prefix,
2191 intptr_t prefix_length,
2192 const String& name) const;
2193
2194 // Allocate an instance class which has a VM implementation.
2195 template <class FakeInstance, class TargetFakeInstance>
2196 static ClassPtr New(intptr_t id,
2197 IsolateGroup* isolate_group,
2198 bool register_class = true,
2199 bool is_abstract = false);
2200
2201 // Helper that calls 'Class::New<Instance>(kIllegalCid)'.
2202 static ClassPtr NewInstanceClass();
2203
2204 FINAL_HEAP_OBJECT_IMPLEMENTATION(Class, Object);
2205 friend class AbstractType;
2206 friend class Instance;
2207 friend class Object;
2208 friend class Type;
2209 friend class Intrinsifier;
2210 friend class ProgramWalker;
2211 friend class Precompiler;
2212 friend class ClassFinalizer;
2213};
2214
2215// Classification of type genericity according to type parameter owners.
2216enum Genericity {
2217 kAny, // Consider type params of current class and functions.
2218 kCurrentClass, // Consider type params of current class only.
2219 kFunctions, // Consider type params of current and parent functions.
2220};
2221
2222// Wrapper of a [Class] with different [Script] and kernel binary.
2223//
2224// We use this as owner of [Field]/[Function] objects that were from a different
2225// script/kernel than the actual class object.
2226//
2227// * used for corelib patches that live in different .dart files than the
2228// library itself.
2229//
2230// * used for library parts that live in different .dart files than the library
2231// itself.
2232//
2233// * used in reload to make old [Function]/[Field] objects have the old script
2234// kernel data.
2235//
2236class PatchClass : public Object {
2237 public:
2238 ClassPtr wrapped_class() const { return untag()->wrapped_class(); }
2239 ScriptPtr script() const { return untag()->script(); }
2240
2241 intptr_t kernel_library_index() const {
2242#if !defined(DART_PRECOMPILED_RUNTIME)
2243 return untag()->kernel_library_index_;
2244#else
2245 return -1;
2246#endif
2247 }
2248 void set_kernel_library_index(intptr_t index) const {
2249 NOT_IN_PRECOMPILED(StoreNonPointer(&untag()->kernel_library_index_, index));
2250 }
2251
2252#if !defined(DART_PRECOMPILED_RUNTIME)
2253 KernelProgramInfoPtr kernel_program_info() const {
2254 return untag()->kernel_program_info();
2255 }
2256 void set_kernel_program_info(const KernelProgramInfo& info) const;
2257#endif
2258
2259 static intptr_t InstanceSize() {
2260 return RoundedAllocationSize(size: sizeof(UntaggedPatchClass));
2261 }
2262 static bool IsInFullSnapshot(PatchClassPtr cls) {
2263 NoSafepointScope no_safepoint;
2264 return Class::IsInFullSnapshot(cls: cls->untag()->wrapped_class());
2265 }
2266
2267 static PatchClassPtr New(const Class& wrapped_class,
2268 const KernelProgramInfo& info,
2269 const Script& source);
2270
2271 private:
2272 void set_wrapped_class(const Class& value) const;
2273 void set_script(const Script& value) const;
2274
2275 static PatchClassPtr New();
2276
2277 FINAL_HEAP_OBJECT_IMPLEMENTATION(PatchClass, Object);
2278 friend class Class;
2279};
2280
2281class SingleTargetCache : public Object {
2282 public:
2283 CodePtr target() const { return untag()->target(); }
2284 void set_target(const Code& target) const;
2285 static intptr_t target_offset() {
2286 return OFFSET_OF(UntaggedSingleTargetCache, target_);
2287 }
2288
2289#define DEFINE_NON_POINTER_FIELD_ACCESSORS(type, name) \
2290 type name() const { return untag()->name##_; } \
2291 void set_##name(type value) const { \
2292 StoreNonPointer(&untag()->name##_, value); \
2293 } \
2294 static intptr_t name##_offset() { \
2295 return OFFSET_OF(UntaggedSingleTargetCache, name##_); \
2296 }
2297
2298 DEFINE_NON_POINTER_FIELD_ACCESSORS(uword, entry_point);
2299 DEFINE_NON_POINTER_FIELD_ACCESSORS(intptr_t, lower_limit);
2300 DEFINE_NON_POINTER_FIELD_ACCESSORS(intptr_t, upper_limit);
2301#undef DEFINE_NON_POINTER_FIELD_ACCESSORS
2302
2303 static intptr_t InstanceSize() {
2304 return RoundedAllocationSize(size: sizeof(UntaggedSingleTargetCache));
2305 }
2306
2307 static SingleTargetCachePtr New();
2308
2309 private:
2310 FINAL_HEAP_OBJECT_IMPLEMENTATION(SingleTargetCache, Object);
2311 friend class Class;
2312};
2313
2314class MonomorphicSmiableCall : public Object {
2315 public:
2316 classid_t expected_cid() const { return untag()->expected_cid_; }
2317
2318 static intptr_t InstanceSize() {
2319 return RoundedAllocationSize(size: sizeof(UntaggedMonomorphicSmiableCall));
2320 }
2321
2322 static MonomorphicSmiableCallPtr New(classid_t expected_cid,
2323 const Code& target);
2324
2325 static intptr_t expected_cid_offset() {
2326 return OFFSET_OF(UntaggedMonomorphicSmiableCall, expected_cid_);
2327 }
2328
2329 static intptr_t entrypoint_offset() {
2330 return OFFSET_OF(UntaggedMonomorphicSmiableCall, entrypoint_);
2331 }
2332
2333 private:
2334 FINAL_HEAP_OBJECT_IMPLEMENTATION(MonomorphicSmiableCall, Object);
2335 friend class Class;
2336};
2337
2338class CallSiteData : public Object {
2339 public:
2340 StringPtr target_name() const { return untag()->target_name(); }
2341 ArrayPtr arguments_descriptor() const { return untag()->args_descriptor(); }
2342
2343 intptr_t TypeArgsLen() const;
2344
2345 intptr_t CountWithTypeArgs() const;
2346
2347 intptr_t CountWithoutTypeArgs() const;
2348
2349 intptr_t SizeWithoutTypeArgs() const;
2350
2351 intptr_t SizeWithTypeArgs() const;
2352
2353 static intptr_t target_name_offset() {
2354 return OFFSET_OF(UntaggedCallSiteData, target_name_);
2355 }
2356
2357 static intptr_t arguments_descriptor_offset() {
2358 return OFFSET_OF(UntaggedCallSiteData, args_descriptor_);
2359 }
2360
2361 private:
2362 void set_target_name(const String& value) const;
2363 void set_arguments_descriptor(const Array& value) const;
2364
2365 HEAP_OBJECT_IMPLEMENTATION(CallSiteData, Object)
2366
2367 friend class ICData;
2368 friend class MegamorphicCache;
2369};
2370
2371class UnlinkedCall : public CallSiteData {
2372 public:
2373 bool can_patch_to_monomorphic() const {
2374 return untag()->can_patch_to_monomorphic_;
2375 }
2376
2377 static intptr_t InstanceSize() {
2378 return RoundedAllocationSize(size: sizeof(UntaggedUnlinkedCall));
2379 }
2380
2381 uword Hash() const;
2382 bool Equals(const UnlinkedCall& other) const;
2383
2384 static UnlinkedCallPtr New();
2385
2386 private:
2387 friend class ICData; // For set_*() methods.
2388
2389 void set_can_patch_to_monomorphic(bool value) const;
2390
2391 FINAL_HEAP_OBJECT_IMPLEMENTATION(UnlinkedCall, CallSiteData);
2392 friend class Class;
2393};
2394
2395// Object holding information about an IC: test classes and their
2396// corresponding targets. The owner of the ICData can be either the function
2397// or the original ICData object. In case of background compilation we
2398// copy the ICData in a child object, thus freezing it during background
2399// compilation. Code may contain only original ICData objects.
2400//
2401// ICData's backing store is an array that logically contains several valid
2402// entries followed by a sentinel entry.
2403//
2404// [<entry-0>, <...>, <entry-N>, <sentinel>]
2405//
2406// Each entry has the following form:
2407//
2408// [arg0?, arg1?, argN?, count, target-function/code, exactness?]
2409//
2410// The <entry-X> need to contain valid type feedback.
2411// The <sentinel> entry and must have kIllegalCid value for all
2412// members of the entry except for the last one (`exactness` if
2413// present, otherwise `target-function/code`) - which we use as a backref:
2414//
2415// * For empty ICData we use a cached/shared backing store. So there is no
2416// unique backref, we use kIllegalCid instead.
2417// * For non-empty ICData the backref in the backing store array will point to
2418// the ICData object.
2419//
2420// Updating the ICData happens under a lock to avoid phantom-reads. The backing
2421// is treated as an immutable Copy-on-Write data structure: Adding to the ICData
2422// makes a copy with length+1 which will be store-release'd so any reader can
2423// see it (and doesn't need to hold a lock).
2424class ICData : public CallSiteData {
2425 public:
2426 FunctionPtr Owner() const;
2427
2428 ICDataPtr Original() const;
2429
2430 void SetOriginal(const ICData& value) const;
2431
2432 bool IsOriginal() const { return Original() == this->ptr(); }
2433
2434 intptr_t NumArgsTested() const;
2435
2436 intptr_t deopt_id() const {
2437#if defined(DART_PRECOMPILED_RUNTIME)
2438 UNREACHABLE();
2439 return -1;
2440#else
2441 return untag()->deopt_id_;
2442#endif
2443 }
2444
2445 bool IsImmutable() const;
2446
2447#if !defined(DART_PRECOMPILED_RUNTIME)
2448 AbstractTypePtr receivers_static_type() const {
2449 return untag()->receivers_static_type();
2450 }
2451 bool is_tracking_exactness() const {
2452 return untag()->state_bits_.Read<TrackingExactnessBit>();
2453 }
2454#else
2455 bool is_tracking_exactness() const { return false; }
2456#endif
2457
2458// Note: only deopts with reasons before Unknown in this list are recorded in
2459// the ICData. All other reasons are used purely for informational messages
2460// printed during deoptimization itself.
2461#define DEOPT_REASONS(V) \
2462 V(BinarySmiOp) \
2463 V(BinaryInt64Op) \
2464 V(DoubleToSmi) \
2465 V(CheckSmi) \
2466 V(CheckClass) \
2467 V(Unknown) \
2468 V(PolymorphicInstanceCallTestFail) \
2469 V(UnaryInt64Op) \
2470 V(BinaryDoubleOp) \
2471 V(UnaryOp) \
2472 V(UnboxInteger) \
2473 V(Unbox) \
2474 V(CheckArrayBound) \
2475 V(AtCall) \
2476 V(GuardField) \
2477 V(TestCids) \
2478 V(NumReasons)
2479
2480 enum DeoptReasonId {
2481#define DEFINE_ENUM_LIST(name) kDeopt##name,
2482 DEOPT_REASONS(DEFINE_ENUM_LIST)
2483#undef DEFINE_ENUM_LIST
2484 };
2485
2486 static constexpr intptr_t kLastRecordedDeoptReason = kDeoptUnknown - 1;
2487
2488 enum DeoptFlags {
2489 // Deoptimization is caused by an optimistically hoisted instruction.
2490 kHoisted = 1 << 0,
2491
2492 // Deoptimization is caused by an optimistically generalized bounds check.
2493 kGeneralized = 1 << 1
2494 };
2495
2496 bool HasDeoptReasons() const { return DeoptReasons() != 0; }
2497 uint32_t DeoptReasons() const;
2498 void SetDeoptReasons(uint32_t reasons) const;
2499
2500 bool HasDeoptReason(ICData::DeoptReasonId reason) const;
2501 void AddDeoptReason(ICData::DeoptReasonId reason) const;
2502
2503 // Call site classification that is helpful for hot-reload. Call sites with
2504 // different `RebindRule` have to be rebound differently.
2505#define FOR_EACH_REBIND_RULE(V) \
2506 V(Instance) \
2507 V(NoRebind) \
2508 V(NSMDispatch) \
2509 V(Optimized) \
2510 V(Static) \
2511 V(Super)
2512
2513 enum RebindRule {
2514#define REBIND_ENUM_DEF(name) k##name,
2515 FOR_EACH_REBIND_RULE(REBIND_ENUM_DEF)
2516#undef REBIND_ENUM_DEF
2517 kNumRebindRules,
2518 };
2519 static const char* RebindRuleToCString(RebindRule r);
2520 static bool ParseRebindRule(const char* str, RebindRule* out);
2521 RebindRule rebind_rule() const;
2522
2523 void set_is_megamorphic(bool value) const {
2524 untag()->state_bits_.UpdateBool<MegamorphicBit, std::memory_order_release>(
2525 value);
2526 }
2527
2528 // The length of the array. This includes all sentinel entries including
2529 // the final one.
2530 intptr_t Length() const;
2531
2532 intptr_t NumberOfChecks() const;
2533
2534 // Discounts any checks with usage of zero.
2535 // Takes O(result)) time!
2536 intptr_t NumberOfUsedChecks() const;
2537
2538 bool NumberOfChecksIs(intptr_t n) const;
2539
2540 bool IsValidEntryIndex(intptr_t index) const {
2541 return 0 <= index && index < NumberOfChecks();
2542 }
2543
2544 static intptr_t InstanceSize() {
2545 return RoundedAllocationSize(size: sizeof(UntaggedICData));
2546 }
2547
2548 static intptr_t state_bits_offset() {
2549 return OFFSET_OF(UntaggedICData, state_bits_);
2550 }
2551
2552 static intptr_t NumArgsTestedShift() { return kNumArgsTestedPos; }
2553
2554 static intptr_t NumArgsTestedMask() {
2555 return ((1 << kNumArgsTestedSize) - 1) << kNumArgsTestedPos;
2556 }
2557
2558 static intptr_t entries_offset() {
2559 return OFFSET_OF(UntaggedICData, entries_);
2560 }
2561
2562 static intptr_t owner_offset() { return OFFSET_OF(UntaggedICData, owner_); }
2563
2564#if !defined(DART_PRECOMPILED_RUNTIME)
2565 static intptr_t receivers_static_type_offset() {
2566 return OFFSET_OF(UntaggedICData, receivers_static_type_);
2567 }
2568#endif
2569
2570 // NOTE: Can only be called during reload.
2571 void Clear(const CallSiteResetter& proof_of_reload) const {
2572 TruncateTo(num_checks: 0, proof_of_reload);
2573 }
2574
2575 // NOTE: Can only be called during reload.
2576 void TruncateTo(intptr_t num_checks,
2577 const CallSiteResetter& proof_of_reload) const;
2578
2579 // Clears the count for entry |index|.
2580 // NOTE: Can only be called during reload.
2581 void ClearCountAt(intptr_t index,
2582 const CallSiteResetter& proof_of_reload) const;
2583
2584 // Clear all entries with the sentinel value and reset the first entry
2585 // with the dummy target entry.
2586 // NOTE: Can only be called during reload.
2587 void ClearAndSetStaticTarget(const Function& func,
2588 const CallSiteResetter& proof_of_reload) const;
2589
2590 void DebugDump() const;
2591
2592 // Adding checks.
2593
2594 // Ensures there is a check for [class_ids].
2595 //
2596 // Calls [AddCheck] iff there is no existing check. Ensures test (and
2597 // potential update) will be performed under exclusive lock to guard against
2598 // multiple threads trying to add the same check.
2599 void EnsureHasCheck(const GrowableArray<intptr_t>& class_ids,
2600 const Function& target,
2601 intptr_t count = 1) const;
2602
2603 // Adds one more class test to ICData. Length of 'classes' must be equal to
2604 // the number of arguments tested. Use only for num_args_tested > 1.
2605 void AddCheck(const GrowableArray<intptr_t>& class_ids,
2606 const Function& target,
2607 intptr_t count = 1) const;
2608
2609 StaticTypeExactnessState GetExactnessAt(intptr_t count) const;
2610
2611 // Ensures there is a receiver check for [receiver_class_id].
2612 //
2613 // Calls [AddCheckReceiverCheck] iff there is no existing check. Ensures
2614 // test (and potential update) will be performed under exclusive lock to
2615 // guard against multiple threads trying to add the same check.
2616 void EnsureHasReceiverCheck(
2617 intptr_t receiver_class_id,
2618 const Function& target,
2619 intptr_t count = 1,
2620 StaticTypeExactnessState exactness =
2621 StaticTypeExactnessState::NotTracking()) const;
2622
2623 // Adds sorted so that Smi is the first class-id. Use only for
2624 // num_args_tested == 1.
2625 void AddReceiverCheck(intptr_t receiver_class_id,
2626 const Function& target,
2627 intptr_t count = 1,
2628 StaticTypeExactnessState exactness =
2629 StaticTypeExactnessState::NotTracking()) const;
2630
2631 // Retrieving checks.
2632
2633 void GetCheckAt(intptr_t index,
2634 GrowableArray<intptr_t>* class_ids,
2635 Function* target) const;
2636 void GetClassIdsAt(intptr_t index, GrowableArray<intptr_t>* class_ids) const;
2637
2638 // Only for 'num_args_checked == 1'.
2639 void GetOneClassCheckAt(intptr_t index,
2640 intptr_t* class_id,
2641 Function* target) const;
2642 // Only for 'num_args_checked == 1'.
2643 intptr_t GetCidAt(intptr_t index) const;
2644
2645 intptr_t GetReceiverClassIdAt(intptr_t index) const;
2646 intptr_t GetClassIdAt(intptr_t index, intptr_t arg_nr) const;
2647
2648 FunctionPtr GetTargetAt(intptr_t index) const;
2649
2650 void IncrementCountAt(intptr_t index, intptr_t value) const;
2651 void SetCountAt(intptr_t index, intptr_t value) const;
2652 intptr_t GetCountAt(intptr_t index) const;
2653 intptr_t AggregateCount() const;
2654
2655 // Returns this->untag() if num_args_tested == 1 and arg_nr == 1, otherwise
2656 // returns a new ICData object containing only unique arg_nr checks.
2657 // Returns only used entries.
2658 ICDataPtr AsUnaryClassChecksForArgNr(intptr_t arg_nr) const;
2659 ICDataPtr AsUnaryClassChecks() const { return AsUnaryClassChecksForArgNr(arg_nr: 0); }
2660
2661 // Returns ICData with aggregated receiver count, sorted by highest count.
2662 // Smi not first!! (the convention for ICData used in code generation is that
2663 // Smi check is first)
2664 // Used for printing and optimizations.
2665 ICDataPtr AsUnaryClassChecksSortedByCount() const;
2666
2667 UnlinkedCallPtr AsUnlinkedCall() const;
2668
2669 bool HasReceiverClassId(intptr_t class_id) const;
2670
2671 // Note: passing non-null receiver_type enables exactness tracking for
2672 // the receiver type. Receiver type is expected to be a fully
2673 // instantiated generic (but not a FutureOr).
2674 // See StaticTypeExactnessState for more information.
2675 static ICDataPtr New(
2676 const Function& owner,
2677 const String& target_name,
2678 const Array& arguments_descriptor,
2679 intptr_t deopt_id,
2680 intptr_t num_args_tested,
2681 RebindRule rebind_rule,
2682 const AbstractType& receiver_type = Object::null_abstract_type());
2683
2684 // Similar to [New] makes the ICData have an initial (cids, target) entry.
2685 static ICDataPtr NewWithCheck(
2686 const Function& owner,
2687 const String& target_name,
2688 const Array& arguments_descriptor,
2689 intptr_t deopt_id,
2690 intptr_t num_args_tested,
2691 RebindRule rebind_rule,
2692 GrowableArray<intptr_t>* cids,
2693 const Function& target,
2694 const AbstractType& receiver_type = Object::null_abstract_type());
2695
2696 static ICDataPtr NewForStaticCall(const Function& owner,
2697 const Function& target,
2698 const Array& arguments_descriptor,
2699 intptr_t deopt_id,
2700 intptr_t num_args_tested,
2701 RebindRule rebind_rule);
2702
2703 static ICDataPtr NewFrom(const ICData& from, intptr_t num_args_tested);
2704
2705 // Generates a new ICData with descriptor and data array copied (deep clone).
2706 static ICDataPtr Clone(const ICData& from);
2707
2708 // Gets the [ICData] from the [ICData::entries_] array (which stores a back
2709 // ref).
2710 //
2711 // May return `null` if the [ICData] is empty.
2712 static ICDataPtr ICDataOfEntriesArray(const Array& array);
2713
2714 static intptr_t TestEntryLengthFor(intptr_t num_args,
2715 bool tracking_exactness);
2716
2717 static intptr_t CountIndexFor(intptr_t num_args) { return num_args; }
2718 static intptr_t EntryPointIndexFor(intptr_t num_args) { return num_args; }
2719
2720 static intptr_t TargetIndexFor(intptr_t num_args) { return num_args + 1; }
2721 static intptr_t CodeIndexFor(intptr_t num_args) { return num_args + 1; }
2722
2723 static intptr_t ExactnessIndexFor(intptr_t num_args) { return num_args + 2; }
2724
2725 bool IsUsedAt(intptr_t i) const;
2726
2727 void PrintToJSONArray(const JSONArray& jsarray,
2728 TokenPosition token_pos) const;
2729
2730 // Initialize the preallocated empty ICData entry arrays.
2731 static void Init();
2732
2733 // Clear the preallocated empty ICData entry arrays.
2734 static void Cleanup();
2735
2736 // We cache ICData with 0, 1, 2 arguments tested without exactness
2737 // tracking and with 1 argument tested with exactness tracking.
2738 enum {
2739 kCachedICDataZeroArgTestedWithoutExactnessTrackingIdx = 0,
2740 kCachedICDataMaxArgsTestedWithoutExactnessTracking = 2,
2741 kCachedICDataOneArgWithExactnessTrackingIdx =
2742 kCachedICDataZeroArgTestedWithoutExactnessTrackingIdx +
2743 kCachedICDataMaxArgsTestedWithoutExactnessTracking + 1,
2744 kCachedICDataArrayCount = kCachedICDataOneArgWithExactnessTrackingIdx + 1,
2745 };
2746
2747 bool is_static_call() const;
2748
2749 intptr_t FindCheck(const GrowableArray<intptr_t>& cids) const;
2750
2751 ArrayPtr entries() const {
2752 return untag()->entries<std::memory_order_acquire>();
2753 }
2754
2755 bool receiver_cannot_be_smi() const {
2756 return untag()->state_bits_.Read<ReceiverCannotBeSmiBit>();
2757 }
2758
2759 void set_receiver_cannot_be_smi(bool value) const {
2760 untag()->state_bits_.UpdateBool<ReceiverCannotBeSmiBit>(value);
2761 }
2762
2763 uword Hash() const;
2764
2765 private:
2766 static ICDataPtr New();
2767
2768 // Grows the array and also sets the argument to the index that should be used
2769 // for the new entry.
2770 ArrayPtr Grow(intptr_t* index) const;
2771
2772 void set_deopt_id(intptr_t value) const;
2773 void set_entries(const Array& value) const;
2774 void set_owner(const Function& value) const;
2775 void set_rebind_rule(uint32_t rebind_rule) const;
2776 void clear_state_bits() const;
2777 void set_tracking_exactness(bool value) const {
2778 untag()->state_bits_.UpdateBool<TrackingExactnessBit>(value);
2779 }
2780
2781 // Does entry |index| contain the sentinel value?
2782 void SetNumArgsTested(intptr_t value) const;
2783 void SetReceiversStaticType(const AbstractType& type) const;
2784 DEBUG_ONLY(void AssertInvariantsAreSatisfied() const;)
2785
2786 static void SetTargetAtPos(const Array& data,
2787 intptr_t data_pos,
2788 intptr_t num_args_tested,
2789 const Function& target);
2790 void AddCheckInternal(const GrowableArray<intptr_t>& class_ids,
2791 const Function& target,
2792 intptr_t count) const;
2793 void AddReceiverCheckInternal(intptr_t receiver_class_id,
2794 const Function& target,
2795 intptr_t count,
2796 StaticTypeExactnessState exactness) const;
2797
2798 // This bit is set when a call site becomes megamorphic and starts using a
2799 // MegamorphicCache instead of ICData. It means that the entries in the
2800 // ICData are incomplete and the MegamorphicCache needs to also be consulted
2801 // to list the call site's observed receiver classes and targets.
2802 // In the compiler, this should only be read once by CallTargets to avoid the
2803 // compiler seeing an unstable set of feedback.
2804 bool is_megamorphic() const {
2805 // Ensure any following load instructions do not get performed before this
2806 // one.
2807 return untag()
2808 ->state_bits_.Read<MegamorphicBit, std::memory_order_acquire>();
2809 }
2810
2811 bool ValidateInterceptor(const Function& target) const;
2812
2813 enum {
2814 kNumArgsTestedPos = 0,
2815 kNumArgsTestedSize = 2,
2816 kTrackingExactnessPos = kNumArgsTestedPos + kNumArgsTestedSize,
2817 kTrackingExactnessSize = 1,
2818 kDeoptReasonPos = kTrackingExactnessPos + kTrackingExactnessSize,
2819 kDeoptReasonSize = kLastRecordedDeoptReason + 1,
2820 kRebindRulePos = kDeoptReasonPos + kDeoptReasonSize,
2821 kRebindRuleSize = 3,
2822 kMegamorphicPos = kRebindRulePos + kRebindRuleSize,
2823 kMegamorphicSize = 1,
2824 kReceiverCannotBeSmiPos = kMegamorphicPos + kMegamorphicSize,
2825 kReceiverCannotBeSmiSize = 1,
2826 };
2827
2828 COMPILE_ASSERT(kReceiverCannotBeSmiPos + kReceiverCannotBeSmiSize <=
2829 sizeof(UntaggedICData::state_bits_) * kBitsPerWord);
2830 COMPILE_ASSERT(kNumRebindRules <= (1 << kRebindRuleSize));
2831
2832 class NumArgsTestedBits : public BitField<uint32_t,
2833 uint32_t,
2834 kNumArgsTestedPos,
2835 kNumArgsTestedSize> {};
2836 class TrackingExactnessBit : public BitField<uint32_t,
2837 bool,
2838 kTrackingExactnessPos,
2839 kTrackingExactnessSize> {};
2840 class DeoptReasonBits : public BitField<uint32_t,
2841 uint32_t,
2842 ICData::kDeoptReasonPos,
2843 ICData::kDeoptReasonSize> {};
2844 class RebindRuleBits : public BitField<uint32_t,
2845 uint32_t,
2846 ICData::kRebindRulePos,
2847 ICData::kRebindRuleSize> {};
2848 class MegamorphicBit
2849 : public BitField<uint32_t, bool, kMegamorphicPos, kMegamorphicSize> {};
2850
2851 class ReceiverCannotBeSmiBit : public BitField<uint32_t,
2852 bool,
2853 kReceiverCannotBeSmiPos,
2854 kReceiverCannotBeSmiSize> {};
2855
2856#if defined(DEBUG)
2857 // Used in asserts to verify that a check is not added twice.
2858 bool HasCheck(const GrowableArray<intptr_t>& cids) const;
2859#endif // DEBUG
2860
2861 intptr_t TestEntryLength() const;
2862 static ArrayPtr NewNonCachedEmptyICDataArray(intptr_t num_args_tested,
2863 bool tracking_exactness);
2864 static ArrayPtr CachedEmptyICDataArray(intptr_t num_args_tested,
2865 bool tracking_exactness);
2866 static bool IsCachedEmptyEntry(const Array& array);
2867 static ICDataPtr NewDescriptor(Zone* zone,
2868 const Function& owner,
2869 const String& target_name,
2870 const Array& arguments_descriptor,
2871 intptr_t deopt_id,
2872 intptr_t num_args_tested,
2873 RebindRule rebind_rule,
2874 const AbstractType& receiver_type);
2875
2876 static void WriteSentinel(const Array& data,
2877 intptr_t test_entry_length,
2878 const Object& back_ref);
2879
2880 // A cache of VM heap allocated preinitialized empty ic data entry arrays.
2881 static ArrayPtr cached_icdata_arrays_[kCachedICDataArrayCount];
2882
2883 FINAL_HEAP_OBJECT_IMPLEMENTATION(ICData, CallSiteData);
2884 friend class CallSiteResetter;
2885 friend class CallTargets;
2886 friend class Class;
2887 friend class VMDeserializationRoots;
2888 friend class ICDataTestTask;
2889 friend class VMSerializationRoots;
2890};
2891
2892// Often used constants for number of free function type parameters.
2893enum {
2894 kNoneFree = 0,
2895
2896 // 'kCurrentAndEnclosingFree' is used when partially applying a signature
2897 // function to a set of type arguments. It indicates that the set of type
2898 // parameters declared by the current function and enclosing functions should
2899 // be considered free, and the current function type parameters should be
2900 // substituted as well.
2901 //
2902 // For instance, if the signature "<T>(T, R) => T" is instantiated with
2903 // function type arguments [int, String] and kCurrentAndEnclosingFree is
2904 // supplied, the result of the instantiation will be "(String, int) => int".
2905 kCurrentAndEnclosingFree = kMaxInt32 - 1,
2906
2907 // Only parameters declared by enclosing functions are free.
2908 kAllFree = kMaxInt32,
2909};
2910
2911// Formatting configuration for Function::PrintName.
2912struct NameFormattingParams {
2913 Object::NameVisibility name_visibility;
2914 bool disambiguate_names;
2915
2916 // By default function name includes the name of the enclosing class if any.
2917 // However in some contexts this information is redundant and class name
2918 // is already known. In this case setting |include_class_name| to false
2919 // allows you to exclude this information from the formatted name.
2920 bool include_class_name = true;
2921
2922 // By default function name includes the name of the enclosing function if
2923 // any. However in some contexts this information is redundant and
2924 // the name of the enclosing function is already known. In this case
2925 // setting |include_parent_name| to false allows to exclude this information
2926 // from the formatted name.
2927 bool include_parent_name = true;
2928
2929 NameFormattingParams(Object::NameVisibility visibility,
2930 Object::NameDisambiguation name_disambiguation =
2931 Object::NameDisambiguation::kNo)
2932 : name_visibility(visibility),
2933 disambiguate_names(name_disambiguation ==
2934 Object::NameDisambiguation::kYes) {}
2935
2936 static NameFormattingParams DisambiguatedWithoutClassName(
2937 Object::NameVisibility visibility) {
2938 NameFormattingParams params(visibility, Object::NameDisambiguation::kYes);
2939 params.include_class_name = false;
2940 return params;
2941 }
2942
2943 static NameFormattingParams DisambiguatedUnqualified(
2944 Object::NameVisibility visibility) {
2945 NameFormattingParams params(visibility, Object::NameDisambiguation::kYes);
2946 params.include_class_name = false;
2947 params.include_parent_name = false;
2948 return params;
2949 }
2950};
2951
2952enum class FfiTrampolineKind : uint8_t {
2953 kCall,
2954 kSyncCallback,
2955 kAsyncCallback,
2956};
2957
2958class Function : public Object {
2959 public:
2960 StringPtr name() const { return untag()->name(); }
2961 StringPtr UserVisibleName() const; // Same as scrubbed name.
2962 const char* UserVisibleNameCString() const;
2963
2964 const char* NameCString(NameVisibility name_visibility) const;
2965
2966 void PrintName(const NameFormattingParams& params,
2967 BaseTextBuffer* printer) const;
2968 StringPtr QualifiedScrubbedName() const;
2969 const char* QualifiedScrubbedNameCString() const;
2970 StringPtr QualifiedUserVisibleName() const;
2971 const char* QualifiedUserVisibleNameCString() const;
2972
2973 virtual StringPtr DictionaryName() const { return name(); }
2974
2975 StringPtr GetSource() const;
2976
2977 // Set the "C signature" for an FFI trampoline.
2978 // Can only be used on FFI trampolines.
2979 void SetFfiCSignature(const FunctionType& sig) const;
2980
2981 // Retrieves the "C signature" for an FFI trampoline.
2982 // Can only be used on FFI trampolines.
2983 FunctionTypePtr FfiCSignature() const;
2984
2985 bool FfiCSignatureContainsHandles() const;
2986 bool FfiCSignatureReturnsStruct() const;
2987
2988 // Can only be called on FFI trampolines.
2989 // -1 for Dart -> native calls.
2990 int32_t FfiCallbackId() const;
2991
2992 // Should be called when ffi trampoline function object is created.
2993 void AssignFfiCallbackId(int32_t callback_id) const;
2994
2995 // Can only be called on FFI trampolines.
2996 bool FfiIsLeaf() const;
2997
2998 // Can only be called on FFI trampolines.
2999 void SetFfiIsLeaf(bool is_leaf) const;
3000
3001 // Can only be called on FFI trampolines.
3002 // Null for Dart -> native calls.
3003 FunctionPtr FfiCallbackTarget() const;
3004
3005 // Can only be called on FFI trampolines.
3006 void SetFfiCallbackTarget(const Function& target) const;
3007
3008 // Can only be called on FFI trampolines.
3009 // Null for Dart -> native calls.
3010 InstancePtr FfiCallbackExceptionalReturn() const;
3011
3012 // Can only be called on FFI trampolines.
3013 void SetFfiCallbackExceptionalReturn(const Instance& value) const;
3014
3015 // Can only be called on FFI trampolines.
3016 FfiTrampolineKind GetFfiTrampolineKind() const;
3017
3018 // Can only be called on FFI trampolines.
3019 void SetFfiTrampolineKind(FfiTrampolineKind value) const;
3020
3021 // Return the signature of this function.
3022 PRECOMPILER_WSR_FIELD_DECLARATION(FunctionType, signature);
3023 void SetSignature(const FunctionType& value) const;
3024 static intptr_t signature_offset() {
3025 return OFFSET_OF(UntaggedFunction, signature_);
3026 }
3027
3028 // Build a string of the form '<T>(T, {B b, C c}) => R' representing the
3029 // internal signature of the given function. In this example, T is a type
3030 // parameter of this function and R is a type parameter of class C, the owner
3031 // of the function. B and C are not type parameters.
3032 StringPtr InternalSignature() const;
3033
3034 // Build a string of the form '<T>(T, {B b, C c}) => R' representing the
3035 // user visible signature of the given function. In this example, T is a type
3036 // parameter of this function and R is a type parameter of class C, the owner
3037 // of the function. B and C are not type parameters.
3038 // Implicit parameters are hidden.
3039 StringPtr UserVisibleSignature() const;
3040
3041 // Returns true if the signature of this function is instantiated, i.e. if it
3042 // does not involve generic parameter types or generic result type.
3043 // Note that function type parameters declared by this function do not make
3044 // its signature uninstantiated, only type parameters declared by parent
3045 // generic functions or class type parameters.
3046 bool HasInstantiatedSignature(
3047 Genericity genericity = kAny,
3048 intptr_t num_free_fun_type_params = kAllFree) const;
3049
3050 bool IsPrivate() const;
3051
3052 ClassPtr Owner() const;
3053 void set_owner(const Object& value) const;
3054 ScriptPtr script() const;
3055#if !defined(DART_PRECOMPILED_RUNTIME)
3056 KernelProgramInfoPtr KernelProgramInfo() const;
3057#endif
3058 ObjectPtr RawOwner() const { return untag()->owner(); }
3059
3060 // The NNBD mode of the library declaring this function.
3061 // TODO(alexmarkov): nnbd_mode() doesn't work for mixins.
3062 // It should be either removed or fixed.
3063 NNBDMode nnbd_mode() const { return Class::Handle(ptr: Owner()).nnbd_mode(); }
3064
3065 RegExpPtr regexp() const;
3066 intptr_t string_specialization_cid() const;
3067 bool is_sticky_specialization() const;
3068 void SetRegExpData(const RegExp& regexp,
3069 intptr_t string_specialization_cid,
3070 bool sticky) const;
3071
3072 StringPtr native_name() const;
3073 void set_native_name(const String& name) const;
3074
3075 AbstractTypePtr result_type() const {
3076 return signature()->untag()->result_type();
3077 }
3078
3079 // The parameters, starting with NumImplicitParameters() parameters which are
3080 // only visible to the VM, but not to Dart users.
3081 // Note that type checks exclude implicit parameters.
3082 AbstractTypePtr ParameterTypeAt(intptr_t index) const;
3083 ArrayPtr parameter_types() const {
3084 return signature()->untag()->parameter_types();
3085 }
3086
3087 // Outside of the AOT runtime, functions store the names for their positional
3088 // parameters, and delegate storage of the names for named parameters to
3089 // their signature. These methods handle fetching the name from and
3090 // setting the name to the correct location.
3091 StringPtr ParameterNameAt(intptr_t index) const;
3092 // Only valid for positional parameter indexes, as this should be called
3093 // explicitly on the signature for named parameters.
3094 void SetParameterNameAt(intptr_t index, const String& value) const;
3095 // Creates an appropriately sized array in the function to hold positional
3096 // parameter names, using the positional parameter count in the signature.
3097 // Uses same default space as Function::New.
3098 void CreateNameArray(Heap::Space space = Heap::kOld) const;
3099
3100 // Delegates to the signature, which stores the named parameter flags.
3101 bool IsRequiredAt(intptr_t index) const;
3102
3103 // The formal type parameters, their bounds, and defaults, are specified as an
3104 // object of type TypeParameters stored in the signature.
3105 TypeParametersPtr type_parameters() const {
3106 return signature()->untag()->type_parameters();
3107 }
3108
3109 // Returns the number of local type arguments for this function.
3110 intptr_t NumTypeParameters() const;
3111 // Return the cumulative number of type arguments in all parent functions.
3112 intptr_t NumParentTypeArguments() const;
3113 // Return the cumulative number of type arguments for this function, including
3114 // type arguments for all parent functions.
3115 intptr_t NumTypeArguments() const;
3116 // Return whether this function declares local type arguments.
3117 bool IsGeneric() const;
3118 // Returns whether any parent function of this function is generic.
3119 bool HasGenericParent() const { return NumParentTypeArguments() > 0; }
3120
3121 // Return the type parameter declared at index.
3122 TypeParameterPtr TypeParameterAt(
3123 intptr_t index,
3124 Nullability nullability = Nullability::kNonNullable) const;
3125
3126 // Not thread-safe; must be called in the main thread.
3127 // Sets function's code and code's function.
3128 void InstallOptimizedCode(const Code& code) const;
3129 void AttachCode(const Code& value) const;
3130 void SetInstructions(const Code& value) const;
3131 void SetInstructionsSafe(const Code& value) const;
3132 void ClearCode() const;
3133 void ClearCodeSafe() const;
3134
3135 // Disables optimized code and switches to unoptimized code.
3136 void SwitchToUnoptimizedCode() const;
3137
3138 // Ensures that the function has code. If there is no code it compiles the
3139 // unoptimized version of the code. If the code contains errors, it calls
3140 // Exceptions::PropagateError and does not return. Normally returns the
3141 // current code, whether it is optimized or unoptimized.
3142 CodePtr EnsureHasCode() const;
3143
3144 // Disables optimized code and switches to unoptimized code (or the lazy
3145 // compilation stub).
3146 void SwitchToLazyCompiledUnoptimizedCode() const;
3147
3148 // Compiles unoptimized code (if necessary) and attaches it to the function.
3149 void EnsureHasCompiledUnoptimizedCode() const;
3150
3151 // Return the most recently compiled and installed code for this function.
3152 // It is not the only Code object that points to this function.
3153 CodePtr CurrentCode() const { return CurrentCodeOf(function: ptr()); }
3154
3155 bool SafeToClosurize() const;
3156
3157 static CodePtr CurrentCodeOf(const FunctionPtr function) {
3158 return function->untag()->code();
3159 }
3160
3161 CodePtr unoptimized_code() const {
3162#if defined(DART_PRECOMPILED_RUNTIME)
3163 return static_cast<CodePtr>(Object::null());
3164#else
3165 return untag()->unoptimized_code();
3166#endif
3167 }
3168 void set_unoptimized_code(const Code& value) const;
3169 bool HasCode() const;
3170 static bool HasCode(FunctionPtr function);
3171
3172 static intptr_t code_offset() { return OFFSET_OF(UntaggedFunction, code_); }
3173
3174 uword entry_point() const {
3175 return EntryPointOf(function: ptr());
3176 }
3177 static uword EntryPointOf(const FunctionPtr function) {
3178 return function->untag()->entry_point_;
3179 }
3180
3181 static intptr_t entry_point_offset(
3182 CodeEntryKind entry_kind = CodeEntryKind::kNormal) {
3183 switch (entry_kind) {
3184 case CodeEntryKind::kNormal:
3185 return OFFSET_OF(UntaggedFunction, entry_point_);
3186 case CodeEntryKind::kUnchecked:
3187 return OFFSET_OF(UntaggedFunction, unchecked_entry_point_);
3188 default:
3189 UNREACHABLE();
3190 }
3191 }
3192
3193 static intptr_t unchecked_entry_point_offset() {
3194 return OFFSET_OF(UntaggedFunction, unchecked_entry_point_);
3195 }
3196
3197 virtual uword Hash() const;
3198
3199 // Returns true if there is at least one debugger breakpoint
3200 // set in this function.
3201 bool HasBreakpoint() const;
3202
3203 ContextScopePtr context_scope() const;
3204 void set_context_scope(const ContextScope& value) const;
3205
3206 struct AwaiterLink {
3207 // Context depth at which the `@pragma('vm:awaiter-link')` variable
3208 // is located.
3209 uint8_t depth = UntaggedClosureData::kNoAwaiterLinkDepth;
3210 // Context index at which the `@pragma('vm:awaiter-link')` variable
3211 // is located.
3212 uint8_t index = static_cast<uint8_t>(-1);
3213 };
3214
3215 AwaiterLink awaiter_link() const;
3216 void set_awaiter_link(AwaiterLink link) const;
3217 bool HasAwaiterLink() const {
3218 return IsClosureFunction() &&
3219 (awaiter_link().depth != UntaggedClosureData::kNoAwaiterLinkDepth);
3220 }
3221
3222 // Enclosing function of this local function.
3223 FunctionPtr parent_function() const;
3224
3225 using DefaultTypeArgumentsKind =
3226 UntaggedClosureData::DefaultTypeArgumentsKind;
3227
3228 // Returns a canonicalized vector of the type parameters instantiated
3229 // to bounds. If non-generic, the empty type arguments vector is returned.
3230 TypeArgumentsPtr InstantiateToBounds(
3231 Thread* thread,
3232 DefaultTypeArgumentsKind* kind_out = nullptr) const;
3233
3234 // Only usable for closure functions.
3235 DefaultTypeArgumentsKind default_type_arguments_kind() const;
3236 void set_default_type_arguments_kind(DefaultTypeArgumentsKind value) const;
3237
3238 // Enclosing outermost function of this local function.
3239 FunctionPtr GetOutermostFunction() const;
3240
3241 void set_extracted_method_closure(const Function& function) const;
3242 FunctionPtr extracted_method_closure() const;
3243
3244 void set_saved_args_desc(const Array& array) const;
3245 ArrayPtr saved_args_desc() const;
3246
3247 bool HasSavedArgumentsDescriptor() const {
3248 return IsInvokeFieldDispatcher() || IsNoSuchMethodDispatcher();
3249 }
3250
3251 void set_accessor_field(const Field& value) const;
3252 FieldPtr accessor_field() const;
3253
3254 bool IsRegularFunction() const {
3255 return kind() == UntaggedFunction::kRegularFunction;
3256 }
3257
3258 bool IsMethodExtractor() const {
3259 return kind() == UntaggedFunction::kMethodExtractor;
3260 }
3261
3262 bool IsNoSuchMethodDispatcher() const {
3263 return kind() == UntaggedFunction::kNoSuchMethodDispatcher;
3264 }
3265
3266 bool IsRecordFieldGetter() const {
3267 return kind() == UntaggedFunction::kRecordFieldGetter;
3268 }
3269
3270 bool IsInvokeFieldDispatcher() const {
3271 return kind() == UntaggedFunction::kInvokeFieldDispatcher;
3272 }
3273
3274 bool IsDynamicInvokeFieldDispatcher() const {
3275 return IsInvokeFieldDispatcher() &&
3276 IsDynamicInvocationForwarderName(name: name());
3277 }
3278
3279 // Performs all the checks that don't require the current thread first, to
3280 // avoid retrieving it unless they all pass. If you have a handle on the
3281 // current thread, call the version that takes one instead.
3282 bool IsDynamicClosureCallDispatcher() const {
3283 if (!IsDynamicInvokeFieldDispatcher()) return false;
3284 return IsDynamicClosureCallDispatcher(thread: Thread::Current());
3285 }
3286 bool IsDynamicClosureCallDispatcher(Thread* thread) const;
3287
3288 bool IsDynamicInvocationForwarder() const {
3289 return kind() == UntaggedFunction::kDynamicInvocationForwarder;
3290 }
3291
3292 bool IsImplicitGetterOrSetter() const {
3293 return kind() == UntaggedFunction::kImplicitGetter ||
3294 kind() == UntaggedFunction::kImplicitSetter ||
3295 kind() == UntaggedFunction::kImplicitStaticGetter;
3296 }
3297
3298 // Returns true iff an implicit closure function has been created
3299 // for this function.
3300 bool HasImplicitClosureFunction() const {
3301 return implicit_closure_function() != null();
3302 }
3303
3304 // Returns the closure function implicitly created for this function. If none
3305 // exists yet, create one and remember it. Implicit closure functions are
3306 // used in VM Closure instances that represent results of tear-off operations.
3307 FunctionPtr ImplicitClosureFunction() const;
3308 void DropUncompiledImplicitClosureFunction() const;
3309
3310 // Return the closure implicitly created for this function.
3311 // If none exists yet, create one and remember it.
3312 ClosurePtr ImplicitStaticClosure() const;
3313
3314 ClosurePtr ImplicitInstanceClosure(const Instance& receiver) const;
3315
3316 // Returns the target of the implicit closure or null if the target is now
3317 // invalid (e.g., mismatched argument shapes after a reload).
3318 FunctionPtr ImplicitClosureTarget(Zone* zone) const;
3319
3320 FunctionPtr ForwardingTarget() const;
3321 void SetForwardingTarget(const Function& target) const;
3322
3323 UntaggedFunction::Kind kind() const {
3324 return untag()->kind_tag_.Read<KindBits>();
3325 }
3326
3327 UntaggedFunction::AsyncModifier modifier() const {
3328 return untag()->kind_tag_.Read<ModifierBits>();
3329 }
3330
3331 static const char* KindToCString(UntaggedFunction::Kind kind);
3332
3333 bool IsConstructor() const {
3334 return kind() == UntaggedFunction::kConstructor;
3335 }
3336 bool IsGenerativeConstructor() const {
3337 return IsConstructor() && !is_static();
3338 }
3339 bool IsImplicitConstructor() const;
3340 bool IsFactory() const { return IsConstructor() && is_static(); }
3341
3342 bool HasThisParameter() const {
3343 return IsDynamicFunction(/*allow_abstract=*/allow_abstract: true) ||
3344 IsGenerativeConstructor() || (IsFieldInitializer() && !is_static());
3345 }
3346
3347 bool IsDynamicFunction(bool allow_abstract = false) const {
3348 if (is_static() || (!allow_abstract && is_abstract())) {
3349 return false;
3350 }
3351 switch (kind()) {
3352 case UntaggedFunction::kRegularFunction:
3353 case UntaggedFunction::kGetterFunction:
3354 case UntaggedFunction::kSetterFunction:
3355 case UntaggedFunction::kImplicitGetter:
3356 case UntaggedFunction::kImplicitSetter:
3357 case UntaggedFunction::kMethodExtractor:
3358 case UntaggedFunction::kNoSuchMethodDispatcher:
3359 case UntaggedFunction::kInvokeFieldDispatcher:
3360 case UntaggedFunction::kDynamicInvocationForwarder:
3361 case UntaggedFunction::kRecordFieldGetter:
3362 return true;
3363 case UntaggedFunction::kClosureFunction:
3364 case UntaggedFunction::kImplicitClosureFunction:
3365 case UntaggedFunction::kConstructor:
3366 case UntaggedFunction::kImplicitStaticGetter:
3367 case UntaggedFunction::kFieldInitializer:
3368 case UntaggedFunction::kIrregexpFunction:
3369 return false;
3370 default:
3371 UNREACHABLE();
3372 return false;
3373 }
3374 }
3375 bool IsStaticFunction() const {
3376 if (!is_static()) {
3377 return false;
3378 }
3379 switch (kind()) {
3380 case UntaggedFunction::kRegularFunction:
3381 case UntaggedFunction::kGetterFunction:
3382 case UntaggedFunction::kSetterFunction:
3383 case UntaggedFunction::kImplicitGetter:
3384 case UntaggedFunction::kImplicitSetter:
3385 case UntaggedFunction::kImplicitStaticGetter:
3386 case UntaggedFunction::kFieldInitializer:
3387 case UntaggedFunction::kIrregexpFunction:
3388 return true;
3389 case UntaggedFunction::kClosureFunction:
3390 case UntaggedFunction::kImplicitClosureFunction:
3391 case UntaggedFunction::kConstructor:
3392 case UntaggedFunction::kMethodExtractor:
3393 case UntaggedFunction::kNoSuchMethodDispatcher:
3394 case UntaggedFunction::kInvokeFieldDispatcher:
3395 case UntaggedFunction::kDynamicInvocationForwarder:
3396 case UntaggedFunction::kRecordFieldGetter:
3397 return false;
3398 default:
3399 UNREACHABLE();
3400 return false;
3401 }
3402 }
3403
3404 bool NeedsTypeArgumentTypeChecks() const {
3405 return !(is_static() || (kind() == UntaggedFunction::kConstructor));
3406 }
3407
3408 bool NeedsArgumentTypeChecks() const {
3409 return !(is_static() || (kind() == UntaggedFunction::kConstructor));
3410 }
3411
3412 bool NeedsMonomorphicCheckedEntry(Zone* zone) const;
3413 bool HasDynamicCallers(Zone* zone) const;
3414 bool PrologueNeedsArgumentsDescriptor() const;
3415
3416 bool MayHaveUncheckedEntryPoint() const;
3417
3418 TokenPosition token_pos() const {
3419#if defined(DART_PRECOMPILED_RUNTIME)
3420 return TokenPosition::kNoSource;
3421#else
3422 return untag()->token_pos_;
3423#endif
3424 }
3425 void set_token_pos(TokenPosition value) const;
3426
3427 TokenPosition end_token_pos() const {
3428#if defined(DART_PRECOMPILED_RUNTIME)
3429 return TokenPosition::kNoSource;
3430#else
3431 return untag()->end_token_pos_;
3432#endif
3433 }
3434 void set_end_token_pos(TokenPosition value) const {
3435#if defined(DART_PRECOMPILED_RUNTIME)
3436 UNREACHABLE();
3437#else
3438 StoreNonPointer(addr: &untag()->end_token_pos_, value);
3439#endif
3440 }
3441
3442#if !defined(PRODUCT) && \
3443 (defined(DART_PRECOMPILER) || defined(DART_PRECOMPILED_RUNTIME))
3444 int32_t line() const {
3445 return untag()->token_pos_.Serialize();
3446 }
3447
3448 void set_line(int32_t line) const {
3449 StoreNonPointer(&untag()->token_pos_, TokenPosition::Deserialize(line));
3450 }
3451#endif
3452
3453 // Returns the size of the source for this function.
3454 intptr_t SourceSize() const;
3455
3456 uint32_t packed_fields() const {
3457#if defined(DART_PRECOMPILED_RUNTIME)
3458 UNREACHABLE();
3459#else
3460 return untag()->packed_fields_;
3461#endif
3462 }
3463 void set_packed_fields(uint32_t packed_fields) const;
3464
3465 // Returns the number of required positional parameters.
3466 intptr_t num_fixed_parameters() const;
3467 // Returns the number of optional parameters, whether positional or named.
3468 bool HasOptionalParameters() const;
3469 // Returns whether the function has optional named parameters.
3470 bool HasOptionalNamedParameters() const;
3471 // Returns whether the function has required named parameters.
3472 bool HasRequiredNamedParameters() const;
3473 // Returns whether the function has optional positional parameters.
3474 bool HasOptionalPositionalParameters() const;
3475 // Returns the number of optional parameters, or 0 if none.
3476 intptr_t NumOptionalParameters() const;
3477 // Returns the number of optional positional parameters, or 0 if none.
3478 intptr_t NumOptionalPositionalParameters() const;
3479 // Returns the number of optional named parameters, or 0 if none.
3480 intptr_t NumOptionalNamedParameters() const;
3481 // Returns the total number of both required and optional parameters.
3482 intptr_t NumParameters() const;
3483 // Returns the number of implicit parameters, e.g., this for instance methods.
3484 intptr_t NumImplicitParameters() const;
3485
3486 // Returns true if parameters of this function are copied into the frame
3487 // in the function prologue.
3488 bool MakesCopyOfParameters() const {
3489 return HasOptionalParameters() || IsSuspendableFunction();
3490 }
3491
3492#if defined(DART_PRECOMPILED_RUNTIME)
3493#define DEFINE_GETTERS_AND_SETTERS(return_type, type, name) \
3494 static intptr_t name##_offset() { \
3495 UNREACHABLE(); \
3496 return 0; \
3497 } \
3498 return_type name() const { return 0; } \
3499 \
3500 void set_##name(type value) const { UNREACHABLE(); }
3501#else
3502#define DEFINE_GETTERS_AND_SETTERS(return_type, type, name) \
3503 static intptr_t name##_offset() { \
3504 return OFFSET_OF(UntaggedFunction, name##_); \
3505 } \
3506 return_type name() const { \
3507 return LoadNonPointer<type, std::memory_order_relaxed>(&untag()->name##_); \
3508 } \
3509 \
3510 void set_##name(type value) const { \
3511 StoreNonPointer<type, type, std::memory_order_relaxed>(&untag()->name##_, \
3512 value); \
3513 }
3514#endif
3515
3516 JIT_FUNCTION_COUNTERS(DEFINE_GETTERS_AND_SETTERS)
3517
3518#undef DEFINE_GETTERS_AND_SETTERS
3519
3520 intptr_t kernel_offset() const {
3521#if defined(DART_PRECOMPILED_RUNTIME)
3522 return 0;
3523#else
3524 return untag()->kernel_offset_;
3525#endif
3526 }
3527
3528 void set_kernel_offset(intptr_t value) const {
3529#if defined(DART_PRECOMPILED_RUNTIME)
3530 UNREACHABLE();
3531#else
3532 ASSERT(value >= 0);
3533 StoreNonPointer(addr: &untag()->kernel_offset_, value);
3534#endif
3535 }
3536
3537 void InheritKernelOffsetFrom(const Function& src) const;
3538 void InheritKernelOffsetFrom(const Field& src) const;
3539
3540 static constexpr intptr_t kMaxInstructionCount = (1 << 16) - 1;
3541
3542 void SetOptimizedInstructionCountClamped(uintptr_t value) const {
3543 if (value > kMaxInstructionCount) value = kMaxInstructionCount;
3544 set_optimized_instruction_count(value);
3545 }
3546
3547 void SetOptimizedCallSiteCountClamped(uintptr_t value) const {
3548 if (value > kMaxInstructionCount) value = kMaxInstructionCount;
3549 set_optimized_call_site_count(value);
3550 }
3551
3552 void SetKernelLibraryAndEvalScript(
3553 const Script& script,
3554 const class KernelProgramInfo& kernel_program_info,
3555 intptr_t index) const;
3556
3557 intptr_t KernelLibraryOffset() const;
3558 intptr_t KernelLibraryIndex() const;
3559
3560 TypedDataViewPtr KernelLibrary() const;
3561
3562 bool IsOptimizable() const;
3563 void SetIsOptimizable(bool value) const;
3564
3565 // Whether this function must be optimized immediately and cannot be compiled
3566 // with the unoptimizing compiler. Such a function must be sure to not
3567 // deoptimize, since we won't generate deoptimization info or register
3568 // dependencies. It will be compiled into optimized code immediately when it's
3569 // run.
3570 bool ForceOptimize() const;
3571
3572 // Whether this function's |recognized_kind| requires optimization.
3573 bool RecognizedKindForceOptimize() const;
3574
3575 bool CanBeInlined() const;
3576
3577 MethodRecognizer::Kind recognized_kind() const {
3578 return untag()->kind_tag_.Read<RecognizedBits>();
3579 }
3580 void set_recognized_kind(MethodRecognizer::Kind value) const;
3581
3582 bool IsRecognized() const {
3583 return recognized_kind() != MethodRecognizer::kUnknown;
3584 }
3585
3586 bool HasOptimizedCode() const;
3587
3588 // Returns true if the argument counts are valid for calling this function.
3589 // Otherwise, it returns false and the reason (if error_message is not
3590 // nullptr).
3591 bool AreValidArgumentCounts(intptr_t num_type_arguments,
3592 intptr_t num_arguments,
3593 intptr_t num_named_arguments,
3594 String* error_message) const;
3595
3596 // Returns a TypeError if the provided arguments don't match the function
3597 // parameter types, null otherwise. Assumes AreValidArguments is called first.
3598 //
3599 // If the function has a non-null receiver in the arguments, the instantiator
3600 // type arguments are retrieved from the receiver, otherwise the null type
3601 // arguments vector is used.
3602 //
3603 // If the function is generic, the appropriate function type arguments are
3604 // retrieved either from the arguments array or the receiver (if a closure).
3605 // If no function type arguments are available in either location, the bounds
3606 // of the function type parameters are instantiated and used as the function
3607 // type arguments.
3608 //
3609 // The local function type arguments (_not_ parent function type arguments)
3610 // are also checked against the bounds of the corresponding parameters to
3611 // ensure they are appropriate subtypes if the function is generic.
3612 ObjectPtr DoArgumentTypesMatch(const Array& args,
3613 const ArgumentsDescriptor& arg_names) const;
3614
3615 // Returns a TypeError if the provided arguments don't match the function
3616 // parameter types, null otherwise. Assumes AreValidArguments is called first.
3617 //
3618 // If the function is generic, the appropriate function type arguments are
3619 // retrieved either from the arguments array or the receiver (if a closure).
3620 // If no function type arguments are available in either location, the bounds
3621 // of the function type parameters are instantiated and used as the function
3622 // type arguments.
3623 //
3624 // The local function type arguments (_not_ parent function type arguments)
3625 // are also checked against the bounds of the corresponding parameters to
3626 // ensure they are appropriate subtypes if the function is generic.
3627 ObjectPtr DoArgumentTypesMatch(
3628 const Array& args,
3629 const ArgumentsDescriptor& arg_names,
3630 const TypeArguments& instantiator_type_args) const;
3631
3632 // Returns a TypeError if the provided arguments don't match the function
3633 // parameter types, null otherwise. Assumes AreValidArguments is called first.
3634 //
3635 // The local function type arguments (_not_ parent function type arguments)
3636 // are also checked against the bounds of the corresponding parameters to
3637 // ensure they are appropriate subtypes if the function is generic.
3638 ObjectPtr DoArgumentTypesMatch(const Array& args,
3639 const ArgumentsDescriptor& arg_names,
3640 const TypeArguments& instantiator_type_args,
3641 const TypeArguments& function_type_args) const;
3642
3643 // Returns true if the type argument count, total argument count and the names
3644 // of optional arguments are valid for calling this function.
3645 // Otherwise, it returns false and the reason (if error_message is not
3646 // nullptr).
3647 bool AreValidArguments(intptr_t num_type_arguments,
3648 intptr_t num_arguments,
3649 const Array& argument_names,
3650 String* error_message) const;
3651 bool AreValidArguments(const ArgumentsDescriptor& args_desc,
3652 String* error_message) const;
3653
3654 // Fully qualified name uniquely identifying the function under gdb and during
3655 // ast printing. The special ':' character, if present, is replaced by '_'.
3656 const char* ToFullyQualifiedCString() const;
3657
3658 const char* ToLibNamePrefixedQualifiedCString() const;
3659
3660 const char* ToQualifiedCString() const;
3661
3662 static constexpr intptr_t maximum_unboxed_parameter_count() {
3663 // Subtracts one that represents the return value
3664 return UntaggedFunction::UnboxedParameterBitmap::kCapacity - 1;
3665 }
3666
3667 void reset_unboxed_parameters_and_return() const {
3668#if !defined(DART_PRECOMPILED_RUNTIME)
3669 StoreNonPointer(addr: &untag()->unboxed_parameters_info_,
3670 value: UntaggedFunction::UnboxedParameterBitmap());
3671#endif // !defined(DART_PRECOMPILED_RUNTIME)
3672 }
3673
3674 void set_unboxed_integer_parameter_at(intptr_t index) const {
3675#if !defined(DART_PRECOMPILED_RUNTIME)
3676 ASSERT(index >= 0 && index < maximum_unboxed_parameter_count());
3677 index++; // position 0 is reserved for the return value
3678 const_cast<UntaggedFunction::UnboxedParameterBitmap*>(
3679 &untag()->unboxed_parameters_info_)
3680 ->SetUnboxedInteger(index);
3681#else
3682 UNREACHABLE();
3683#endif // !defined(DART_PRECOMPILED_RUNTIME)
3684 }
3685
3686 void set_unboxed_double_parameter_at(intptr_t index) const {
3687#if !defined(DART_PRECOMPILED_RUNTIME)
3688 ASSERT(index >= 0 && index < maximum_unboxed_parameter_count());
3689 index++; // position 0 is reserved for the return value
3690 const_cast<UntaggedFunction::UnboxedParameterBitmap*>(
3691 &untag()->unboxed_parameters_info_)
3692 ->SetUnboxedDouble(index);
3693
3694#else
3695 UNREACHABLE();
3696#endif // !defined(DART_PRECOMPILED_RUNTIME)
3697 }
3698
3699 void set_unboxed_integer_return() const {
3700#if !defined(DART_PRECOMPILED_RUNTIME)
3701 const_cast<UntaggedFunction::UnboxedParameterBitmap*>(
3702 &untag()->unboxed_parameters_info_)
3703 ->SetUnboxedInteger(0);
3704#else
3705 UNREACHABLE();
3706#endif // !defined(DART_PRECOMPILED_RUNTIME)
3707 }
3708
3709 void set_unboxed_double_return() const {
3710#if !defined(DART_PRECOMPILED_RUNTIME)
3711 const_cast<UntaggedFunction::UnboxedParameterBitmap*>(
3712 &untag()->unboxed_parameters_info_)
3713 ->SetUnboxedDouble(0);
3714
3715#else
3716 UNREACHABLE();
3717#endif // !defined(DART_PRECOMPILED_RUNTIME)
3718 }
3719
3720 void set_unboxed_record_return() const {
3721#if !defined(DART_PRECOMPILED_RUNTIME)
3722 const_cast<UntaggedFunction::UnboxedParameterBitmap*>(
3723 &untag()->unboxed_parameters_info_)
3724 ->SetUnboxedRecord(0);
3725
3726#else
3727 UNREACHABLE();
3728#endif // !defined(DART_PRECOMPILED_RUNTIME)
3729 }
3730
3731 bool is_unboxed_parameter_at(intptr_t index) const {
3732#if !defined(DART_PRECOMPILED_RUNTIME)
3733 ASSERT(index >= 0);
3734 index++; // position 0 is reserved for the return value
3735 return untag()->unboxed_parameters_info_.IsUnboxed(position: index);
3736#else
3737 return false;
3738#endif // !defined(DART_PRECOMPILED_RUNTIME)
3739 }
3740
3741 bool is_unboxed_integer_parameter_at(intptr_t index) const {
3742#if !defined(DART_PRECOMPILED_RUNTIME)
3743 ASSERT(index >= 0);
3744 index++; // position 0 is reserved for the return value
3745 return untag()->unboxed_parameters_info_.IsUnboxedInteger(position: index);
3746#else
3747 return false;
3748#endif // !defined(DART_PRECOMPILED_RUNTIME)
3749 }
3750
3751 bool is_unboxed_double_parameter_at(intptr_t index) const {
3752#if !defined(DART_PRECOMPILED_RUNTIME)
3753 ASSERT(index >= 0);
3754 index++; // position 0 is reserved for the return value
3755 return untag()->unboxed_parameters_info_.IsUnboxedDouble(position: index);
3756#else
3757 return false;
3758#endif // !defined(DART_PRECOMPILED_RUNTIME)
3759 }
3760
3761 bool has_unboxed_return() const {
3762#if !defined(DART_PRECOMPILED_RUNTIME)
3763 return untag()->unboxed_parameters_info_.IsUnboxed(position: 0);
3764#else
3765 return false;
3766#endif // !defined(DART_PRECOMPILED_RUNTIME)
3767 }
3768
3769 bool has_unboxed_integer_return() const {
3770#if !defined(DART_PRECOMPILED_RUNTIME)
3771 return untag()->unboxed_parameters_info_.IsUnboxedInteger(position: 0);
3772#else
3773 return false;
3774#endif // !defined(DART_PRECOMPILED_RUNTIME)
3775 }
3776
3777 bool has_unboxed_double_return() const {
3778#if !defined(DART_PRECOMPILED_RUNTIME)
3779 return untag()->unboxed_parameters_info_.IsUnboxedDouble(position: 0);
3780#else
3781 return false;
3782#endif // !defined(DART_PRECOMPILED_RUNTIME)
3783 }
3784
3785 bool has_unboxed_record_return() const {
3786#if !defined(DART_PRECOMPILED_RUNTIME)
3787 return untag()->unboxed_parameters_info_.IsUnboxedRecord(position: 0);
3788#else
3789 return false;
3790#endif // !defined(DART_PRECOMPILED_RUNTIME)
3791 }
3792
3793#if !defined(DART_PRECOMPILED_RUNTIME)
3794 bool HasUnboxedParameters() const {
3795 return untag()->unboxed_parameters_info_.HasUnboxedParameters();
3796 }
3797 bool HasUnboxedReturnValue() const { return has_unboxed_return(); }
3798#endif // !defined(DART_PRECOMPILED_RUNTIME)
3799
3800 bool IsDispatcherOrImplicitAccessor() const {
3801 switch (kind()) {
3802 case UntaggedFunction::kImplicitGetter:
3803 case UntaggedFunction::kImplicitSetter:
3804 case UntaggedFunction::kImplicitStaticGetter:
3805 case UntaggedFunction::kNoSuchMethodDispatcher:
3806 case UntaggedFunction::kInvokeFieldDispatcher:
3807 case UntaggedFunction::kDynamicInvocationForwarder:
3808 return true;
3809 default:
3810 return false;
3811 }
3812 }
3813
3814 // Returns true if this function represents an explicit getter function.
3815 bool IsGetterFunction() const {
3816 return kind() == UntaggedFunction::kGetterFunction;
3817 }
3818
3819 // Returns true if this function represents an implicit getter function.
3820 bool IsImplicitGetterFunction() const {
3821 return kind() == UntaggedFunction::kImplicitGetter;
3822 }
3823
3824 // Returns true if this function represents an implicit static getter
3825 // function.
3826 bool IsImplicitStaticGetterFunction() const {
3827 return kind() == UntaggedFunction::kImplicitStaticGetter;
3828 }
3829
3830 // Returns true if this function represents an explicit setter function.
3831 bool IsSetterFunction() const {
3832 return kind() == UntaggedFunction::kSetterFunction;
3833 }
3834
3835 // Returns true if this function represents an implicit setter function.
3836 bool IsImplicitSetterFunction() const {
3837 return kind() == UntaggedFunction::kImplicitSetter;
3838 }
3839
3840 // Returns true if this function represents an initializer for a static or
3841 // instance field. The function returns the initial value and the caller is
3842 // responsible for setting the field.
3843 bool IsFieldInitializer() const {
3844 return kind() == UntaggedFunction::kFieldInitializer;
3845 }
3846
3847 // Returns true if this function represents a (possibly implicit) closure
3848 // function.
3849 bool IsClosureFunction() const {
3850 UntaggedFunction::Kind k = kind();
3851 return (k == UntaggedFunction::kClosureFunction) ||
3852 (k == UntaggedFunction::kImplicitClosureFunction);
3853 }
3854
3855 // Returns true if this function represents a generated irregexp function.
3856 bool IsIrregexpFunction() const {
3857 return kind() == UntaggedFunction::kIrregexpFunction;
3858 }
3859
3860 // Returns true if this function represents an implicit closure function.
3861 bool IsImplicitClosureFunction() const {
3862 return kind() == UntaggedFunction::kImplicitClosureFunction;
3863 }
3864
3865 // Returns true if this function represents a non implicit closure function.
3866 bool IsNonImplicitClosureFunction() const {
3867 return IsClosureFunction() && !IsImplicitClosureFunction();
3868 }
3869
3870 // Returns true if this function represents an implicit static closure
3871 // function.
3872 bool IsImplicitStaticClosureFunction() const {
3873 return IsImplicitClosureFunction() && is_static();
3874 }
3875 static bool IsImplicitStaticClosureFunction(FunctionPtr func);
3876
3877 // Returns true if this function represents an implicit instance closure
3878 // function.
3879 bool IsImplicitInstanceClosureFunction() const {
3880 return IsImplicitClosureFunction() && !is_static();
3881 }
3882
3883 // Returns true if this function has a parent function.
3884 bool HasParent() const { return parent_function() != Function::null(); }
3885
3886 // Returns true if this function is a local function.
3887 bool IsLocalFunction() const {
3888 return !IsImplicitClosureFunction() && HasParent();
3889 }
3890
3891 // Returns true if this function represents an ffi trampoline.
3892 bool IsFfiTrampoline() const {
3893 return kind() == UntaggedFunction::kFfiTrampoline;
3894 }
3895 static bool IsFfiTrampoline(FunctionPtr function) {
3896 NoSafepointScope no_safepoint;
3897 return function->untag()->kind_tag_.Read<KindBits>() ==
3898 UntaggedFunction::kFfiTrampoline;
3899 }
3900
3901 // Returns true for functions which execution can be suspended
3902 // using Suspend/Resume stubs. Such functions have an artificial
3903 // :suspend_state local variable at the fixed location of the frame.
3904 bool IsSuspendableFunction() const {
3905 return modifier() != UntaggedFunction::kNoModifier;
3906 }
3907
3908 // Returns true if this function is marked with 'async' modifier.
3909 bool IsAsyncFunction() const {
3910 return modifier() == UntaggedFunction::kAsync;
3911 }
3912
3913 // Returns true if this function is marked with 'sync*' modifier.
3914 bool IsSyncGenerator() const {
3915 return modifier() == UntaggedFunction::kSyncGen;
3916 }
3917
3918 // Returns true if this function is marked with 'async*' modifier.
3919 bool IsAsyncGenerator() const {
3920 return modifier() == UntaggedFunction::kAsyncGen;
3921 }
3922
3923 bool IsTypedDataViewFactory() const {
3924 if (is_native() && kind() == UntaggedFunction::kConstructor) {
3925 // This is a native factory constructor.
3926 const Class& klass = Class::Handle(ptr: Owner());
3927 return IsTypedDataViewClassId(index: klass.id());
3928 }
3929 return false;
3930 }
3931
3932 bool IsUnmodifiableTypedDataViewFactory() const {
3933 if (is_native() && kind() == UntaggedFunction::kConstructor) {
3934 // This is a native factory constructor.
3935 const Class& klass = Class::Handle(ptr: Owner());
3936 return IsUnmodifiableTypedDataViewClassId(index: klass.id());
3937 }
3938 return false;
3939 }
3940
3941 DART_WARN_UNUSED_RESULT
3942 ErrorPtr VerifyCallEntryPoint() const;
3943
3944 DART_WARN_UNUSED_RESULT
3945 ErrorPtr VerifyClosurizedEntryPoint() const;
3946
3947 static intptr_t InstanceSize() {
3948 return RoundedAllocationSize(size: sizeof(UntaggedFunction));
3949 }
3950
3951 static FunctionPtr New(const FunctionType& signature,
3952 const String& name,
3953 UntaggedFunction::Kind kind,
3954 bool is_static,
3955 bool is_const,
3956 bool is_abstract,
3957 bool is_external,
3958 bool is_native,
3959 const Object& owner,
3960 TokenPosition token_pos,
3961 Heap::Space space = Heap::kOld);
3962
3963 // Allocates a new Function object representing a closure function
3964 // with given kind - kClosureFunction or kImplicitClosureFunction.
3965 static FunctionPtr NewClosureFunctionWithKind(UntaggedFunction::Kind kind,
3966 const String& name,
3967 const Function& parent,
3968 bool is_static,
3969 TokenPosition token_pos,
3970 const Object& owner);
3971
3972 // Allocates a new Function object representing a closure function.
3973 static FunctionPtr NewClosureFunction(const String& name,
3974 const Function& parent,
3975 TokenPosition token_pos);
3976
3977 // Allocates a new Function object representing an implicit closure function.
3978 static FunctionPtr NewImplicitClosureFunction(const String& name,
3979 const Function& parent,
3980 TokenPosition token_pos);
3981
3982 FunctionPtr CreateMethodExtractor(const String& getter_name) const;
3983 FunctionPtr GetMethodExtractor(const String& getter_name) const;
3984
3985 static bool IsDynamicInvocationForwarderName(const String& name);
3986 static bool IsDynamicInvocationForwarderName(StringPtr name);
3987
3988 static StringPtr DemangleDynamicInvocationForwarderName(const String& name);
3989
3990 static StringPtr CreateDynamicInvocationForwarderName(const String& name);
3991
3992#if !defined(DART_PRECOMPILED_RUNTIME)
3993 FunctionPtr CreateDynamicInvocationForwarder(
3994 const String& mangled_name) const;
3995
3996 FunctionPtr GetDynamicInvocationForwarder(const String& mangled_name,
3997 bool allow_add = true) const;
3998#endif
3999
4000 // Slow function, use in asserts to track changes in important library
4001 // functions.
4002 int32_t SourceFingerprint() const;
4003
4004 // Return false and report an error if the fingerprint does not match.
4005 bool CheckSourceFingerprint(int32_t fp, const char* kind = nullptr) const;
4006
4007 // Works with map [deopt-id] -> ICData.
4008 void SaveICDataMap(
4009 const ZoneGrowableArray<const ICData*>& deopt_id_to_ic_data,
4010 const Array& edge_counters_array,
4011 const Array& coverage_array) const;
4012 // Uses 'ic_data_array' to populate the table 'deopt_id_to_ic_data'. Clone
4013 // ic_data (array and descriptor) if 'clone_ic_data' is true.
4014 void RestoreICDataMap(ZoneGrowableArray<const ICData*>* deopt_id_to_ic_data,
4015 bool clone_ic_data) const;
4016
4017 // ic_data_array attached to the function stores edge counters in the
4018 // first element, coverage data array in the second element and the rest
4019 // are ICData objects.
4020 struct ICDataArrayIndices {
4021 static constexpr intptr_t kEdgeCounters = 0;
4022 static constexpr intptr_t kCoverageData = 1;
4023 static constexpr intptr_t kFirstICData = 2;
4024 };
4025
4026 ArrayPtr ic_data_array() const;
4027 void ClearICDataArray() const;
4028 ICDataPtr FindICData(intptr_t deopt_id) const;
4029
4030 // Coverage data array is a list of pairs:
4031 // element 2 * i + 0 is token position
4032 // element 2 * i + 1 is coverage hit (zero meaning code was not hit)
4033 ArrayPtr GetCoverageArray() const;
4034
4035 // Outputs this function's service ID to the provided JSON object.
4036 void AddFunctionServiceId(const JSONObject& obj) const;
4037
4038 // Sets deopt reason in all ICData-s with given deopt_id.
4039 void SetDeoptReasonForAll(intptr_t deopt_id, ICData::DeoptReasonId reason);
4040
4041 void set_modifier(UntaggedFunction::AsyncModifier value) const;
4042
4043// 'WasCompiled' is true if the function was compiled once in this
4044// VM instantiation. It is independent from presence of type feedback
4045// (ic_data_array) and code, which may be loaded from a snapshot.
4046// 'WasExecuted' is true if the usage counter has ever been positive.
4047// 'ProhibitsInstructionHoisting' is true if this function deoptimized before on
4048// a hoisted instruction.
4049// 'ProhibitsBoundsCheckGeneralization' is true if this function deoptimized
4050// before on a generalized bounds check.
4051#define STATE_BITS_LIST(V) \
4052 V(WasCompiled) \
4053 V(WasExecutedBit) \
4054 V(ProhibitsInstructionHoisting) \
4055 V(ProhibitsBoundsCheckGeneralization)
4056
4057 enum StateBits {
4058#define DECLARE_FLAG_POS(Name) k##Name##Pos,
4059 STATE_BITS_LIST(DECLARE_FLAG_POS)
4060#undef DECLARE_FLAG_POS
4061 };
4062#define DEFINE_FLAG_BIT(Name) \
4063 class Name##Bit : public BitField<uint8_t, bool, k##Name##Pos, 1> {};
4064 STATE_BITS_LIST(DEFINE_FLAG_BIT)
4065#undef DEFINE_FLAG_BIT
4066
4067#define DEFINE_FLAG_ACCESSORS(Name) \
4068 void Set##Name(bool value) const { \
4069 set_state_bits(Name##Bit::update(value, state_bits())); \
4070 } \
4071 bool Name() const { return Name##Bit::decode(state_bits()); }
4072 STATE_BITS_LIST(DEFINE_FLAG_ACCESSORS)
4073#undef DEFINE_FLAG_ACCESSORS
4074
4075 void SetUsageCounter(intptr_t value) const {
4076 if (usage_counter() > 0) {
4077 SetWasExecuted(true);
4078 }
4079 set_usage_counter(value);
4080 }
4081
4082 bool WasExecuted() const { return (usage_counter() > 0) || WasExecutedBit(); }
4083
4084 void SetWasExecuted(bool value) const { SetWasExecutedBit(value); }
4085
4086 static intptr_t data_offset() { return OFFSET_OF(UntaggedFunction, data_); }
4087
4088 static intptr_t kind_tag_offset() {
4089 return OFFSET_OF(UntaggedFunction, kind_tag_);
4090 }
4091
4092 // static: Considered during class-side or top-level resolution rather than
4093 // instance-side resolution.
4094 // const: Valid target of a const constructor call.
4095 // abstract: Skipped during instance-side resolution.
4096 // reflectable: Enumerated by mirrors, invocable by mirrors. False for private
4097 // functions of dart: libraries.
4098 // debuggable: Valid location of a breakpoint. Synthetic code is not
4099 // debuggable.
4100 // visible: Frame is included in stack traces. Synthetic code such as
4101 // dispatchers is not visible. Synthetic code that can trigger
4102 // exceptions such as the outer async functions that create Futures
4103 // is visible.
4104 // intrinsic: Has a hand-written assembly prologue.
4105 // inlinable: Candidate for inlining. False for functions with features we
4106 // don't support during inlining (e.g., optional parameters),
4107 // functions which are too big, etc.
4108 // native: Bridge to C/C++ code.
4109 // external: Just a declaration that expects to be defined in another patch
4110 // file.
4111 // polymorphic_target: A polymorphic method.
4112 // has_pragma: Has a @pragma decoration.
4113 // no_such_method_forwarder: A stub method that just calls noSuchMethod.
4114
4115// Bits that are set when function is created, don't have to worry about
4116// concurrent updates.
4117#define FOR_EACH_FUNCTION_KIND_BIT(V) \
4118 V(Static, is_static) \
4119 V(Const, is_const) \
4120 V(Abstract, is_abstract) \
4121 V(Reflectable, is_reflectable) \
4122 V(Visible, is_visible) \
4123 V(Debuggable, is_debuggable) \
4124 V(Intrinsic, is_intrinsic) \
4125 V(Native, is_native) \
4126 V(External, is_external) \
4127 V(PolymorphicTarget, is_polymorphic_target) \
4128 V(HasPragma, has_pragma) \
4129 V(IsSynthetic, is_synthetic) \
4130 V(IsExtensionMember, is_extension_member) \
4131 V(IsRedirectingFactory, is_redirecting_factory)
4132// Bit that is updated after function is constructed, has to be updated in
4133// concurrent-safe manner.
4134#define FOR_EACH_FUNCTION_VOLATILE_KIND_BIT(V) V(Inlinable, is_inlinable)
4135
4136#define DEFINE_ACCESSORS(name, accessor_name) \
4137 void set_##accessor_name(bool value) const { \
4138 untag()->kind_tag_.UpdateUnsynchronized<name##Bit>(value); \
4139 } \
4140 bool accessor_name() const { return untag()->kind_tag_.Read<name##Bit>(); }
4141 FOR_EACH_FUNCTION_KIND_BIT(DEFINE_ACCESSORS)
4142#undef DEFINE_ACCESSORS
4143
4144 static bool is_visible(FunctionPtr f) {
4145 return f.untag()->kind_tag_.Read<VisibleBit>();
4146 }
4147
4148#define DEFINE_ACCESSORS(name, accessor_name) \
4149 void set_##accessor_name(bool value) const { \
4150 untag()->kind_tag_.UpdateBool<name##Bit>(value); \
4151 } \
4152 bool accessor_name() const { return untag()->kind_tag_.Read<name##Bit>(); }
4153 FOR_EACH_FUNCTION_VOLATILE_KIND_BIT(DEFINE_ACCESSORS)
4154#undef DEFINE_ACCESSORS
4155
4156 // optimizable: Candidate for going through the optimizing compiler. False for
4157 // some functions known to be execute infrequently and functions
4158 // which have been de-optimized too many times.
4159 bool is_optimizable() const {
4160#if defined(DART_PRECOMPILED_RUNTIME)
4161 return false;
4162#else
4163 return untag()->packed_fields_.Read<UntaggedFunction::PackedOptimizable>();
4164#endif
4165 }
4166 void set_is_optimizable(bool value) const {
4167#if defined(DART_PRECOMPILED_RUNTIME)
4168 UNREACHABLE();
4169#else
4170 untag()->packed_fields_.UpdateBool<UntaggedFunction::PackedOptimizable>(
4171 value);
4172#endif
4173 }
4174
4175 enum KindTagBits {
4176 kKindTagPos = 0,
4177 kKindTagSize = 5,
4178 kRecognizedTagPos = kKindTagPos + kKindTagSize,
4179 kRecognizedTagSize = 9,
4180 kModifierPos = kRecognizedTagPos + kRecognizedTagSize,
4181 kModifierSize = 2,
4182 kLastModifierBitPos = kModifierPos + (kModifierSize - 1),
4183// Single bit sized fields start here.
4184#define DECLARE_BIT(name, _) k##name##Bit,
4185 FOR_EACH_FUNCTION_KIND_BIT(DECLARE_BIT)
4186 FOR_EACH_FUNCTION_VOLATILE_KIND_BIT(DECLARE_BIT)
4187#undef DECLARE_BIT
4188 kNumTagBits
4189 };
4190
4191 COMPILE_ASSERT(MethodRecognizer::kNumRecognizedMethods <
4192 (1 << kRecognizedTagSize));
4193 COMPILE_ASSERT(kNumTagBits <=
4194 (kBitsPerByte *
4195 sizeof(decltype(UntaggedFunction::kind_tag_))));
4196
4197#define ASSERT_FUNCTION_KIND_IN_RANGE(Name) \
4198 COMPILE_ASSERT(UntaggedFunction::k##Name < (1 << kKindTagSize));
4199 FOR_EACH_RAW_FUNCTION_KIND(ASSERT_FUNCTION_KIND_IN_RANGE)
4200#undef ASSERT_FUNCTION_KIND_IN_RANGE
4201
4202 class KindBits : public BitField<uint32_t,
4203 UntaggedFunction::Kind,
4204 kKindTagPos,
4205 kKindTagSize> {};
4206
4207 class RecognizedBits : public BitField<uint32_t,
4208 MethodRecognizer::Kind,
4209 kRecognizedTagPos,
4210 kRecognizedTagSize> {};
4211 class ModifierBits : public BitField<uint32_t,
4212 UntaggedFunction::AsyncModifier,
4213 kModifierPos,
4214 kModifierSize> {};
4215
4216#define DEFINE_BIT(name, _) \
4217 class name##Bit : public BitField<uint32_t, bool, k##name##Bit, 1> {};
4218 FOR_EACH_FUNCTION_KIND_BIT(DEFINE_BIT)
4219 FOR_EACH_FUNCTION_VOLATILE_KIND_BIT(DEFINE_BIT)
4220#undef DEFINE_BIT
4221
4222 private:
4223 enum class EvalFunctionData {
4224 kScript,
4225 kKernelProgramInfo,
4226 kKernelLibraryIndex,
4227 kLength,
4228 };
4229 enum NativeFunctionData {
4230 kNativeName,
4231 kTearOff,
4232 kLength,
4233 };
4234 // Given the provided defaults type arguments, determines which
4235 // DefaultTypeArgumentsKind applies.
4236 DefaultTypeArgumentsKind DefaultTypeArgumentsKindFor(
4237 const TypeArguments& defaults) const;
4238
4239 void set_ic_data_array(const Array& value) const;
4240 void set_name(const String& value) const;
4241 void set_kind(UntaggedFunction::Kind value) const;
4242 void set_parent_function(const Function& value) const;
4243 FunctionPtr implicit_closure_function() const;
4244 void set_implicit_closure_function(const Function& value) const;
4245 ClosurePtr implicit_static_closure() const;
4246 void set_implicit_static_closure(const Closure& closure) const;
4247 ScriptPtr eval_script() const;
4248 void set_eval_script(const Script& value) const;
4249 void set_num_optional_parameters(intptr_t value) const; // Encoded value.
4250 void set_kind_tag(uint32_t value) const;
4251 bool is_eval_function() const;
4252
4253#if !defined(DART_PRECOMPILED_RUNTIME)
4254 ArrayPtr positional_parameter_names() const {
4255 return untag()->positional_parameter_names();
4256 }
4257 void set_positional_parameter_names(const Array& value) const;
4258#endif
4259
4260 ObjectPtr data() const { return untag()->data<std::memory_order_acquire>(); }
4261 void set_data(const Object& value) const;
4262
4263 static FunctionPtr New(Heap::Space space = Heap::kOld);
4264
4265 FINAL_HEAP_OBJECT_IMPLEMENTATION(Function, Object);
4266 friend class Class;
4267 friend class Parser; // For set_eval_script.
4268 // UntaggedFunction::VisitFunctionPointers accesses the private constructor of
4269 // Function.
4270 friend class UntaggedFunction;
4271 friend class ClassFinalizer; // To reset parent_function.
4272 friend class Type; // To adjust parent_function.
4273 friend class Precompiler; // To access closure data.
4274 friend class ProgramVisitor; // For set_parameter_types/names.
4275};
4276
4277class ClosureData : public Object {
4278 public:
4279 static intptr_t InstanceSize() {
4280 return RoundedAllocationSize(size: sizeof(UntaggedClosureData));
4281 }
4282
4283 static intptr_t packed_fields_offset() {
4284 return OFFSET_OF(UntaggedClosureData, packed_fields_);
4285 }
4286
4287 using DefaultTypeArgumentsKind =
4288 UntaggedClosureData::DefaultTypeArgumentsKind;
4289 using PackedDefaultTypeArgumentsKind =
4290 UntaggedClosureData::PackedDefaultTypeArgumentsKind;
4291
4292 static constexpr uint8_t kNoAwaiterLinkDepth =
4293 UntaggedClosureData::kNoAwaiterLinkDepth;
4294
4295 private:
4296 ContextScopePtr context_scope() const { return untag()->context_scope(); }
4297 void set_context_scope(const ContextScope& value) const;
4298
4299 void set_packed_fields(uint32_t value) const {
4300 untag()->packed_fields_ = value;
4301 }
4302
4303 Function::AwaiterLink awaiter_link() const;
4304 void set_awaiter_link(Function::AwaiterLink link) const;
4305
4306 // Enclosing function of this local function.
4307 PRECOMPILER_WSR_FIELD_DECLARATION(Function, parent_function)
4308
4309 ClosurePtr implicit_static_closure() const {
4310 return untag()->closure<std::memory_order_acquire>();
4311 }
4312 void set_implicit_static_closure(const Closure& closure) const;
4313
4314 DefaultTypeArgumentsKind default_type_arguments_kind() const;
4315 void set_default_type_arguments_kind(DefaultTypeArgumentsKind value) const;
4316
4317 static ClosureDataPtr New();
4318
4319 FINAL_HEAP_OBJECT_IMPLEMENTATION(ClosureData, Object);
4320 friend class Class;
4321 friend class Function;
4322 friend class Precompiler; // To wrap parent functions in WSRs.
4323};
4324
4325enum class EntryPointPragma {
4326 kAlways,
4327 kNever,
4328 kGetterOnly,
4329 kSetterOnly,
4330 kCallOnly
4331};
4332
4333class FfiTrampolineData : public Object {
4334 public:
4335 static intptr_t InstanceSize() {
4336 return RoundedAllocationSize(size: sizeof(UntaggedFfiTrampolineData));
4337 }
4338
4339 private:
4340 FunctionTypePtr c_signature() const { return untag()->c_signature(); }
4341 void set_c_signature(const FunctionType& value) const;
4342
4343 FunctionPtr callback_target() const { return untag()->callback_target(); }
4344 void set_callback_target(const Function& value) const;
4345
4346 InstancePtr callback_exceptional_return() const {
4347 return untag()->callback_exceptional_return();
4348 }
4349 void set_callback_exceptional_return(const Instance& value) const;
4350
4351 FfiTrampolineKind trampoline_kind() const {
4352 return static_cast<FfiTrampolineKind>(untag()->trampoline_kind_);
4353 }
4354 void set_trampoline_kind(FfiTrampolineKind kind) const;
4355
4356 int32_t callback_id() const { return untag()->callback_id_; }
4357 void set_callback_id(int32_t value) const;
4358
4359 bool is_leaf() const { return untag()->is_leaf_; }
4360 void set_is_leaf(bool value) const;
4361
4362 static FfiTrampolineDataPtr New();
4363
4364 FINAL_HEAP_OBJECT_IMPLEMENTATION(FfiTrampolineData, Object);
4365 friend class Class;
4366 friend class Function;
4367};
4368
4369class Field : public Object {
4370 public:
4371 // The field that this field was cloned from, or this field itself if it isn't
4372 // a clone. The purpose of cloning is that the fields the background compiler
4373 // sees are consistent.
4374 FieldPtr Original() const;
4375
4376 // Set the original field that this field was cloned from.
4377 void SetOriginal(const Field& value) const;
4378
4379 // Returns whether this field is an original or a clone.
4380 bool IsOriginal() const {
4381 if (IsNull()) {
4382 return true;
4383 }
4384 NoSafepointScope no_safepoint;
4385 return !untag()->owner()->IsField();
4386 }
4387
4388 // Returns a field cloned from 'this'. 'this' is set as the
4389 // original field of result.
4390 FieldPtr CloneFromOriginal() const;
4391
4392 StringPtr name() const { return untag()->name(); }
4393 StringPtr UserVisibleName() const; // Same as scrubbed name.
4394 const char* UserVisibleNameCString() const;
4395 virtual StringPtr DictionaryName() const { return name(); }
4396
4397 uint16_t kind_bits() const {
4398 return LoadNonPointer<uint16_t, std::memory_order_acquire>(
4399 addr: &untag()->kind_bits_);
4400 }
4401
4402 bool is_static() const { return StaticBit::decode(value: kind_bits()); }
4403 bool is_instance() const { return !is_static(); }
4404 bool is_final() const { return FinalBit::decode(value: kind_bits()); }
4405 bool is_const() const { return ConstBit::decode(value: kind_bits()); }
4406 bool is_late() const { return IsLateBit::decode(value: kind_bits()); }
4407 bool is_extension_member() const {
4408 return IsExtensionMemberBit::decode(value: kind_bits());
4409 }
4410 bool needs_load_guard() const {
4411 return NeedsLoadGuardBit::decode(value: kind_bits());
4412 }
4413 bool is_reflectable() const { return ReflectableBit::decode(value: kind_bits()); }
4414 void set_is_reflectable(bool value) const {
4415 ASSERT(IsOriginal());
4416 // TODO(36097): Once concurrent access is possible ensure updates are safe.
4417 set_kind_bits(ReflectableBit::update(value, original: untag()->kind_bits_));
4418 }
4419
4420 bool initializer_changed_after_initialization() const {
4421 return InitializerChangedAfterInitializationBit::decode(value: kind_bits());
4422 }
4423 void set_initializer_changed_after_initialization(bool value) const {
4424 // TODO(36097): Once concurrent access is possible ensure updates are safe.
4425 set_kind_bits(InitializerChangedAfterInitializationBit::update(
4426 value, original: untag()->kind_bits_));
4427 }
4428
4429 bool has_pragma() const { return HasPragmaBit::decode(value: kind_bits()); }
4430 void set_has_pragma(bool value) const {
4431 // TODO(36097): Once concurrent access is possible ensure updates are safe.
4432 set_kind_bits(HasPragmaBit::update(value, original: untag()->kind_bits_));
4433 }
4434
4435 bool is_covariant() const { return CovariantBit::decode(value: kind_bits()); }
4436 void set_is_covariant(bool value) const {
4437 // TODO(36097): Once concurrent access is possible ensure updates are safe.
4438 set_kind_bits(CovariantBit::update(value, original: untag()->kind_bits_));
4439 }
4440
4441 bool is_generic_covariant_impl() const {
4442 return GenericCovariantImplBit::decode(value: kind_bits());
4443 }
4444 void set_is_generic_covariant_impl(bool value) const {
4445 // TODO(36097): Once concurrent access is possible ensure updates are safe.
4446 set_kind_bits(GenericCovariantImplBit::update(value, original: untag()->kind_bits_));
4447 }
4448
4449 intptr_t kernel_offset() const {
4450#if defined(DART_PRECOMPILED_RUNTIME)
4451 return 0;
4452#else
4453 return untag()->kernel_offset_;
4454#endif
4455 }
4456
4457 void set_kernel_offset(intptr_t value) const {
4458#if defined(DART_PRECOMPILED_RUNTIME)
4459 UNREACHABLE();
4460#else
4461 ASSERT(value >= 0);
4462 StoreNonPointer(addr: &untag()->kernel_offset_, value);
4463#endif
4464 }
4465
4466 void InheritKernelOffsetFrom(const Field& src) const;
4467
4468 TypedDataViewPtr KernelLibrary() const;
4469 intptr_t KernelLibraryOffset() const;
4470 intptr_t KernelLibraryIndex() const;
4471
4472 // Called during class finalization.
4473 inline void SetOffset(intptr_t host_offset_in_bytes,
4474 intptr_t target_offset_in_bytes) const;
4475
4476 inline intptr_t HostOffset() const;
4477 static intptr_t host_offset_or_field_id_offset() {
4478 return OFFSET_OF(UntaggedField, host_offset_or_field_id_);
4479 }
4480
4481 inline intptr_t TargetOffset() const;
4482 static inline intptr_t TargetOffsetOf(FieldPtr field);
4483
4484 ObjectPtr StaticConstFieldValue() const;
4485 void SetStaticConstFieldValue(const Instance& value,
4486 bool assert_initializing_store = true) const;
4487
4488 inline ObjectPtr StaticValue() const;
4489 void SetStaticValue(const Object& value) const;
4490
4491 inline intptr_t field_id() const;
4492 inline void set_field_id(intptr_t field_id) const;
4493 inline void set_field_id_unsafe(intptr_t field_id) const;
4494
4495 ClassPtr Owner() const;
4496 ScriptPtr Script() const;
4497#if !defined(DART_PRECOMPILED_RUNTIME)
4498 KernelProgramInfoPtr KernelProgramInfo() const;
4499#endif
4500 ObjectPtr RawOwner() const;
4501
4502 uint32_t Hash() const;
4503
4504 AbstractTypePtr type() const { return untag()->type(); }
4505 // Used by class finalizer, otherwise initialized in constructor.
4506 void SetFieldType(const AbstractType& value) const;
4507 void SetFieldTypeSafe(const AbstractType& value) const;
4508
4509 DART_WARN_UNUSED_RESULT
4510 ErrorPtr VerifyEntryPoint(EntryPointPragma kind) const;
4511
4512 static intptr_t InstanceSize() {
4513 return RoundedAllocationSize(size: sizeof(UntaggedField));
4514 }
4515
4516 static FieldPtr New(const String& name,
4517 bool is_static,
4518 bool is_final,
4519 bool is_const,
4520 bool is_reflectable,
4521 bool is_late,
4522 const Object& owner,
4523 const AbstractType& type,
4524 TokenPosition token_pos,
4525 TokenPosition end_token_pos);
4526
4527 static FieldPtr NewTopLevel(const String& name,
4528 bool is_final,
4529 bool is_const,
4530 bool is_late,
4531 const Object& owner,
4532 TokenPosition token_pos,
4533 TokenPosition end_token_pos);
4534
4535 // Allocate new field object, clone values from this field. The
4536 // original is specified.
4537 FieldPtr Clone(const Field& original) const;
4538
4539 static intptr_t kind_bits_offset() {
4540 return OFFSET_OF(UntaggedField, kind_bits_);
4541 }
4542
4543 TokenPosition token_pos() const { return untag()->token_pos_; }
4544 TokenPosition end_token_pos() const { return untag()->end_token_pos_; }
4545
4546 int32_t SourceFingerprint() const;
4547
4548 StringPtr InitializingExpression() const;
4549
4550 bool has_nontrivial_initializer() const {
4551 return HasNontrivialInitializerBit::decode(value: kind_bits());
4552 }
4553 // Called by parser after allocating field.
4554 void set_has_nontrivial_initializer_unsafe(
4555 bool has_nontrivial_initializer) const {
4556 ASSERT(IsOriginal());
4557 // TODO(36097): Once concurrent access is possible ensure updates are safe.
4558 set_kind_bits(HasNontrivialInitializerBit::update(
4559 value: has_nontrivial_initializer, original: untag()->kind_bits_));
4560 }
4561 void set_has_nontrivial_initializer(bool has_nontrivial_initializer) const {
4562 DEBUG_ASSERT(
4563 IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
4564 set_has_nontrivial_initializer_unsafe(has_nontrivial_initializer);
4565 }
4566
4567 bool has_initializer() const {
4568 return HasInitializerBit::decode(value: kind_bits());
4569 }
4570 // Called by parser after allocating field.
4571 void set_has_initializer_unsafe(bool has_initializer) const {
4572 ASSERT(IsOriginal());
4573 // TODO(36097): Once concurrent access is possible ensure updates are safe.
4574 set_kind_bits(
4575 HasInitializerBit::update(value: has_initializer, original: untag()->kind_bits_));
4576 }
4577 void set_has_initializer(bool has_initializer) const {
4578 DEBUG_ASSERT(
4579 IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
4580 set_has_initializer_unsafe(has_initializer);
4581 }
4582
4583 bool has_trivial_initializer() const {
4584 return has_initializer() && !has_nontrivial_initializer();
4585 }
4586
4587 StaticTypeExactnessState static_type_exactness_state() const {
4588 return StaticTypeExactnessState::Decode(
4589 value: LoadNonPointer<int8_t, std::memory_order_relaxed>(
4590 addr: &untag()->static_type_exactness_state_));
4591 }
4592
4593 void set_static_type_exactness_state(StaticTypeExactnessState state) const {
4594 DEBUG_ASSERT(
4595 IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
4596 set_static_type_exactness_state_unsafe(state);
4597 }
4598
4599 void set_static_type_exactness_state_unsafe(
4600 StaticTypeExactnessState state) const {
4601 StoreNonPointer<int8_t, int8_t, std::memory_order_relaxed>(
4602 addr: &untag()->static_type_exactness_state_, value: state.Encode());
4603 }
4604
4605 static intptr_t static_type_exactness_state_offset() {
4606 return OFFSET_OF(UntaggedField, static_type_exactness_state_);
4607 }
4608
4609 // Return class id that any non-null value read from this field is guaranteed
4610 // to have or kDynamicCid if such class id is not known.
4611 // Stores to this field must update this information hence the name.
4612 intptr_t guarded_cid() const;
4613
4614 void set_guarded_cid(intptr_t cid) const {
4615 DEBUG_ASSERT(
4616 IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
4617 set_guarded_cid_unsafe(cid);
4618 }
4619 void set_guarded_cid_unsafe(intptr_t cid) const {
4620 StoreNonPointer<ClassIdTagType, ClassIdTagType, std::memory_order_relaxed>(
4621 addr: &untag()->guarded_cid_, value: cid);
4622 }
4623 static intptr_t guarded_cid_offset() {
4624 return OFFSET_OF(UntaggedField, guarded_cid_);
4625 }
4626 // Return the list length that any list stored in this field is guaranteed
4627 // to have. If length is kUnknownFixedLength the length has not
4628 // been determined. If length is kNoFixedLength this field has multiple
4629 // list lengths associated with it and cannot be predicted.
4630 intptr_t guarded_list_length() const;
4631 void set_guarded_list_length_unsafe(intptr_t list_length) const;
4632 void set_guarded_list_length(intptr_t list_length) const {
4633 DEBUG_ASSERT(
4634 IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
4635 set_guarded_list_length_unsafe(list_length);
4636 }
4637 static intptr_t guarded_list_length_offset() {
4638 return OFFSET_OF(UntaggedField, guarded_list_length_);
4639 }
4640 intptr_t guarded_list_length_in_object_offset() const;
4641 void set_guarded_list_length_in_object_offset_unsafe(intptr_t offset) const;
4642 void set_guarded_list_length_in_object_offset(intptr_t offset) const {
4643 DEBUG_ASSERT(
4644 IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
4645 set_guarded_list_length_in_object_offset_unsafe(offset);
4646 }
4647 static intptr_t guarded_list_length_in_object_offset_offset() {
4648 return OFFSET_OF(UntaggedField, guarded_list_length_in_object_offset_);
4649 }
4650
4651 bool needs_length_check() const {
4652 const bool r = guarded_list_length() >= Field::kUnknownFixedLength;
4653 ASSERT(!r || is_final());
4654 return r;
4655 }
4656
4657 bool NeedsSetter() const;
4658 bool NeedsGetter() const;
4659
4660 bool NeedsInitializationCheckOnLoad() const {
4661 return needs_load_guard() || (is_late() && !has_trivial_initializer());
4662 }
4663
4664 const char* GuardedPropertiesAsCString() const;
4665
4666 bool is_unboxed() const {
4667 return UnboxedBit::decode(value: kind_bits());
4668 }
4669
4670 // Field unboxing decisions are based either on static types (JIT) or
4671 // inferred types (AOT). See the callers of this function.
4672 void set_is_unboxed_unsafe(bool b) const {
4673 set_kind_bits(UnboxedBit::update(value: b, original: untag()->kind_bits_));
4674 }
4675
4676 void set_is_unboxed(bool b) const {
4677 DEBUG_ASSERT(
4678 IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
4679 set_is_unboxed_unsafe(b);
4680 }
4681
4682 enum {
4683 kUnknownLengthOffset = -1,
4684 kUnknownFixedLength = -1,
4685 kNoFixedLength = -2,
4686 };
4687 void set_is_late(bool value) const {
4688 // TODO(36097): Once concurrent access is possible ensure updates are safe.
4689 set_kind_bits(IsLateBit::update(value, original: untag()->kind_bits_));
4690 }
4691 void set_is_extension_member(bool value) const {
4692 // TODO(36097): Once concurrent access is possible ensure updates are safe.
4693 set_kind_bits(IsExtensionMemberBit::update(value, original: untag()->kind_bits_));
4694 }
4695 void set_needs_load_guard(bool value) const {
4696 // TODO(36097): Once concurrent access is possible ensure updates are safe.
4697 set_kind_bits(NeedsLoadGuardBit::update(value, original: untag()->kind_bits_));
4698 }
4699 // Returns false if any value read from this field is guaranteed to be
4700 // not null.
4701 // Internally we is_nullable_ field contains either kNullCid (nullable) or
4702 // kIllegalCid (non-nullable) instead of boolean. This is done to simplify
4703 // guarding sequence in the generated code.
4704 bool is_nullable() const;
4705 void set_is_nullable(bool val) const {
4706 DEBUG_ASSERT(
4707 IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
4708 set_is_nullable_unsafe(val);
4709 }
4710 bool is_nullable_unsafe() const {
4711 return LoadNonPointer<ClassIdTagType, std::memory_order_relaxed>(
4712 addr: &untag()->is_nullable_) == kNullCid;
4713 }
4714 void set_is_nullable_unsafe(bool val) const {
4715 StoreNonPointer<ClassIdTagType, ClassIdTagType, std::memory_order_relaxed>(
4716 addr: &untag()->is_nullable_, value: val ? kNullCid : kIllegalCid);
4717 }
4718 static intptr_t is_nullable_offset() {
4719 return OFFSET_OF(UntaggedField, is_nullable_);
4720 }
4721
4722 // Record store of the given value into this field. May trigger
4723 // deoptimization of dependent optimized code.
4724 void RecordStore(const Object& value) const;
4725
4726 void InitializeGuardedListLengthInObjectOffset(bool unsafe = false) const;
4727
4728 // Return the list of optimized code objects that were optimized under
4729 // assumptions about guarded class id and nullability of this field.
4730 // These code objects must be deoptimized when field's properties change.
4731 // Code objects are held weakly via an indirection through WeakProperty.
4732 WeakArrayPtr dependent_code() const;
4733 void set_dependent_code(const WeakArray& array) const;
4734
4735 // Add the given code object to the list of dependent ones.
4736 void RegisterDependentCode(const Code& code) const;
4737
4738 // Deoptimize all dependent code objects.
4739 void DeoptimizeDependentCode(bool are_mutators_stopped = false) const;
4740
4741 // Used by background compiler to check consistency of field copy with its
4742 // original.
4743 bool IsConsistentWith(const Field& field) const;
4744
4745 bool IsUninitialized() const;
4746
4747 // Run initializer and set field value.
4748 DART_WARN_UNUSED_RESULT ErrorPtr
4749 InitializeInstance(const Instance& instance) const;
4750 DART_WARN_UNUSED_RESULT ErrorPtr InitializeStatic() const;
4751
4752 // Run initializer only.
4753 DART_WARN_UNUSED_RESULT ObjectPtr EvaluateInitializer() const;
4754
4755 FunctionPtr EnsureInitializerFunction() const;
4756 FunctionPtr InitializerFunction() const {
4757 return untag()->initializer_function<std::memory_order_acquire>();
4758 }
4759 void SetInitializerFunction(const Function& initializer) const;
4760 bool HasInitializerFunction() const;
4761 static intptr_t initializer_function_offset() {
4762 return OFFSET_OF(UntaggedField, initializer_function_);
4763 }
4764
4765 // For static fields only. Constructs a closure that gets/sets the
4766 // field value.
4767 InstancePtr GetterClosure() const;
4768 InstancePtr SetterClosure() const;
4769 InstancePtr AccessorClosure(bool make_setter) const;
4770
4771 // Constructs getter and setter names for fields and vice versa.
4772 static StringPtr GetterName(const String& field_name);
4773 static StringPtr GetterSymbol(const String& field_name);
4774 // Returns String::null() if getter symbol does not exist.
4775 static StringPtr LookupGetterSymbol(const String& field_name);
4776 static StringPtr SetterName(const String& field_name);
4777 static StringPtr SetterSymbol(const String& field_name);
4778 // Returns String::null() if setter symbol does not exist.
4779 static StringPtr LookupSetterSymbol(const String& field_name);
4780 static StringPtr NameFromGetter(const String& getter_name);
4781 static StringPtr NameFromSetter(const String& setter_name);
4782 static StringPtr NameFromInit(const String& init_name);
4783 static bool IsGetterName(const String& function_name);
4784 static bool IsSetterName(const String& function_name);
4785 static bool IsInitName(const String& function_name);
4786
4787 private:
4788 static void InitializeNew(const Field& result,
4789 const String& name,
4790 bool is_static,
4791 bool is_final,
4792 bool is_const,
4793 bool is_reflectable,
4794 bool is_late,
4795 const Object& owner,
4796 TokenPosition token_pos,
4797 TokenPosition end_token_pos);
4798 friend class StoreFieldInstr; // Generated code access to bit field.
4799
4800 enum {
4801 kConstBit = 0,
4802 kStaticBit,
4803 kFinalBit,
4804 kHasNontrivialInitializerBit,
4805 kUnboxedBit,
4806 kReflectableBit,
4807 kInitializerChangedAfterInitializationBit,
4808 kHasPragmaBit,
4809 kCovariantBit,
4810 kGenericCovariantImplBit,
4811 kIsLateBit,
4812 kIsExtensionMemberBit,
4813 kNeedsLoadGuardBit,
4814 kHasInitializerBit,
4815 };
4816 class ConstBit : public BitField<uint16_t, bool, kConstBit, 1> {};
4817 class StaticBit : public BitField<uint16_t, bool, kStaticBit, 1> {};
4818 class FinalBit : public BitField<uint16_t, bool, kFinalBit, 1> {};
4819 class HasNontrivialInitializerBit
4820 : public BitField<uint16_t, bool, kHasNontrivialInitializerBit, 1> {};
4821 class UnboxedBit : public BitField<uint16_t, bool, kUnboxedBit, 1> {};
4822 class ReflectableBit : public BitField<uint16_t, bool, kReflectableBit, 1> {};
4823 class InitializerChangedAfterInitializationBit
4824 : public BitField<uint16_t,
4825 bool,
4826 kInitializerChangedAfterInitializationBit,
4827 1> {};
4828 class HasPragmaBit : public BitField<uint16_t, bool, kHasPragmaBit, 1> {};
4829 class CovariantBit : public BitField<uint16_t, bool, kCovariantBit, 1> {};
4830 class GenericCovariantImplBit
4831 : public BitField<uint16_t, bool, kGenericCovariantImplBit, 1> {};
4832 class IsLateBit : public BitField<uint16_t, bool, kIsLateBit, 1> {};
4833 class IsExtensionMemberBit
4834 : public BitField<uint16_t, bool, kIsExtensionMemberBit, 1> {};
4835 class NeedsLoadGuardBit
4836 : public BitField<uint16_t, bool, kNeedsLoadGuardBit, 1> {};
4837 class HasInitializerBit
4838 : public BitField<uint16_t, bool, kHasInitializerBit, 1> {};
4839
4840 // Force this field's guard to be dynamic and deoptimize dependent code.
4841 void ForceDynamicGuardedCidAndLength() const;
4842
4843 void set_name(const String& value) const;
4844 void set_is_static(bool is_static) const {
4845 // TODO(36097): Once concurrent access is possible ensure updates are safe.
4846 set_kind_bits(StaticBit::update(value: is_static, original: untag()->kind_bits_));
4847 }
4848 void set_is_final(bool is_final) const {
4849 // TODO(36097): Once concurrent access is possible ensure updates are safe.
4850 set_kind_bits(FinalBit::update(value: is_final, original: untag()->kind_bits_));
4851 }
4852 void set_is_const(bool value) const {
4853 // TODO(36097): Once concurrent access is possible ensure updates are safe.
4854 set_kind_bits(ConstBit::update(value, original: untag()->kind_bits_));
4855 }
4856 void set_owner(const Object& value) const { untag()->set_owner(value.ptr()); }
4857 void set_token_pos(TokenPosition token_pos) const {
4858 StoreNonPointer(addr: &untag()->token_pos_, value: token_pos);
4859 }
4860 void set_end_token_pos(TokenPosition token_pos) const {
4861 StoreNonPointer(addr: &untag()->end_token_pos_, value: token_pos);
4862 }
4863 void set_kind_bits(uint16_t value) const {
4864 StoreNonPointer<uint16_t, uint16_t, std::memory_order_release>(
4865 addr: &untag()->kind_bits_, value);
4866 }
4867
4868 static FieldPtr New();
4869
4870 FINAL_HEAP_OBJECT_IMPLEMENTATION(Field, Object);
4871 friend class Class;
4872 friend class UntaggedField;
4873 friend class FieldSerializationCluster;
4874 friend class FieldDeserializationCluster;
4875};
4876
4877class Script : public Object {
4878 public:
4879 StringPtr url() const { return untag()->url(); }
4880 void set_url(const String& value) const;
4881
4882 // The actual url which was loaded from disk, if provided by the embedder.
4883 StringPtr resolved_url() const;
4884 bool HasSource() const;
4885 StringPtr Source() const;
4886 bool IsPartOfDartColonLibrary() const;
4887
4888 GrowableObjectArrayPtr GenerateLineNumberArray() const;
4889
4890 intptr_t line_offset() const { return 0; }
4891 intptr_t col_offset() const { return 0; }
4892 // Returns the max real token position for this script, or kNoSource
4893 // if there is no line starts information.
4894 TokenPosition MaxPosition() const;
4895
4896 // The load time in milliseconds since epoch.
4897 int64_t load_timestamp() const { return untag()->load_timestamp_; }
4898
4899#if !defined(DART_PRECOMPILED_RUNTIME)
4900 // Initializes thie script object from a kernel file.
4901 void InitializeFromKernel(const KernelProgramInfo& info,
4902 intptr_t script_index,
4903 const TypedData& line_starts,
4904 const TypedDataView& constant_coverage) const;
4905#endif
4906
4907 // The index of this script into the [KernelProgramInfo] object's source
4908 // table.
4909 intptr_t kernel_script_index() const { return untag()->kernel_script_index_; }
4910
4911 static intptr_t line_starts_offset() {
4912 return OFFSET_OF(UntaggedScript, line_starts_);
4913 }
4914
4915 TypedDataPtr line_starts() const;
4916
4917#if !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
4918 TypedDataViewPtr constant_coverage() const;
4919#endif // !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
4920
4921 LibraryPtr FindLibrary() const;
4922 StringPtr GetLine(intptr_t line_number, Heap::Space space = Heap::kNew) const;
4923 StringPtr GetSnippet(intptr_t from_line,
4924 intptr_t from_column,
4925 intptr_t to_line,
4926 intptr_t to_column) const;
4927
4928 // For real token positions when line starts are available, returns whether or
4929 // not a GetTokenLocation call would succeed. Returns true for non-real token
4930 // positions or if there is no line starts information.
4931 bool IsValidTokenPosition(TokenPosition token_pos) const;
4932
4933 // Returns whether a line and column could be computed for the given token
4934 // position and, if so, sets *line and *column (if not nullptr).
4935 bool GetTokenLocation(const TokenPosition& token_pos,
4936 intptr_t* line,
4937 intptr_t* column = nullptr) const;
4938
4939 // Returns the length of the token at the given position. If the length cannot
4940 // be determined, returns a negative value.
4941 intptr_t GetTokenLength(const TokenPosition& token_pos) const;
4942
4943 // Returns whether any tokens were found for the given line. When found,
4944 // *first_token_index and *last_token_index are set to the first and
4945 // last token on the line, respectively.
4946 bool TokenRangeAtLine(intptr_t line_number,
4947 TokenPosition* first_token_index,
4948 TokenPosition* last_token_index) const;
4949
4950 static intptr_t InstanceSize() {
4951 return RoundedAllocationSize(size: sizeof(UntaggedScript));
4952 }
4953
4954 static ScriptPtr New(const String& url, const String& source);
4955
4956 static ScriptPtr New(const String& url,
4957 const String& resolved_url,
4958 const String& source);
4959
4960#if !defined(DART_PRECOMPILED_RUNTIME)
4961 void LoadSourceFromKernel(const uint8_t* kernel_buffer,
4962 intptr_t kernel_buffer_len) const;
4963#endif // !defined(DART_PRECOMPILED_RUNTIME)
4964
4965 void CollectTokenPositionsFor() const;
4966 ArrayPtr CollectConstConstructorCoverageFrom() const;
4967
4968 private:
4969 KernelProgramInfoPtr kernel_program_info() const {
4970 return untag()->kernel_program_info();
4971 }
4972
4973 void set_debug_positions(const Array& value) const;
4974
4975#if !defined(DART_PRECOMPILED_RUNTIME)
4976 bool HasCachedMaxPosition() const;
4977
4978 void SetHasCachedMaxPosition(bool value) const;
4979 void SetCachedMaxPosition(intptr_t value) const;
4980#endif // !defined(DART_PRECOMPILED_RUNTIME)
4981
4982 void set_resolved_url(const String& value) const;
4983 void set_source(const String& value) const;
4984 void set_load_timestamp(int64_t value) const;
4985 ArrayPtr debug_positions() const;
4986
4987 FINAL_HEAP_OBJECT_IMPLEMENTATION(Script, Object);
4988 friend class Class;
4989 friend class Precompiler;
4990};
4991
4992class DictionaryIterator : public ValueObject {
4993 public:
4994 explicit DictionaryIterator(const Library& library);
4995
4996 bool HasNext() const { return next_ix_ < size_; }
4997
4998 // Returns next non-null raw object.
4999 ObjectPtr GetNext();
5000
5001 private:
5002 void MoveToNextObject();
5003
5004 const Array& array_;
5005 const int size_; // Number of elements to iterate over.
5006 int next_ix_; // Index of next element.
5007
5008 friend class ClassDictionaryIterator;
5009 DISALLOW_COPY_AND_ASSIGN(DictionaryIterator);
5010};
5011
5012class ClassDictionaryIterator : public DictionaryIterator {
5013 public:
5014 enum IterationKind {
5015 // TODO(hausner): fix call sites that use kIteratePrivate. There is only
5016 // one top-level class per library left, not an array to iterate over.
5017 kIteratePrivate,
5018 kNoIteratePrivate
5019 };
5020
5021 ClassDictionaryIterator(const Library& library,
5022 IterationKind kind = kNoIteratePrivate);
5023
5024 bool HasNext() const {
5025 return (next_ix_ < size_) || !toplevel_class_.IsNull();
5026 }
5027
5028 // Returns a non-null raw class.
5029 ClassPtr GetNextClass();
5030
5031 private:
5032 void MoveToNextClass();
5033
5034 Class& toplevel_class_;
5035
5036 DISALLOW_COPY_AND_ASSIGN(ClassDictionaryIterator);
5037};
5038
5039class Library : public Object {
5040 public:
5041 StringPtr name() const { return untag()->name(); }
5042 void SetName(const String& name) const;
5043
5044 StringPtr url() const { return untag()->url(); }
5045 static StringPtr UrlOf(LibraryPtr lib) { return lib->untag()->url(); }
5046 StringPtr private_key() const { return untag()->private_key(); }
5047 bool LoadNotStarted() const {
5048 return untag()->load_state_ == UntaggedLibrary::kAllocated;
5049 }
5050 bool LoadRequested() const {
5051 return untag()->load_state_ == UntaggedLibrary::kLoadRequested;
5052 }
5053 bool LoadInProgress() const {
5054 return untag()->load_state_ == UntaggedLibrary::kLoadInProgress;
5055 }
5056 void SetLoadRequested() const;
5057 void SetLoadInProgress() const;
5058 bool Loaded() const {
5059 return untag()->load_state_ == UntaggedLibrary::kLoaded;
5060 }
5061 void SetLoaded() const;
5062
5063 LoadingUnitPtr loading_unit() const { return untag()->loading_unit(); }
5064 void set_loading_unit(const LoadingUnit& value) const;
5065
5066 static intptr_t InstanceSize() {
5067 return RoundedAllocationSize(size: sizeof(UntaggedLibrary));
5068 }
5069
5070 static LibraryPtr New(const String& url);
5071
5072 ObjectPtr Invoke(const String& selector,
5073 const Array& arguments,
5074 const Array& argument_names,
5075 bool respect_reflectable = true,
5076 bool check_is_entrypoint = false) const;
5077 ObjectPtr InvokeGetter(const String& selector,
5078 bool throw_nsm_if_absent,
5079 bool respect_reflectable = true,
5080 bool check_is_entrypoint = false) const;
5081 ObjectPtr InvokeSetter(const String& selector,
5082 const Instance& argument,
5083 bool respect_reflectable = true,
5084 bool check_is_entrypoint = false) const;
5085
5086 // Evaluate the given expression as if it appeared in an top-level method of
5087 // this library and return the resulting value, or an error object if
5088 // evaluating the expression fails. The method has the formal (type)
5089 // parameters given in (type_)param_names, and is invoked with the (type)
5090 // argument values given in (type_)param_values.
5091 ObjectPtr EvaluateCompiledExpression(
5092 const ExternalTypedData& kernel_buffer,
5093 const Array& type_definitions,
5094 const Array& param_values,
5095 const TypeArguments& type_param_values) const;
5096
5097 // Library scope name dictionary.
5098 //
5099 // TODO(turnidge): The Lookup functions are not consistent in how
5100 // they deal with private names. Go through and make them a bit
5101 // more regular.
5102 void AddClass(const Class& cls) const;
5103 void AddObject(const Object& obj, const String& name) const;
5104 ObjectPtr LookupReExport(
5105 const String& name,
5106 ZoneGrowableArray<intptr_t>* visited = nullptr) const;
5107 ObjectPtr LookupObjectAllowPrivate(const String& name) const;
5108 ObjectPtr LookupLocalOrReExportObject(const String& name) const;
5109 ObjectPtr LookupImportedObject(const String& name) const;
5110 ClassPtr LookupClass(const String& name) const;
5111 ClassPtr LookupClassAllowPrivate(const String& name) const;
5112 ClassPtr SlowLookupClassAllowMultiPartPrivate(const String& name) const;
5113 ClassPtr LookupLocalClass(const String& name) const;
5114 FieldPtr LookupFieldAllowPrivate(const String& name) const;
5115 FieldPtr LookupLocalField(const String& name) const;
5116 FunctionPtr LookupFunctionAllowPrivate(const String& name) const;
5117 FunctionPtr LookupLocalFunction(const String& name) const;
5118 LibraryPrefixPtr LookupLocalLibraryPrefix(const String& name) const;
5119
5120 // Look up a Script based on a url. If 'useResolvedUri' is not provided or is
5121 // false, 'url' should have a 'dart:' scheme for Dart core libraries,
5122 // a 'package:' scheme for packages, and 'file:' scheme otherwise.
5123 //
5124 // If 'useResolvedUri' is true, 'url' should have a 'org-dartlang-sdk:' scheme
5125 // for Dart core libraries and a 'file:' scheme otherwise.
5126 ScriptPtr LookupScript(const String& url, bool useResolvedUri = false) const;
5127 ArrayPtr LoadedScripts() const;
5128
5129 // Resolve name in the scope of this library. First check the cache
5130 // of already resolved names for this library. Then look in the
5131 // local dictionary for the unmangled name N, the getter name get:N
5132 // and setter name set:N.
5133 // If the local dictionary contains no entry for these names,
5134 // look in the scopes of all libraries that are imported
5135 // without a library prefix.
5136 ObjectPtr ResolveName(const String& name) const;
5137
5138 void AddAnonymousClass(const Class& cls) const;
5139
5140 void AddExport(const Namespace& ns) const;
5141
5142 void AddMetadata(const Object& declaration, intptr_t kernel_offset) const;
5143 ObjectPtr GetMetadata(const Object& declaration) const;
5144
5145 // Tries to finds a @pragma annotation on [object].
5146 //
5147 // If successful returns `true`. If an error happens during constant
5148 // evaluation, returns `false.
5149 //
5150 // If [only_core] is true, then the annotations on the object will only
5151 // be inspected if it is part of a core library.
5152 //
5153 // If [multiple] is true, then sets [options] to an GrowableObjectArray
5154 // containing all results and [options] may not be nullptr.
5155 //
5156 // WARNING: If the isolate received an [UnwindError] this function will not
5157 // return and rather unwinds until the enclosing setjmp() handler.
5158 static bool FindPragma(Thread* T,
5159 bool only_core,
5160 const Object& object,
5161 const String& pragma_name,
5162 bool multiple = false,
5163 Object* options = nullptr);
5164
5165 ClassPtr toplevel_class() const { return untag()->toplevel_class(); }
5166 void set_toplevel_class(const Class& value) const;
5167
5168 GrowableObjectArrayPtr used_scripts() const {
5169 return untag()->used_scripts();
5170 }
5171
5172 // Library imports.
5173 ArrayPtr imports() const { return untag()->imports(); }
5174 ArrayPtr exports() const { return untag()->exports(); }
5175 void AddImport(const Namespace& ns) const;
5176 intptr_t num_imports() const { return untag()->num_imports_; }
5177 NamespacePtr ImportAt(intptr_t index) const;
5178 LibraryPtr ImportLibraryAt(intptr_t index) const;
5179
5180 ArrayPtr dependencies() const { return untag()->dependencies(); }
5181 void set_dependencies(const Array& deps) const;
5182
5183 void DropDependenciesAndCaches() const;
5184
5185 // Resolving native methods for script loaded in the library.
5186 Dart_NativeEntryResolver native_entry_resolver() const {
5187 return LoadNonPointer<Dart_NativeEntryResolver, std::memory_order_relaxed>(
5188 addr: &untag()->native_entry_resolver_);
5189 }
5190 void set_native_entry_resolver(Dart_NativeEntryResolver value) const {
5191 StoreNonPointer<Dart_NativeEntryResolver, Dart_NativeEntryResolver,
5192 std::memory_order_relaxed>(addr: &untag()->native_entry_resolver_,
5193 value);
5194 }
5195 Dart_NativeEntrySymbol native_entry_symbol_resolver() const {
5196 return LoadNonPointer<Dart_NativeEntrySymbol, std::memory_order_relaxed>(
5197 addr: &untag()->native_entry_symbol_resolver_);
5198 }
5199 void set_native_entry_symbol_resolver(
5200 Dart_NativeEntrySymbol native_symbol_resolver) const {
5201 StoreNonPointer<Dart_NativeEntrySymbol, Dart_NativeEntrySymbol,
5202 std::memory_order_relaxed>(
5203 addr: &untag()->native_entry_symbol_resolver_, value: native_symbol_resolver);
5204 }
5205
5206 // Resolver for FFI native function pointers.
5207 Dart_FfiNativeResolver ffi_native_resolver() const {
5208 return LoadNonPointer<Dart_FfiNativeResolver, std::memory_order_relaxed>(
5209 addr: &untag()->ffi_native_resolver_);
5210 }
5211 void set_ffi_native_resolver(Dart_FfiNativeResolver value) const {
5212 StoreNonPointer<Dart_FfiNativeResolver, Dart_FfiNativeResolver,
5213 std::memory_order_relaxed>(addr: &untag()->ffi_native_resolver_,
5214 value);
5215 }
5216
5217 bool is_in_fullsnapshot() const {
5218 return UntaggedLibrary::InFullSnapshotBit::decode(value: untag()->flags_);
5219 }
5220 void set_is_in_fullsnapshot(bool value) const {
5221 set_flags(
5222 UntaggedLibrary::InFullSnapshotBit::update(value, original: untag()->flags_));
5223 }
5224
5225 bool is_nnbd() const {
5226 return UntaggedLibrary::NnbdBit::decode(value: untag()->flags_);
5227 }
5228 void set_is_nnbd(bool value) const {
5229 set_flags(UntaggedLibrary::NnbdBit::update(value, original: untag()->flags_));
5230 }
5231
5232 NNBDMode nnbd_mode() const {
5233 return is_nnbd() ? NNBDMode::kOptedInLib : NNBDMode::kLegacyLib;
5234 }
5235
5236 NNBDCompiledMode nnbd_compiled_mode() const {
5237 return static_cast<NNBDCompiledMode>(
5238 UntaggedLibrary::NnbdCompiledModeBits::decode(value: untag()->flags_));
5239 }
5240 void set_nnbd_compiled_mode(NNBDCompiledMode value) const {
5241 set_flags(UntaggedLibrary::NnbdCompiledModeBits::update(
5242 value: static_cast<uint8_t>(value), original: untag()->flags_));
5243 }
5244
5245 StringPtr PrivateName(const String& name) const;
5246
5247 intptr_t index() const { return untag()->index_; }
5248 void set_index(intptr_t value) const {
5249 ASSERT((value == -1) ||
5250 ((value >= 0) && (value < std::numeric_limits<classid_t>::max())));
5251 StoreNonPointer(addr: &untag()->index_, value);
5252 }
5253
5254 void Register(Thread* thread) const;
5255 static void RegisterLibraries(Thread* thread,
5256 const GrowableObjectArray& libs);
5257
5258 bool IsDebuggable() const {
5259 return UntaggedLibrary::DebuggableBit::decode(value: untag()->flags_);
5260 }
5261 void set_debuggable(bool value) const {
5262 set_flags(UntaggedLibrary::DebuggableBit::update(value, original: untag()->flags_));
5263 }
5264
5265 bool is_dart_scheme() const {
5266 return UntaggedLibrary::DartSchemeBit::decode(value: untag()->flags_);
5267 }
5268 void set_is_dart_scheme(bool value) const {
5269 set_flags(UntaggedLibrary::DartSchemeBit::update(value, original: untag()->flags_));
5270 }
5271
5272 // Includes 'dart:async', 'dart:typed_data', etc.
5273 bool IsAnyCoreLibrary() const;
5274
5275 inline intptr_t UrlHash() const;
5276
5277#if !defined(DART_PRECOMPILED_RUNTIME)
5278 KernelProgramInfoPtr kernel_program_info() const {
5279 return untag()->kernel_program_info();
5280 }
5281 void set_kernel_program_info(const KernelProgramInfo& info) const;
5282 TypedDataViewPtr KernelLibrary() const;
5283 intptr_t KernelLibraryOffset() const;
5284#endif
5285
5286 intptr_t kernel_library_index() const {
5287#if defined(DART_PRECOMPILED_RUNTIME)
5288 return 0;
5289#else
5290 return untag()->kernel_library_index_;
5291#endif
5292 }
5293
5294 void set_kernel_library_index(intptr_t value) const {
5295#if defined(DART_PRECOMPILED_RUNTIME)
5296 UNREACHABLE();
5297#else
5298 ASSERT(value >= 0);
5299 StoreNonPointer(addr: &untag()->kernel_library_index_, value);
5300#endif
5301 }
5302
5303 static LibraryPtr LookupLibrary(Thread* thread, const String& url);
5304 static LibraryPtr GetLibrary(intptr_t index);
5305
5306 static void InitCoreLibrary(IsolateGroup* isolate_group);
5307 static void InitNativeWrappersLibrary(IsolateGroup* isolate_group,
5308 bool is_kernel_file);
5309
5310 static LibraryPtr AsyncLibrary();
5311 static LibraryPtr ConvertLibrary();
5312 static LibraryPtr CoreLibrary();
5313 static LibraryPtr CollectionLibrary();
5314 static LibraryPtr DeveloperLibrary();
5315 static LibraryPtr FfiLibrary();
5316 static LibraryPtr InternalLibrary();
5317 static LibraryPtr IsolateLibrary();
5318 static LibraryPtr MathLibrary();
5319#if !defined(DART_PRECOMPILED_RUNTIME)
5320 static LibraryPtr MirrorsLibrary();
5321#endif
5322 static LibraryPtr NativeWrappersLibrary();
5323 static LibraryPtr TypedDataLibrary();
5324 static LibraryPtr VMServiceLibrary();
5325
5326 // Eagerly compile all classes and functions in the library.
5327 static ErrorPtr CompileAll(bool ignore_error = false);
5328#if !defined(DART_PRECOMPILED_RUNTIME)
5329 // Finalize all classes in all libraries.
5330 static ErrorPtr FinalizeAllClasses();
5331#endif
5332
5333#if defined(DEBUG) && !defined(DART_PRECOMPILED_RUNTIME)
5334 // Checks function fingerprints. Prints mismatches and aborts if
5335 // mismatch found.
5336 static void CheckFunctionFingerprints();
5337#endif // defined(DEBUG) && !defined(DART_PRECOMPILED_RUNTIME).
5338
5339 static bool IsPrivate(const String& name);
5340
5341 // Construct the full name of a corelib member.
5342 static const String& PrivateCoreLibName(const String& member);
5343
5344 // Returns true if [name] matches full name of corelib [member].
5345 static bool IsPrivateCoreLibName(const String& name, const String& member);
5346
5347 // Lookup class in the core lib which also contains various VM
5348 // helper methods and classes. Allow look up of private classes.
5349 static ClassPtr LookupCoreClass(const String& class_name);
5350
5351 // Return Function::null() if function does not exist in libs.
5352 static FunctionPtr GetFunction(const GrowableArray<Library*>& libs,
5353 const char* class_name,
5354 const char* function_name);
5355
5356 // Character used to indicate a private identifier.
5357 static const char kPrivateIdentifierStart = '_';
5358
5359 // Character used to separate private identifiers from
5360 // the library-specific key.
5361 static const char kPrivateKeySeparator = '@';
5362
5363 void CheckReload(const Library& replacement,
5364 ProgramReloadContext* context) const;
5365
5366 // Returns a closure of top level function 'name' in the exported namespace
5367 // of this library. If a top level function 'name' does not exist we look
5368 // for a top level getter 'name' that returns a closure.
5369 ObjectPtr GetFunctionClosure(const String& name) const;
5370
5371 // Ensures that all top-level functions and variables (fields) are loaded.
5372 void EnsureTopLevelClassIsFinalized() const;
5373
5374 private:
5375 static constexpr int kInitialImportsCapacity = 4;
5376 static constexpr int kImportsCapacityIncrement = 8;
5377
5378 static LibraryPtr New();
5379
5380 // These methods are only used by the Precompiler to obfuscate
5381 // the name and url.
5382 void set_name(const String& name) const;
5383 void set_url(const String& url) const;
5384 void set_private_key(const String& key) const;
5385
5386 void set_num_imports(intptr_t value) const;
5387 void set_flags(uint8_t flags) const;
5388 bool HasExports() const;
5389 ArrayPtr loaded_scripts() const { return untag()->loaded_scripts(); }
5390 ArrayPtr metadata() const {
5391 DEBUG_ASSERT(
5392 IsolateGroup::Current()->program_lock()->IsCurrentThreadReader());
5393 return untag()->metadata();
5394 }
5395 void set_metadata(const Array& value) const;
5396 ArrayPtr dictionary() const { return untag()->dictionary(); }
5397 void InitClassDictionary() const;
5398
5399 ArrayPtr resolved_names() const { return untag()->resolved_names(); }
5400 bool LookupResolvedNamesCache(const String& name, Object* obj) const;
5401 void AddToResolvedNamesCache(const String& name, const Object& obj) const;
5402 void InitResolvedNamesCache() const;
5403 void ClearResolvedNamesCache() const;
5404 void InvalidateResolvedName(const String& name) const;
5405 void InvalidateResolvedNamesCache() const;
5406
5407 ArrayPtr exported_names() const { return untag()->exported_names(); }
5408 bool LookupExportedNamesCache(const String& name, Object* obj) const;
5409 void AddToExportedNamesCache(const String& name, const Object& obj) const;
5410 void InitExportedNamesCache() const;
5411 void ClearExportedNamesCache() const;
5412 static void InvalidateExportedNamesCaches();
5413
5414 void InitImportList() const;
5415 void RehashDictionary(const Array& old_dict, intptr_t new_dict_size) const;
5416 static LibraryPtr NewLibraryHelper(const String& url, bool import_core_lib);
5417 ObjectPtr LookupEntry(const String& name, intptr_t* index) const;
5418 ObjectPtr LookupLocalObjectAllowPrivate(const String& name) const;
5419 ObjectPtr LookupLocalObject(const String& name) const;
5420
5421 void AllocatePrivateKey() const;
5422
5423 FINAL_HEAP_OBJECT_IMPLEMENTATION(Library, Object);
5424
5425 friend class Bootstrap;
5426 friend class Class;
5427 friend class Debugger;
5428 friend class DictionaryIterator;
5429 friend class Isolate;
5430 friend class LibraryDeserializationCluster;
5431 friend class Namespace;
5432 friend class Object;
5433 friend class Precompiler;
5434};
5435
5436// A Namespace contains the names in a library dictionary, filtered by
5437// the show/hide combinators.
5438class Namespace : public Object {
5439 public:
5440 LibraryPtr target() const { return untag()->target(); }
5441 ArrayPtr show_names() const { return untag()->show_names(); }
5442 ArrayPtr hide_names() const { return untag()->hide_names(); }
5443 LibraryPtr owner() const { return untag()->owner(); }
5444
5445 static intptr_t InstanceSize() {
5446 return RoundedAllocationSize(size: sizeof(UntaggedNamespace));
5447 }
5448
5449 bool HidesName(const String& name) const;
5450 ObjectPtr Lookup(const String& name,
5451 ZoneGrowableArray<intptr_t>* trail = nullptr) const;
5452
5453 static NamespacePtr New(const Library& library,
5454 const Array& show_names,
5455 const Array& hide_names,
5456 const Library& owner);
5457
5458 private:
5459 static NamespacePtr New();
5460
5461 FINAL_HEAP_OBJECT_IMPLEMENTATION(Namespace, Object);
5462 friend class Class;
5463 friend class Precompiler;
5464};
5465
5466class KernelProgramInfo : public Object {
5467 public:
5468 static KernelProgramInfoPtr New(const TypedDataBase& kernel_component,
5469 const TypedDataView& string_data,
5470 const TypedDataView& metadata_payload,
5471 const TypedDataView& metadata_mappings,
5472 const TypedDataView& constants_table,
5473 const TypedData& string_offsets,
5474 const TypedData& canonical_names,
5475 const Array& scripts,
5476 const Array& libraries_cache,
5477 const Array& classes_cache);
5478
5479 static intptr_t InstanceSize() {
5480 return RoundedAllocationSize(size: sizeof(UntaggedKernelProgramInfo));
5481 }
5482
5483 TypedDataPtr string_offsets() const { return untag()->string_offsets(); }
5484
5485 TypedDataBasePtr kernel_component() const {
5486 return untag()->kernel_component();
5487 }
5488 TypedDataViewPtr string_data() const { return untag()->string_data(); }
5489
5490 TypedDataPtr canonical_names() const { return untag()->canonical_names(); }
5491
5492 TypedDataViewPtr metadata_payloads() const {
5493 return untag()->metadata_payloads();
5494 }
5495
5496 TypedDataViewPtr metadata_mappings() const {
5497 return untag()->metadata_mappings();
5498 }
5499
5500 intptr_t KernelLibraryStartOffset(intptr_t library_index) const;
5501 intptr_t KernelLibraryEndOffset(intptr_t library_index) const;
5502 TypedDataViewPtr KernelLibrary(intptr_t library_index) const;
5503
5504 TypedDataViewPtr constants_table() const {
5505 return untag()->constants_table();
5506 }
5507
5508 void set_constants_table(const TypedDataView& value) const;
5509
5510 ArrayPtr scripts() const { return untag()->scripts(); }
5511 void set_scripts(const Array& scripts) const;
5512
5513 ArrayPtr constants() const { return untag()->constants(); }
5514 void set_constants(const Array& constants) const;
5515
5516 ScriptPtr ScriptAt(intptr_t index) const;
5517
5518 ArrayPtr libraries_cache() const { return untag()->libraries_cache(); }
5519 void set_libraries_cache(const Array& cache) const;
5520 LibraryPtr LookupLibrary(Thread* thread, const Smi& name_index) const;
5521 LibraryPtr InsertLibrary(Thread* thread,
5522 const Smi& name_index,
5523 const Library& lib) const;
5524
5525 ArrayPtr classes_cache() const { return untag()->classes_cache(); }
5526 void set_classes_cache(const Array& cache) const;
5527 ClassPtr LookupClass(Thread* thread, const Smi& name_index) const;
5528 ClassPtr InsertClass(Thread* thread,
5529 const Smi& name_index,
5530 const Class& klass) const;
5531
5532 private:
5533 static KernelProgramInfoPtr New();
5534
5535 FINAL_HEAP_OBJECT_IMPLEMENTATION(KernelProgramInfo, Object);
5536 friend class Class;
5537};
5538
5539// ObjectPool contains constants, immediates and addresses referenced by
5540// generated code and deoptimization infos. Each entry has an type associated
5541// with it which is stored in-inline after all the entries.
5542class ObjectPool : public Object {
5543 public:
5544 using EntryType = compiler::ObjectPoolBuilderEntry::EntryType;
5545 using Patchability = compiler::ObjectPoolBuilderEntry::Patchability;
5546 using TypeBits = compiler::ObjectPoolBuilderEntry::TypeBits;
5547 using PatchableBit = compiler::ObjectPoolBuilderEntry::PatchableBit;
5548
5549 struct Entry {
5550 Entry() : raw_value_(), type_() {}
5551 explicit Entry(const Object* obj)
5552 : obj_(obj), type_(EntryType::kTaggedObject) {}
5553 Entry(uword value, EntryType info) : raw_value_(value), type_(info) {}
5554 union {
5555 const Object* obj_;
5556 uword raw_value_;
5557 };
5558 EntryType type_;
5559 };
5560
5561 intptr_t Length() const { return untag()->length_; }
5562 void SetLength(intptr_t value) const {
5563 StoreNonPointer(addr: &untag()->length_, value);
5564 }
5565
5566 static intptr_t length_offset() {
5567 return OFFSET_OF(UntaggedObjectPool, length_);
5568 }
5569 static intptr_t data_offset() {
5570 return OFFSET_OF_RETURNED_VALUE(UntaggedObjectPool, data);
5571 }
5572 static intptr_t element_offset(intptr_t index) {
5573 return OFFSET_OF_RETURNED_VALUE(UntaggedObjectPool, data) +
5574 sizeof(UntaggedObjectPool::Entry) * index;
5575 }
5576
5577 struct ArrayTraits {
5578 static intptr_t elements_start_offset() {
5579 return ObjectPool::data_offset();
5580 }
5581
5582 static constexpr intptr_t kElementSize = sizeof(UntaggedObjectPool::Entry);
5583 };
5584
5585 EntryType TypeAt(intptr_t index) const {
5586 ASSERT((index >= 0) && (index <= Length()));
5587 return TypeBits::decode(value: untag()->entry_bits()[index]);
5588 }
5589
5590 Patchability PatchableAt(intptr_t index) const {
5591 ASSERT((index >= 0) && (index <= Length()));
5592 return PatchableBit::decode(value: untag()->entry_bits()[index]);
5593 }
5594
5595 static uint8_t EncodeBits(EntryType type, Patchability patchable) {
5596 return PatchableBit::encode(value: patchable) | TypeBits::encode(value: type);
5597 }
5598
5599 void SetTypeAt(intptr_t index, EntryType type, Patchability patchable) const {
5600 ASSERT(index >= 0 && index <= Length());
5601 const uint8_t bits = EncodeBits(type, patchable);
5602 StoreNonPointer(addr: &untag()->entry_bits()[index], value: bits);
5603 }
5604
5605 template <std::memory_order order = std::memory_order_relaxed>
5606 ObjectPtr ObjectAt(intptr_t index) const {
5607 ASSERT(TypeAt(index) == EntryType::kTaggedObject);
5608 return LoadPointer<ObjectPtr, order>(&(EntryAddr(index)->raw_obj_));
5609 }
5610
5611 template <std::memory_order order = std::memory_order_relaxed>
5612 void SetObjectAt(intptr_t index, const Object& obj) const {
5613 ASSERT((TypeAt(index) == EntryType::kTaggedObject) ||
5614 (TypeAt(index) == EntryType::kImmediate && obj.IsSmi()));
5615 StorePointer<ObjectPtr, order>(&EntryAddr(index)->raw_obj_, obj.ptr());
5616 }
5617
5618 uword RawValueAt(intptr_t index) const {
5619 ASSERT(TypeAt(index) != EntryType::kTaggedObject);
5620 return EntryAddr(index)->raw_value_;
5621 }
5622 void SetRawValueAt(intptr_t index, uword raw_value) const {
5623 ASSERT(TypeAt(index) != EntryType::kTaggedObject);
5624 StoreNonPointer(addr: &EntryAddr(index)->raw_value_, value: raw_value);
5625 }
5626
5627 static intptr_t InstanceSize() {
5628 ASSERT(sizeof(UntaggedObjectPool) ==
5629 OFFSET_OF_RETURNED_VALUE(UntaggedObjectPool, data));
5630 return 0;
5631 }
5632
5633 static constexpr intptr_t kBytesPerElement =
5634 sizeof(UntaggedObjectPool::Entry) + sizeof(uint8_t);
5635 static constexpr intptr_t kMaxElements = kSmiMax / kBytesPerElement;
5636
5637 static intptr_t InstanceSize(intptr_t len) {
5638 // Ensure that variable length data is not adding to the object length.
5639 ASSERT(sizeof(UntaggedObjectPool) ==
5640 (sizeof(UntaggedObject) + (1 * kWordSize)));
5641 ASSERT(0 <= len && len <= kMaxElements);
5642 return RoundedAllocationSize(size: sizeof(UntaggedObjectPool) +
5643 (len * kBytesPerElement));
5644 }
5645
5646 static ObjectPoolPtr NewFromBuilder(
5647 const compiler::ObjectPoolBuilder& builder);
5648 static ObjectPoolPtr New(intptr_t len);
5649
5650 void CopyInto(compiler::ObjectPoolBuilder* builder) const;
5651
5652 // Returns the pool index from the offset relative to a tagged ObjectPoolPtr,
5653 // adjusting for the tag-bit.
5654 static intptr_t IndexFromOffset(intptr_t offset) {
5655 ASSERT(
5656 Utils::IsAligned(offset + kHeapObjectTag, compiler::target::kWordSize));
5657#if defined(DART_PRECOMPILER)
5658 return (offset + kHeapObjectTag -
5659 compiler::target::ObjectPool::element_offset(0)) /
5660 compiler::target::kWordSize;
5661#else
5662 return (offset + kHeapObjectTag - element_offset(index: 0)) / kWordSize;
5663#endif
5664 }
5665
5666 static intptr_t OffsetFromIndex(intptr_t index) {
5667 return element_offset(index) - kHeapObjectTag;
5668 }
5669
5670 void DebugPrint() const;
5671
5672 private:
5673 UntaggedObjectPool::Entry const* EntryAddr(intptr_t index) const {
5674 ASSERT((index >= 0) && (index < Length()));
5675 return &untag()->data()[index];
5676 }
5677
5678 FINAL_HEAP_OBJECT_IMPLEMENTATION(ObjectPool, Object);
5679 friend class Class;
5680 friend class Object;
5681 friend class UntaggedObjectPool;
5682};
5683
5684class Instructions : public Object {
5685 public:
5686 enum {
5687 kSizePos = 0,
5688 kSizeSize = 31,
5689 kFlagsPos = kSizePos + kSizeSize,
5690 kFlagsSize = 1, // Currently, only flag is single entry flag.
5691 };
5692
5693 class SizeBits : public BitField<uint32_t, uint32_t, kSizePos, kSizeSize> {};
5694 class FlagsBits : public BitField<uint32_t, bool, kFlagsPos, kFlagsSize> {};
5695
5696 // Excludes HeaderSize().
5697 intptr_t Size() const { return SizeBits::decode(value: untag()->size_and_flags_); }
5698 static intptr_t Size(const InstructionsPtr instr) {
5699 return SizeBits::decode(value: instr->untag()->size_and_flags_);
5700 }
5701
5702 bool HasMonomorphicEntry() const {
5703 return FlagsBits::decode(value: untag()->size_and_flags_);
5704 }
5705 static bool HasMonomorphicEntry(const InstructionsPtr instr) {
5706 return FlagsBits::decode(value: instr->untag()->size_and_flags_);
5707 }
5708
5709 uword PayloadStart() const { return PayloadStart(instr: ptr()); }
5710 uword MonomorphicEntryPoint() const { return MonomorphicEntryPoint(instr: ptr()); }
5711 uword EntryPoint() const { return EntryPoint(instr: ptr()); }
5712 static uword PayloadStart(const InstructionsPtr instr) {
5713 return reinterpret_cast<uword>(instr->untag()) + HeaderSize();
5714 }
5715
5716// Note: We keep the checked entrypoint offsets even (emitting NOPs if
5717// necessary) to allow them to be seen as Smis by the GC.
5718#if defined(TARGET_ARCH_IA32)
5719 static constexpr intptr_t kMonomorphicEntryOffsetJIT = 6;
5720 static constexpr intptr_t kPolymorphicEntryOffsetJIT = 36;
5721 static constexpr intptr_t kMonomorphicEntryOffsetAOT = 0;
5722 static constexpr intptr_t kPolymorphicEntryOffsetAOT = 0;
5723#elif defined(TARGET_ARCH_X64)
5724 static constexpr intptr_t kMonomorphicEntryOffsetJIT = 8;
5725 static constexpr intptr_t kPolymorphicEntryOffsetJIT = 42;
5726 static constexpr intptr_t kMonomorphicEntryOffsetAOT = 8;
5727 static constexpr intptr_t kPolymorphicEntryOffsetAOT = 22;
5728#elif defined(TARGET_ARCH_ARM)
5729 static constexpr intptr_t kMonomorphicEntryOffsetJIT = 0;
5730 static constexpr intptr_t kPolymorphicEntryOffsetJIT = 44;
5731 static constexpr intptr_t kMonomorphicEntryOffsetAOT = 0;
5732 static constexpr intptr_t kPolymorphicEntryOffsetAOT = 16;
5733#elif defined(TARGET_ARCH_ARM64)
5734 static constexpr intptr_t kMonomorphicEntryOffsetJIT = 8;
5735 static constexpr intptr_t kPolymorphicEntryOffsetJIT = 52;
5736 static constexpr intptr_t kMonomorphicEntryOffsetAOT = 8;
5737 static constexpr intptr_t kPolymorphicEntryOffsetAOT = 24;
5738#elif defined(TARGET_ARCH_RISCV32)
5739 static constexpr intptr_t kMonomorphicEntryOffsetJIT = 6;
5740 static constexpr intptr_t kPolymorphicEntryOffsetJIT = 44;
5741 static constexpr intptr_t kMonomorphicEntryOffsetAOT = 6;
5742 static constexpr intptr_t kPolymorphicEntryOffsetAOT = 18;
5743#elif defined(TARGET_ARCH_RISCV64)
5744 static constexpr intptr_t kMonomorphicEntryOffsetJIT = 6;
5745 static constexpr intptr_t kPolymorphicEntryOffsetJIT = 44;
5746 static constexpr intptr_t kMonomorphicEntryOffsetAOT = 6;
5747 static constexpr intptr_t kPolymorphicEntryOffsetAOT = 18;
5748#else
5749#error Missing entry offsets for current architecture
5750#endif
5751
5752 static uword MonomorphicEntryPoint(const InstructionsPtr instr) {
5753 uword entry = PayloadStart(instr);
5754 if (HasMonomorphicEntry(instr)) {
5755 entry += !FLAG_precompiled_mode ? kMonomorphicEntryOffsetJIT
5756 : kMonomorphicEntryOffsetAOT;
5757 }
5758 return entry;
5759 }
5760
5761 static uword EntryPoint(const InstructionsPtr instr) {
5762 uword entry = PayloadStart(instr);
5763 if (HasMonomorphicEntry(instr)) {
5764 entry += !FLAG_precompiled_mode ? kPolymorphicEntryOffsetJIT
5765 : kPolymorphicEntryOffsetAOT;
5766 }
5767 return entry;
5768 }
5769
5770 static constexpr intptr_t kMaxElements =
5771 (kMaxInt32 - (sizeof(UntaggedInstructions) + sizeof(UntaggedObject) +
5772 (2 * kObjectStartAlignment)));
5773
5774 // Currently, we align bare instruction payloads on 4 byte boundaries.
5775 //
5776 // If we later decide to align on larger boundaries to put entries at the
5777 // start of cache lines, make sure to account for entry points that are
5778 // _not_ at the start of the payload.
5779 static constexpr intptr_t kBarePayloadAlignment = 4;
5780
5781 // When instructions reside in the heap we align the payloads on word
5782 // boundaries.
5783 static constexpr intptr_t kNonBarePayloadAlignment = kWordSize;
5784
5785 // In the precompiled runtime when running in bare instructions mode,
5786 // Instructions objects don't exist, just their bare payloads, so we
5787 // mark them as unreachable in that case.
5788
5789 static intptr_t HeaderSize() {
5790#if defined(DART_PRECOMPILED_RUNTIME)
5791 UNREACHABLE();
5792#endif
5793 return Utils::RoundUp(x: sizeof(UntaggedInstructions),
5794 alignment: kNonBarePayloadAlignment);
5795 }
5796
5797 static intptr_t InstanceSize() {
5798 ASSERT_EQUAL(sizeof(UntaggedInstructions),
5799 OFFSET_OF_RETURNED_VALUE(UntaggedInstructions, data));
5800 return 0;
5801 }
5802
5803 static intptr_t InstanceSize(intptr_t size) {
5804#if defined(DART_PRECOMPILED_RUNTIME)
5805 UNREACHABLE();
5806#endif
5807 return RoundedAllocationSize(size: HeaderSize() + size);
5808 }
5809
5810 static InstructionsPtr FromPayloadStart(uword payload_start) {
5811#if defined(DART_PRECOMPILED_RUNTIME)
5812 UNREACHABLE();
5813#endif
5814 return static_cast<InstructionsPtr>(payload_start - HeaderSize() +
5815 kHeapObjectTag);
5816 }
5817
5818 bool Equals(const Instructions& other) const {
5819 return Equals(a: ptr(), b: other.ptr());
5820 }
5821
5822 static bool Equals(InstructionsPtr a, InstructionsPtr b) {
5823 // This method should only be called on non-null Instructions objects.
5824 ASSERT_EQUAL(a->GetClassId(), kInstructionsCid);
5825 ASSERT_EQUAL(b->GetClassId(), kInstructionsCid);
5826 // Don't include the object header tags wholesale in the comparison,
5827 // because the GC tags may differ in JIT mode. In fact, we can skip checking
5828 // the object header entirely, as we're guaranteed that the cids match,
5829 // because there are no subclasses for the Instructions class, and the sizes
5830 // should match if the content size encoded in size_and_flags_ matches.
5831 if (a->untag()->size_and_flags_ != b->untag()->size_and_flags_) {
5832 return false;
5833 }
5834 NoSafepointScope no_safepoint;
5835 return memcmp(s1: a->untag()->data(), s2: b->untag()->data(), n: Size(instr: a)) == 0;
5836 }
5837
5838 uint32_t Hash() const { return Hash(instr: ptr()); }
5839
5840 static uint32_t Hash(const InstructionsPtr instr) {
5841 return HashBytes(bytes: reinterpret_cast<const uint8_t*>(PayloadStart(instr)),
5842 size: Size(instr));
5843 }
5844
5845 CodeStatistics* stats() const;
5846 void set_stats(CodeStatistics* stats) const;
5847
5848 private:
5849 friend struct RelocatorTestHelper;
5850
5851 void SetSize(intptr_t value) const {
5852 ASSERT(value >= 0);
5853 StoreNonPointer(addr: &untag()->size_and_flags_,
5854 value: SizeBits::update(value, original: untag()->size_and_flags_));
5855 }
5856
5857 void SetHasMonomorphicEntry(bool value) const {
5858 StoreNonPointer(addr: &untag()->size_and_flags_,
5859 value: FlagsBits::update(value, original: untag()->size_and_flags_));
5860 }
5861
5862 // New is a private method as RawInstruction and RawCode objects should
5863 // only be created using the Code::FinalizeCode method. This method creates
5864 // the RawInstruction and RawCode objects, sets up the pointer offsets
5865 // and links the two in a GC safe manner.
5866 static InstructionsPtr New(intptr_t size, bool has_monomorphic_entry);
5867
5868 FINAL_HEAP_OBJECT_IMPLEMENTATION(Instructions, Object);
5869 friend class Class;
5870 friend class Code;
5871 friend class AssemblyImageWriter;
5872 friend class BlobImageWriter;
5873 friend class ImageWriter;
5874};
5875
5876// An InstructionsSection contains extra information about serialized AOT
5877// snapshots.
5878//
5879// To avoid changing the embedder to return more information about an AOT
5880// snapshot and possibly disturbing existing clients of that interface, we
5881// serialize a single InstructionsSection object at the start of any text
5882// segments. In bare instructions mode, it also has the benefit of providing
5883// memory accounting for the instructions payloads and avoiding special casing
5884// Images with bare instructions payloads in the GC. Otherwise, it is empty
5885// and the Instructions objects come after it in the Image.
5886class InstructionsSection : public Object {
5887 public:
5888 // Excludes HeaderSize().
5889 static intptr_t Size(const InstructionsSectionPtr instr) {
5890 return instr->untag()->payload_length_;
5891 }
5892 static intptr_t InstanceSize() {
5893 ASSERT(sizeof(UntaggedInstructionsSection) ==
5894 OFFSET_OF_RETURNED_VALUE(UntaggedInstructionsSection, data));
5895 return 0;
5896 }
5897
5898 static intptr_t InstanceSize(intptr_t size) {
5899 return Utils::RoundUp(x: HeaderSize() + size, alignment: kObjectAlignment);
5900 }
5901
5902 static intptr_t HeaderSize() {
5903 return Utils::RoundUp(x: sizeof(UntaggedInstructionsSection),
5904 alignment: Instructions::kBarePayloadAlignment);
5905 }
5906
5907 // There are no public instance methods for the InstructionsSection class, as
5908 // all access to the contents is handled by methods on the Image class.
5909
5910 private:
5911 // Note there are no New() methods for InstructionsSection. Instead, the
5912 // serializer writes the UntaggedInstructionsSection object manually at the
5913 // start of instructions Images in precompiled snapshots.
5914
5915 FINAL_HEAP_OBJECT_IMPLEMENTATION(InstructionsSection, Object);
5916 friend class Class;
5917};
5918
5919// Table which maps ranges of machine code to [Code] or
5920// [CompressedStackMaps] objects.
5921// Used in AOT in bare instructions mode.
5922class InstructionsTable : public Object {
5923 public:
5924 static intptr_t InstanceSize() { return sizeof(UntaggedInstructionsTable); }
5925
5926 static InstructionsTablePtr New(intptr_t length,
5927 uword start_pc,
5928 uword end_pc,
5929 uword rodata);
5930
5931 void SetCodeAt(intptr_t index, CodePtr code) const;
5932
5933 bool ContainsPc(uword pc) const { return ContainsPc(table: ptr(), pc); }
5934 static bool ContainsPc(InstructionsTablePtr table, uword pc);
5935
5936 static CodePtr FindCode(InstructionsTablePtr table, uword pc);
5937
5938 static const UntaggedCompressedStackMaps::Payload*
5939 FindStackMap(InstructionsTablePtr table, uword pc, uword* start_pc);
5940
5941 static const UntaggedCompressedStackMaps::Payload* GetCanonicalStackMap(
5942 InstructionsTablePtr table);
5943
5944 const UntaggedInstructionsTable::Data* rodata() const {
5945 return ptr()->untag()->rodata_;
5946 }
5947
5948 // Returns start address of the instructions entry with given index.
5949 uword PayloadStartAt(intptr_t index) const {
5950 return InstructionsTable::PayloadStartAt(table: this->ptr(), index);
5951 }
5952 static uword PayloadStartAt(InstructionsTablePtr table, intptr_t index);
5953
5954 // Returns entry point of the instructions with given index.
5955 uword EntryPointAt(intptr_t index) const;
5956
5957 private:
5958 uword start_pc() const { return InstructionsTable::start_pc(table: this->ptr()); }
5959 static uword start_pc(InstructionsTablePtr table) {
5960 return table->untag()->start_pc_;
5961 }
5962
5963 uword end_pc() const { return InstructionsTable::end_pc(table: this->ptr()); }
5964 static uword end_pc(InstructionsTablePtr table) {
5965 return table->untag()->end_pc_;
5966 }
5967
5968 ArrayPtr code_objects() const { return untag()->code_objects_; }
5969
5970 void set_length(intptr_t value) const;
5971 void set_start_pc(uword value) const;
5972 void set_end_pc(uword value) const;
5973 void set_code_objects(const Array& value) const;
5974 void set_rodata(uword rodata) const;
5975
5976 uint32_t ConvertPcToOffset(uword pc) const {
5977 return InstructionsTable::ConvertPcToOffset(table: this->ptr(), pc);
5978 }
5979 static uint32_t ConvertPcToOffset(InstructionsTablePtr table, uword pc);
5980
5981 static intptr_t FindEntry(InstructionsTablePtr table,
5982 uword pc,
5983 intptr_t start_index = 0);
5984
5985 FINAL_HEAP_OBJECT_IMPLEMENTATION(InstructionsTable, Object);
5986 friend class Class;
5987 friend class Deserializer;
5988};
5989
5990class LocalVarDescriptors : public Object {
5991 public:
5992 intptr_t Length() const;
5993
5994 StringPtr GetName(intptr_t var_index) const;
5995
5996 void SetVar(intptr_t var_index,
5997 const String& name,
5998 UntaggedLocalVarDescriptors::VarInfo* info) const;
5999
6000 void GetInfo(intptr_t var_index,
6001 UntaggedLocalVarDescriptors::VarInfo* info) const;
6002
6003 static constexpr intptr_t kBytesPerElement =
6004 sizeof(UntaggedLocalVarDescriptors::VarInfo);
6005 static constexpr intptr_t kMaxElements =
6006 UntaggedLocalVarDescriptors::kMaxIndex;
6007
6008 static intptr_t InstanceSize() {
6009 ASSERT(sizeof(UntaggedLocalVarDescriptors) ==
6010 OFFSET_OF_RETURNED_VALUE(UntaggedLocalVarDescriptors, names));
6011 return 0;
6012 }
6013 static intptr_t InstanceSize(intptr_t len) {
6014 ASSERT(0 <= len && len <= kMaxElements);
6015 return RoundedAllocationSize(
6016 size: sizeof(UntaggedLocalVarDescriptors) +
6017 (len * kWordSize) // RawStrings for names.
6018 + (len * sizeof(UntaggedLocalVarDescriptors::VarInfo)));
6019 }
6020
6021 static LocalVarDescriptorsPtr New(intptr_t num_variables);
6022
6023 static const char* KindToCString(
6024 UntaggedLocalVarDescriptors::VarInfoKind kind);
6025
6026 private:
6027 FINAL_HEAP_OBJECT_IMPLEMENTATION(LocalVarDescriptors, Object);
6028 friend class Class;
6029 friend class Object;
6030};
6031
6032class PcDescriptors : public Object {
6033 public:
6034 static constexpr intptr_t kBytesPerElement = 1;
6035 static constexpr intptr_t kMaxElements = kMaxInt32 / kBytesPerElement;
6036
6037 static intptr_t HeaderSize() { return sizeof(UntaggedPcDescriptors); }
6038 static intptr_t UnroundedSize(PcDescriptorsPtr desc) {
6039 return UnroundedSize(len: desc->untag()->length_);
6040 }
6041 static intptr_t UnroundedSize(intptr_t len) { return HeaderSize() + len; }
6042 static intptr_t InstanceSize() {
6043 ASSERT_EQUAL(sizeof(UntaggedPcDescriptors),
6044 OFFSET_OF_RETURNED_VALUE(UntaggedPcDescriptors, data));
6045 return 0;
6046 }
6047 static intptr_t InstanceSize(intptr_t len) {
6048 ASSERT(0 <= len && len <= kMaxElements);
6049 return RoundedAllocationSize(size: UnroundedSize(len));
6050 }
6051
6052 static PcDescriptorsPtr New(const void* delta_encoded_data, intptr_t size);
6053
6054 // Verify (assert) assumptions about pc descriptors in debug mode.
6055 void Verify(const Function& function) const;
6056
6057 static void PrintHeaderString();
6058
6059 void PrintToJSONObject(JSONObject* jsobj, bool ref) const;
6060
6061 // We would have a VisitPointers function here to traverse the
6062 // pc descriptors table to visit objects if any in the table.
6063 // Note: never return a reference to a UntaggedPcDescriptors::PcDescriptorRec
6064 // as the object can move.
6065 class Iterator : public ValueObject {
6066 public:
6067 Iterator(const PcDescriptors& descriptors, intptr_t kind_mask)
6068 : descriptors_(descriptors),
6069 kind_mask_(kind_mask),
6070 byte_index_(0),
6071 cur_pc_offset_(0),
6072 cur_kind_(0),
6073 cur_deopt_id_(0),
6074 cur_token_pos_(0),
6075 cur_try_index_(0),
6076 cur_yield_index_(UntaggedPcDescriptors::kInvalidYieldIndex) {}
6077
6078 bool MoveNext() {
6079 NoSafepointScope scope;
6080 ReadStream stream(descriptors_.untag()->data(), descriptors_.Length(),
6081 byte_index_);
6082 // Moves to record that matches kind_mask_.
6083 while (byte_index_ < descriptors_.Length()) {
6084 const int32_t kind_and_metadata = stream.ReadSLEB128<int32_t>();
6085 cur_kind_ = UntaggedPcDescriptors::KindAndMetadata::DecodeKind(
6086 kind_and_metadata);
6087 cur_try_index_ = UntaggedPcDescriptors::KindAndMetadata::DecodeTryIndex(
6088 kind_and_metadata);
6089 cur_yield_index_ =
6090 UntaggedPcDescriptors::KindAndMetadata::DecodeYieldIndex(
6091 kind_and_metadata);
6092
6093 cur_pc_offset_ += stream.ReadSLEB128();
6094
6095 if (!FLAG_precompiled_mode) {
6096 cur_deopt_id_ += stream.ReadSLEB128();
6097 cur_token_pos_ = Utils::AddWithWrapAround(
6098 a: cur_token_pos_, b: stream.ReadSLEB128<int32_t>());
6099 }
6100 byte_index_ = stream.Position();
6101
6102 if ((cur_kind_ & kind_mask_) != 0) {
6103 return true; // Current is valid.
6104 }
6105 }
6106 return false;
6107 }
6108
6109 uword PcOffset() const { return cur_pc_offset_; }
6110 intptr_t DeoptId() const { return cur_deopt_id_; }
6111 TokenPosition TokenPos() const {
6112 return TokenPosition::Deserialize(value: cur_token_pos_);
6113 }
6114 intptr_t TryIndex() const { return cur_try_index_; }
6115 intptr_t YieldIndex() const { return cur_yield_index_; }
6116 UntaggedPcDescriptors::Kind Kind() const {
6117 return static_cast<UntaggedPcDescriptors::Kind>(cur_kind_);
6118 }
6119
6120 private:
6121 friend class PcDescriptors;
6122
6123 // For nested iterations, starting at element after.
6124 explicit Iterator(const Iterator& iter)
6125 : ValueObject(),
6126 descriptors_(iter.descriptors_),
6127 kind_mask_(iter.kind_mask_),
6128 byte_index_(iter.byte_index_),
6129 cur_pc_offset_(iter.cur_pc_offset_),
6130 cur_kind_(iter.cur_kind_),
6131 cur_deopt_id_(iter.cur_deopt_id_),
6132 cur_token_pos_(iter.cur_token_pos_),
6133 cur_try_index_(iter.cur_try_index_),
6134 cur_yield_index_(iter.cur_yield_index_) {}
6135
6136 const PcDescriptors& descriptors_;
6137 const intptr_t kind_mask_;
6138 intptr_t byte_index_;
6139
6140 intptr_t cur_pc_offset_;
6141 intptr_t cur_kind_;
6142 intptr_t cur_deopt_id_;
6143 int32_t cur_token_pos_;
6144 intptr_t cur_try_index_;
6145 intptr_t cur_yield_index_;
6146 };
6147
6148 intptr_t Length() const;
6149 bool Equals(const PcDescriptors& other) const {
6150 if (Length() != other.Length()) {
6151 return false;
6152 }
6153 NoSafepointScope no_safepoint;
6154 return memcmp(s1: untag(), s2: other.untag(), n: InstanceSize(len: Length())) == 0;
6155 }
6156
6157 private:
6158 static const char* KindAsStr(UntaggedPcDescriptors::Kind kind);
6159
6160 static PcDescriptorsPtr New(intptr_t length);
6161
6162 void SetLength(intptr_t value) const;
6163 void CopyData(const void* bytes, intptr_t size);
6164
6165 FINAL_HEAP_OBJECT_IMPLEMENTATION(PcDescriptors, Object);
6166 friend class Class;
6167 friend class Object;
6168};
6169
6170class CodeSourceMap : public Object {
6171 public:
6172 static constexpr intptr_t kBytesPerElement = 1;
6173 static constexpr intptr_t kMaxElements = kMaxInt32 / kBytesPerElement;
6174
6175 static intptr_t HeaderSize() { return sizeof(UntaggedCodeSourceMap); }
6176 static intptr_t UnroundedSize(CodeSourceMapPtr map) {
6177 return UnroundedSize(len: map->untag()->length_);
6178 }
6179 static intptr_t UnroundedSize(intptr_t len) { return HeaderSize() + len; }
6180 static intptr_t InstanceSize() {
6181 ASSERT_EQUAL(sizeof(UntaggedCodeSourceMap),
6182 OFFSET_OF_RETURNED_VALUE(UntaggedCodeSourceMap, data));
6183 return 0;
6184 }
6185 static intptr_t InstanceSize(intptr_t len) {
6186 ASSERT(0 <= len && len <= kMaxElements);
6187 return RoundedAllocationSize(size: UnroundedSize(len));
6188 }
6189
6190 static CodeSourceMapPtr New(intptr_t length);
6191
6192 intptr_t Length() const { return untag()->length_; }
6193 uint8_t* Data() const { return UnsafeMutableNonPointer(addr: &untag()->data()[0]); }
6194
6195 bool Equals(const CodeSourceMap& other) const {
6196 if (Length() != other.Length()) {
6197 return false;
6198 }
6199 NoSafepointScope no_safepoint;
6200 return memcmp(s1: untag(), s2: other.untag(), n: InstanceSize(len: Length())) == 0;
6201 }
6202
6203 uint32_t Hash() const {
6204 NoSafepointScope no_safepoint;
6205 return HashBytes(bytes: Data(), size: Length());
6206 }
6207
6208 void PrintToJSONObject(JSONObject* jsobj, bool ref) const;
6209
6210 private:
6211 void SetLength(intptr_t value) const;
6212
6213 FINAL_HEAP_OBJECT_IMPLEMENTATION(CodeSourceMap, Object);
6214 friend class Class;
6215 friend class Object;
6216};
6217
6218class CompressedStackMaps : public Object {
6219 public:
6220 uintptr_t payload_size() const { return PayloadSizeOf(raw: ptr()); }
6221 static uintptr_t PayloadSizeOf(const CompressedStackMapsPtr raw) {
6222 return UntaggedCompressedStackMaps::SizeField::decode(
6223 value: raw->untag()->payload()->flags_and_size());
6224 }
6225
6226 const uint8_t* data() const { return ptr()->untag()->payload()->data(); }
6227
6228 // Methods to allow use with PointerKeyValueTrait to create sets of CSMs.
6229 bool Equals(const CompressedStackMaps& other) const {
6230 // All of the table flags and payload size must match.
6231 if (untag()->payload()->flags_and_size() !=
6232 other.untag()->payload()->flags_and_size()) {
6233 return false;
6234 }
6235 NoSafepointScope no_safepoint;
6236 return memcmp(s1: untag(), s2: other.untag(), n: InstanceSize(length: payload_size())) == 0;
6237 }
6238 uword Hash() const;
6239
6240 static intptr_t HeaderSize() {
6241 return sizeof(UntaggedCompressedStackMaps) +
6242 sizeof(UntaggedCompressedStackMaps::Payload::FlagsAndSizeHeader);
6243 }
6244 static intptr_t UnroundedSize(CompressedStackMapsPtr maps) {
6245 return UnroundedSize(length: CompressedStackMaps::PayloadSizeOf(raw: maps));
6246 }
6247 static intptr_t UnroundedSize(intptr_t length) {
6248 return HeaderSize() + length;
6249 }
6250 static intptr_t InstanceSize() { return 0; }
6251 static intptr_t InstanceSize(intptr_t length) {
6252 return RoundedAllocationSize(size: UnroundedSize(length));
6253 }
6254
6255 bool UsesGlobalTable() const { return UsesGlobalTable(raw: ptr()); }
6256 static bool UsesGlobalTable(const CompressedStackMapsPtr raw) {
6257 return UntaggedCompressedStackMaps::UsesTableBit::decode(
6258 value: raw->untag()->payload()->flags_and_size());
6259 }
6260
6261 bool IsGlobalTable() const { return IsGlobalTable(raw: ptr()); }
6262 static bool IsGlobalTable(const CompressedStackMapsPtr raw) {
6263 return UntaggedCompressedStackMaps::GlobalTableBit::decode(
6264 value: raw->untag()->payload()->flags_and_size());
6265 }
6266
6267 static CompressedStackMapsPtr NewInlined(const void* payload, intptr_t size) {
6268 return New(payload, size, /*is_global_table=*/is_global_table: false,
6269 /*uses_global_table=*/uses_global_table: false);
6270 }
6271 static CompressedStackMapsPtr NewUsingTable(const void* payload,
6272 intptr_t size) {
6273 return New(payload, size, /*is_global_table=*/is_global_table: false,
6274 /*uses_global_table=*/uses_global_table: true);
6275 }
6276
6277 static CompressedStackMapsPtr NewGlobalTable(const void* payload,
6278 intptr_t size) {
6279 return New(payload, size, /*is_global_table=*/is_global_table: true,
6280 /*uses_global_table=*/uses_global_table: false);
6281 }
6282
6283 class RawPayloadHandle {
6284 public:
6285 RawPayloadHandle() {}
6286 RawPayloadHandle(const RawPayloadHandle&) = default;
6287 RawPayloadHandle& operator=(const RawPayloadHandle&) = default;
6288
6289 const UntaggedCompressedStackMaps::Payload* payload() const {
6290 return payload_;
6291 }
6292 bool IsNull() const { return payload_ == nullptr; }
6293
6294 RawPayloadHandle& operator=(
6295 const UntaggedCompressedStackMaps::Payload* payload) {
6296 payload_ = payload;
6297 return *this;
6298 }
6299
6300 RawPayloadHandle& operator=(const CompressedStackMaps& maps) {
6301 ASSERT(!maps.IsNull());
6302 payload_ = maps.untag()->payload();
6303 return *this;
6304 }
6305
6306 RawPayloadHandle& operator=(CompressedStackMapsPtr maps) {
6307 ASSERT(maps != CompressedStackMaps::null());
6308 payload_ = maps.untag()->payload();
6309 return *this;
6310 }
6311
6312 uintptr_t payload_size() const {
6313 return UntaggedCompressedStackMaps::SizeField::decode(
6314 value: payload()->flags_and_size());
6315 }
6316 const uint8_t* data() const { return payload()->data(); }
6317
6318 bool UsesGlobalTable() const {
6319 return UntaggedCompressedStackMaps::UsesTableBit::decode(
6320 value: payload()->flags_and_size());
6321 }
6322
6323 bool IsGlobalTable() const {
6324 return UntaggedCompressedStackMaps::GlobalTableBit::decode(
6325 value: payload()->flags_and_size());
6326 }
6327
6328 private:
6329 const UntaggedCompressedStackMaps::Payload* payload_ = nullptr;
6330 };
6331
6332 template <typename PayloadHandle>
6333 class Iterator {
6334 public:
6335 Iterator(const PayloadHandle& maps, const PayloadHandle& global_table)
6336 : maps_(maps),
6337 bits_container_(maps.UsesGlobalTable() ? global_table : maps) {
6338 ASSERT(!maps_.IsNull());
6339 ASSERT(!bits_container_.IsNull());
6340 ASSERT(!maps_.IsGlobalTable());
6341 ASSERT(!maps_.UsesGlobalTable() || bits_container_.IsGlobalTable());
6342 }
6343
6344 Iterator(const Iterator& it)
6345 : maps_(it.maps_),
6346 bits_container_(it.bits_container_),
6347 next_offset_(it.next_offset_),
6348 current_pc_offset_(it.current_pc_offset_),
6349 current_global_table_offset_(it.current_global_table_offset_),
6350 current_spill_slot_bit_count_(it.current_spill_slot_bit_count_),
6351 current_non_spill_slot_bit_count_(it.current_spill_slot_bit_count_),
6352 current_bits_offset_(it.current_bits_offset_) {}
6353
6354 // Loads the next entry from [maps_], if any. If [maps_] is the null value,
6355 // this always returns false.
6356 bool MoveNext() {
6357 if (next_offset_ >= maps_.payload_size()) {
6358 return false;
6359 }
6360
6361 NoSafepointScope scope;
6362 ReadStream stream(maps_.data(), maps_.payload_size(), next_offset_);
6363
6364 auto const pc_delta = stream.ReadLEB128();
6365 ASSERT(pc_delta <= (kMaxUint32 - current_pc_offset_));
6366 current_pc_offset_ += pc_delta;
6367
6368 // Table-using CSMs have a table offset after the PC offset delta, whereas
6369 // the post-delta part of inlined entries has the same information as
6370 // global table entries.
6371 // See comments in UntaggedCompressedStackMaps for description of
6372 // encoding.
6373 if (maps_.UsesGlobalTable()) {
6374 current_global_table_offset_ = stream.ReadLEB128();
6375 ASSERT(current_global_table_offset_ < bits_container_.payload_size());
6376
6377 // Since generally we only use entries in the GC and the GC only needs
6378 // the rest of the entry information if the PC offset matches, we lazily
6379 // load and cache the information stored in the global object when it is
6380 // actually requested.
6381 current_spill_slot_bit_count_ = -1;
6382 current_non_spill_slot_bit_count_ = -1;
6383 current_bits_offset_ = -1;
6384
6385 next_offset_ = stream.Position();
6386 } else {
6387 current_spill_slot_bit_count_ = stream.ReadLEB128();
6388 ASSERT(current_spill_slot_bit_count_ >= 0);
6389
6390 current_non_spill_slot_bit_count_ = stream.ReadLEB128();
6391 ASSERT(current_non_spill_slot_bit_count_ >= 0);
6392
6393 const auto stackmap_bits =
6394 current_spill_slot_bit_count_ + current_non_spill_slot_bit_count_;
6395 const uintptr_t stackmap_size =
6396 Utils::RoundUp(x: stackmap_bits, alignment: kBitsPerByte) >> kBitsPerByteLog2;
6397 ASSERT(stackmap_size <= (maps_.payload_size() - stream.Position()));
6398
6399 current_bits_offset_ = stream.Position();
6400 next_offset_ = current_bits_offset_ + stackmap_size;
6401 }
6402
6403 return true;
6404 }
6405
6406 // Finds the entry with the given PC offset starting at the current position
6407 // of the iterator. If [maps_] is the null value, this always returns false.
6408 bool Find(uint32_t pc_offset) {
6409 // We should never have an entry with a PC offset of 0 inside an
6410 // non-empty CSM, so fail.
6411 if (pc_offset == 0) return false;
6412 do {
6413 if (current_pc_offset_ >= pc_offset) break;
6414 } while (MoveNext());
6415 return current_pc_offset_ == pc_offset;
6416 }
6417
6418 // Methods for accessing parts of an entry should not be called until
6419 // a successful MoveNext() or Find() call has been made.
6420
6421 // Returns the PC offset of the loaded entry.
6422 uint32_t pc_offset() const {
6423 ASSERT(HasLoadedEntry());
6424 return current_pc_offset_;
6425 }
6426
6427 // Returns the bit length of the loaded entry.
6428 intptr_t Length() const {
6429 EnsureFullyLoadedEntry();
6430 return current_spill_slot_bit_count_ + current_non_spill_slot_bit_count_;
6431 }
6432 // Returns the number of spill slot bits of the loaded entry.
6433 intptr_t SpillSlotBitCount() const {
6434 EnsureFullyLoadedEntry();
6435 return current_spill_slot_bit_count_;
6436 }
6437 // Returns whether the stack entry represented by the offset contains
6438 // a tagged object.
6439 bool IsObject(intptr_t bit_index) const {
6440 EnsureFullyLoadedEntry();
6441 ASSERT(bit_index >= 0 && bit_index < Length());
6442 const intptr_t byte_index = bit_index >> kBitsPerByteLog2;
6443 const intptr_t bit_remainder = bit_index & (kBitsPerByte - 1);
6444 uint8_t byte_mask = 1U << bit_remainder;
6445 const intptr_t byte_offset = current_bits_offset_ + byte_index;
6446 NoSafepointScope scope;
6447 return (bits_container_.data()[byte_offset] & byte_mask) != 0;
6448 }
6449
6450 private:
6451 bool HasLoadedEntry() const { return next_offset_ > 0; }
6452
6453 // Caches the corresponding values from the global table in the mutable
6454 // fields. We lazily load these as some clients only need the PC offset.
6455 void LazyLoadGlobalTableEntry() const {
6456 ASSERT(maps_.UsesGlobalTable());
6457 ASSERT(HasLoadedEntry());
6458 ASSERT(current_global_table_offset_ < bits_container_.payload_size());
6459
6460 NoSafepointScope scope;
6461 ReadStream stream(bits_container_.data(), bits_container_.payload_size(),
6462 current_global_table_offset_);
6463
6464 current_spill_slot_bit_count_ = stream.ReadLEB128();
6465 ASSERT(current_spill_slot_bit_count_ >= 0);
6466
6467 current_non_spill_slot_bit_count_ = stream.ReadLEB128();
6468 ASSERT(current_non_spill_slot_bit_count_ >= 0);
6469
6470 const auto stackmap_bits = Length();
6471 const uintptr_t stackmap_size =
6472 Utils::RoundUp(stackmap_bits, kBitsPerByte) >> kBitsPerByteLog2;
6473 ASSERT(stackmap_size <=
6474 (bits_container_.payload_size() - stream.Position()));
6475
6476 current_bits_offset_ = stream.Position();
6477 }
6478
6479 void EnsureFullyLoadedEntry() const {
6480 ASSERT(HasLoadedEntry());
6481 if (current_spill_slot_bit_count_ < 0) {
6482 LazyLoadGlobalTableEntry();
6483 ASSERT(current_spill_slot_bit_count_ >= 0);
6484 }
6485 }
6486
6487 const PayloadHandle& maps_;
6488 const PayloadHandle& bits_container_;
6489
6490 uintptr_t next_offset_ = 0;
6491 uint32_t current_pc_offset_ = 0;
6492 // Only used when looking up non-PC information in the global table.
6493 uintptr_t current_global_table_offset_ = 0;
6494 // Marked as mutable as these fields may be updated with lazily loaded
6495 // values from the global table when their associated accessor is called,
6496 // but those values will never change for a given entry once loaded..
6497 mutable intptr_t current_spill_slot_bit_count_ = -1;
6498 mutable intptr_t current_non_spill_slot_bit_count_ = -1;
6499 mutable intptr_t current_bits_offset_ = -1;
6500
6501 friend class StackMapEntry;
6502 };
6503
6504 Iterator<CompressedStackMaps> iterator(Thread* thread) const;
6505
6506 void WriteToBuffer(BaseTextBuffer* buffer, const char* separator) const;
6507
6508 private:
6509 static CompressedStackMapsPtr New(const void* payload,
6510 intptr_t size,
6511 bool is_global_table,
6512 bool uses_global_table);
6513
6514 FINAL_HEAP_OBJECT_IMPLEMENTATION(CompressedStackMaps, Object);
6515 friend class Class;
6516};
6517
6518class ExceptionHandlers : public Object {
6519 public:
6520 static constexpr intptr_t kInvalidPcOffset = 0;
6521
6522 intptr_t num_entries() const;
6523
6524 bool has_async_handler() const;
6525 void set_has_async_handler(bool value) const;
6526
6527 void GetHandlerInfo(intptr_t try_index, ExceptionHandlerInfo* info) const;
6528
6529 uword HandlerPCOffset(intptr_t try_index) const;
6530 intptr_t OuterTryIndex(intptr_t try_index) const;
6531 bool NeedsStackTrace(intptr_t try_index) const;
6532 bool IsGenerated(intptr_t try_index) const;
6533
6534 void SetHandlerInfo(intptr_t try_index,
6535 intptr_t outer_try_index,
6536 uword handler_pc_offset,
6537 bool needs_stacktrace,
6538 bool has_catch_all,
6539 bool is_generated) const;
6540
6541 ArrayPtr GetHandledTypes(intptr_t try_index) const;
6542 void SetHandledTypes(intptr_t try_index, const Array& handled_types) const;
6543 bool HasCatchAll(intptr_t try_index) const;
6544
6545 struct ArrayTraits {
6546 static intptr_t elements_start_offset() {
6547 return sizeof(UntaggedExceptionHandlers);
6548 }
6549 static constexpr intptr_t kElementSize = sizeof(ExceptionHandlerInfo);
6550 };
6551
6552 static intptr_t InstanceSize() {
6553 ASSERT(sizeof(UntaggedExceptionHandlers) ==
6554 OFFSET_OF_RETURNED_VALUE(UntaggedExceptionHandlers, data));
6555 return 0;
6556 }
6557 static intptr_t InstanceSize(intptr_t len) {
6558 return RoundedAllocationSize(size: sizeof(UntaggedExceptionHandlers) +
6559 (len * sizeof(ExceptionHandlerInfo)));
6560 }
6561
6562 static ExceptionHandlersPtr New(intptr_t num_handlers);
6563 static ExceptionHandlersPtr New(const Array& handled_types_data);
6564
6565 // We would have a VisitPointers function here to traverse the
6566 // exception handler table to visit objects if any in the table.
6567
6568 private:
6569 // Pick somewhat arbitrary maximum number of exception handlers
6570 // for a function. This value is used to catch potentially
6571 // malicious code.
6572 static constexpr intptr_t kMaxHandlers = 1024 * 1024;
6573
6574 void set_handled_types_data(const Array& value) const;
6575
6576 FINAL_HEAP_OBJECT_IMPLEMENTATION(ExceptionHandlers, Object);
6577 friend class Class;
6578 friend class Object;
6579};
6580
6581// A WeakSerializationReference (WSR) denotes a type of weak reference to a
6582// target object. In particular, objects that can only be reached from roots via
6583// WSR edges during serialization of AOT snapshots should not be serialized, but
6584// instead references to these objects should be replaced with a reference to
6585// the provided replacement object.
6586//
6587// Of course, the target object may still be serialized if there are paths to
6588// the object from the roots that do not go through one of these objects. In
6589// this case, references through WSRs are serialized as direct references to
6590// the target.
6591//
6592// Unfortunately a WSR is not a proxy for the original object, so WSRs may
6593// only currently be used with ObjectPtr fields. To ease this situation for
6594// fields that are normally a non-ObjectPtr type outside of the precompiler,
6595// use the following macros, which avoid the need to adjust other code to
6596// handle the WSR case:
6597//
6598// * WSR_*POINTER_FIELD() in raw_object.h (i.e., just append WSR_ to the
6599// original field declaration).
6600// * PRECOMPILER_WSR_FIELD_DECLARATION() in object.h
6601// * PRECOMPILER_WSR_FIELD_DEFINITION() in object.cc
6602class WeakSerializationReference : public Object {
6603 public:
6604 ObjectPtr target() const { return TargetOf(obj: ptr()); }
6605 static ObjectPtr TargetOf(const WeakSerializationReferencePtr obj) {
6606 return obj->untag()->target();
6607 }
6608
6609 static ObjectPtr Unwrap(ObjectPtr obj) {
6610#if defined(DART_PRECOMPILER)
6611 if (obj->IsHeapObject() && obj->IsWeakSerializationReference()) {
6612 return TargetOf(static_cast<WeakSerializationReferencePtr>(obj));
6613 }
6614#endif
6615 return obj;
6616 }
6617 static ObjectPtr Unwrap(const Object& obj) { return Unwrap(obj: obj.ptr()); }
6618 static ObjectPtr UnwrapIfTarget(ObjectPtr obj) { return Unwrap(obj); }
6619 static ObjectPtr UnwrapIfTarget(const Object& obj) { return Unwrap(obj); }
6620
6621 static intptr_t InstanceSize() {
6622 return RoundedAllocationSize(size: sizeof(UntaggedWeakSerializationReference));
6623 }
6624
6625 // Returns an ObjectPtr as the target may not need wrapping (e.g., it
6626 // is guaranteed to be serialized).
6627 static ObjectPtr New(const Object& target, const Object& replacement);
6628
6629 private:
6630 FINAL_HEAP_OBJECT_IMPLEMENTATION(WeakSerializationReference, Object);
6631
6632 ObjectPtr replacement() const { return untag()->replacement(); }
6633
6634 friend class Class;
6635};
6636
6637class WeakArray : public Object {
6638 public:
6639 intptr_t Length() const { return LengthOf(array: ptr()); }
6640 static inline intptr_t LengthOf(const WeakArrayPtr array);
6641
6642 static intptr_t length_offset() {
6643 return OFFSET_OF(UntaggedWeakArray, length_);
6644 }
6645 static intptr_t data_offset() {
6646 return OFFSET_OF_RETURNED_VALUE(UntaggedWeakArray, data);
6647 }
6648 static intptr_t element_offset(intptr_t index) {
6649 return OFFSET_OF_RETURNED_VALUE(UntaggedWeakArray, data) +
6650 kBytesPerElement * index;
6651 }
6652 static intptr_t index_at_offset(intptr_t offset_in_bytes) {
6653 intptr_t index = (offset_in_bytes - data_offset()) / kBytesPerElement;
6654 ASSERT(index >= 0);
6655 return index;
6656 }
6657
6658 struct ArrayTraits {
6659 static intptr_t elements_start_offset() { return WeakArray::data_offset(); }
6660
6661 static constexpr intptr_t kElementSize = kCompressedWordSize;
6662 };
6663
6664 ObjectPtr At(intptr_t index) const { return untag()->element(index); }
6665 void SetAt(intptr_t index, const Object& value) const {
6666 untag()->set_element(index, value: value.ptr());
6667 }
6668
6669 // Access to the array with acquire release semantics.
6670 ObjectPtr AtAcquire(intptr_t index) const {
6671 return untag()->element<std::memory_order_acquire>(index);
6672 }
6673 void SetAtRelease(intptr_t index, const Object& value) const {
6674 untag()->set_element<std::memory_order_release>(index, value: value.ptr());
6675 }
6676
6677 static constexpr intptr_t kBytesPerElement = kCompressedWordSize;
6678 static constexpr intptr_t kMaxElements = kSmiMax / kBytesPerElement;
6679
6680 static constexpr bool IsValidLength(intptr_t length) {
6681 return 0 <= length && length <= kMaxElements;
6682 }
6683
6684 static intptr_t InstanceSize() {
6685 ASSERT(sizeof(UntaggedWeakArray) ==
6686 OFFSET_OF_RETURNED_VALUE(UntaggedWeakArray, data));
6687 return 0;
6688 }
6689
6690 static constexpr intptr_t InstanceSize(intptr_t len) {
6691 return RoundedAllocationSize(size: sizeof(UntaggedWeakArray) +
6692 (len * kBytesPerElement));
6693 }
6694
6695 static WeakArrayPtr New(intptr_t length, Heap::Space space = Heap::kNew);
6696
6697 private:
6698 FINAL_HEAP_OBJECT_IMPLEMENTATION(WeakArray, Object);
6699 friend class Class;
6700 friend class Object;
6701};
6702
6703class Code : public Object {
6704 public:
6705 // When dual mapping, this returns the executable view.
6706 InstructionsPtr active_instructions() const {
6707#if defined(DART_PRECOMPILED_RUNTIME)
6708 UNREACHABLE();
6709 return nullptr;
6710#else
6711 return untag()->active_instructions();
6712#endif
6713 }
6714
6715 // When dual mapping, these return the executable view.
6716 InstructionsPtr instructions() const { return untag()->instructions(); }
6717 static InstructionsPtr InstructionsOf(const CodePtr code) {
6718 return code->untag()->instructions();
6719 }
6720
6721 static intptr_t instructions_offset() {
6722 return OFFSET_OF(UntaggedCode, instructions_);
6723 }
6724#if !defined(DART_PRECOMPILED_RUNTIME)
6725 static intptr_t active_instructions_offset() {
6726 return OFFSET_OF(UntaggedCode, active_instructions_);
6727 }
6728#endif
6729
6730 using EntryKind = CodeEntryKind;
6731
6732 static const char* EntryKindToCString(EntryKind kind);
6733 static bool ParseEntryKind(const char* str, EntryKind* out);
6734
6735 static intptr_t entry_point_offset(EntryKind kind = EntryKind::kNormal) {
6736 switch (kind) {
6737 case EntryKind::kNormal:
6738 return OFFSET_OF(UntaggedCode, entry_point_);
6739 case EntryKind::kUnchecked:
6740 return OFFSET_OF(UntaggedCode, unchecked_entry_point_);
6741 case EntryKind::kMonomorphic:
6742 return OFFSET_OF(UntaggedCode, monomorphic_entry_point_);
6743 case EntryKind::kMonomorphicUnchecked:
6744 return OFFSET_OF(UntaggedCode, monomorphic_unchecked_entry_point_);
6745 default:
6746 UNREACHABLE();
6747 }
6748 }
6749
6750 ObjectPoolPtr object_pool() const { return untag()->object_pool(); }
6751 static intptr_t object_pool_offset() {
6752 return OFFSET_OF(UntaggedCode, object_pool_);
6753 }
6754
6755 intptr_t pointer_offsets_length() const {
6756 return PtrOffBits::decode(value: untag()->state_bits_);
6757 }
6758
6759 bool is_optimized() const {
6760 return OptimizedBit::decode(value: untag()->state_bits_);
6761 }
6762 void set_is_optimized(bool value) const;
6763 static bool IsOptimized(CodePtr code) {
6764 return Code::OptimizedBit::decode(value: code->untag()->state_bits_);
6765 }
6766
6767 bool is_force_optimized() const {
6768 return ForceOptimizedBit::decode(value: untag()->state_bits_);
6769 }
6770 void set_is_force_optimized(bool value) const;
6771
6772 bool is_alive() const { return AliveBit::decode(value: untag()->state_bits_); }
6773 void set_is_alive(bool value) const;
6774
6775 bool is_discarded() const { return IsDiscarded(code: ptr()); }
6776 static bool IsDiscarded(const CodePtr code) {
6777 return DiscardedBit::decode(value: code->untag()->state_bits_);
6778 }
6779 void set_is_discarded(bool value) const;
6780
6781 bool HasMonomorphicEntry() const { return HasMonomorphicEntry(code: ptr()); }
6782 static bool HasMonomorphicEntry(const CodePtr code) {
6783#if defined(DART_PRECOMPILED_RUNTIME)
6784 return code->untag()->entry_point_ !=
6785 code->untag()->monomorphic_entry_point_;
6786#else
6787 return Instructions::HasMonomorphicEntry(instr: InstructionsOf(code));
6788#endif
6789 }
6790
6791 // Returns the payload start of [instructions()].
6792 uword PayloadStart() const { return PayloadStartOf(code: ptr()); }
6793 static uword PayloadStartOf(const CodePtr code) {
6794#if defined(DART_PRECOMPILED_RUNTIME)
6795 if (IsUnknownDartCode(code)) return 0;
6796 const uword entry_offset = HasMonomorphicEntry(code)
6797 ? Instructions::kPolymorphicEntryOffsetAOT
6798 : 0;
6799 return EntryPointOf(code) - entry_offset;
6800#else
6801 return Instructions::PayloadStart(instr: InstructionsOf(code));
6802#endif
6803 }
6804
6805 // Returns the entry point of [instructions()].
6806 uword EntryPoint() const { return EntryPointOf(code: ptr()); }
6807 static uword EntryPointOf(const CodePtr code) {
6808#if defined(DART_PRECOMPILED_RUNTIME)
6809 return code->untag()->entry_point_;
6810#else
6811 return Instructions::EntryPoint(instr: InstructionsOf(code));
6812#endif
6813 }
6814
6815 static uword UncheckedEntryPointOf(const CodePtr code) {
6816 return code->untag()->unchecked_entry_point_;
6817 }
6818
6819 // Returns the unchecked entry point of [instructions()].
6820 uword UncheckedEntryPoint() const {
6821#if defined(DART_PRECOMPILED_RUNTIME)
6822 return untag()->unchecked_entry_point_;
6823#else
6824 return EntryPoint() + untag()->unchecked_offset_;
6825#endif
6826 }
6827 // Returns the monomorphic entry point of [instructions()].
6828 uword MonomorphicEntryPoint() const {
6829#if defined(DART_PRECOMPILED_RUNTIME)
6830 return untag()->monomorphic_entry_point_;
6831#else
6832 return Instructions::MonomorphicEntryPoint(instr: instructions());
6833#endif
6834 }
6835 // Returns the unchecked monomorphic entry point of [instructions()].
6836 uword MonomorphicUncheckedEntryPoint() const {
6837#if defined(DART_PRECOMPILED_RUNTIME)
6838 return untag()->monomorphic_unchecked_entry_point_;
6839#else
6840 return MonomorphicEntryPoint() + untag()->unchecked_offset_;
6841#endif
6842 }
6843
6844 // Returns the size of [instructions()].
6845 uword Size() const { return PayloadSizeOf(code: ptr()); }
6846 static uword PayloadSizeOf(const CodePtr code) {
6847#if defined(DART_PRECOMPILED_RUNTIME)
6848 if (IsUnknownDartCode(code)) return kUwordMax;
6849 return code->untag()->instructions_length_;
6850#else
6851 return Instructions::Size(instr: InstructionsOf(code));
6852#endif
6853 }
6854
6855 ObjectPoolPtr GetObjectPool() const;
6856 // Returns whether the given PC address is in [instructions()].
6857 bool ContainsInstructionAt(uword addr) const {
6858 return ContainsInstructionAt(code: ptr(), pc: addr);
6859 }
6860
6861 // Returns whether the given PC address is in [InstructionsOf(code)].
6862 static bool ContainsInstructionAt(const CodePtr code, uword pc) {
6863 return UntaggedCode::ContainsPC(raw_obj: code, pc);
6864 }
6865
6866 // Returns true if there is a debugger breakpoint set in this code object.
6867 bool HasBreakpoint() const;
6868
6869 PcDescriptorsPtr pc_descriptors() const { return untag()->pc_descriptors(); }
6870 void set_pc_descriptors(const PcDescriptors& descriptors) const {
6871 ASSERT(descriptors.IsOld());
6872 untag()->set_pc_descriptors(descriptors.ptr());
6873 }
6874
6875 CodeSourceMapPtr code_source_map() const {
6876 return untag()->code_source_map();
6877 }
6878
6879 void set_code_source_map(const CodeSourceMap& code_source_map) const {
6880 ASSERT(code_source_map.IsOld());
6881 untag()->set_code_source_map(code_source_map.ptr());
6882 }
6883
6884 // Array of DeoptInfo objects.
6885 ArrayPtr deopt_info_array() const {
6886#if defined(DART_PRECOMPILED_RUNTIME)
6887 UNREACHABLE();
6888 return nullptr;
6889#else
6890 return untag()->deopt_info_array();
6891#endif
6892 }
6893 void set_deopt_info_array(const Array& array) const;
6894
6895#if !defined(DART_PRECOMPILED_RUNTIME)
6896 intptr_t num_variables() const;
6897 void set_num_variables(intptr_t num_variables) const;
6898#endif
6899
6900#if defined(DART_PRECOMPILED_RUNTIME) || defined(DART_PRECOMPILER)
6901 TypedDataPtr catch_entry_moves_maps() const;
6902 void set_catch_entry_moves_maps(const TypedData& maps) const;
6903#endif
6904
6905 CompressedStackMapsPtr compressed_stackmaps() const {
6906 return untag()->compressed_stackmaps();
6907 }
6908 void set_compressed_stackmaps(const CompressedStackMaps& maps) const;
6909
6910 enum CallKind {
6911 kPcRelativeCall = 1,
6912 kPcRelativeTTSCall = 2,
6913 kPcRelativeTailCall = 3,
6914 kCallViaCode = 4,
6915 };
6916
6917 enum CallEntryPoint {
6918 kDefaultEntry,
6919 kUncheckedEntry,
6920 };
6921
6922 enum SCallTableEntry {
6923 kSCallTableKindAndOffset = 0,
6924 kSCallTableCodeOrTypeTarget = 1,
6925 kSCallTableFunctionTarget = 2,
6926 kSCallTableEntryLength = 3,
6927 };
6928
6929 enum class PoolAttachment {
6930 kAttachPool,
6931 kNotAttachPool,
6932 };
6933
6934 class KindField : public BitField<intptr_t, CallKind, 0, 3> {};
6935 class EntryPointField
6936 : public BitField<intptr_t, CallEntryPoint, KindField::kNextBit, 1> {};
6937 class OffsetField
6938 : public BitField<intptr_t, intptr_t, EntryPointField::kNextBit, 26> {};
6939
6940 void set_static_calls_target_table(const Array& value) const;
6941 ArrayPtr static_calls_target_table() const {
6942#if defined(DART_PRECOMPILED_RUNTIME)
6943 UNREACHABLE();
6944 return nullptr;
6945#else
6946 return untag()->static_calls_target_table();
6947#endif
6948 }
6949
6950 TypedDataPtr GetDeoptInfoAtPc(uword pc,
6951 ICData::DeoptReasonId* deopt_reason,
6952 uint32_t* deopt_flags) const;
6953
6954 // Returns null if there is no static call at 'pc'.
6955 FunctionPtr GetStaticCallTargetFunctionAt(uword pc) const;
6956 // Aborts if there is no static call at 'pc'.
6957 void SetStaticCallTargetCodeAt(uword pc, const Code& code) const;
6958 void SetStubCallTargetCodeAt(uword pc, const Code& code) const;
6959
6960 void Disassemble(DisassemblyFormatter* formatter = nullptr) const;
6961
6962#if defined(INCLUDE_IL_PRINTER)
6963 class Comments : public ZoneAllocated, public CodeComments {
6964 public:
6965 static Comments& New(intptr_t count);
6966
6967 intptr_t Length() const override;
6968
6969 void SetPCOffsetAt(intptr_t idx, intptr_t pc_offset);
6970 void SetCommentAt(intptr_t idx, const String& comment);
6971
6972 intptr_t PCOffsetAt(intptr_t idx) const override;
6973 const char* CommentAt(intptr_t idx) const override;
6974
6975 private:
6976 explicit Comments(const Array& comments);
6977
6978 // Layout of entries describing comments.
6979 enum {kPCOffsetEntry = 0, // PC offset to a comment as a Smi.
6980 kCommentEntry, // Comment text as a String.
6981 kNumberOfEntries};
6982
6983 const Array& comments_;
6984 String& string_;
6985
6986 friend class Code;
6987
6988 DISALLOW_COPY_AND_ASSIGN(Comments);
6989 };
6990
6991 const CodeComments& comments() const;
6992 void set_comments(const CodeComments& comments) const;
6993#endif // defined(INCLUDE_IL_PRINTER)
6994
6995 ObjectPtr return_address_metadata() const {
6996#if defined(PRODUCT)
6997 UNREACHABLE();
6998 return nullptr;
6999#else
7000 return untag()->return_address_metadata();
7001#endif
7002 }
7003 // Sets |return_address_metadata|.
7004 void SetPrologueOffset(intptr_t offset) const;
7005 // Returns -1 if no prologue offset is available.
7006 intptr_t GetPrologueOffset() const;
7007
7008 ArrayPtr inlined_id_to_function() const;
7009 void set_inlined_id_to_function(const Array& value) const;
7010
7011 // Provides the call stack at the given pc offset, with the top-of-stack in
7012 // the last element and the root function (this) as the first element, along
7013 // with the corresponding source positions. Note the token position for each
7014 // function except the top-of-stack is the position of the call to the next
7015 // function. The stack will be empty if we lack the metadata to produce it,
7016 // which happens for stub code.
7017 // The pc offset is interpreted as an instruction address (as needed by the
7018 // disassembler or the top frame of a profiler sample).
7019 void GetInlinedFunctionsAtInstruction(
7020 intptr_t pc_offset,
7021 GrowableArray<const Function*>* functions,
7022 GrowableArray<TokenPosition>* token_positions) const;
7023 // Same as above, except the pc is interpreted as a return address (as needed
7024 // for a stack trace or the bottom frames of a profiler sample).
7025 void GetInlinedFunctionsAtReturnAddress(
7026 intptr_t pc_offset,
7027 GrowableArray<const Function*>* functions,
7028 GrowableArray<TokenPosition>* token_positions) const {
7029 GetInlinedFunctionsAtInstruction(pc_offset: pc_offset - 1, functions, token_positions);
7030 }
7031
7032 NOT_IN_PRODUCT(void PrintJSONInlineIntervals(JSONObject* object) const);
7033 void DumpInlineIntervals() const;
7034 void DumpSourcePositions(bool relative_addresses = false) const;
7035
7036 LocalVarDescriptorsPtr var_descriptors() const {
7037#if defined(PRODUCT)
7038 UNREACHABLE();
7039 return nullptr;
7040#else
7041 return untag()->var_descriptors();
7042#endif
7043 }
7044 void set_var_descriptors(const LocalVarDescriptors& value) const {
7045#if defined(PRODUCT)
7046 UNREACHABLE();
7047#else
7048 ASSERT(value.IsOld());
7049 untag()->set_var_descriptors(value.ptr());
7050#endif
7051 }
7052
7053 // Will compute local var descriptors if necessary.
7054 LocalVarDescriptorsPtr GetLocalVarDescriptors() const;
7055
7056 ExceptionHandlersPtr exception_handlers() const {
7057 return untag()->exception_handlers();
7058 }
7059 void set_exception_handlers(const ExceptionHandlers& handlers) const {
7060 ASSERT(handlers.IsOld());
7061 untag()->set_exception_handlers(handlers.ptr());
7062 }
7063
7064 // WARNING: function() returns the owner which is not guaranteed to be
7065 // a Function. It is up to the caller to guarantee it isn't a stub, class,
7066 // or something else.
7067 // TODO(turnidge): Consider dropping this function and making
7068 // everybody use owner(). Currently this function is misused - even
7069 // while generating the snapshot.
7070 FunctionPtr function() const {
7071 ASSERT(IsFunctionCode());
7072 return Function::RawCast(raw: owner());
7073 }
7074
7075 ObjectPtr owner() const {
7076 return WeakSerializationReference::Unwrap(obj: untag()->owner());
7077 }
7078 void set_owner(const Object& owner) const;
7079
7080 classid_t OwnerClassId() const { return OwnerClassIdOf(raw: ptr()); }
7081 static classid_t OwnerClassIdOf(CodePtr raw) {
7082 ObjectPtr owner = WeakSerializationReference::Unwrap(obj: raw->untag()->owner());
7083 if (!owner->IsHeapObject()) {
7084 return RawSmiValue(raw_value: static_cast<SmiPtr>(owner));
7085 }
7086 return owner->GetClassId();
7087 }
7088
7089 static intptr_t owner_offset() { return OFFSET_OF(UntaggedCode, owner_); }
7090
7091 // We would have a VisitPointers function here to traverse all the
7092 // embedded objects in the instructions using pointer_offsets.
7093
7094 static constexpr intptr_t kBytesPerElement =
7095 sizeof(reinterpret_cast<UntaggedCode*>(kOffsetOfPtr)->data()[0]);
7096 static constexpr intptr_t kMaxElements = kSmiMax / kBytesPerElement;
7097
7098 struct ArrayTraits {
7099 static intptr_t elements_start_offset() { return sizeof(UntaggedCode); }
7100 static constexpr intptr_t kElementSize = kBytesPerElement;
7101 };
7102
7103 static intptr_t InstanceSize() {
7104 ASSERT(sizeof(UntaggedCode) ==
7105 OFFSET_OF_RETURNED_VALUE(UntaggedCode, data));
7106 return 0;
7107 }
7108 static intptr_t InstanceSize(intptr_t len) {
7109 ASSERT(0 <= len && len <= kMaxElements);
7110 return RoundedAllocationSize(size: sizeof(UntaggedCode) +
7111 (len * kBytesPerElement));
7112 }
7113#if !defined(DART_PRECOMPILED_RUNTIME)
7114 // Finalizes the generated code, by generating various kinds of metadata (e.g.
7115 // stack maps, pc descriptors, ...) and attach them to a newly generated
7116 // [Code] object.
7117 //
7118 // If Code::PoolAttachment::kAttachPool is specified for [pool_attachment]
7119 // then a new [ObjectPool] will be attached to the code object as well.
7120 // Otherwise the caller is responsible for doing this via
7121 // `Object::set_object_pool()`.
7122 static CodePtr FinalizeCode(FlowGraphCompiler* compiler,
7123 compiler::Assembler* assembler,
7124 PoolAttachment pool_attachment,
7125 bool optimized,
7126 CodeStatistics* stats);
7127
7128 // Notifies all active [CodeObserver]s.
7129 static void NotifyCodeObservers(const Code& code, bool optimized);
7130 static void NotifyCodeObservers(const Function& function,
7131 const Code& code,
7132 bool optimized);
7133 static void NotifyCodeObservers(const char* name,
7134 const Code& code,
7135 bool optimized);
7136
7137 // Calls [FinalizeCode] and also notifies [CodeObserver]s.
7138 static CodePtr FinalizeCodeAndNotify(const Function& function,
7139 FlowGraphCompiler* compiler,
7140 compiler::Assembler* assembler,
7141 PoolAttachment pool_attachment,
7142 bool optimized = false,
7143 CodeStatistics* stats = nullptr);
7144 static CodePtr FinalizeCodeAndNotify(const char* name,
7145 FlowGraphCompiler* compiler,
7146 compiler::Assembler* assembler,
7147 PoolAttachment pool_attachment,
7148 bool optimized = false,
7149 CodeStatistics* stats = nullptr);
7150
7151#endif
7152 static CodePtr FindCode(uword pc, int64_t timestamp);
7153
7154 int32_t GetPointerOffsetAt(int index) const {
7155 NoSafepointScope no_safepoint;
7156 return *PointerOffsetAddrAt(index);
7157 }
7158 TokenPosition GetTokenIndexOfPC(uword pc) const;
7159
7160 // Find pc, return 0 if not found.
7161 uword GetPcForDeoptId(intptr_t deopt_id,
7162 UntaggedPcDescriptors::Kind kind) const;
7163 intptr_t GetDeoptIdForOsr(uword pc) const;
7164
7165 uint32_t Hash() const;
7166 const char* Name() const;
7167 const char* QualifiedName(const NameFormattingParams& params) const;
7168
7169 int64_t compile_timestamp() const {
7170#if defined(PRODUCT)
7171 return 0;
7172#else
7173 return untag()->compile_timestamp_;
7174#endif
7175 }
7176
7177 bool IsStubCode() const;
7178 bool IsAllocationStubCode() const;
7179 bool IsTypeTestStubCode() const;
7180 bool IsFunctionCode() const;
7181
7182 // Returns true if this Code object represents
7183 // Dart function code without any additional information.
7184 bool IsUnknownDartCode() const { return IsUnknownDartCode(code: ptr()); }
7185 static bool IsUnknownDartCode(CodePtr code);
7186
7187 void DisableDartCode() const;
7188
7189 void DisableStubCode(bool is_cls_parameterized) const;
7190
7191 void Enable() const {
7192 if (!IsDisabled()) return;
7193 ResetActiveInstructions();
7194 }
7195
7196 bool IsDisabled() const { return IsDisabled(code: ptr()); }
7197 static bool IsDisabled(CodePtr code) {
7198#if defined(DART_PRECOMPILED_RUNTIME)
7199 UNREACHABLE();
7200 return false;
7201#else
7202 return code->untag()->instructions() !=
7203 code->untag()->active_instructions();
7204#endif
7205 }
7206
7207 void set_object_pool(ObjectPoolPtr object_pool) const {
7208 untag()->set_object_pool(object_pool);
7209 }
7210
7211 private:
7212 void set_state_bits(intptr_t bits) const;
7213
7214 friend class UntaggedObject; // For UntaggedObject::SizeFromClass().
7215 friend class UntaggedCode;
7216 friend struct RelocatorTestHelper;
7217
7218 enum {
7219 kOptimizedBit = 0,
7220 kForceOptimizedBit = 1,
7221 kAliveBit = 2,
7222 kDiscardedBit = 3,
7223 kPtrOffBit = 4,
7224 kPtrOffSize = kBitsPerInt32 - kPtrOffBit,
7225 };
7226
7227 class OptimizedBit : public BitField<int32_t, bool, kOptimizedBit, 1> {};
7228
7229 // Force-optimized is true if the Code was generated for a function with
7230 // Function::ForceOptimize().
7231 class ForceOptimizedBit
7232 : public BitField<int32_t, bool, kForceOptimizedBit, 1> {};
7233
7234 class AliveBit : public BitField<int32_t, bool, kAliveBit, 1> {};
7235
7236 // Set by precompiler if this Code object doesn't contain
7237 // useful information besides instructions and compressed stack map.
7238 // Such objects are serialized in a shorter form and replaced with
7239 // StubCode::UnknownDartCode() during snapshot deserialization.
7240 class DiscardedBit : public BitField<int32_t, bool, kDiscardedBit, 1> {};
7241
7242 class PtrOffBits
7243 : public BitField<int32_t, intptr_t, kPtrOffBit, kPtrOffSize> {};
7244
7245 static constexpr intptr_t kEntrySize = sizeof(int32_t); // NOLINT
7246
7247 void set_compile_timestamp(int64_t timestamp) const {
7248#if defined(PRODUCT)
7249 UNREACHABLE();
7250#else
7251 StoreNonPointer(addr: &untag()->compile_timestamp_, value: timestamp);
7252#endif
7253 }
7254
7255 // Initializes the cached entrypoint addresses in [code] as calculated
7256 // from [instructions] and [unchecked_offset].
7257 static void InitializeCachedEntryPointsFrom(CodePtr code,
7258 InstructionsPtr instructions,
7259 uint32_t unchecked_offset);
7260
7261 // Sets [active_instructions_] to [instructions] and updates the cached
7262 // entry point addresses.
7263 void SetActiveInstructions(const Instructions& instructions,
7264 uint32_t unchecked_offset) const;
7265 void SetActiveInstructionsSafe(const Instructions& instructions,
7266 uint32_t unchecked_offset) const;
7267
7268 // Resets [active_instructions_] to its original value of [instructions_] and
7269 // updates the cached entry point addresses to match.
7270 void ResetActiveInstructions() const;
7271
7272 void set_instructions(const Instructions& instructions) const {
7273 ASSERT(Thread::Current()->IsDartMutatorThread() || !is_alive());
7274 untag()->set_instructions(instructions.ptr());
7275 }
7276#if !defined(DART_PRECOMPILED_RUNTIME)
7277 void set_unchecked_offset(uword offset) const {
7278 StoreNonPointer(addr: &untag()->unchecked_offset_, value: offset);
7279 }
7280#endif
7281
7282 // Returns the unchecked entry point offset for [instructions_].
7283 uint32_t UncheckedEntryPointOffset() const {
7284 return UncheckedEntryPointOffsetOf(code: ptr());
7285 }
7286 static uint32_t UncheckedEntryPointOffsetOf(CodePtr code) {
7287#if defined(DART_PRECOMPILED_RUNTIME)
7288 UNREACHABLE();
7289#else
7290 return code->untag()->unchecked_offset_;
7291#endif
7292 }
7293
7294 void set_pointer_offsets_length(intptr_t value) {
7295 // The number of fixups is limited to 1-billion.
7296 ASSERT(Utils::IsUint(30, value));
7297 set_state_bits(PtrOffBits::update(value, original: untag()->state_bits_));
7298 }
7299 int32_t* PointerOffsetAddrAt(int index) const {
7300 ASSERT(index >= 0);
7301 ASSERT(index < pointer_offsets_length());
7302 // TODO(iposva): Unit test is missing for this functionality.
7303 return &UnsafeMutableNonPointer(addr: untag()->data())[index];
7304 }
7305 void SetPointerOffsetAt(int index, int32_t offset_in_instructions) {
7306 NoSafepointScope no_safepoint;
7307 *PointerOffsetAddrAt(index) = offset_in_instructions;
7308 }
7309
7310 intptr_t BinarySearchInSCallTable(uword pc) const;
7311 static CodePtr LookupCodeInIsolateGroup(IsolateGroup* isolate_group,
7312 uword pc);
7313
7314 // New is a private method as RawInstruction and RawCode objects should
7315 // only be created using the Code::FinalizeCode method. This method creates
7316 // the RawInstruction and RawCode objects, sets up the pointer offsets
7317 // and links the two in a GC safe manner.
7318 static CodePtr New(intptr_t pointer_offsets_length);
7319
7320 FINAL_HEAP_OBJECT_IMPLEMENTATION(Code, Object);
7321 friend class Class;
7322 friend class CodeTestHelper;
7323 friend class StubCode; // for set_object_pool
7324 friend class Precompiler; // for set_object_pool
7325 friend class FunctionSerializationCluster;
7326 friend class CodeSerializationCluster;
7327 friend class CodeDeserializationCluster;
7328 friend class Deserializer; // for InitializeCachedEntryPointsFrom
7329 friend class StubCode; // for set_object_pool
7330 friend class MegamorphicCacheTable; // for set_object_pool
7331 friend class CodePatcher; // for set_instructions
7332 friend class ProgramVisitor; // for set_instructions
7333 // So that the UntaggedFunction pointer visitor can determine whether code the
7334 // function points to is optimized.
7335 friend class UntaggedFunction;
7336 friend class CallSiteResetter;
7337 friend class CodeKeyValueTrait; // for UncheckedEntryPointOffset
7338 friend class InstanceCall; // for StorePointerUnaligned
7339 friend class StaticCall; // for StorePointerUnaligned
7340};
7341
7342class Context : public Object {
7343 public:
7344 ContextPtr parent() const { return untag()->parent(); }
7345 void set_parent(const Context& parent) const {
7346 untag()->set_parent(parent.ptr());
7347 }
7348 static intptr_t parent_offset() {
7349 return OFFSET_OF(UntaggedContext, parent_);
7350 }
7351
7352 intptr_t num_variables() const { return untag()->num_variables_; }
7353 static intptr_t num_variables_offset() {
7354 return OFFSET_OF(UntaggedContext, num_variables_);
7355 }
7356 static intptr_t NumVariables(const ContextPtr context) {
7357 return context->untag()->num_variables_;
7358 }
7359
7360 ObjectPtr At(intptr_t context_index) const {
7361 return untag()->element(index: context_index);
7362 }
7363 inline void SetAt(intptr_t context_index, const Object& value) const;
7364
7365 intptr_t GetLevel() const;
7366
7367 void Dump(int indent = 0) const;
7368
7369 static constexpr intptr_t kBytesPerElement = kCompressedWordSize;
7370 static constexpr intptr_t kMaxElements = kSmiMax / kBytesPerElement;
7371
7372 struct ArrayTraits {
7373 static intptr_t elements_start_offset() { return sizeof(UntaggedContext); }
7374 static constexpr intptr_t kElementSize = kBytesPerElement;
7375 };
7376
7377 static intptr_t variable_offset(intptr_t context_index) {
7378 return OFFSET_OF_RETURNED_VALUE(UntaggedContext, data) +
7379 (kBytesPerElement * context_index);
7380 }
7381
7382 static bool IsValidLength(intptr_t len) {
7383 return 0 <= len && len <= compiler::target::Context::kMaxElements;
7384 }
7385
7386 static intptr_t InstanceSize() {
7387 ASSERT(sizeof(UntaggedContext) ==
7388 OFFSET_OF_RETURNED_VALUE(UntaggedContext, data));
7389 return 0;
7390 }
7391
7392 static intptr_t InstanceSize(intptr_t len) {
7393 ASSERT(IsValidLength(len));
7394 return RoundedAllocationSize(size: sizeof(UntaggedContext) +
7395 (len * kBytesPerElement));
7396 }
7397
7398 static ContextPtr New(intptr_t num_variables, Heap::Space space = Heap::kNew);
7399
7400 private:
7401 void set_num_variables(intptr_t num_variables) const {
7402 StoreNonPointer(addr: &untag()->num_variables_, value: num_variables);
7403 }
7404
7405 FINAL_HEAP_OBJECT_IMPLEMENTATION(Context, Object);
7406 friend class Class;
7407 friend class Object;
7408};
7409
7410// The ContextScope class makes it possible to delay the compilation of a local
7411// function until it is invoked. A ContextScope instance collects the local
7412// variables that are referenced by the local function to be compiled and that
7413// belong to the outer scopes, that is, to the local scopes of (possibly nested)
7414// functions enclosing the local function. Each captured variable is represented
7415// by its token position in the source, its name, its type, its allocation index
7416// in the context, and its context level. The function nesting level and loop
7417// nesting level are not preserved, since they are only used until the context
7418// level is assigned. In addition the ContextScope has a field 'is_implicit'
7419// which is true if the ContextScope was created for an implicit closure.
7420class ContextScope : public Object {
7421 public:
7422 intptr_t num_variables() const { return untag()->num_variables_; }
7423
7424 TokenPosition TokenIndexAt(intptr_t scope_index) const;
7425 void SetTokenIndexAt(intptr_t scope_index, TokenPosition token_pos) const;
7426
7427 TokenPosition DeclarationTokenIndexAt(intptr_t scope_index) const;
7428 void SetDeclarationTokenIndexAt(intptr_t scope_index,
7429 TokenPosition declaration_token_pos) const;
7430
7431 StringPtr NameAt(intptr_t scope_index) const;
7432 void SetNameAt(intptr_t scope_index, const String& name) const;
7433
7434 void ClearFlagsAt(intptr_t scope_index) const;
7435
7436 intptr_t LateInitOffsetAt(intptr_t scope_index) const;
7437 void SetLateInitOffsetAt(intptr_t scope_index,
7438 intptr_t late_init_offset) const;
7439
7440#define DECLARE_FLAG_ACCESSORS(Name) \
7441 bool Is##Name##At(intptr_t scope_index) const; \
7442 void SetIs##Name##At(intptr_t scope_index, bool value) const;
7443
7444 CONTEXT_SCOPE_VARIABLE_DESC_FLAG_LIST(DECLARE_FLAG_ACCESSORS)
7445#undef DECLARE_FLAG_ACCESSORS
7446
7447 AbstractTypePtr TypeAt(intptr_t scope_index) const;
7448 void SetTypeAt(intptr_t scope_index, const AbstractType& type) const;
7449
7450 InstancePtr ConstValueAt(intptr_t scope_index) const;
7451 void SetConstValueAt(intptr_t scope_index, const Instance& value) const;
7452
7453 intptr_t ContextIndexAt(intptr_t scope_index) const;
7454 void SetContextIndexAt(intptr_t scope_index, intptr_t context_index) const;
7455
7456 intptr_t ContextLevelAt(intptr_t scope_index) const;
7457 void SetContextLevelAt(intptr_t scope_index, intptr_t context_level) const;
7458
7459 intptr_t KernelOffsetAt(intptr_t scope_index) const;
7460 void SetKernelOffsetAt(intptr_t scope_index, intptr_t kernel_offset) const;
7461
7462 static constexpr intptr_t kBytesPerElement =
7463 sizeof(UntaggedContextScope::VariableDesc);
7464 static constexpr intptr_t kMaxElements = kSmiMax / kBytesPerElement;
7465
7466 struct ArrayTraits {
7467 static intptr_t elements_start_offset() {
7468 return sizeof(UntaggedContextScope);
7469 }
7470 static constexpr intptr_t kElementSize = kBytesPerElement;
7471 };
7472
7473 static intptr_t InstanceSize() {
7474 ASSERT(sizeof(UntaggedContextScope) ==
7475 OFFSET_OF_RETURNED_VALUE(UntaggedContextScope, data));
7476 return 0;
7477 }
7478
7479 static intptr_t InstanceSize(intptr_t len) {
7480 ASSERT(0 <= len && len <= kMaxElements);
7481 return RoundedAllocationSize(size: sizeof(UntaggedContextScope) +
7482 (len * kBytesPerElement));
7483 }
7484
7485 static ContextScopePtr New(intptr_t num_variables, bool is_implicit);
7486
7487 private:
7488 void set_num_variables(intptr_t num_variables) const {
7489 StoreNonPointer(addr: &untag()->num_variables_, value: num_variables);
7490 }
7491
7492 void set_is_implicit(bool is_implicit) const {
7493 StoreNonPointer(addr: &untag()->is_implicit_, value: is_implicit);
7494 }
7495
7496 const UntaggedContextScope::VariableDesc* VariableDescAddr(
7497 intptr_t index) const {
7498 ASSERT((index >= 0) && (index < num_variables()));
7499 return untag()->VariableDescAddr(index);
7500 }
7501
7502 bool GetFlagAt(intptr_t scope_index, intptr_t bit_index) const;
7503 void SetFlagAt(intptr_t scope_index, intptr_t bit_index, bool value) const;
7504
7505 FINAL_HEAP_OBJECT_IMPLEMENTATION(ContextScope, Object);
7506 friend class Class;
7507 friend class Object;
7508};
7509
7510// Class of special sentinel values:
7511// - Object::sentinel() is a value that cannot be produced by Dart code.
7512// It can be used to mark special values, for example to distinguish
7513// "uninitialized" fields.
7514// - Object::transition_sentinel() is a value marking that we are transitioning
7515// from sentinel, e.g., computing a field value. Used to detect circular
7516// initialization of static fields.
7517// - Object::unknown_constant() and Object::non_constant() are optimizing
7518// compiler's constant propagation constants.
7519// - Object::optimized_out() result from deopt environment pruning or failure
7520// to capture variables in a closure's context
7521class Sentinel : public Object {
7522 public:
7523 static intptr_t InstanceSize() {
7524 return RoundedAllocationSize(size: sizeof(UntaggedSentinel));
7525 }
7526
7527 static SentinelPtr New();
7528
7529 private:
7530 FINAL_HEAP_OBJECT_IMPLEMENTATION(Sentinel, Object);
7531 friend class Class;
7532 friend class Object;
7533};
7534
7535class MegamorphicCache : public CallSiteData {
7536 public:
7537 static constexpr intptr_t kInitialCapacity = 16;
7538 static constexpr intptr_t kSpreadFactor = 7;
7539 static constexpr double kLoadFactor = 0.50;
7540
7541 enum EntryType {
7542 kClassIdIndex,
7543 kTargetFunctionIndex,
7544 kEntryLength,
7545 };
7546
7547 ArrayPtr buckets() const;
7548 void set_buckets(const Array& buckets) const;
7549
7550 intptr_t mask() const;
7551 void set_mask(intptr_t mask) const;
7552
7553 intptr_t filled_entry_count() const;
7554 void set_filled_entry_count(intptr_t num) const;
7555
7556 static intptr_t buckets_offset() {
7557 return OFFSET_OF(UntaggedMegamorphicCache, buckets_);
7558 }
7559 static intptr_t mask_offset() {
7560 return OFFSET_OF(UntaggedMegamorphicCache, mask_);
7561 }
7562 static intptr_t arguments_descriptor_offset() {
7563 return OFFSET_OF(UntaggedMegamorphicCache, args_descriptor_);
7564 }
7565
7566 static MegamorphicCachePtr New(const String& target_name,
7567 const Array& arguments_descriptor);
7568
7569 void EnsureContains(const Smi& class_id, const Object& target) const;
7570 ObjectPtr Lookup(const Smi& class_id) const;
7571
7572 static intptr_t InstanceSize() {
7573 return RoundedAllocationSize(size: sizeof(UntaggedMegamorphicCache));
7574 }
7575
7576 private:
7577 friend class Class;
7578 friend class MegamorphicCacheTable;
7579 friend class ProgramVisitor;
7580
7581 static MegamorphicCachePtr New();
7582
7583 // The caller must hold IsolateGroup::type_feedback_mutex().
7584 void InsertLocked(const Smi& class_id, const Object& target) const;
7585 void EnsureCapacityLocked() const;
7586 ObjectPtr LookupLocked(const Smi& class_id) const;
7587
7588 void InsertEntryLocked(const Smi& class_id, const Object& target) const;
7589
7590 static inline void SetEntry(const Array& array,
7591 intptr_t index,
7592 const Smi& class_id,
7593 const Object& target);
7594
7595 static inline ObjectPtr GetClassId(const Array& array, intptr_t index);
7596 static inline ObjectPtr GetTargetFunction(const Array& array, intptr_t index);
7597
7598 FINAL_HEAP_OBJECT_IMPLEMENTATION(MegamorphicCache, CallSiteData);
7599};
7600
7601class SubtypeTestCache : public Object {
7602 public:
7603 // The contents of the backing array storage is a number of entry tuples.
7604 // Any entry that is unoccupied has the null value as its first component.
7605 //
7606 // If the cache is linear, the entries can be accessed in a linear fashion:
7607 // all occupied entries come first, followed by at least one unoccupied
7608 // entry to mark the end of the cache. Guaranteeing at least one unoccupied
7609 // entry avoids the need for a length check when iterating over the contents
7610 // of the linear cache in stubs.
7611 //
7612 // If the cache is hash-based, the array is instead treated as a hash table
7613 // probed by using a hash value derived from the inputs.
7614
7615 // The tuple of values stored in a given entry.
7616 //
7617 // Note that occupied entry contents are never modified. That means reading a
7618 // non-null instance cid or signature means the rest of the entry can be
7619 // loaded without worrying about concurrent modification. Thus, we always set
7620 // the instance cid or signature last when making an occupied entry.
7621 //
7622 // Also note that each STC, when created, has a set number of used inputs.
7623 // The value of any unused input is unspecified, so for example, if the
7624 // STC only uses 3 inputs, then no assumptions can be made about the value
7625 // stored in the instantiator type arguments slot.
7626 enum Entries {
7627 kInstanceCidOrSignature = 0,
7628 kInstanceTypeArguments = 1,
7629 kInstantiatorTypeArguments = 2,
7630 kFunctionTypeArguments = 3,
7631 kInstanceParentFunctionTypeArguments = 4,
7632 kInstanceDelayedFunctionTypeArguments = 5,
7633 kDestinationType = 6,
7634 kTestResult = 7,
7635 kTestEntryLength = 8,
7636 };
7637
7638 // Assumes only one non-input entry in the array, kTestResult.
7639 static_assert(kInstanceCidOrSignature == 0 &&
7640 kDestinationType + 1 == kTestResult &&
7641 kTestResult + 1 == kTestEntryLength,
7642 "Need to adjust number of max inputs");
7643 static constexpr intptr_t kMaxInputs = kTestResult;
7644
7645 // Returns the number of occupied entries stored in the cache.
7646 intptr_t NumberOfChecks() const;
7647
7648 // Retrieves the number of entries (occupied or unoccupied) in the cache.
7649 intptr_t NumEntries() const;
7650
7651 // Adds a check, returning the index of the new entry in the cache.
7652 intptr_t AddCheck(
7653 const Object& instance_class_id_or_signature,
7654 const AbstractType& destination_type,
7655 const TypeArguments& instance_type_arguments,
7656 const TypeArguments& instantiator_type_arguments,
7657 const TypeArguments& function_type_arguments,
7658 const TypeArguments& instance_parent_function_type_arguments,
7659 const TypeArguments& instance_delayed_type_arguments,
7660 const Bool& test_result) const;
7661 void GetCheck(intptr_t ix,
7662 Object* instance_class_id_or_signature,
7663 AbstractType* destination_type,
7664 TypeArguments* instance_type_arguments,
7665 TypeArguments* instantiator_type_arguments,
7666 TypeArguments* function_type_arguments,
7667 TypeArguments* instance_parent_function_type_arguments,
7668 TypeArguments* instance_delayed_type_arguments,
7669 Bool* test_result) const;
7670
7671 // Like GetCheck(), but does not require the subtype test cache mutex and so
7672 // may see an outdated view of the cache.
7673 void GetCurrentCheck(intptr_t ix,
7674 Object* instance_class_id_or_signature,
7675 AbstractType* destination_type,
7676 TypeArguments* instance_type_arguments,
7677 TypeArguments* instantiator_type_arguments,
7678 TypeArguments* function_type_arguments,
7679 TypeArguments* instance_parent_function_type_arguments,
7680 TypeArguments* instance_delayed_type_arguments,
7681 Bool* test_result) const;
7682
7683 // Like GetCheck(), but returns the contents of the first occupied entry
7684 // at or after the initial contents of [ix]. Returns whether an occupied entry
7685 // was found, and if an occupied entry was found, [ix] is updated to the entry
7686 // index following the occupied entry.
7687 bool GetNextCheck(intptr_t* ix,
7688 Object* instance_class_id_or_signature,
7689 AbstractType* destination_type,
7690 TypeArguments* instance_type_arguments,
7691 TypeArguments* instantiator_type_arguments,
7692 TypeArguments* function_type_arguments,
7693 TypeArguments* instance_parent_function_type_arguments,
7694 TypeArguments* instance_delayed_type_arguments,
7695 Bool* test_result) const;
7696
7697 // Returns whether all the elements of an existing cache entry, excluding
7698 // the result, match the non-pointer arguments. The pointer arguments are
7699 // out parameters as follows:
7700 //
7701 // If [index] is not nullptr, then it is set to the matching entry's index.
7702 // If [result] is not nullptr, then it is set to the matching entry's result.
7703 //
7704 // If called without the STC mutex lock, may return outdated information:
7705 // * May return a false negative if the entry was added concurrently.
7706 // * The [index] field may be invalid for the STC if the backing array is
7707 // grown concurrently and the new backing array is hash-based.
7708 bool HasCheck(const Object& instance_class_id_or_signature,
7709 const AbstractType& destination_type,
7710 const TypeArguments& instance_type_arguments,
7711 const TypeArguments& instantiator_type_arguments,
7712 const TypeArguments& function_type_arguments,
7713 const TypeArguments& instance_parent_function_type_arguments,
7714 const TypeArguments& instance_delayed_type_arguments,
7715 intptr_t* index,
7716 Bool* result) const;
7717
7718 // Writes the cache entry at index [index] to the given text buffer.
7719 //
7720 // The output is comma separated on a single line if [line_prefix] is nullptr,
7721 // otherwise line breaks followed by [line_prefix] is used as a separator.
7722 void WriteEntryToBuffer(Zone* zone,
7723 BaseTextBuffer* buffer,
7724 intptr_t index,
7725 const char* line_prefix = nullptr) const;
7726
7727 // Writes the contents of this SubtypeTestCache to the given text buffer.
7728 void WriteToBuffer(Zone* zone,
7729 BaseTextBuffer* buffer,
7730 const char* line_prefix = nullptr) const;
7731
7732 void Reset() const;
7733
7734 // Tests that [other] contains the same entries in the same order.
7735 bool Equals(const SubtypeTestCache& other) const;
7736
7737 // Returns whether the cache backed by the given storage is hash-based.
7738 bool IsHash() const;
7739
7740 // Creates a separate copy of the current STC contents.
7741 SubtypeTestCachePtr Copy(Thread* thread) const;
7742
7743 static SubtypeTestCachePtr New(intptr_t num_inputs);
7744
7745 static intptr_t InstanceSize() {
7746 return RoundedAllocationSize(size: sizeof(UntaggedSubtypeTestCache));
7747 }
7748
7749 static intptr_t cache_offset() {
7750 return OFFSET_OF(UntaggedSubtypeTestCache, cache_);
7751 }
7752 ArrayPtr cache() const;
7753
7754 static intptr_t num_inputs_offset() {
7755 return OFFSET_OF(UntaggedSubtypeTestCache, num_inputs_);
7756 }
7757 intptr_t num_inputs() const { return untag()->num_inputs_; }
7758
7759 intptr_t num_occupied() const { return untag()->num_occupied_; }
7760
7761 // The maximum number of occupied entries for a linear subtype test cache
7762 // before swapping to a hash table-based cache. Exposed publicly for tests.
7763#if defined(TARGET_ARCH_IA32)
7764 // We don't generate hash cache probing in the stub on IA32, so larger caches
7765 // force runtime checks.
7766 static constexpr intptr_t kMaxLinearCacheEntries = 100;
7767#else
7768 static constexpr intptr_t kMaxLinearCacheEntries = 30;
7769#endif
7770
7771 // Whether the entry at the given index in the cache is occupied. Exposed
7772 // publicly for tests.
7773 bool IsOccupied(intptr_t index) const;
7774
7775 // Returns the number of inputs needed to cache entries for the given type.
7776 static intptr_t UsedInputsForType(const AbstractType& type);
7777
7778 // Given a minimum entry count, calculates an entry count that won't force
7779 // additional allocation but minimizes the number of unoccupied entries.
7780 // Used to calculate an appropriate value for FLAG_max_subtype_cache_entries.
7781 static constexpr intptr_t MaxEntriesForCacheAllocatedFor(intptr_t count) {
7782 // If the cache would be linear, just return the count unchanged.
7783 if (count <= kMaxLinearCacheEntries) return count;
7784 intptr_t allocated_entries = Utils::RoundUpToPowerOfTwo(x: count);
7785 if (LoadFactor(occupied: count, capacity: allocated_entries) >= kMaxLoadFactor) {
7786 allocated_entries *= 2;
7787 }
7788 const intptr_t max_entries =
7789 static_cast<intptr_t>(kMaxLoadFactor * allocated_entries);
7790 assert(LoadFactor(max_entries, allocated_entries) < kMaxLoadFactor);
7791 assert(max_entries >= count);
7792 return max_entries;
7793 }
7794
7795 private:
7796 static constexpr double LoadFactor(intptr_t occupied, intptr_t capacity) {
7797 return occupied / static_cast<double>(capacity);
7798 }
7799
7800 // Retrieves the number of entries (occupied or unoccupied) in a cache
7801 // backed by the given array.
7802 static intptr_t NumEntries(const Array& array);
7803
7804 // Returns whether the cache backed by the given storage is linear.
7805 static bool IsLinear(const Array& array) { return !IsHash(array); }
7806
7807 // Returns whether the cache backed by the given storage is hash-based.
7808 static bool IsHash(const Array& array);
7809
7810 struct KeyLocation {
7811 // The entry index if [present] is true, otherwise where the entry would
7812 // be located if added afterwards without any intermediate additions.
7813 intptr_t entry;
7814 bool present; // Whether an entry already exists in the cache.
7815 };
7816
7817 // If a cache entry in the given array contains the given inputs, returns a
7818 // KeyLocation with the index of the entry and true. Otherwise, returns a
7819 // KeyLocation with the index that would be used if the instantiation for the
7820 // given type arguments is added and false.
7821 //
7822 // If called without the STC mutex lock, may return outdated information:
7823 // * The [present] field may be a false negative if the entry was added
7824 // concurrently.
7825 static KeyLocation FindKeyOrUnused(
7826 const Array& array,
7827 intptr_t num_inputs,
7828 const Object& instance_class_id_or_signature,
7829 const AbstractType& destination_type,
7830 const TypeArguments& instance_type_arguments,
7831 const TypeArguments& instantiator_type_arguments,
7832 const TypeArguments& function_type_arguments,
7833 const TypeArguments& instance_parent_function_type_arguments,
7834 const TypeArguments& instance_delayed_type_arguments);
7835
7836 // If the given array can contain the requested number of entries, returns
7837 // the same array and sets [was_grown] to false.
7838 //
7839 // If the given array cannot contain the requested number of entries,
7840 // returns a new array that can and which contains all the entries of the
7841 // given array and sets [was_grown] to true.
7842 ArrayPtr EnsureCapacity(Zone* zone,
7843 const Array& array,
7844 intptr_t new_capacity,
7845 bool* was_grown) const;
7846
7847 public: // Used in the StubCodeCompiler.
7848 // The maximum size of the array backing a linear cache. All hash based
7849 // caches are guaranteed to have sizes larger than this.
7850 static constexpr intptr_t kMaxLinearCacheSize =
7851 (kMaxLinearCacheEntries + 1) * kTestEntryLength;
7852
7853 private:
7854 // The initial number of entries used when converting from a linear to
7855 // a hash-based cache.
7856 static constexpr intptr_t kNumInitialHashCacheEntries =
7857 Utils::RoundUpToPowerOfTwo(x: 2 * kMaxLinearCacheEntries);
7858 static_assert(Utils::IsPowerOfTwo(x: kNumInitialHashCacheEntries),
7859 "number of hash-based cache entries must be a power of two");
7860
7861 // The max load factor allowed in hash-based caches.
7862 static constexpr double kMaxLoadFactor = 0.71;
7863
7864 void set_cache(const Array& value) const;
7865 void set_num_occupied(intptr_t value) const;
7866
7867 // Like GetCurrentCheck, but takes the backing storage array.
7868 static void GetCheckFromArray(
7869 const Array& array,
7870 intptr_t num_inputs,
7871 intptr_t ix,
7872 Object* instance_class_id_or_signature,
7873 AbstractType* destination_type,
7874 TypeArguments* instance_type_arguments,
7875 TypeArguments* instantiator_type_arguments,
7876 TypeArguments* function_type_arguments,
7877 TypeArguments* instance_parent_function_type_arguments,
7878 TypeArguments* instance_delayed_type_arguments,
7879 Bool* test_result);
7880
7881 // Like WriteEntryToBuffer(), but does not require the subtype test cache
7882 // mutex and so may see an incorrect view of the cache if there are concurrent
7883 // modifications.
7884 void WriteCurrentEntryToBuffer(Zone* zone,
7885 BaseTextBuffer* buffer,
7886 intptr_t index,
7887 const char* line_prefix = nullptr) const;
7888
7889 // Like WriteToBuffer(), but does not require the subtype test cache mutex and
7890 // so may see an incorrect view of the cache if there are concurrent
7891 // modifications.
7892 void WriteToBufferUnlocked(Zone* zone,
7893 BaseTextBuffer* buffer,
7894 const char* line_prefix = nullptr) const;
7895
7896 FINAL_HEAP_OBJECT_IMPLEMENTATION(SubtypeTestCache, Object);
7897 friend class Class;
7898 friend class FieldInvalidator;
7899 friend class VMSerializationRoots;
7900 friend class VMDeserializationRoots;
7901};
7902
7903class LoadingUnit : public Object {
7904 public:
7905 static constexpr intptr_t kIllegalId = 0;
7906 COMPILE_ASSERT(kIllegalId == WeakTable::kNoValue);
7907 static constexpr intptr_t kRootId = 1;
7908
7909 static LoadingUnitPtr New();
7910
7911 static intptr_t InstanceSize() {
7912 return RoundedAllocationSize(size: sizeof(UntaggedLoadingUnit));
7913 }
7914
7915 static intptr_t LoadingUnitOf(const Function& function);
7916 static intptr_t LoadingUnitOf(const Code& code);
7917
7918 LoadingUnitPtr parent() const;
7919 void set_parent(const LoadingUnit& value) const;
7920
7921 ArrayPtr base_objects() const;
7922 void set_base_objects(const Array& value) const;
7923
7924 intptr_t id() const { return untag()->id_; }
7925 void set_id(intptr_t id) const { StoreNonPointer(addr: &untag()->id_, value: id); }
7926
7927 // True once the VM deserializes this unit's snapshot.
7928 bool loaded() const { return untag()->loaded_; }
7929 void set_loaded(bool value) const {
7930 StoreNonPointer(addr: &untag()->loaded_, value);
7931 }
7932
7933 // True once the VM invokes the embedder's deferred load callback until the
7934 // embedder calls Dart_DeferredLoadComplete[Error].
7935 bool load_outstanding() const { return untag()->load_outstanding_; }
7936 void set_load_outstanding(bool value) const {
7937 StoreNonPointer(addr: &untag()->load_outstanding_, value);
7938 }
7939
7940 ObjectPtr IssueLoad() const;
7941 ObjectPtr CompleteLoad(const String& error_message,
7942 bool transient_error) const;
7943
7944 private:
7945 FINAL_HEAP_OBJECT_IMPLEMENTATION(LoadingUnit, Object);
7946 friend class Class;
7947};
7948
7949class Error : public Object {
7950 public:
7951 virtual const char* ToErrorCString() const;
7952
7953 private:
7954 HEAP_OBJECT_IMPLEMENTATION(Error, Object);
7955};
7956
7957class ApiError : public Error {
7958 public:
7959 StringPtr message() const { return untag()->message(); }
7960
7961 static intptr_t InstanceSize() {
7962 return RoundedAllocationSize(size: sizeof(UntaggedApiError));
7963 }
7964
7965 static ApiErrorPtr New(const String& message, Heap::Space space = Heap::kNew);
7966
7967 virtual const char* ToErrorCString() const;
7968
7969 private:
7970 void set_message(const String& message) const;
7971
7972 static ApiErrorPtr New();
7973
7974 FINAL_HEAP_OBJECT_IMPLEMENTATION(ApiError, Error);
7975 friend class Class;
7976};
7977
7978class LanguageError : public Error {
7979 public:
7980 Report::Kind kind() const {
7981 return static_cast<Report::Kind>(untag()->kind_);
7982 }
7983
7984 // Build, cache, and return formatted message.
7985 StringPtr FormatMessage() const;
7986
7987 static intptr_t InstanceSize() {
7988 return RoundedAllocationSize(size: sizeof(UntaggedLanguageError));
7989 }
7990
7991 // A null script means no source and a negative token_pos means no position.
7992 static LanguageErrorPtr NewFormatted(const Error& prev_error,
7993 const Script& script,
7994 TokenPosition token_pos,
7995 bool report_after_token,
7996 Report::Kind kind,
7997 Heap::Space space,
7998 const char* format,
7999 ...) PRINTF_ATTRIBUTE(7, 8);
8000
8001 static LanguageErrorPtr NewFormattedV(const Error& prev_error,
8002 const Script& script,
8003 TokenPosition token_pos,
8004 bool report_after_token,
8005 Report::Kind kind,
8006 Heap::Space space,
8007 const char* format,
8008 va_list args);
8009
8010 static LanguageErrorPtr New(const String& formatted_message,
8011 Report::Kind kind = Report::kError,
8012 Heap::Space space = Heap::kNew);
8013
8014 virtual const char* ToErrorCString() const;
8015
8016 TokenPosition token_pos() const { return untag()->token_pos_; }
8017
8018 private:
8019 ErrorPtr previous_error() const { return untag()->previous_error(); }
8020 void set_previous_error(const Error& value) const;
8021
8022 ScriptPtr script() const { return untag()->script(); }
8023 void set_script(const Script& value) const;
8024
8025 void set_token_pos(TokenPosition value) const;
8026
8027 bool report_after_token() const { return untag()->report_after_token_; }
8028 void set_report_after_token(bool value) const;
8029
8030 void set_kind(uint8_t value) const;
8031
8032 StringPtr message() const { return untag()->message(); }
8033 void set_message(const String& value) const;
8034
8035 StringPtr formatted_message() const { return untag()->formatted_message(); }
8036 void set_formatted_message(const String& value) const;
8037
8038 static LanguageErrorPtr New();
8039
8040 FINAL_HEAP_OBJECT_IMPLEMENTATION(LanguageError, Error);
8041 friend class Class;
8042};
8043
8044class UnhandledException : public Error {
8045 public:
8046 InstancePtr exception() const { return untag()->exception(); }
8047 static intptr_t exception_offset() {
8048 return OFFSET_OF(UntaggedUnhandledException, exception_);
8049 }
8050
8051 InstancePtr stacktrace() const { return untag()->stacktrace(); }
8052 static intptr_t stacktrace_offset() {
8053 return OFFSET_OF(UntaggedUnhandledException, stacktrace_);
8054 }
8055
8056 static intptr_t InstanceSize() {
8057 return RoundedAllocationSize(size: sizeof(UntaggedUnhandledException));
8058 }
8059
8060 static UnhandledExceptionPtr New(const Instance& exception,
8061 const Instance& stacktrace,
8062 Heap::Space space = Heap::kNew);
8063
8064 virtual const char* ToErrorCString() const;
8065
8066 private:
8067 static UnhandledExceptionPtr New(Heap::Space space = Heap::kNew);
8068
8069 void set_exception(const Instance& exception) const;
8070 void set_stacktrace(const Instance& stacktrace) const;
8071
8072 FINAL_HEAP_OBJECT_IMPLEMENTATION(UnhandledException, Error);
8073 friend class Class;
8074 friend class ObjectStore;
8075};
8076
8077class UnwindError : public Error {
8078 public:
8079 bool is_user_initiated() const { return untag()->is_user_initiated_; }
8080 void set_is_user_initiated(bool value) const;
8081
8082 StringPtr message() const { return untag()->message(); }
8083
8084 static intptr_t InstanceSize() {
8085 return RoundedAllocationSize(size: sizeof(UntaggedUnwindError));
8086 }
8087
8088 static UnwindErrorPtr New(const String& message,
8089 Heap::Space space = Heap::kNew);
8090
8091 virtual const char* ToErrorCString() const;
8092
8093 private:
8094 void set_message(const String& message) const;
8095
8096 FINAL_HEAP_OBJECT_IMPLEMENTATION(UnwindError, Error);
8097 friend class Class;
8098};
8099
8100// Instance is the base class for all instance objects (aka the Object class
8101// in Dart source code.
8102class Instance : public Object {
8103 public:
8104 // Equality and identity testing.
8105 // 1. OperatorEquals: true iff 'this == other' is true in Dart code.
8106 // 2. IsIdenticalTo: true iff 'identical(this, other)' is true in Dart code.
8107 // 3. CanonicalizeEquals: used to canonicalize compile-time constants, e.g.,
8108 // using bitwise equality of fields and list elements.
8109 // Subclasses where 1 and 3 coincide may also define a plain Equals, e.g.,
8110 // String and Integer.
8111 virtual bool OperatorEquals(const Instance& other) const;
8112 bool IsIdenticalTo(const Instance& other) const;
8113 virtual bool CanonicalizeEquals(const Instance& other) const;
8114 virtual uint32_t CanonicalizeHash() const;
8115
8116 intptr_t SizeFromClass() const {
8117#if defined(DEBUG)
8118 const Class& cls = Class::Handle(clazz());
8119 ASSERT(cls.is_finalized() || cls.is_prefinalized());
8120#endif
8121 return (clazz()->untag()->host_instance_size_in_words_ *
8122 kCompressedWordSize);
8123 }
8124
8125 InstancePtr Canonicalize(Thread* thread) const;
8126 // Caller must hold IsolateGroup::constant_canonicalization_mutex_.
8127 virtual InstancePtr CanonicalizeLocked(Thread* thread) const;
8128 virtual void CanonicalizeFieldsLocked(Thread* thread) const;
8129
8130 InstancePtr CopyShallowToOldSpace(Thread* thread) const;
8131
8132 ObjectPtr GetField(const Field& field) const;
8133
8134 void SetField(const Field& field, const Object& value) const;
8135
8136 AbstractTypePtr GetType(Heap::Space space) const;
8137
8138 // Access the type arguments vector of this [Instance].
8139 // This vector includes type arguments corresponding to type parameters of
8140 // instance's class and all its superclasses.
8141 virtual TypeArgumentsPtr GetTypeArguments() const;
8142 virtual void SetTypeArguments(const TypeArguments& value) const;
8143
8144 // Check if the type of this instance is a subtype of the given other type.
8145 // The type argument vectors are used to instantiate the other type if needed.
8146 bool IsInstanceOf(const AbstractType& other,
8147 const TypeArguments& other_instantiator_type_arguments,
8148 const TypeArguments& other_function_type_arguments) const;
8149
8150 // Check if this instance is assignable to the given other type.
8151 // The type argument vectors are used to instantiate the other type if needed.
8152 bool IsAssignableTo(const AbstractType& other,
8153 const TypeArguments& other_instantiator_type_arguments,
8154 const TypeArguments& other_function_type_arguments) const;
8155
8156 // Return true if the null instance can be assigned to a variable of [other]
8157 // type. Return false if null cannot be assigned or we cannot tell (if
8158 // [other] is a type parameter in NNBD strong mode). Only used for checks at
8159 // compile time.
8160 static bool NullIsAssignableTo(const AbstractType& other);
8161
8162 // Return true if the null instance can be assigned to a variable of [other]
8163 // type. Return false if null cannot be assigned. Used for checks at runtime,
8164 // when the instantiator and function type argument vectors are available.
8165 static bool NullIsAssignableTo(
8166 const AbstractType& other,
8167 const TypeArguments& other_instantiator_type_arguments,
8168 const TypeArguments& other_function_type_arguments);
8169
8170 bool IsValidNativeIndex(int index) const {
8171 return ((index >= 0) && (index < clazz()->untag()->num_native_fields_));
8172 }
8173
8174 intptr_t* NativeFieldsDataAddr() const;
8175 inline intptr_t GetNativeField(int index) const;
8176 inline void GetNativeFields(uint16_t num_fields,
8177 intptr_t* field_values) const;
8178 void SetNativeFields(uint16_t num_fields, const intptr_t* field_values) const;
8179
8180 uint16_t NumNativeFields() const {
8181 return clazz()->untag()->num_native_fields_;
8182 }
8183
8184 void SetNativeField(int index, intptr_t value) const;
8185
8186 // If the instance is a callable object, i.e. a closure or the instance of a
8187 // class implementing a 'call' method, return true and set the function
8188 // (if not nullptr) to call.
8189 bool IsCallable(Function* function) const;
8190
8191 ObjectPtr Invoke(const String& selector,
8192 const Array& arguments,
8193 const Array& argument_names,
8194 bool respect_reflectable = true,
8195 bool check_is_entrypoint = false) const;
8196 ObjectPtr InvokeGetter(const String& selector,
8197 bool respect_reflectable = true,
8198 bool check_is_entrypoint = false) const;
8199 ObjectPtr InvokeSetter(const String& selector,
8200 const Instance& argument,
8201 bool respect_reflectable = true,
8202 bool check_is_entrypoint = false) const;
8203
8204 ObjectPtr EvaluateCompiledExpression(
8205 const Class& klass,
8206 const ExternalTypedData& kernel_buffer,
8207 const Array& type_definitions,
8208 const Array& arguments,
8209 const TypeArguments& type_arguments) const;
8210
8211 // Evaluate the given expression as if it appeared in an instance method of
8212 // [receiver] and return the resulting value, or an error object if
8213 // evaluating the expression fails. The method has the formal (type)
8214 // parameters given in (type_)param_names, and is invoked with the (type)
8215 // argument values given in (type_)param_values.
8216 //
8217 // We allow [receiver] to be null/<optimized out> if
8218 // * the evaluation function doesn't access `this`
8219 // * the evaluation function is static
8220 static ObjectPtr EvaluateCompiledExpression(
8221 Thread* thread,
8222 const Object& receiver,
8223 const Library& library,
8224 const Class& klass,
8225 const ExternalTypedData& kernel_buffer,
8226 const Array& type_definitions,
8227 const Array& param_values,
8228 const TypeArguments& type_param_values);
8229
8230 // Equivalent to invoking hashCode on this instance.
8231 virtual ObjectPtr HashCode() const;
8232
8233 // Equivalent to invoking identityHashCode with this instance.
8234 IntegerPtr IdentityHashCode(Thread* thread) const;
8235
8236 static intptr_t InstanceSize() {
8237 return RoundedAllocationSize(size: sizeof(UntaggedInstance));
8238 }
8239
8240 static InstancePtr New(const Class& cls, Heap::Space space = Heap::kNew);
8241 static InstancePtr NewAlreadyFinalized(const Class& cls,
8242 Heap::Space space = Heap::kNew);
8243
8244 // Array/list element address computations.
8245 static intptr_t DataOffsetFor(intptr_t cid);
8246 static intptr_t ElementSizeFor(intptr_t cid);
8247
8248 // Pointers may be subtyped, but their subtypes may not get extra fields.
8249 // The subtype runtime representation has exactly the same object layout,
8250 // only the class_id is different. So, it is safe to use subtype instances in
8251 // Pointer handles.
8252 virtual bool IsPointer() const;
8253
8254 static intptr_t NextFieldOffset() { return sizeof(UntaggedInstance); }
8255
8256 static intptr_t NativeFieldsOffset() { return sizeof(UntaggedObject); }
8257
8258 protected:
8259#ifndef PRODUCT
8260 virtual void PrintSharedInstanceJSON(JSONObject* jsobj,
8261 bool ref,
8262 bool include_id = true) const;
8263#endif
8264
8265 private:
8266 // Return true if the runtimeType of this instance is a subtype of other type.
8267 bool RuntimeTypeIsSubtypeOf(
8268 const AbstractType& other,
8269 const TypeArguments& other_instantiator_type_arguments,
8270 const TypeArguments& other_function_type_arguments) const;
8271
8272 // Returns true if the type of this instance is a subtype of FutureOr<T>
8273 // specified by instantiated type 'other'.
8274 // Returns false if other type is not a FutureOr.
8275 bool RuntimeTypeIsSubtypeOfFutureOr(Zone* zone,
8276 const AbstractType& other) const;
8277
8278 // Return true if the null instance is an instance of other type.
8279 static bool NullIsInstanceOf(
8280 const AbstractType& other,
8281 const TypeArguments& other_instantiator_type_arguments,
8282 const TypeArguments& other_function_type_arguments);
8283
8284 CompressedObjectPtr* FieldAddrAtOffset(intptr_t offset) const {
8285 ASSERT(IsValidFieldOffset(offset));
8286 return reinterpret_cast<CompressedObjectPtr*>(raw_value() - kHeapObjectTag +
8287 offset);
8288 }
8289 CompressedObjectPtr* FieldAddr(const Field& field) const {
8290 return FieldAddrAtOffset(offset: field.HostOffset());
8291 }
8292 CompressedObjectPtr* NativeFieldsAddr() const {
8293 return FieldAddrAtOffset(offset: sizeof(UntaggedObject));
8294 }
8295 void SetFieldAtOffset(intptr_t offset, const Object& value) const {
8296 StoreCompressedPointer(addr: FieldAddrAtOffset(offset), value: value.ptr());
8297 }
8298 bool IsValidFieldOffset(intptr_t offset) const;
8299
8300 // The following raw methods are used for morphing.
8301 // They are needed due to the extraction of the class in IsValidFieldOffset.
8302 CompressedObjectPtr* RawFieldAddrAtOffset(intptr_t offset) const {
8303 return reinterpret_cast<CompressedObjectPtr*>(raw_value() - kHeapObjectTag +
8304 offset);
8305 }
8306 ObjectPtr RawGetFieldAtOffset(intptr_t offset) const {
8307 return RawFieldAddrAtOffset(offset)->Decompress(heap_base: untag()->heap_base());
8308 }
8309 void RawSetFieldAtOffset(intptr_t offset, const Object& value) const {
8310 StoreCompressedPointer(addr: RawFieldAddrAtOffset(offset), value: value.ptr());
8311 }
8312 void RawSetFieldAtOffset(intptr_t offset, ObjectPtr value) const {
8313 StoreCompressedPointer(addr: RawFieldAddrAtOffset(offset), value);
8314 }
8315
8316 template <typename T>
8317 T* RawUnboxedFieldAddrAtOffset(intptr_t offset) const {
8318 return reinterpret_cast<T*>(raw_value() - kHeapObjectTag + offset);
8319 }
8320 template <typename T>
8321 T RawGetUnboxedFieldAtOffset(intptr_t offset) const {
8322 return *RawUnboxedFieldAddrAtOffset<T>(offset);
8323 }
8324 template <typename T>
8325 void RawSetUnboxedFieldAtOffset(intptr_t offset, const T& value) const {
8326 *RawUnboxedFieldAddrAtOffset<T>(offset) = value;
8327 }
8328
8329 // TODO(iposva): Determine if this gets in the way of Smi.
8330 HEAP_OBJECT_IMPLEMENTATION(Instance, Object);
8331 friend class ByteBuffer;
8332 friend class Class;
8333 friend class Closure;
8334 friend class Pointer;
8335 friend class DeferredObject;
8336 friend class FlowGraphSerializer;
8337 friend class FlowGraphDeserializer;
8338 friend class RegExp;
8339 friend class StubCode;
8340 friend class TypedDataView;
8341 friend class InstanceSerializationCluster;
8342 friend class InstanceDeserializationCluster;
8343 friend class ClassDeserializationCluster; // vtable
8344 friend class InstanceMorpher;
8345 friend class Obfuscator; // RawGetFieldAtOffset, RawSetFieldAtOffset
8346};
8347
8348class LibraryPrefix : public Instance {
8349 public:
8350 StringPtr name() const { return untag()->name(); }
8351 virtual StringPtr DictionaryName() const { return name(); }
8352
8353 ArrayPtr imports() const { return untag()->imports(); }
8354 intptr_t num_imports() const { return untag()->num_imports_; }
8355 LibraryPtr importer() const { return untag()->importer(); }
8356
8357 LibraryPtr GetLibrary(int index) const;
8358 void AddImport(const Namespace& import) const;
8359
8360 bool is_deferred_load() const { return untag()->is_deferred_load_; }
8361
8362 static intptr_t InstanceSize() {
8363 return RoundedAllocationSize(size: sizeof(UntaggedLibraryPrefix));
8364 }
8365
8366 static LibraryPrefixPtr New(const String& name,
8367 const Namespace& import,
8368 bool deferred_load,
8369 const Library& importer);
8370
8371 private:
8372 static constexpr int kInitialSize = 2;
8373 static constexpr int kIncrementSize = 2;
8374
8375 void set_name(const String& value) const;
8376 void set_imports(const Array& value) const;
8377 void set_num_imports(intptr_t value) const;
8378 void set_importer(const Library& value) const;
8379
8380 static LibraryPrefixPtr New();
8381
8382 FINAL_HEAP_OBJECT_IMPLEMENTATION(LibraryPrefix, Instance);
8383 friend class Class;
8384};
8385
8386// TypeParameters represents a list of formal type parameters with their bounds
8387// and their default values as calculated by CFE.
8388class TypeParameters : public Object {
8389 public:
8390 intptr_t Length() const;
8391
8392 static intptr_t names_offset() {
8393 return OFFSET_OF(UntaggedTypeParameters, names_);
8394 }
8395 StringPtr NameAt(intptr_t index) const;
8396 void SetNameAt(intptr_t index, const String& value) const;
8397
8398 static intptr_t flags_offset() {
8399 return OFFSET_OF(UntaggedTypeParameters, flags_);
8400 }
8401
8402 static intptr_t bounds_offset() {
8403 return OFFSET_OF(UntaggedTypeParameters, bounds_);
8404 }
8405 AbstractTypePtr BoundAt(intptr_t index) const;
8406 void SetBoundAt(intptr_t index, const AbstractType& value) const;
8407 bool AllDynamicBounds() const;
8408
8409 static intptr_t defaults_offset() {
8410 return OFFSET_OF(UntaggedTypeParameters, defaults_);
8411 }
8412 AbstractTypePtr DefaultAt(intptr_t index) const;
8413 void SetDefaultAt(intptr_t index, const AbstractType& value) const;
8414 bool AllDynamicDefaults() const;
8415
8416 // The isGenericCovariantImpl bits are packed into SMIs in the flags array,
8417 // but omitted if they're 0.
8418 bool IsGenericCovariantImplAt(intptr_t index) const;
8419 void SetIsGenericCovariantImplAt(intptr_t index, bool value) const;
8420
8421 // The number of flags per Smi should be a power of 2 in order to simplify the
8422 // generated code accessing the flags array.
8423#if !defined(DART_COMPRESSED_POINTERS)
8424 static constexpr intptr_t kFlagsPerSmiShift = kBitsPerWordLog2 - 1;
8425#else
8426 static constexpr intptr_t kFlagsPerSmiShift = kBitsPerWordLog2 - 2;
8427#endif
8428 static constexpr intptr_t kFlagsPerSmi = 1LL << kFlagsPerSmiShift;
8429 COMPILE_ASSERT(kFlagsPerSmi < kSmiBits);
8430 static constexpr intptr_t kFlagsPerSmiMask = kFlagsPerSmi - 1;
8431
8432 void Print(Thread* thread,
8433 Zone* zone,
8434 bool are_class_type_parameters,
8435 intptr_t base,
8436 NameVisibility name_visibility,
8437 BaseTextBuffer* printer) const;
8438
8439 static intptr_t InstanceSize() {
8440 return RoundedAllocationSize(size: sizeof(UntaggedTypeParameters));
8441 }
8442
8443 static TypeParametersPtr New(Heap::Space space = Heap::kOld);
8444 static TypeParametersPtr New(intptr_t count, Heap::Space space = Heap::kOld);
8445
8446 private:
8447 ArrayPtr names() const { return untag()->names(); }
8448 void set_names(const Array& value) const;
8449 ArrayPtr flags() const { return untag()->flags(); }
8450 void set_flags(const Array& value) const;
8451 TypeArgumentsPtr bounds() const { return untag()->bounds(); }
8452 void set_bounds(const TypeArguments& value) const;
8453 TypeArgumentsPtr defaults() const { return untag()->defaults(); }
8454 void set_defaults(const TypeArguments& value) const;
8455
8456 // Allocate and initialize the flags array to zero.
8457 void AllocateFlags(Heap::Space space) const;
8458 // Reset the flags array to null if all flags are zero.
8459 void OptimizeFlags() const;
8460
8461 FINAL_HEAP_OBJECT_IMPLEMENTATION(TypeParameters, Object);
8462 friend class Class;
8463 friend class ClassFinalizer;
8464 friend class FlowGraphSerializer;
8465 friend class FlowGraphDeserializer;
8466 friend class Function;
8467 friend class FunctionType;
8468 friend class Object;
8469 friend class Precompiler;
8470 friend class Type; // To determine whether to print type arguments.
8471};
8472
8473// A TypeArguments is an array of AbstractType.
8474class TypeArguments : public Instance {
8475 public:
8476 // Hash value for a type argument vector consisting solely of dynamic types.
8477 static constexpr intptr_t kAllDynamicHash = 1;
8478
8479 // Returns whether this TypeArguments vector can be used in a context that
8480 // expects a vector of length [count]. Always true for the null vector.
8481 bool HasCount(intptr_t count) const;
8482 static intptr_t length_offset() {
8483 return OFFSET_OF(UntaggedTypeArguments, length_);
8484 }
8485 intptr_t Length() const;
8486 AbstractTypePtr TypeAt(intptr_t index) const;
8487 AbstractTypePtr TypeAtNullSafe(intptr_t index) const;
8488 static intptr_t types_offset() {
8489 return OFFSET_OF_RETURNED_VALUE(UntaggedTypeArguments, types);
8490 }
8491 static intptr_t type_at_offset(intptr_t index) {
8492 return types_offset() + index * kCompressedWordSize;
8493 }
8494 void SetTypeAt(intptr_t index, const AbstractType& value) const;
8495
8496 struct ArrayTraits {
8497 static intptr_t elements_start_offset() {
8498 return TypeArguments::types_offset();
8499 }
8500
8501 static constexpr intptr_t kElementSize = kCompressedWordSize;
8502 };
8503
8504 // The nullability of a type argument vector represents the nullability of its
8505 // type elements (up to a maximum number of them, i.e. kNullabilityMaxTypes).
8506 // It is used at runtime in some cases (predetermined by the compiler) to
8507 // decide whether the instantiator type arguments (ITA) can be shared instead
8508 // of performing a more costly instantiation of the uninstantiated type
8509 // arguments (UTA).
8510 // The vector nullability is stored as a bit vector (in a Smi field), using
8511 // 2 bits per type:
8512 // - the high bit is set if the type is nullable or legacy.
8513 // - the low bit is set if the type is nullable.
8514 // The nullability is 0 if the vector is longer than kNullabilityMaxTypes.
8515 // The condition evaluated at runtime to decide whether UTA can share ITA is
8516 // (UTA.nullability & ITA.nullability) == UTA.nullability
8517 // Note that this allows for ITA to be longer than UTA (the bit vector must be
8518 // stored in the same order as the corresponding type vector, i.e. with the
8519 // least significant 2 bits representing the nullability of the first type).
8520 static constexpr intptr_t kNullabilityBitsPerType = 2;
8521 static constexpr intptr_t kNullabilityMaxTypes =
8522 kSmiBits / kNullabilityBitsPerType;
8523 static constexpr intptr_t kNonNullableBits = 0;
8524 static constexpr intptr_t kNullableBits = 3;
8525 static constexpr intptr_t kLegacyBits = 2;
8526 intptr_t nullability() const;
8527 static intptr_t nullability_offset() {
8528 return OFFSET_OF(UntaggedTypeArguments, nullability_);
8529 }
8530
8531 // The name of this type argument vector, e.g. "<T, dynamic, List<T>, Smi>".
8532 StringPtr Name() const;
8533
8534 // The name of this type argument vector, e.g. "<T, dynamic, List<T>, int>".
8535 // Names of internal classes are mapped to their public interfaces.
8536 StringPtr UserVisibleName() const;
8537
8538 // Print the internal or public name of a subvector of this type argument
8539 // vector, e.g. "<T, dynamic, List<T>, int>".
8540 void PrintSubvectorName(intptr_t from_index,
8541 intptr_t len,
8542 NameVisibility name_visibility,
8543 BaseTextBuffer* printer) const;
8544 void PrintTo(BaseTextBuffer* printer) const;
8545
8546 // Check if the subvector of length 'len' starting at 'from_index' of this
8547 // type argument vector consists solely of DynamicType.
8548 bool IsRaw(intptr_t from_index, intptr_t len) const {
8549 return IsDynamicTypes(raw_instantiated: false, from_index, len);
8550 }
8551
8552 // Check if this type argument vector would consist solely of DynamicType if
8553 // it was instantiated from both a raw (null) instantiator type arguments and
8554 // a raw (null) function type arguments, i.e. consider each class type
8555 // parameter and function type parameters as it would be first instantiated
8556 // from a vector of dynamic types.
8557 // Consider only a prefix of length 'len'.
8558 bool IsRawWhenInstantiatedFromRaw(intptr_t len) const {
8559 return IsDynamicTypes(raw_instantiated: true, from_index: 0, len);
8560 }
8561
8562 // Return true if this vector contains a non-nullable type.
8563 bool RequireConstCanonicalTypeErasure(Zone* zone,
8564 intptr_t from_index,
8565 intptr_t len) const;
8566
8567 TypeArgumentsPtr Prepend(Zone* zone,
8568 const TypeArguments& other,
8569 intptr_t other_length,
8570 intptr_t total_length) const;
8571
8572 // Concatenate [this] and [other] vectors of type parameters.
8573 TypeArgumentsPtr ConcatenateTypeParameters(Zone* zone,
8574 const TypeArguments& other) const;
8575
8576 // Check if the vectors are equal (they may be null).
8577 bool Equals(const TypeArguments& other) const {
8578 return IsSubvectorEquivalent(other, from_index: 0, len: IsNull() ? 0 : Length(),
8579 kind: TypeEquality::kCanonical);
8580 }
8581
8582 bool IsEquivalent(
8583 const TypeArguments& other,
8584 TypeEquality kind,
8585 FunctionTypeMapping* function_type_equivalence = nullptr) const {
8586 // Make a null vector a vector of dynamic as long as the other vector.
8587 return IsSubvectorEquivalent(other, from_index: 0, len: IsNull() ? other.Length() : Length(),
8588 kind, function_type_equivalence);
8589 }
8590 bool IsSubvectorEquivalent(
8591 const TypeArguments& other,
8592 intptr_t from_index,
8593 intptr_t len,
8594 TypeEquality kind,
8595 FunctionTypeMapping* function_type_equivalence = nullptr) const;
8596
8597 // Check if the vector is instantiated (it must not be null).
8598 bool IsInstantiated(Genericity genericity = kAny,
8599 intptr_t num_free_fun_type_params = kAllFree) const {
8600 return IsSubvectorInstantiated(from_index: 0, len: Length(), genericity,
8601 num_free_fun_type_params);
8602 }
8603 bool IsSubvectorInstantiated(
8604 intptr_t from_index,
8605 intptr_t len,
8606 Genericity genericity = kAny,
8607 intptr_t num_free_fun_type_params = kAllFree) const;
8608 bool IsUninstantiatedIdentity() const;
8609
8610 // Determine whether this uninstantiated type argument vector can share its
8611 // instantiator (resp. function) type argument vector instead of being
8612 // instantiated at runtime.
8613 // If null is passed in for 'with_runtime_check', the answer is unconditional
8614 // (i.e. the answer will be false even if a runtime check may allow sharing),
8615 // otherwise, in case the function returns true, 'with_runtime_check'
8616 // indicates if a check is still required at runtime before allowing sharing.
8617 bool CanShareInstantiatorTypeArguments(
8618 const Class& instantiator_class,
8619 bool* with_runtime_check = nullptr) const;
8620 bool CanShareFunctionTypeArguments(const Function& function,
8621 bool* with_runtime_check = nullptr) const;
8622 TypeArgumentsPtr TruncatedTo(intptr_t length) const;
8623
8624 // Return true if all types of this vector are finalized.
8625 bool IsFinalized() const;
8626
8627 // Caller must hold IsolateGroup::constant_canonicalization_mutex_.
8628 virtual InstancePtr CanonicalizeLocked(Thread* thread) const {
8629 return Canonicalize(thread);
8630 }
8631
8632 // Canonicalize only if instantiated, otherwise returns 'this'.
8633 TypeArgumentsPtr Canonicalize(Thread* thread) const;
8634
8635 // Shrinks flattened instance type arguments to ordinary type arguments.
8636 TypeArgumentsPtr FromInstanceTypeArguments(Thread* thread,
8637 const Class& cls) const;
8638
8639 // Expands type arguments to a vector suitable as instantiator type
8640 // arguments.
8641 //
8642 // Only fills positions corresponding to type parameters of [cls], leave
8643 // all positions of superclass type parameters blank.
8644 // Use [GetInstanceTypeArguments] on a class or a type if full vector is
8645 // needed.
8646 TypeArgumentsPtr ToInstantiatorTypeArguments(Thread* thread,
8647 const Class& cls) const;
8648
8649 // Add the class name and URI of each type argument of this vector to the uris
8650 // list and mark ambiguous triplets to be printed.
8651 void EnumerateURIs(URIs* uris) const;
8652
8653 // Return 'this' if this type argument vector is instantiated, i.e. if it does
8654 // not refer to type parameters. Otherwise, return a new type argument vector
8655 // where each reference to a type parameter is replaced with the corresponding
8656 // type from the various type argument vectors (class instantiator, function,
8657 // or parent functions via the current context).
8658 TypeArgumentsPtr InstantiateFrom(
8659 const TypeArguments& instantiator_type_arguments,
8660 const TypeArguments& function_type_arguments,
8661 intptr_t num_free_fun_type_params,
8662 Heap::Space space,
8663 FunctionTypeMapping* function_type_mapping = nullptr,
8664 intptr_t num_parent_type_args_adjustment = 0) const;
8665
8666 // Update number of parent function type arguments for
8667 // all elements of this vector.
8668 TypeArgumentsPtr UpdateFunctionTypes(
8669 intptr_t num_parent_type_args_adjustment,
8670 intptr_t num_free_fun_type_params,
8671 Heap::Space space,
8672 FunctionTypeMapping* function_type_mapping) const;
8673
8674 // Runtime instantiation with canonicalization. Not to be used during type
8675 // finalization at compile time.
8676 TypeArgumentsPtr InstantiateAndCanonicalizeFrom(
8677 const TypeArguments& instantiator_type_arguments,
8678 const TypeArguments& function_type_arguments) const;
8679
8680 class Cache : public ValueObject {
8681 public:
8682 // The contents of the backing array storage is a header followed by
8683 // a number of entry tuples. Any entry that is unoccupied has
8684 // Sentinel() as its first component.
8685 //
8686 // If the cache is linear, the entries can be accessed in a linear fashion:
8687 // all occupied entries come first, followed by at least one unoccupied
8688 // entry to mark the end of the cache. Guaranteeing at least one unoccupied
8689 // entry avoids the need for a length check when iterating over the contents
8690 // of the linear cache in stubs.
8691 //
8692 // If the cache is hash-based, the array is instead treated as a hash table
8693 // probed by using a hash value derived from the instantiator and function
8694 // type arguments.
8695
8696 enum Header {
8697 // A single Smi that is a bitfield containing two values:
8698 // - The number of occupied entries in the cache for all caches.
8699 // - For hash-based caches, the upper bits contain log2(N) where N
8700 // is the number of total entries in the cache, so this information can
8701 // be quickly retrieved by stubs.
8702 //
8703 // Note: accesses outside of the type arguments canonicalization mutex
8704 // must have acquire semantics. In C++ code, use NumOccupied to retrieve
8705 // the number of occupied entries.
8706 kMetadataIndex = 0,
8707 kHeaderSize,
8708 };
8709
8710 using NumOccupiedBits = BitField<intptr_t,
8711 intptr_t,
8712 0,
8713 compiler::target::kSmiBits -
8714 compiler::target::kBitsPerWordLog2>;
8715 using EntryCountLog2Bits = BitField<intptr_t,
8716 intptr_t,
8717 NumOccupiedBits::kNextBit,
8718 compiler::target::kBitsPerWordLog2>;
8719
8720 // The tuple of values stored in a given entry.
8721 //
8722 // Note: accesses of the first component outside of the type arguments
8723 // canonicalization mutex must have acquire semantics.
8724 enum Entry {
8725 kSentinelIndex = 0, // Used when only checking for sentinel values.
8726 kInstantiatorTypeArgsIndex = kSentinelIndex,
8727 kFunctionTypeArgsIndex,
8728 kInstantiatedTypeArgsIndex,
8729 kEntrySize,
8730 };
8731
8732 // Requires that the type arguments canonicalization mutex is held.
8733 Cache(Zone* zone, const TypeArguments& source);
8734
8735 // Requires that the type arguments canonicalization mutex is held.
8736 Cache(Zone* zone, const Array& array);
8737
8738 // Used to check that the state of the backing array is valid.
8739 //
8740 // Requires that the type arguments canonicalization mutex is held.
8741 DEBUG_ONLY(static bool IsValidStorageLocked(const Array& array);)
8742
8743 // Returns the number of entries stored in the cache.
8744 intptr_t NumOccupied() const { return NumOccupied(array: data_); }
8745
8746 struct KeyLocation {
8747 // The entry index if [present] is true, otherwise where the entry would
8748 // be located if added afterwards without any intermediate additions.
8749 intptr_t entry;
8750 bool present; // Whether an entry already exists in the cache.
8751 };
8752
8753 // If an entry contains the given instantiator and function type arguments,
8754 // returns a KeyLocation with the index of the entry and true. Otherwise,
8755 // returns the index an entry with those keys would have if added and false.
8756 KeyLocation FindKeyOrUnused(const TypeArguments& instantiator_tav,
8757 const TypeArguments& function_tav) const {
8758 return FindKeyOrUnused(array: data_, instantiator_tav, function_tav);
8759 }
8760
8761 // Returns whether the entry at the given index in the cache is occupied.
8762 bool IsOccupied(intptr_t entry) const;
8763
8764 // Given an occupied entry index, returns the instantiated TypeArguments.
8765 TypeArgumentsPtr Retrieve(intptr_t entry) const;
8766
8767 // Adds a new instantiation mapping to the cache at index [entry]. Assumes
8768 // that the entry at index [entry] is unoccupied.
8769 //
8770 // May replace the underlying storage array, in which case the returned
8771 // index of the entry may differ from the requested one. If this Cache was
8772 // constructed using a TypeArguments object, its instantiations field is
8773 // also updated to point to the new storage.
8774 KeyLocation AddEntry(intptr_t entry,
8775 const TypeArguments& instantiator_tav,
8776 const TypeArguments& function_tav,
8777 const TypeArguments& instantiated_tav) const;
8778
8779 // The sentinel value used to mark unoccupied entries.
8780 static SmiPtr Sentinel();
8781
8782 static const Array& EmptyStorage() {
8783 return Object::empty_instantiations_cache_array();
8784 }
8785
8786 // Returns whether the cache is linear.
8787 bool IsLinear() const { return IsLinear(array: data_); }
8788
8789 // Returns whether the cache is hash-based.
8790 bool IsHash() const { return IsHash(array: data_); }
8791
8792 private:
8793 static constexpr double LoadFactor(intptr_t occupied, intptr_t capacity) {
8794 return occupied / static_cast<double>(capacity);
8795 }
8796
8797 // Returns the number of entries stored in the cache backed by the given
8798 // array.
8799 static intptr_t NumOccupied(const Array& array);
8800
8801 // Returns whether the cache backed by the given storage is linear.
8802 static bool IsLinear(const Array& array) { return !IsHash(array); }
8803
8804 // Returns whether the cache backed by the given storage is hash-based.
8805 static bool IsHash(const Array& array);
8806
8807 // Ensures that the backing store for the cache can hold at least [occupied]
8808 // occupied entries. If it cannot, replaces the backing store with one that
8809 // can, copying over entries from the old backing store.
8810 //
8811 // Returns whether the backing store changed.
8812 bool EnsureCapacity(intptr_t occupied) const;
8813
8814 public: // For testing purposes only.
8815 // Retrieves the number of entries (occupied or unoccupied) in the cache.
8816 intptr_t NumEntries() const { return NumEntries(array: data_); }
8817
8818 // The maximum number of occupied entries for a linear cache of
8819 // instantiations before swapping to a hash table-based cache.
8820#if defined(TARGET_ARCH_IA32)
8821 // We don't generate hash cache probing in the stub on IA32.
8822 static constexpr intptr_t kMaxLinearCacheEntries = 500;
8823#else
8824 static constexpr intptr_t kMaxLinearCacheEntries = 10;
8825#endif
8826
8827 private:
8828 // Retrieves the number of entries (occupied or unoccupied) in a cache
8829 // backed by the given array.
8830 static intptr_t NumEntries(const Array& array);
8831
8832 // If an entry in the given array contains the given instantiator and
8833 // function type arguments, returns a KeyLocation with the index of the
8834 // entry and true. Otherwise, returns a KeyLocation with the index that
8835 // would be used if the instantiation for the given type arguments is
8836 // added and false.
8837 static KeyLocation FindKeyOrUnused(const Array& array,
8838 const TypeArguments& instantiator_tav,
8839 const TypeArguments& function_tav);
8840
8841 // The sentinel value in the Smi returned from Sentinel().
8842 static constexpr intptr_t kSentinelValue = 0;
8843
8844 public: // Used in the StubCodeCompiler.
8845 // The maximum size of the array backing a linear cache. All hash based
8846 // caches are guaranteed to have sizes larger than this.
8847 static constexpr intptr_t kMaxLinearCacheSize =
8848 kHeaderSize + (kMaxLinearCacheEntries + 1) * kEntrySize;
8849
8850 private:
8851 // The initial number of entries used when converting from a linear to
8852 // a hash-based cache.
8853 static constexpr intptr_t kNumInitialHashCacheEntries =
8854 Utils::RoundUpToPowerOfTwo(x: 2 * kMaxLinearCacheEntries);
8855 static_assert(Utils::IsPowerOfTwo(x: kNumInitialHashCacheEntries),
8856 "number of hash-based cache entries must be a power of two");
8857
8858 // The max load factor allowed in hash-based caches.
8859 static constexpr double kMaxLoadFactor = 0.71;
8860
8861 Zone* const zone_;
8862 const TypeArguments* const cache_container_;
8863 Array& data_;
8864 Smi& smi_handle_;
8865
8866 friend class TypeArguments; // For asserts against data_.
8867 };
8868
8869 // Return true if this type argument vector has cached instantiations.
8870 bool HasInstantiations() const;
8871
8872 static intptr_t instantiations_offset() {
8873 return OFFSET_OF(UntaggedTypeArguments, instantiations_);
8874 }
8875
8876 static constexpr intptr_t kBytesPerElement = kCompressedWordSize;
8877 static constexpr intptr_t kMaxElements = kSmiMax / kBytesPerElement;
8878
8879 static intptr_t InstanceSize() {
8880 ASSERT(sizeof(UntaggedTypeArguments) ==
8881 OFFSET_OF_RETURNED_VALUE(UntaggedTypeArguments, types));
8882 return 0;
8883 }
8884
8885 static intptr_t InstanceSize(intptr_t len) {
8886 // Ensure that the types() is not adding to the object size, which includes
8887 // 4 fields: instantiations_, length_, hash_, and nullability_.
8888 ASSERT(sizeof(UntaggedTypeArguments) ==
8889 (sizeof(UntaggedObject) + (kNumFields * kCompressedWordSize)));
8890 ASSERT(0 <= len && len <= kMaxElements);
8891 return RoundedAllocationSize(size: sizeof(UntaggedTypeArguments) +
8892 (len * kBytesPerElement));
8893 }
8894
8895 virtual uint32_t CanonicalizeHash() const {
8896 // Hash() is not stable until finalization is done.
8897 return 0;
8898 }
8899 uword Hash() const;
8900 uword HashForRange(intptr_t from_index, intptr_t len) const;
8901 static intptr_t hash_offset() {
8902 return OFFSET_OF(UntaggedTypeArguments, hash_);
8903 }
8904
8905 static TypeArgumentsPtr New(intptr_t len, Heap::Space space = Heap::kOld);
8906
8907 private:
8908 intptr_t ComputeNullability() const;
8909 void set_nullability(intptr_t value) const;
8910
8911 uword ComputeHash() const;
8912 void SetHash(intptr_t value) const;
8913
8914 // Check if the subvector of length 'len' starting at 'from_index' of this
8915 // type argument vector consists solely of DynamicType.
8916 // If raw_instantiated is true, consider each class type parameter to be first
8917 // instantiated from a vector of dynamic types.
8918 bool IsDynamicTypes(bool raw_instantiated,
8919 intptr_t from_index,
8920 intptr_t len) const;
8921
8922 ArrayPtr instantiations() const;
8923 void set_instantiations(const Array& value) const;
8924 void SetLength(intptr_t value) const;
8925 // Number of fields in the raw object is 4:
8926 // instantiations_, length_, hash_ and nullability_.
8927 static constexpr int kNumFields = 4;
8928
8929 FINAL_HEAP_OBJECT_IMPLEMENTATION(TypeArguments, Instance);
8930 friend class AbstractType;
8931 friend class Class;
8932 friend class ClearTypeHashVisitor;
8933 friend class Object;
8934};
8935
8936// AbstractType is an abstract superclass.
8937// Subclasses of AbstractType are Type and TypeParameter.
8938class AbstractType : public Instance {
8939 public:
8940 static intptr_t flags_offset() {
8941 return OFFSET_OF(UntaggedAbstractType, flags_);
8942 }
8943 static intptr_t hash_offset() {
8944 return OFFSET_OF(UntaggedAbstractType, hash_);
8945 }
8946
8947 bool IsFinalized() const {
8948 const auto state = type_state();
8949 return (state == UntaggedAbstractType::kFinalizedInstantiated) ||
8950 (state == UntaggedAbstractType::kFinalizedUninstantiated);
8951 }
8952 void SetIsFinalized() const;
8953
8954 Nullability nullability() const {
8955 return static_cast<Nullability>(
8956 UntaggedAbstractType::NullabilityBits::decode(value: untag()->flags()));
8957 }
8958 // Returns true if type has '?' nullability suffix, or it is a
8959 // built-in type which is always nullable (Null, dynamic or void).
8960 bool IsNullable() const { return nullability() == Nullability::kNullable; }
8961 // Returns true if type does not have any nullability suffix.
8962 // This function also returns true for type parameters without
8963 // nullability suffix ("T") which can be instantiated with
8964 // nullable or legacy types.
8965 bool IsNonNullable() const {
8966 return nullability() == Nullability::kNonNullable;
8967 }
8968 // Returns true if type has '*' nullability suffix, i.e.
8969 // it is from a legacy (opted-out) library.
8970 bool IsLegacy() const { return nullability() == Nullability::kLegacy; }
8971 // Returns true if it is guaranteed that null cannot be
8972 // assigned to this type.
8973 bool IsStrictlyNonNullable() const;
8974
8975 virtual AbstractTypePtr SetInstantiatedNullability(
8976 const TypeParameter& type_param,
8977 Heap::Space space) const;
8978 virtual AbstractTypePtr NormalizeFutureOrType(Heap::Space space) const;
8979
8980 virtual bool HasTypeClass() const { return type_class_id() != kIllegalCid; }
8981 virtual classid_t type_class_id() const;
8982 virtual ClassPtr type_class() const;
8983 virtual TypeArgumentsPtr arguments() const;
8984 virtual bool IsInstantiated(
8985 Genericity genericity = kAny,
8986 intptr_t num_free_fun_type_params = kAllFree) const;
8987 virtual bool CanonicalizeEquals(const Instance& other) const {
8988 return Equals(other);
8989 }
8990 virtual uint32_t CanonicalizeHash() const { return Hash(); }
8991 virtual bool Equals(const Instance& other) const {
8992 return IsEquivalent(other, kind: TypeEquality::kCanonical);
8993 }
8994 virtual bool IsEquivalent(
8995 const Instance& other,
8996 TypeEquality kind,
8997 FunctionTypeMapping* function_type_equivalence = nullptr) const;
8998 virtual bool RequireConstCanonicalTypeErasure(Zone* zone) const;
8999
9000 // Instantiate this type using the given type argument vectors.
9001 //
9002 // Note that some type parameters appearing in this type may not require
9003 // instantiation. Consider a class C<T> declaring a non-generic method
9004 // foo(bar<B>(T t, B b)). Although foo is not a generic method, it takes a
9005 // generic function bar<B> as argument and its function type refers to class
9006 // type parameter T and function type parameter B. When instantiating the
9007 // function type of foo for a particular value of T, function type parameter B
9008 // must remain uninstantiated, because only T is a free variable in this type.
9009 //
9010 // Return a new type, or return 'this' if it is already instantiated.
9011 virtual AbstractTypePtr InstantiateFrom(
9012 const TypeArguments& instantiator_type_arguments,
9013 const TypeArguments& function_type_arguments,
9014 intptr_t num_free_fun_type_params,
9015 Heap::Space space,
9016 FunctionTypeMapping* function_type_mapping = nullptr,
9017 intptr_t num_parent_type_args_adjustment = 0) const;
9018
9019 // Update number of parent function type arguments for the
9020 // nested function types and their type parameters.
9021 //
9022 // This adjustment is needed when nesting one generic function type
9023 // inside another. It is also needed when function type is copied
9024 // and owners of type parameters need to be adjusted.
9025 //
9026 // Number of parent function type arguments is adjusted by
9027 // [num_parent_type_args_adjustment].
9028 // Type parameters up to [num_free_fun_type_params] are not adjusted.
9029 virtual AbstractTypePtr UpdateFunctionTypes(
9030 intptr_t num_parent_type_args_adjustment,
9031 intptr_t num_free_fun_type_params,
9032 Heap::Space space,
9033 FunctionTypeMapping* function_type_mapping) const;
9034
9035 // Caller must hold IsolateGroup::constant_canonicalization_mutex_.
9036 virtual InstancePtr CanonicalizeLocked(Thread* thread) const {
9037 return Canonicalize(thread);
9038 }
9039
9040 // Return the canonical version of this type.
9041 virtual AbstractTypePtr Canonicalize(Thread* thread) const;
9042
9043 // Add the pair <name, uri> to the list, if not already present.
9044 static void AddURI(URIs* uris, const String& name, const String& uri);
9045
9046 // Return a formatted string of the uris.
9047 static StringPtr PrintURIs(URIs* uris);
9048
9049 // Returns a C-String (possibly "") representing the nullability of this type.
9050 // Legacy and undetermined suffixes are only displayed with kInternalName.
9051 virtual const char* NullabilitySuffix(NameVisibility name_visibility) const;
9052
9053 // The name of this type, including the names of its type arguments, if any.
9054 virtual StringPtr Name() const;
9055
9056 // The name of this type, including the names of its type arguments, if any.
9057 // Names of internal classes are mapped to their public interfaces.
9058 virtual StringPtr UserVisibleName() const;
9059
9060 // The name of this type, including the names of its type arguments, if any.
9061 // Privacy suffixes are dropped.
9062 virtual StringPtr ScrubbedName() const;
9063
9064 // Return the internal or public name of this type, including the names of its
9065 // type arguments, if any.
9066 virtual void PrintName(NameVisibility visibility,
9067 BaseTextBuffer* printer) const;
9068
9069 // Add the class name and URI of each occurring type to the uris
9070 // list and mark ambiguous triplets to be printed.
9071 virtual void EnumerateURIs(URIs* uris) const;
9072
9073 uword Hash() const;
9074 virtual uword ComputeHash() const;
9075
9076 // The name of this type's class, i.e. without the type argument names of this
9077 // type.
9078 StringPtr ClassName() const;
9079
9080 // Check if this type represents the 'dynamic' type.
9081 bool IsDynamicType() const { return type_class_id() == kDynamicCid; }
9082
9083 // Check if this type represents the 'void' type.
9084 bool IsVoidType() const { return type_class_id() == kVoidCid; }
9085
9086 // Check if this type represents the 'Null' type.
9087 bool IsNullType() const;
9088
9089 // Check if this type represents the 'Never' type.
9090 bool IsNeverType() const;
9091
9092 // Check if this type represents the 'Sentinel' type.
9093 bool IsSentinelType() const;
9094
9095 // Check if this type represents the 'Object' type.
9096 bool IsObjectType() const { return type_class_id() == kInstanceCid; }
9097
9098 // Check if this type represents the 'Object?' type.
9099 bool IsNullableObjectType() const {
9100 return IsObjectType() && (nullability() == Nullability::kNullable);
9101 }
9102
9103 // Check if this type represents a top type for subtyping,
9104 // assignability and 'as' type tests.
9105 //
9106 // Returns true if
9107 // - any type is a subtype of this type;
9108 // - any value can be assigned to a variable of this type;
9109 // - 'as' type test always succeeds for this type.
9110 bool IsTopTypeForSubtyping() const;
9111
9112 // Check if this type represents a top type for 'is' type tests.
9113 // Returns true if 'is' type test always returns true for this type.
9114 bool IsTopTypeForInstanceOf() const;
9115
9116 // Check if this type represents the 'bool' type.
9117 bool IsBoolType() const { return type_class_id() == kBoolCid; }
9118
9119 // Check if this type represents the 'int' type.
9120 bool IsIntType() const;
9121
9122 // Check if this type represents the '_IntegerImplementation' type.
9123 bool IsIntegerImplementationType() const;
9124
9125 // Check if this type represents the 'double' type.
9126 bool IsDoubleType() const;
9127
9128 // Check if this type represents the 'Float32x4' type.
9129 bool IsFloat32x4Type() const;
9130
9131 // Check if this type represents the 'Float64x2' type.
9132 bool IsFloat64x2Type() const;
9133
9134 // Check if this type represents the 'Int32x4' type.
9135 bool IsInt32x4Type() const;
9136
9137 // Check if this type represents the 'num' type.
9138 bool IsNumberType() const { return type_class_id() == kNumberCid; }
9139
9140 // Check if this type represents the '_Smi' type.
9141 bool IsSmiType() const { return type_class_id() == kSmiCid; }
9142
9143 // Check if this type represents the '_Mint' type.
9144 bool IsMintType() const { return type_class_id() == kMintCid; }
9145
9146 // Check if this type represents the 'String' type.
9147 bool IsStringType() const;
9148
9149 // Check if this type represents the Dart 'Function' type.
9150 bool IsDartFunctionType() const;
9151
9152 // Check if this type represents the Dart '_Closure' type.
9153 bool IsDartClosureType() const;
9154
9155 // Check if this type represents the Dart 'Record' type.
9156 bool IsDartRecordType() const;
9157
9158 // Check if this type represents the 'Pointer' type from "dart:ffi".
9159 bool IsFfiPointerType() const;
9160
9161 // Check if this type represents the 'FutureOr' type.
9162 bool IsFutureOrType() const { return type_class_id() == kFutureOrCid; }
9163
9164 // Returns the type argument of this (possibly nested) 'FutureOr' type.
9165 // Returns unmodified type if this type is not a 'FutureOr' type.
9166 AbstractTypePtr UnwrapFutureOr() const;
9167
9168 // Returns true if parameter of this type might need a
9169 // null assertion (if null assertions are enabled).
9170 bool NeedsNullAssertion() const;
9171
9172 // Returns true if catching this type will catch all exceptions.
9173 // Exception objects are guaranteed to be non-nullable, so
9174 // non-nullable Object is also a catch-all type.
9175 bool IsCatchAllType() const { return IsDynamicType() || IsObjectType(); }
9176
9177 // Returns true if this type has a type class permitted by SendPort.send for
9178 // messages between isolates in different groups. Does not recursively visit
9179 // type arguments.
9180 bool IsTypeClassAllowedBySpawnUri() const;
9181
9182 // Check the subtype relationship.
9183 bool IsSubtypeOf(
9184 const AbstractType& other,
9185 Heap::Space space,
9186 FunctionTypeMapping* function_type_equivalence = nullptr) const;
9187
9188 // Returns true iff subtype is a subtype of supertype, false otherwise or if
9189 // an error occurred.
9190 static bool InstantiateAndTestSubtype(
9191 AbstractType* subtype,
9192 AbstractType* supertype,
9193 const TypeArguments& instantiator_type_args,
9194 const TypeArguments& function_type_args);
9195
9196 static intptr_t type_test_stub_entry_point_offset() {
9197 return OFFSET_OF(UntaggedAbstractType, type_test_stub_entry_point_);
9198 }
9199
9200 uword type_test_stub_entry_point() const {
9201 return untag()->type_test_stub_entry_point_;
9202 }
9203 CodePtr type_test_stub() const { return untag()->type_test_stub(); }
9204
9205 // Sets the TTS to [stub].
9206 //
9207 // The update will ensure both fields (code as well as the cached entrypoint)
9208 // are updated together.
9209 //
9210 // Can be used concurrently by multiple threads - the updates will be applied
9211 // in undetermined order - but always consistently.
9212 void SetTypeTestingStub(const Code& stub) const;
9213
9214 // Sets the TTS to the [stub].
9215 //
9216 // The caller has to ensure no other thread can concurrently try to update the
9217 // TTS. This should mainly be used when initializing newly allocated Type
9218 // objects.
9219 void InitializeTypeTestingStubNonAtomic(const Code& stub) const;
9220
9221 void UpdateTypeTestingStubEntryPoint() const {
9222 StoreNonPointer(addr: &untag()->type_test_stub_entry_point_,
9223 value: Code::EntryPointOf(code: untag()->type_test_stub()));
9224 }
9225
9226 // No instances of type AbstractType are allocated, but InstanceSize() and
9227 // NextFieldOffset() are required to register class _AbstractType.
9228 static intptr_t InstanceSize() {
9229 return RoundedAllocationSize(size: sizeof(UntaggedAbstractType));
9230 }
9231
9232 static intptr_t NextFieldOffset() { return -kWordSize; }
9233
9234 private:
9235 // Returns true if this type is a subtype of FutureOr<T> specified by 'other'.
9236 // Returns false if other type is not a FutureOr.
9237 bool IsSubtypeOfFutureOr(
9238 Zone* zone,
9239 const AbstractType& other,
9240 Heap::Space space,
9241 FunctionTypeMapping* function_type_equivalence = nullptr) const;
9242
9243 protected:
9244 bool IsNullabilityEquivalent(Thread* thread,
9245 const AbstractType& other_type,
9246 TypeEquality kind) const;
9247
9248 void SetHash(intptr_t value) const;
9249
9250 UntaggedAbstractType::TypeState type_state() const {
9251 return static_cast<UntaggedAbstractType::TypeState>(
9252 UntaggedAbstractType::TypeStateBits::decode(value: untag()->flags()));
9253 }
9254 void set_flags(uint32_t value) const;
9255 void set_type_state(UntaggedAbstractType::TypeState value) const;
9256 void set_nullability(Nullability value) const;
9257
9258 HEAP_OBJECT_IMPLEMENTATION(AbstractType, Instance);
9259 friend class Class;
9260 friend class ClearTypeHashVisitor;
9261 friend class Function;
9262 friend class TypeArguments;
9263};
9264
9265// A Type consists of a class, possibly parameterized with type
9266// arguments. Example: C<T1, T2>.
9267class Type : public AbstractType {
9268 public:
9269 static intptr_t arguments_offset() {
9270 return OFFSET_OF(UntaggedType, arguments_);
9271 }
9272 virtual bool HasTypeClass() const {
9273 ASSERT(type_class_id() != kIllegalCid);
9274 return true;
9275 }
9276 TypePtr ToNullability(Nullability value, Heap::Space space) const;
9277 virtual classid_t type_class_id() const;
9278 virtual ClassPtr type_class() const;
9279 void set_type_class(const Class& value) const;
9280 virtual TypeArgumentsPtr arguments() const { return untag()->arguments(); }
9281 void set_arguments(const TypeArguments& value) const;
9282
9283 // Returns flattened instance type arguments vector for
9284 // instance of this type.
9285 TypeArgumentsPtr GetInstanceTypeArguments(Thread* thread,
9286 bool canonicalize = true) const;
9287
9288 virtual bool IsInstantiated(
9289 Genericity genericity = kAny,
9290 intptr_t num_free_fun_type_params = kAllFree) const;
9291 virtual bool IsEquivalent(
9292 const Instance& other,
9293 TypeEquality kind,
9294 FunctionTypeMapping* function_type_equivalence = nullptr) const;
9295 virtual bool RequireConstCanonicalTypeErasure(Zone* zone) const;
9296
9297 // Return true if this type can be used as the declaration type of cls after
9298 // canonicalization (passed-in cls must match type_class()).
9299 bool IsDeclarationTypeOf(const Class& cls) const;
9300
9301 virtual AbstractTypePtr InstantiateFrom(
9302 const TypeArguments& instantiator_type_arguments,
9303 const TypeArguments& function_type_arguments,
9304 intptr_t num_free_fun_type_params,
9305 Heap::Space space,
9306 FunctionTypeMapping* function_type_mapping = nullptr,
9307 intptr_t num_parent_type_args_adjustment = 0) const;
9308
9309 virtual AbstractTypePtr UpdateFunctionTypes(
9310 intptr_t num_parent_type_args_adjustment,
9311 intptr_t num_free_fun_type_params,
9312 Heap::Space space,
9313 FunctionTypeMapping* function_type_mapping) const;
9314
9315 virtual AbstractTypePtr Canonicalize(Thread* thread) const;
9316 virtual void EnumerateURIs(URIs* uris) const;
9317 virtual void PrintName(NameVisibility visibility,
9318 BaseTextBuffer* printer) const;
9319
9320 virtual uword ComputeHash() const;
9321
9322 static intptr_t InstanceSize() {
9323 return RoundedAllocationSize(size: sizeof(UntaggedType));
9324 }
9325
9326 // The type of the literal 'null'.
9327 static TypePtr NullType();
9328
9329 // The 'dynamic' type.
9330 static TypePtr DynamicType();
9331
9332 // The 'void' type.
9333 static TypePtr VoidType();
9334
9335 // The 'Never' type.
9336 static TypePtr NeverType();
9337
9338 // The 'Object' type.
9339 static TypePtr ObjectType();
9340
9341 // The 'bool' type.
9342 static TypePtr BoolType();
9343
9344 // The 'int' type.
9345 static TypePtr IntType();
9346
9347 // The 'int?' type.
9348 static TypePtr NullableIntType();
9349
9350 // The 'Smi' type.
9351 static TypePtr SmiType();
9352
9353 // The 'Mint' type.
9354 static TypePtr MintType();
9355
9356 // The 'double' type.
9357 static TypePtr Double();
9358
9359 // The 'double?' type.
9360 static TypePtr NullableDouble();
9361
9362 // The 'Float32x4' type.
9363 static TypePtr Float32x4();
9364
9365 // The 'Float64x2' type.
9366 static TypePtr Float64x2();
9367
9368 // The 'Int32x4' type.
9369 static TypePtr Int32x4();
9370
9371 // The 'num' type.
9372 static TypePtr Number();
9373
9374 // The 'String' type.
9375 static TypePtr StringType();
9376
9377 // The 'Array' type.
9378 static TypePtr ArrayType();
9379
9380 // The 'Function' type.
9381 static TypePtr DartFunctionType();
9382
9383 // The 'Type' type.
9384 static TypePtr DartTypeType();
9385
9386 // The finalized type of the given non-parameterized class.
9387 static TypePtr NewNonParameterizedType(const Class& type_class);
9388
9389 static TypePtr New(const Class& clazz,
9390 const TypeArguments& arguments,
9391 Nullability nullability = Nullability::kLegacy,
9392 Heap::Space space = Heap::kOld);
9393
9394 private:
9395 // Takes an intptr_t since the cids of some classes are larger than will fit
9396 // in ClassIdTagType. This allows us to guard against that case, instead of
9397 // silently truncating the cid.
9398 void set_type_class_id(intptr_t id) const;
9399
9400 static TypePtr New(Heap::Space space = Heap::kOld);
9401
9402 FINAL_HEAP_OBJECT_IMPLEMENTATION(Type, AbstractType);
9403 friend class Class;
9404 friend class TypeArguments;
9405};
9406
9407// A FunctionType represents the type of a function. It describes most of the
9408// signature of a function, excluding the names of type parameters and names
9409// of parameters, but includes the names of optional named parameters.
9410class FunctionType : public AbstractType {
9411 public:
9412 // Reexported so they can be used by the flow graph builders.
9413 using PackedNumParentTypeArguments =
9414 UntaggedFunctionType::PackedNumParentTypeArguments;
9415 using PackedNumTypeParameters = UntaggedFunctionType::PackedNumTypeParameters;
9416 using PackedHasNamedOptionalParameters =
9417 UntaggedFunctionType::PackedHasNamedOptionalParameters;
9418 using PackedNumImplicitParameters =
9419 UntaggedFunctionType::PackedNumImplicitParameters;
9420 using PackedNumFixedParameters =
9421 UntaggedFunctionType::PackedNumFixedParameters;
9422 using PackedNumOptionalParameters =
9423 UntaggedFunctionType::PackedNumOptionalParameters;
9424
9425 virtual bool HasTypeClass() const { return false; }
9426 FunctionTypePtr ToNullability(Nullability value, Heap::Space space) const;
9427 virtual classid_t type_class_id() const { return kIllegalCid; }
9428 virtual bool IsInstantiated(
9429 Genericity genericity = kAny,
9430 intptr_t num_free_fun_type_params = kAllFree) const;
9431 virtual bool IsEquivalent(
9432 const Instance& other,
9433 TypeEquality kind,
9434 FunctionTypeMapping* function_type_equivalence = nullptr) const;
9435 virtual bool RequireConstCanonicalTypeErasure(Zone* zone) const;
9436
9437 virtual AbstractTypePtr InstantiateFrom(
9438 const TypeArguments& instantiator_type_arguments,
9439 const TypeArguments& function_type_arguments,
9440 intptr_t num_free_fun_type_params,
9441 Heap::Space space,
9442 FunctionTypeMapping* function_type_mapping = nullptr,
9443 intptr_t num_parent_type_args_adjustment = 0) const;
9444
9445 virtual AbstractTypePtr UpdateFunctionTypes(
9446 intptr_t num_parent_type_args_adjustment,
9447 intptr_t num_free_fun_type_params,
9448 Heap::Space space,
9449 FunctionTypeMapping* function_type_mapping) const;
9450
9451 virtual AbstractTypePtr Canonicalize(Thread* thread) const;
9452 virtual void EnumerateURIs(URIs* uris) const;
9453 virtual void PrintName(NameVisibility visibility,
9454 BaseTextBuffer* printer) const;
9455
9456 virtual uword ComputeHash() const;
9457
9458 bool IsSubtypeOf(
9459 const FunctionType& other,
9460 Heap::Space space,
9461 FunctionTypeMapping* function_type_equivalence = nullptr) const;
9462
9463 static intptr_t NumParentTypeArgumentsOf(FunctionTypePtr ptr) {
9464 return ptr->untag()
9465 ->packed_type_parameter_counts_.Read<PackedNumParentTypeArguments>();
9466 }
9467 // Return the number of type arguments in the enclosing signature.
9468 intptr_t NumParentTypeArguments() const {
9469 return NumParentTypeArgumentsOf(ptr: ptr());
9470 }
9471 void SetNumParentTypeArguments(intptr_t value) const;
9472 static intptr_t NumTypeParametersOf(FunctionTypePtr ptr) {
9473 return ptr->untag()
9474 ->packed_type_parameter_counts_.Read<PackedNumTypeParameters>();
9475 }
9476 intptr_t NumTypeParameters() const { return NumTypeParametersOf(ptr: ptr()); }
9477
9478 static intptr_t NumTypeArgumentsOf(FunctionTypePtr ptr) {
9479 return NumTypeParametersOf(ptr) + NumParentTypeArgumentsOf(ptr);
9480 }
9481 intptr_t NumTypeArguments() const { return NumTypeArgumentsOf(ptr: ptr()); }
9482
9483 intptr_t num_implicit_parameters() const {
9484 return untag()
9485 ->packed_parameter_counts_.Read<PackedNumImplicitParameters>();
9486 }
9487 void set_num_implicit_parameters(intptr_t value) const;
9488
9489 static intptr_t NumFixedParametersOf(FunctionTypePtr ptr) {
9490 return ptr->untag()
9491 ->packed_parameter_counts_.Read<PackedNumFixedParameters>();
9492 }
9493 intptr_t num_fixed_parameters() const { return NumFixedParametersOf(ptr: ptr()); }
9494 void set_num_fixed_parameters(intptr_t value) const;
9495
9496 static bool HasOptionalParameters(FunctionTypePtr ptr) {
9497 return ptr->untag()
9498 ->packed_parameter_counts_.Read<PackedNumOptionalParameters>() >
9499 0;
9500 }
9501 bool HasOptionalParameters() const { return HasOptionalParameters(ptr: ptr()); }
9502
9503 static bool HasOptionalNamedParameters(FunctionTypePtr ptr) {
9504 return ptr->untag()
9505 ->packed_parameter_counts_.Read<PackedHasNamedOptionalParameters>();
9506 }
9507 bool HasOptionalNamedParameters() const {
9508 return HasOptionalNamedParameters(ptr: ptr());
9509 }
9510 bool HasRequiredNamedParameters() const;
9511
9512 static bool HasOptionalPositionalParameters(FunctionTypePtr ptr) {
9513 return !HasOptionalNamedParameters(ptr) && HasOptionalParameters(ptr);
9514 }
9515 bool HasOptionalPositionalParameters() const {
9516 return HasOptionalPositionalParameters(ptr: ptr());
9517 }
9518
9519 static intptr_t NumOptionalParametersOf(FunctionTypePtr ptr) {
9520 return ptr->untag()
9521 ->packed_parameter_counts_.Read<PackedNumOptionalParameters>();
9522 }
9523 intptr_t NumOptionalParameters() const {
9524 return NumOptionalParametersOf(ptr: ptr());
9525 }
9526 void SetNumOptionalParameters(intptr_t num_optional_parameters,
9527 bool are_optional_positional) const;
9528
9529 static intptr_t NumOptionalPositionalParametersOf(FunctionTypePtr ptr) {
9530 return HasOptionalNamedParameters(ptr) ? 0 : NumOptionalParametersOf(ptr);
9531 }
9532 intptr_t NumOptionalPositionalParameters() const {
9533 return NumOptionalPositionalParametersOf(ptr: ptr());
9534 }
9535
9536 static intptr_t NumOptionalNamedParametersOf(FunctionTypePtr ptr) {
9537 return HasOptionalNamedParameters(ptr) ? NumOptionalParametersOf(ptr) : 0;
9538 }
9539 intptr_t NumOptionalNamedParameters() const {
9540 return NumOptionalNamedParametersOf(ptr: ptr());
9541 }
9542
9543 static intptr_t NumParametersOf(FunctionTypePtr ptr) {
9544 return NumFixedParametersOf(ptr) + NumOptionalParametersOf(ptr);
9545 }
9546 intptr_t NumParameters() const { return NumParametersOf(ptr: ptr()); }
9547
9548 uint32_t packed_parameter_counts() const {
9549 return untag()->packed_parameter_counts_;
9550 }
9551 void set_packed_parameter_counts(uint32_t packed_parameter_counts) const;
9552 static intptr_t packed_parameter_counts_offset() {
9553 return OFFSET_OF(UntaggedFunctionType, packed_parameter_counts_);
9554 }
9555 uint16_t packed_type_parameter_counts() const {
9556 return untag()->packed_type_parameter_counts_;
9557 }
9558 void set_packed_type_parameter_counts(uint16_t packed_parameter_counts) const;
9559 static intptr_t packed_type_parameter_counts_offset() {
9560 return OFFSET_OF(UntaggedFunctionType, packed_type_parameter_counts_);
9561 }
9562
9563 // Return the type parameter declared at index.
9564 TypeParameterPtr TypeParameterAt(
9565 intptr_t index,
9566 Nullability nullability = Nullability::kNonNullable) const;
9567
9568 AbstractTypePtr result_type() const { return untag()->result_type(); }
9569 void set_result_type(const AbstractType& value) const;
9570
9571 // The parameters, starting with NumImplicitParameters() parameters which are
9572 // only visible to the VM, but not to Dart users.
9573 // Note that type checks exclude implicit parameters.
9574 AbstractTypePtr ParameterTypeAt(intptr_t index) const;
9575 void SetParameterTypeAt(intptr_t index, const AbstractType& value) const;
9576 ArrayPtr parameter_types() const { return untag()->parameter_types(); }
9577 void set_parameter_types(const Array& value) const;
9578 static intptr_t parameter_types_offset() {
9579 return OFFSET_OF(UntaggedFunctionType, parameter_types_);
9580 }
9581 // Parameter names are only stored for named parameters. If there are no named
9582 // parameters, named_parameter_names() is null.
9583 // If there are parameter flags (eg required) they're stored at the end of
9584 // this array, so the size of this array isn't necessarily
9585 // NumOptionalNamedParameters(), but the first NumOptionalNamedParameters()
9586 // elements are the names.
9587 ArrayPtr named_parameter_names() const {
9588 return untag()->named_parameter_names();
9589 }
9590 void set_named_parameter_names(const Array& value) const;
9591 static intptr_t named_parameter_names_offset() {
9592 return OFFSET_OF(UntaggedFunctionType, named_parameter_names_);
9593 }
9594 // The index for these operations is the absolute index of the parameter, not
9595 // the index relative to the start of the named parameters (if any).
9596 StringPtr ParameterNameAt(intptr_t index) const;
9597 // Only valid for absolute indexes of named parameters.
9598 void SetParameterNameAt(intptr_t index, const String& value) const;
9599
9600 // The required flags are stored at the end of the parameter_names. The flags
9601 // are packed into SMIs, but omitted if they're 0.
9602 bool IsRequiredAt(intptr_t index) const;
9603 void SetIsRequiredAt(intptr_t index) const;
9604
9605 // Sets up the signature's parameter name array, including appropriate space
9606 // for any possible parameter flags. This may be an overestimate if some
9607 // parameters don't have flags, and so FinalizeNameArray() should
9608 // be called after all parameter flags have been appropriately set.
9609 //
9610 // Assumes that the number of fixed and optional parameters for the signature
9611 // has already been set. Uses same default space as FunctionType::New.
9612 void CreateNameArrayIncludingFlags(Heap::Space space = Heap::kOld) const;
9613
9614 // Truncate the parameter names array to remove any unused flag slots. Make
9615 // sure to only do this after calling SetIsRequiredAt as necessary.
9616 void FinalizeNameArray() const;
9617
9618 // Returns the length of the parameter names array that is required to store
9619 // all the names plus all their flags. This may be an overestimate if some
9620 // parameters don't have flags.
9621 static intptr_t NameArrayLengthIncludingFlags(intptr_t num_parameters);
9622
9623 // The formal type parameters, their bounds, and defaults, are specified as an
9624 // object of type TypeParameters.
9625 TypeParametersPtr type_parameters() const {
9626 return untag()->type_parameters();
9627 }
9628 void SetTypeParameters(const TypeParameters& value) const;
9629 static intptr_t type_parameters_offset() {
9630 return OFFSET_OF(UntaggedFunctionType, type_parameters_);
9631 }
9632
9633 // Returns true if this function type has the same number of type parameters
9634 // with equal bounds as the other function type. Type parameter names and
9635 // parameter names (unless optional named) are ignored.
9636 bool HasSameTypeParametersAndBounds(
9637 const FunctionType& other,
9638 TypeEquality kind,
9639 FunctionTypeMapping* function_type_equivalence = nullptr) const;
9640
9641 // Return true if this function type declares type parameters.
9642 static bool IsGeneric(FunctionTypePtr ptr) {
9643 return ptr->untag()->type_parameters() != TypeParameters::null();
9644 }
9645 bool IsGeneric() const { return IsGeneric(ptr: ptr()); }
9646
9647 // Return true if any enclosing signature of this signature is generic.
9648 bool HasGenericParent() const { return NumParentTypeArguments() > 0; }
9649
9650 // Returns true if the type of the formal parameter at the given position in
9651 // this function type is contravariant with the type of the other formal
9652 // parameter at the given position in the other function type.
9653 bool IsContravariantParameter(
9654 intptr_t parameter_position,
9655 const FunctionType& other,
9656 intptr_t other_parameter_position,
9657 Heap::Space space,
9658 FunctionTypeMapping* function_type_equivalence) const;
9659
9660 // Returns the index in the parameter names array of the corresponding flag
9661 // for the given parameter index. Also returns (via flag_mask) the
9662 // corresponding mask within the flag.
9663 intptr_t GetRequiredFlagIndex(intptr_t index, intptr_t* flag_mask) const;
9664
9665 void Print(NameVisibility name_visibility, BaseTextBuffer* printer) const;
9666 void PrintParameters(Thread* thread,
9667 Zone* zone,
9668 NameVisibility name_visibility,
9669 BaseTextBuffer* printer) const;
9670
9671 StringPtr ToUserVisibleString() const;
9672 const char* ToUserVisibleCString() const;
9673
9674 static intptr_t InstanceSize() {
9675 return RoundedAllocationSize(size: sizeof(UntaggedFunctionType));
9676 }
9677
9678 static FunctionTypePtr New(intptr_t num_parent_type_arguments = 0,
9679 Nullability nullability = Nullability::kLegacy,
9680 Heap::Space space = Heap::kOld);
9681
9682 static FunctionTypePtr Clone(const FunctionType& orig, Heap::Space space);
9683
9684 private:
9685 static FunctionTypePtr New(Heap::Space space);
9686
9687 FINAL_HEAP_OBJECT_IMPLEMENTATION(FunctionType, AbstractType);
9688 friend class Class;
9689 friend class Function;
9690};
9691
9692// A TypeParameter represents a type parameter of a parameterized class.
9693// It specifies its index (and its name for debugging purposes), as well as its
9694// upper bound.
9695// For example, the type parameter 'V' is specified as index 1 in the context of
9696// the class HashMap<K, V>. At compile time, the TypeParameter is not
9697// instantiated yet, i.e. it is only a place holder.
9698// Upon finalization, the TypeParameter index is changed to reflect its position
9699// as type argument (rather than type parameter) of the parameterized class.
9700// If the type parameter is declared without an extends clause, its bound is set
9701// to the ObjectType.
9702class TypeParameter : public AbstractType {
9703 public:
9704 TypeParameterPtr ToNullability(Nullability value, Heap::Space space) const;
9705 virtual bool HasTypeClass() const { return false; }
9706 virtual classid_t type_class_id() const { return kIllegalCid; }
9707
9708 bool IsFunctionTypeParameter() const {
9709 return UntaggedTypeParameter::IsFunctionTypeParameter::decode(
9710 value: untag()->flags());
9711 }
9712 bool IsClassTypeParameter() const { return !IsFunctionTypeParameter(); }
9713
9714 intptr_t base() const { return untag()->base_; }
9715 void set_base(intptr_t value) const;
9716 intptr_t index() const { return untag()->index_; }
9717 void set_index(intptr_t value) const;
9718 static intptr_t index_offset() {
9719 return OFFSET_OF(UntaggedTypeParameter, index_);
9720 }
9721
9722 classid_t parameterized_class_id() const;
9723 void set_parameterized_class_id(classid_t value) const;
9724 ClassPtr parameterized_class() const;
9725 FunctionTypePtr parameterized_function_type() const;
9726
9727 AbstractTypePtr bound() const;
9728
9729 virtual bool IsInstantiated(
9730 Genericity genericity = kAny,
9731 intptr_t num_free_fun_type_params = kAllFree) const;
9732 virtual bool IsEquivalent(
9733 const Instance& other,
9734 TypeEquality kind,
9735 FunctionTypeMapping* function_type_equivalence = nullptr) const;
9736 virtual bool RequireConstCanonicalTypeErasure(Zone* zone) const {
9737 return IsNonNullable();
9738 }
9739 virtual AbstractTypePtr InstantiateFrom(
9740 const TypeArguments& instantiator_type_arguments,
9741 const TypeArguments& function_type_arguments,
9742 intptr_t num_free_fun_type_params,
9743 Heap::Space space,
9744 FunctionTypeMapping* function_type_mapping = nullptr,
9745 intptr_t num_parent_type_args_adjustment = 0) const;
9746
9747 virtual AbstractTypePtr UpdateFunctionTypes(
9748 intptr_t num_parent_type_args_adjustment,
9749 intptr_t num_free_fun_type_params,
9750 Heap::Space space,
9751 FunctionTypeMapping* function_type_mapping) const;
9752
9753 virtual AbstractTypePtr Canonicalize(Thread* thread) const;
9754 virtual void EnumerateURIs(URIs* uris) const { return; }
9755 virtual void PrintName(NameVisibility visibility,
9756 BaseTextBuffer* printer) const;
9757
9758 // Returns type corresponding to [this] type parameter from the
9759 // given [instantiator_type_arguments] and [function_type_arguments].
9760 // Unlike InstantiateFrom, nullability of type parameter is not applied to
9761 // the result.
9762 AbstractTypePtr GetFromTypeArguments(
9763 const TypeArguments& instantiator_type_arguments,
9764 const TypeArguments& function_type_arguments) const;
9765
9766 // Return a constructed name for this nameless type parameter.
9767 const char* CanonicalNameCString() const {
9768 return CanonicalNameCString(is_class_type_parameter: IsClassTypeParameter(), base: base(), index: index());
9769 }
9770
9771 static const char* CanonicalNameCString(bool is_class_type_parameter,
9772 intptr_t base,
9773 intptr_t index);
9774
9775 static intptr_t InstanceSize() {
9776 return RoundedAllocationSize(size: sizeof(UntaggedTypeParameter));
9777 }
9778
9779 // 'owner' is a Class or FunctionType.
9780 static TypeParameterPtr New(const Object& owner,
9781 intptr_t base,
9782 intptr_t index,
9783 Nullability nullability);
9784
9785 private:
9786 virtual uword ComputeHash() const;
9787
9788 void set_owner(const Object& value) const;
9789
9790 static TypeParameterPtr New();
9791
9792 FINAL_HEAP_OBJECT_IMPLEMENTATION(TypeParameter, AbstractType);
9793 friend class Class;
9794};
9795
9796class Number : public Instance {
9797 public:
9798 // TODO(iposva): Add more useful Number methods.
9799 StringPtr ToString(Heap::Space space) const;
9800
9801 private:
9802 OBJECT_IMPLEMENTATION(Number, Instance);
9803
9804 friend class Class;
9805};
9806
9807class Integer : public Number {
9808 public:
9809 static IntegerPtr New(const String& str, Heap::Space space = Heap::kNew);
9810
9811 // Creates a new Integer by given uint64_t value.
9812 // Silently casts value to int64_t with wrap-around if it is greater
9813 // than kMaxInt64.
9814 static IntegerPtr NewFromUint64(uint64_t value,
9815 Heap::Space space = Heap::kNew);
9816
9817 // Returns a canonical Integer object allocated in the old gen space.
9818 // Returns null if integer is out of range.
9819 static IntegerPtr NewCanonical(const String& str);
9820 static IntegerPtr NewCanonical(int64_t value);
9821
9822 static IntegerPtr New(int64_t value, Heap::Space space = Heap::kNew);
9823
9824 // Returns true iff the given uint64_t value is representable as Dart integer.
9825 static bool IsValueInRange(uint64_t value);
9826
9827 virtual bool OperatorEquals(const Instance& other) const {
9828 return Equals(other);
9829 }
9830 virtual bool CanonicalizeEquals(const Instance& other) const {
9831 return Equals(other);
9832 }
9833 virtual uint32_t CanonicalizeHash() const;
9834 virtual bool Equals(const Instance& other) const;
9835
9836 virtual ObjectPtr HashCode() const { return ptr(); }
9837
9838 virtual bool IsZero() const;
9839 virtual bool IsNegative() const;
9840
9841 virtual double AsDoubleValue() const;
9842 virtual int64_t AsInt64Value() const;
9843 virtual int64_t AsTruncatedInt64Value() const { return AsInt64Value(); }
9844 virtual uint32_t AsTruncatedUint32Value() const;
9845
9846 virtual bool FitsIntoSmi() const;
9847
9848 // Returns 0, -1 or 1.
9849 virtual int CompareWith(const Integer& other) const;
9850
9851 // Converts integer to hex string.
9852 const char* ToHexCString(Zone* zone) const;
9853
9854 // Return the most compact presentation of an integer.
9855 IntegerPtr AsValidInteger() const;
9856
9857 // Returns null to indicate that a bigint operation is required.
9858 IntegerPtr ArithmeticOp(Token::Kind operation,
9859 const Integer& other,
9860 Heap::Space space = Heap::kNew) const;
9861 IntegerPtr BitOp(Token::Kind operation,
9862 const Integer& other,
9863 Heap::Space space = Heap::kNew) const;
9864 IntegerPtr ShiftOp(Token::Kind operation,
9865 const Integer& other,
9866 Heap::Space space = Heap::kNew) const;
9867
9868 static int64_t GetInt64Value(const IntegerPtr obj) {
9869 if (obj->IsSmi()) {
9870 return RawSmiValue(raw_value: static_cast<const SmiPtr>(obj));
9871 } else {
9872 ASSERT(obj->IsMint());
9873 return static_cast<const MintPtr>(obj)->untag()->value_;
9874 }
9875 }
9876
9877 private:
9878 OBJECT_IMPLEMENTATION(Integer, Number);
9879 friend class Class;
9880};
9881
9882class Smi : public Integer {
9883 public:
9884 static constexpr intptr_t kBits = kSmiBits;
9885 static constexpr intptr_t kMaxValue = kSmiMax;
9886 static constexpr intptr_t kMinValue = kSmiMin;
9887
9888 intptr_t Value() const { return RawSmiValue(raw_value: ptr()); }
9889
9890 virtual bool Equals(const Instance& other) const;
9891 virtual bool IsZero() const { return Value() == 0; }
9892 virtual bool IsNegative() const { return Value() < 0; }
9893
9894 virtual double AsDoubleValue() const;
9895 virtual int64_t AsInt64Value() const;
9896 virtual uint32_t AsTruncatedUint32Value() const;
9897
9898 virtual bool FitsIntoSmi() const { return true; }
9899
9900 virtual int CompareWith(const Integer& other) const;
9901
9902 static intptr_t InstanceSize() { return 0; }
9903
9904 static SmiPtr New(intptr_t value) {
9905 SmiPtr raw_smi = static_cast<SmiPtr>(
9906 (static_cast<uintptr_t>(value) << kSmiTagShift) | kSmiTag);
9907 ASSERT(RawSmiValue(raw_smi) == value);
9908 return raw_smi;
9909 }
9910
9911 static ClassPtr Class();
9912
9913 static intptr_t Value(const SmiPtr raw_smi) { return RawSmiValue(raw_value: raw_smi); }
9914#if defined(DART_COMPRESSED_POINTERS)
9915 static intptr_t Value(const CompressedSmiPtr raw_smi) {
9916 return Smi::Value(static_cast<SmiPtr>(raw_smi.DecompressSmi()));
9917 }
9918#endif
9919
9920 static intptr_t RawValue(intptr_t value) {
9921 return static_cast<intptr_t>(New(value));
9922 }
9923
9924 static bool IsValid(int64_t value) { return compiler::target::IsSmi(value); }
9925
9926 void operator=(SmiPtr value) {
9927 ptr_ = value;
9928 CHECK_HANDLE();
9929 }
9930 void operator^=(ObjectPtr value) {
9931 ptr_ = value;
9932 CHECK_HANDLE();
9933 }
9934
9935 private:
9936 static intptr_t NextFieldOffset() {
9937 // Indicates this class cannot be extended by dart code.
9938 return -kWordSize;
9939 }
9940
9941 Smi() : Integer() {}
9942 BASE_OBJECT_IMPLEMENTATION(Smi, Integer);
9943 OBJECT_SERVICE_SUPPORT(Smi);
9944 friend class Api; // For ValueFromRaw
9945 friend class Class;
9946 friend class Object;
9947 friend class ReusableSmiHandleScope;
9948 friend class Thread;
9949};
9950
9951class SmiTraits : AllStatic {
9952 public:
9953 static const char* Name() { return "SmiTraits"; }
9954 static bool ReportStats() { return false; }
9955
9956 static bool IsMatch(const Object& a, const Object& b) {
9957 return Smi::Cast(obj: a).Value() == Smi::Cast(obj: b).Value();
9958 }
9959
9960 static uword Hash(const Object& obj) { return Smi::Cast(obj).Value(); }
9961};
9962
9963class Mint : public Integer {
9964 public:
9965 static constexpr intptr_t kBits = 63; // 64-th bit is sign.
9966 static constexpr int64_t kMaxValue =
9967 static_cast<int64_t>(DART_2PART_UINT64_C(0x7FFFFFFF, FFFFFFFF));
9968 static constexpr int64_t kMinValue =
9969 static_cast<int64_t>(DART_2PART_UINT64_C(0x80000000, 00000000));
9970
9971 int64_t value() const { return untag()->value_; }
9972 static intptr_t value_offset() { return OFFSET_OF(UntaggedMint, value_); }
9973 static int64_t Value(MintPtr mint) { return mint->untag()->value_; }
9974
9975 virtual bool IsZero() const { return value() == 0; }
9976 virtual bool IsNegative() const { return value() < 0; }
9977
9978 virtual bool Equals(const Instance& other) const;
9979
9980 virtual double AsDoubleValue() const;
9981 virtual int64_t AsInt64Value() const;
9982 virtual uint32_t AsTruncatedUint32Value() const;
9983
9984 virtual bool FitsIntoSmi() const;
9985
9986 virtual int CompareWith(const Integer& other) const;
9987
9988 static intptr_t InstanceSize() {
9989 return RoundedAllocationSize(size: sizeof(UntaggedMint));
9990 }
9991
9992 protected:
9993 // Only Integer::NewXXX is allowed to call Mint::NewXXX directly.
9994 friend class Integer;
9995 friend class MintMessageDeserializationCluster;
9996
9997 static MintPtr New(int64_t value, Heap::Space space = Heap::kNew);
9998
9999 static MintPtr NewCanonical(int64_t value);
10000
10001 private:
10002 void set_value(int64_t value) const;
10003
10004 MINT_OBJECT_IMPLEMENTATION(Mint, Integer, Integer);
10005 friend class Class;
10006 friend class Number;
10007};
10008
10009// Class Double represents class Double in corelib_impl, which implements
10010// abstract class double in corelib.
10011class Double : public Number {
10012 public:
10013 double value() const { return untag()->value_; }
10014 static double Value(DoublePtr dbl) { return dbl->untag()->value_; }
10015
10016 bool BitwiseEqualsToDouble(double value) const;
10017 virtual bool OperatorEquals(const Instance& other) const;
10018 virtual bool CanonicalizeEquals(const Instance& other) const;
10019 virtual uint32_t CanonicalizeHash() const;
10020
10021 static DoublePtr New(double d, Heap::Space space = Heap::kNew);
10022
10023 static DoublePtr New(const String& str, Heap::Space space = Heap::kNew);
10024
10025 // Returns a canonical double object allocated in the old gen space.
10026 static DoublePtr NewCanonical(double d);
10027
10028 // Returns a canonical double object (allocated in the old gen space) or
10029 // Double::null() if str points to a string that does not convert to a
10030 // double value.
10031 static DoublePtr NewCanonical(const String& str);
10032
10033 static intptr_t InstanceSize() {
10034 return RoundedAllocationSize(size: sizeof(UntaggedDouble));
10035 }
10036
10037 static intptr_t value_offset() { return OFFSET_OF(UntaggedDouble, value_); }
10038
10039 private:
10040 void set_value(double value) const;
10041
10042 FINAL_HEAP_OBJECT_IMPLEMENTATION(Double, Number);
10043 friend class Class;
10044 friend class Number;
10045};
10046
10047// TODO(http://dartbug.com/46716): Recognize Symbol in the VM.
10048class Symbol : public AllStatic {
10049 public:
10050 static bool IsSymbolCid(Thread* thread, classid_t class_id);
10051
10052 static uint32_t CanonicalizeHash(Thread* thread, const Instance& instance);
10053};
10054
10055// String may not be '\0' terminated.
10056class String : public Instance {
10057 public:
10058 static constexpr intptr_t kOneByteChar = 1;
10059 static constexpr intptr_t kTwoByteChar = 2;
10060
10061// All strings share the same maximum element count to keep things
10062// simple. We choose a value that will prevent integer overflow for
10063// 2 byte strings, since it is the worst case.
10064#if defined(HASH_IN_OBJECT_HEADER)
10065 static constexpr intptr_t kSizeofRawString =
10066 sizeof(UntaggedInstance) + kWordSize;
10067#else
10068 static constexpr intptr_t kSizeofRawString =
10069 sizeof(UntaggedInstance) + 2 * kWordSize;
10070#endif
10071 static constexpr intptr_t kMaxElements = kSmiMax / kTwoByteChar;
10072
10073 static intptr_t HeaderSize() { return String::kSizeofRawString; }
10074
10075 static intptr_t InstanceSize() {
10076 return RoundedAllocationSize(size: sizeof(UntaggedString));
10077 }
10078
10079 class CodePointIterator : public ValueObject {
10080 public:
10081 explicit CodePointIterator(const String& str)
10082 : str_(str), ch_(0), index_(-1), end_(str.Length()) {
10083 ASSERT(!str_.IsNull());
10084 }
10085
10086 CodePointIterator(const String& str, intptr_t start, intptr_t length)
10087 : str_(str), ch_(0), index_(start - 1), end_(start + length) {
10088 ASSERT(start >= 0);
10089 ASSERT(end_ <= str.Length());
10090 }
10091
10092 int32_t Current() const {
10093 ASSERT(index_ >= 0);
10094 ASSERT(index_ < end_);
10095 return ch_;
10096 }
10097
10098 bool Next();
10099
10100 private:
10101 const String& str_;
10102 int32_t ch_;
10103 intptr_t index_;
10104 intptr_t end_;
10105 DISALLOW_IMPLICIT_CONSTRUCTORS(CodePointIterator);
10106 };
10107
10108 intptr_t Length() const { return LengthOf(obj: ptr()); }
10109 static intptr_t LengthOf(StringPtr obj) {
10110 return Smi::Value(raw_smi: obj->untag()->length());
10111 }
10112 static intptr_t length_offset() { return OFFSET_OF(UntaggedString, length_); }
10113
10114 uword Hash() const {
10115 uword result = GetCachedHash(obj: ptr());
10116 if (result != 0) {
10117 return result;
10118 }
10119 result = String::Hash(str: *this, begin_index: 0, len: this->Length());
10120 uword set_hash = SetCachedHashIfNotSet(obj: ptr(), hash: result);
10121 ASSERT(set_hash == result);
10122 return result;
10123 }
10124
10125 static uword Hash(StringPtr raw);
10126
10127 bool HasHash() const {
10128 ASSERT(Smi::New(0) == nullptr);
10129 return GetCachedHash(obj: ptr()) != 0;
10130 }
10131
10132 static intptr_t hash_offset() {
10133#if defined(HASH_IN_OBJECT_HEADER)
10134 COMPILE_ASSERT(UntaggedObject::kHashTagPos % kBitsPerByte == 0);
10135 return OFFSET_OF(UntaggedObject, tags_) +
10136 UntaggedObject::kHashTagPos / kBitsPerByte;
10137#else
10138 return OFFSET_OF(UntaggedString, hash_);
10139#endif
10140 }
10141 static uword Hash(const String& str, intptr_t begin_index, intptr_t len);
10142 static uword Hash(const char* characters, intptr_t len);
10143 static uword Hash(const uint16_t* characters, intptr_t len);
10144 static uword Hash(const int32_t* characters, intptr_t len);
10145 static uword HashRawSymbol(const StringPtr symbol) {
10146 ASSERT(symbol->untag()->IsCanonical());
10147 const uword result = GetCachedHash(obj: symbol);
10148 ASSERT(result != 0);
10149 return result;
10150 }
10151
10152 // Returns the hash of str1 + str2.
10153 static uword HashConcat(const String& str1, const String& str2);
10154
10155 virtual ObjectPtr HashCode() const { return Integer::New(value: Hash()); }
10156
10157 uint16_t CharAt(intptr_t index) const { return CharAt(str: ptr(), index); }
10158 static uint16_t CharAt(StringPtr str, intptr_t index);
10159
10160 intptr_t CharSize() const;
10161
10162 inline bool Equals(const String& str) const;
10163
10164 bool Equals(const String& str,
10165 intptr_t begin_index, // begin index on 'str'.
10166 intptr_t len) const; // len on 'str'.
10167
10168 // Compares to a '\0' terminated array of UTF-8 encoded characters.
10169 bool Equals(const char* cstr) const;
10170
10171 // Compares to an array of Latin-1 encoded characters.
10172 bool EqualsLatin1(const uint8_t* characters, intptr_t len) const {
10173 return Equals(characters, len);
10174 }
10175
10176 // Compares to an array of UTF-16 encoded characters.
10177 bool Equals(const uint16_t* characters, intptr_t len) const;
10178
10179 // Compares to an array of UTF-32 encoded characters.
10180 bool Equals(const int32_t* characters, intptr_t len) const;
10181
10182 // True iff this string equals str1 + str2.
10183 bool EqualsConcat(const String& str1, const String& str2) const;
10184
10185 virtual bool OperatorEquals(const Instance& other) const {
10186 return Equals(other);
10187 }
10188 virtual bool CanonicalizeEquals(const Instance& other) const {
10189 return Equals(other);
10190 }
10191 virtual uint32_t CanonicalizeHash() const { return Hash(); }
10192 virtual bool Equals(const Instance& other) const;
10193
10194 intptr_t CompareTo(const String& other) const;
10195
10196 bool StartsWith(const String& other) const {
10197 NoSafepointScope no_safepoint;
10198 return StartsWith(str: ptr(), prefix: other.ptr());
10199 }
10200 static bool StartsWith(StringPtr str, StringPtr prefix);
10201 bool EndsWith(const String& other) const;
10202
10203 // Strings are canonicalized using the symbol table.
10204 // Caller must hold IsolateGroup::constant_canonicalization_mutex_.
10205 virtual InstancePtr CanonicalizeLocked(Thread* thread) const;
10206
10207 bool IsSymbol() const { return ptr()->untag()->IsCanonical(); }
10208
10209 bool IsOneByteString() const {
10210 return ptr()->GetClassId() == kOneByteStringCid;
10211 }
10212
10213 bool IsTwoByteString() const {
10214 return ptr()->GetClassId() == kTwoByteStringCid;
10215 }
10216
10217 bool IsExternalOneByteString() const {
10218 return ptr()->GetClassId() == kExternalOneByteStringCid;
10219 }
10220
10221 bool IsExternalTwoByteString() const {
10222 return ptr()->GetClassId() == kExternalTwoByteStringCid;
10223 }
10224
10225 bool IsExternal() const {
10226 return IsExternalStringClassId(index: ptr()->GetClassId());
10227 }
10228
10229 void* GetPeer() const;
10230
10231 char* ToMallocCString() const;
10232 void ToUTF8(uint8_t* utf8_array, intptr_t array_len) const;
10233 static const char* ToCString(Thread* thread, StringPtr ptr);
10234
10235 // Creates a new String object from a C string that is assumed to contain
10236 // UTF-8 encoded characters and '\0' is considered a termination character.
10237 // TODO(7123) - Rename this to FromCString(....).
10238 static StringPtr New(const char* cstr, Heap::Space space = Heap::kNew);
10239
10240 // Creates a new String object from an array of UTF-8 encoded characters.
10241 static StringPtr FromUTF8(const uint8_t* utf8_array,
10242 intptr_t array_len,
10243 Heap::Space space = Heap::kNew);
10244
10245 // Creates a new String object from an array of Latin-1 encoded characters.
10246 static StringPtr FromLatin1(const uint8_t* latin1_array,
10247 intptr_t array_len,
10248 Heap::Space space = Heap::kNew);
10249
10250 // Creates a new String object from an array of UTF-16 encoded characters.
10251 static StringPtr FromUTF16(const uint16_t* utf16_array,
10252 intptr_t array_len,
10253 Heap::Space space = Heap::kNew);
10254
10255 // Creates a new String object from an array of UTF-32 encoded characters.
10256 static StringPtr FromUTF32(const int32_t* utf32_array,
10257 intptr_t array_len,
10258 Heap::Space space = Heap::kNew);
10259
10260 // Create a new String object from another Dart String instance.
10261 static StringPtr New(const String& str, Heap::Space space = Heap::kNew);
10262
10263 // Creates a new External String object using the specified array of
10264 // UTF-8 encoded characters as the external reference.
10265 static StringPtr NewExternal(const uint8_t* utf8_array,
10266 intptr_t array_len,
10267 void* peer,
10268 intptr_t external_allocation_size,
10269 Dart_HandleFinalizer callback,
10270 Heap::Space = Heap::kNew);
10271
10272 // Creates a new External String object using the specified array of
10273 // UTF-16 encoded characters as the external reference.
10274 static StringPtr NewExternal(const uint16_t* utf16_array,
10275 intptr_t array_len,
10276 void* peer,
10277 intptr_t external_allocation_size,
10278 Dart_HandleFinalizer callback,
10279 Heap::Space = Heap::kNew);
10280
10281 static void Copy(const String& dst,
10282 intptr_t dst_offset,
10283 const uint8_t* characters,
10284 intptr_t len);
10285 static void Copy(const String& dst,
10286 intptr_t dst_offset,
10287 const uint16_t* characters,
10288 intptr_t len);
10289 static void Copy(const String& dst,
10290 intptr_t dst_offset,
10291 const String& src,
10292 intptr_t src_offset,
10293 intptr_t len);
10294
10295 static StringPtr EscapeSpecialCharacters(const String& str);
10296 // Encodes 'str' for use in an Internationalized Resource Identifier (IRI),
10297 // a generalization of URI (percent-encoding). See RFC 3987.
10298 static const char* EncodeIRI(const String& str);
10299 // Returns null if 'str' is not a valid encoding.
10300 static StringPtr DecodeIRI(const String& str);
10301 static StringPtr Concat(const String& str1,
10302 const String& str2,
10303 Heap::Space space = Heap::kNew);
10304 static StringPtr ConcatAll(const Array& strings,
10305 Heap::Space space = Heap::kNew);
10306 // Concat all strings in 'strings' from 'start' to 'end' (excluding).
10307 static StringPtr ConcatAllRange(const Array& strings,
10308 intptr_t start,
10309 intptr_t end,
10310 Heap::Space space = Heap::kNew);
10311
10312 static StringPtr SubString(const String& str,
10313 intptr_t begin_index,
10314 Heap::Space space = Heap::kNew);
10315 static StringPtr SubString(const String& str,
10316 intptr_t begin_index,
10317 intptr_t length,
10318 Heap::Space space = Heap::kNew) {
10319 return SubString(thread: Thread::Current(), str, begin_index, length, space);
10320 }
10321 static StringPtr SubString(Thread* thread,
10322 const String& str,
10323 intptr_t begin_index,
10324 intptr_t length,
10325 Heap::Space space = Heap::kNew);
10326
10327 static StringPtr Transform(int32_t (*mapping)(int32_t ch),
10328 const String& str,
10329 Heap::Space space = Heap::kNew);
10330
10331 static StringPtr ToUpperCase(const String& str,
10332 Heap::Space space = Heap::kNew);
10333 static StringPtr ToLowerCase(const String& str,
10334 Heap::Space space = Heap::kNew);
10335
10336 static StringPtr RemovePrivateKey(const String& name);
10337
10338 static const char* ScrubName(const String& name, bool is_extension = false);
10339 static StringPtr ScrubNameRetainPrivate(const String& name,
10340 bool is_extension = false);
10341
10342 static bool EqualsIgnoringPrivateKey(const String& str1, const String& str2);
10343
10344 static StringPtr NewFormatted(const char* format, ...) PRINTF_ATTRIBUTE(1, 2);
10345 static StringPtr NewFormatted(Heap::Space space, const char* format, ...)
10346 PRINTF_ATTRIBUTE(2, 3);
10347 static StringPtr NewFormattedV(const char* format,
10348 va_list args,
10349 Heap::Space space = Heap::kNew);
10350
10351 static bool ParseDouble(const String& str,
10352 intptr_t start,
10353 intptr_t end,
10354 double* result);
10355
10356#if !defined(HASH_IN_OBJECT_HEADER)
10357 static uint32_t GetCachedHash(const StringPtr obj) {
10358 return Smi::Value(obj->untag()->hash_);
10359 }
10360
10361 static uint32_t SetCachedHashIfNotSet(StringPtr obj, uint32_t hash) {
10362 ASSERT(Smi::Value(obj->untag()->hash_) == 0 ||
10363 Smi::Value(obj->untag()->hash_) == static_cast<intptr_t>(hash));
10364 return SetCachedHash(obj, hash);
10365 }
10366 static uint32_t SetCachedHash(StringPtr obj, uint32_t hash) {
10367 obj->untag()->hash_ = Smi::New(hash);
10368 return hash;
10369 }
10370#else
10371 static uint32_t SetCachedHash(StringPtr obj, uint32_t hash) {
10372 return Object::SetCachedHashIfNotSet(obj, hash);
10373 }
10374#endif
10375
10376 protected:
10377 // These two operate on an array of Latin-1 encoded characters.
10378 // They are protected to avoid mistaking Latin-1 for UTF-8, but used
10379 // by friendly templated code (e.g., Symbols).
10380 bool Equals(const uint8_t* characters, intptr_t len) const;
10381 static uword Hash(const uint8_t* characters, intptr_t len);
10382
10383 void SetLength(intptr_t value) const {
10384 // This is only safe because we create a new Smi, which does not cause
10385 // heap allocation.
10386 untag()->set_length(Smi::New(value));
10387 }
10388
10389 void SetHash(intptr_t value) const {
10390 const intptr_t hash_set = SetCachedHashIfNotSet(obj: ptr(), hash: value);
10391 ASSERT(hash_set == value);
10392 }
10393
10394 FINAL_HEAP_OBJECT_IMPLEMENTATION(String, Instance);
10395
10396 friend class Class;
10397 friend class Symbols;
10398 friend class StringSlice; // SetHash
10399 template <typename CharType>
10400 friend class CharArray; // SetHash
10401 friend class ConcatString; // SetHash
10402 friend class OneByteString;
10403 friend class TwoByteString;
10404 friend class ExternalOneByteString;
10405 friend class ExternalTwoByteString;
10406 friend class UntaggedOneByteString;
10407 friend class RODataSerializationCluster; // SetHash
10408 friend class Pass2Visitor; // Stack "handle"
10409};
10410
10411// Synchronize with implementation in compiler (intrinsifier).
10412class StringHasher : public ValueObject {
10413 public:
10414 StringHasher() : hash_(0) {}
10415 void Add(uint16_t code_unit) { hash_ = CombineHashes(hash: hash_, other_hash: code_unit); }
10416 void Add(const uint8_t* code_units, intptr_t len) {
10417 while (len > 0) {
10418 Add(code_unit: *code_units);
10419 code_units++;
10420 len--;
10421 }
10422 }
10423 void Add(const uint16_t* code_units, intptr_t len) {
10424 while (len > 0) {
10425 Add(code_unit: LoadUnaligned(ptr: code_units));
10426 code_units++;
10427 len--;
10428 }
10429 }
10430 void Add(const String& str, intptr_t begin_index, intptr_t len);
10431 intptr_t Finalize() { return FinalizeHash(hash: hash_, hashbits: String::kHashBits); }
10432
10433 private:
10434 uint32_t hash_;
10435};
10436
10437class OneByteString : public AllStatic {
10438 public:
10439 static uint16_t CharAt(const String& str, intptr_t index) {
10440 ASSERT(str.IsOneByteString());
10441 return OneByteString::CharAt(str: static_cast<OneByteStringPtr>(str.ptr()),
10442 index);
10443 }
10444
10445 static uint16_t CharAt(OneByteStringPtr str, intptr_t index) {
10446 ASSERT(index >= 0 && index < String::LengthOf(str));
10447 return str->untag()->data()[index];
10448 }
10449
10450 static void SetCharAt(const String& str, intptr_t index, uint8_t code_unit) {
10451 NoSafepointScope no_safepoint;
10452 *CharAddr(str, index) = code_unit;
10453 }
10454 static OneByteStringPtr EscapeSpecialCharacters(const String& str);
10455 // We use the same maximum elements for all strings.
10456 static constexpr intptr_t kBytesPerElement = 1;
10457 static constexpr intptr_t kMaxElements = String::kMaxElements;
10458 static constexpr intptr_t kMaxNewSpaceElements =
10459 (kNewAllocatableSize - sizeof(UntaggedOneByteString)) / kBytesPerElement;
10460
10461 struct ArrayTraits {
10462 static intptr_t elements_start_offset() {
10463 return sizeof(UntaggedOneByteString);
10464 }
10465 static constexpr intptr_t kElementSize = kBytesPerElement;
10466 };
10467
10468 static intptr_t data_offset() {
10469 return OFFSET_OF_RETURNED_VALUE(UntaggedOneByteString, data);
10470 }
10471
10472 static intptr_t UnroundedSize(OneByteStringPtr str) {
10473 return UnroundedSize(len: Smi::Value(raw_smi: str->untag()->length()));
10474 }
10475 static intptr_t UnroundedSize(intptr_t len) {
10476 return sizeof(UntaggedOneByteString) + (len * kBytesPerElement);
10477 }
10478 static intptr_t InstanceSize() {
10479 ASSERT(sizeof(UntaggedOneByteString) ==
10480 OFFSET_OF_RETURNED_VALUE(UntaggedOneByteString, data));
10481 return 0;
10482 }
10483 static intptr_t InstanceSize(intptr_t len) {
10484 ASSERT(sizeof(UntaggedOneByteString) == String::kSizeofRawString);
10485 ASSERT(0 <= len && len <= kMaxElements);
10486 return String::RoundedAllocationSize(size: UnroundedSize(len));
10487 }
10488
10489 static OneByteStringPtr New(intptr_t len, Heap::Space space);
10490 static OneByteStringPtr New(const char* c_string,
10491 Heap::Space space = Heap::kNew) {
10492 return New(characters: reinterpret_cast<const uint8_t*>(c_string), len: strlen(s: c_string),
10493 space);
10494 }
10495 static OneByteStringPtr New(const uint8_t* characters,
10496 intptr_t len,
10497 Heap::Space space);
10498 static OneByteStringPtr New(const uint16_t* characters,
10499 intptr_t len,
10500 Heap::Space space);
10501 static OneByteStringPtr New(const int32_t* characters,
10502 intptr_t len,
10503 Heap::Space space);
10504 static OneByteStringPtr New(const String& str, Heap::Space space);
10505 // 'other' must be OneByteString.
10506 static OneByteStringPtr New(const String& other_one_byte_string,
10507 intptr_t other_start_index,
10508 intptr_t other_len,
10509 Heap::Space space);
10510
10511 static OneByteStringPtr New(const TypedDataBase& other_typed_data,
10512 intptr_t other_start_index,
10513 intptr_t other_len,
10514 Heap::Space space = Heap::kNew);
10515
10516 static OneByteStringPtr Concat(const String& str1,
10517 const String& str2,
10518 Heap::Space space);
10519 static OneByteStringPtr ConcatAll(const Array& strings,
10520 intptr_t start,
10521 intptr_t end,
10522 intptr_t len,
10523 Heap::Space space);
10524
10525 static OneByteStringPtr Transform(int32_t (*mapping)(int32_t ch),
10526 const String& str,
10527 Heap::Space space);
10528
10529 // High performance version of substring for one-byte strings.
10530 // "str" must be OneByteString.
10531 static OneByteStringPtr SubStringUnchecked(const String& str,
10532 intptr_t begin_index,
10533 intptr_t length,
10534 Heap::Space space);
10535
10536 static const ClassId kClassId = kOneByteStringCid;
10537
10538 static OneByteStringPtr null() {
10539 return static_cast<OneByteStringPtr>(Object::null());
10540 }
10541
10542 private:
10543 static OneByteStringPtr raw(const String& str) {
10544 return static_cast<OneByteStringPtr>(str.ptr());
10545 }
10546
10547 static const UntaggedOneByteString* untag(const String& str) {
10548 return reinterpret_cast<const UntaggedOneByteString*>(str.untag());
10549 }
10550
10551 static uint8_t* CharAddr(const String& str, intptr_t index) {
10552 ASSERT((index >= 0) && (index < str.Length()));
10553 ASSERT(str.IsOneByteString());
10554 return &str.UnsafeMutableNonPointer(addr: untag(str)->data())[index];
10555 }
10556
10557 static uint8_t* DataStart(const String& str) {
10558 ASSERT(str.IsOneByteString());
10559 return &str.UnsafeMutableNonPointer(addr: untag(str)->data())[0];
10560 }
10561
10562 ALLSTATIC_CONTAINS_COMPRESSED_IMPLEMENTATION(OneByteString, String);
10563
10564 friend class Class;
10565 friend class ExternalOneByteString;
10566 friend class FlowGraphSerializer;
10567 friend class ImageWriter;
10568 friend class String;
10569 friend class StringHasher;
10570 friend class Symbols;
10571 friend class Utf8;
10572 friend class OneByteStringMessageSerializationCluster;
10573 friend class Deserializer;
10574 friend class JSONWriter;
10575};
10576
10577class TwoByteString : public AllStatic {
10578 public:
10579 static uint16_t CharAt(const String& str, intptr_t index) {
10580 ASSERT(str.IsTwoByteString());
10581 return TwoByteString::CharAt(str: static_cast<TwoByteStringPtr>(str.ptr()),
10582 index);
10583 }
10584
10585 static uint16_t CharAt(TwoByteStringPtr str, intptr_t index) {
10586 ASSERT(index >= 0 && index < String::LengthOf(str));
10587 return str->untag()->data()[index];
10588 }
10589
10590 static void SetCharAt(const String& str, intptr_t index, uint16_t ch) {
10591 NoSafepointScope no_safepoint;
10592 *CharAddr(str, index) = ch;
10593 }
10594
10595 static TwoByteStringPtr EscapeSpecialCharacters(const String& str);
10596
10597 // We use the same maximum elements for all strings.
10598 static constexpr intptr_t kBytesPerElement = 2;
10599 static constexpr intptr_t kMaxElements = String::kMaxElements;
10600 static constexpr intptr_t kMaxNewSpaceElements =
10601 (kNewAllocatableSize - sizeof(UntaggedTwoByteString)) / kBytesPerElement;
10602
10603 struct ArrayTraits {
10604 static intptr_t elements_start_offset() {
10605 return sizeof(UntaggedTwoByteString);
10606 }
10607 static constexpr intptr_t kElementSize = kBytesPerElement;
10608 };
10609
10610 static intptr_t data_offset() {
10611 return OFFSET_OF_RETURNED_VALUE(UntaggedTwoByteString, data);
10612 }
10613 static intptr_t UnroundedSize(TwoByteStringPtr str) {
10614 return UnroundedSize(len: Smi::Value(raw_smi: str->untag()->length()));
10615 }
10616 static intptr_t UnroundedSize(intptr_t len) {
10617 return sizeof(UntaggedTwoByteString) + (len * kBytesPerElement);
10618 }
10619 static intptr_t InstanceSize() {
10620 ASSERT(sizeof(UntaggedTwoByteString) ==
10621 OFFSET_OF_RETURNED_VALUE(UntaggedTwoByteString, data));
10622 return 0;
10623 }
10624 static intptr_t InstanceSize(intptr_t len) {
10625 ASSERT(sizeof(UntaggedTwoByteString) == String::kSizeofRawString);
10626 ASSERT(0 <= len && len <= kMaxElements);
10627 return String::RoundedAllocationSize(size: UnroundedSize(len));
10628 }
10629
10630 static TwoByteStringPtr New(intptr_t len, Heap::Space space);
10631 static TwoByteStringPtr New(const uint16_t* characters,
10632 intptr_t len,
10633 Heap::Space space);
10634 static TwoByteStringPtr New(intptr_t utf16_len,
10635 const int32_t* characters,
10636 intptr_t len,
10637 Heap::Space space);
10638 static TwoByteStringPtr New(const String& str, Heap::Space space);
10639
10640 static TwoByteStringPtr New(const TypedDataBase& other_typed_data,
10641 intptr_t other_start_index,
10642 intptr_t other_len,
10643 Heap::Space space = Heap::kNew);
10644
10645 static TwoByteStringPtr Concat(const String& str1,
10646 const String& str2,
10647 Heap::Space space);
10648 static TwoByteStringPtr ConcatAll(const Array& strings,
10649 intptr_t start,
10650 intptr_t end,
10651 intptr_t len,
10652 Heap::Space space);
10653
10654 static TwoByteStringPtr Transform(int32_t (*mapping)(int32_t ch),
10655 const String& str,
10656 Heap::Space space);
10657
10658 static TwoByteStringPtr null() {
10659 return static_cast<TwoByteStringPtr>(Object::null());
10660 }
10661
10662 static const ClassId kClassId = kTwoByteStringCid;
10663
10664 private:
10665 static TwoByteStringPtr raw(const String& str) {
10666 return static_cast<TwoByteStringPtr>(str.ptr());
10667 }
10668
10669 static const UntaggedTwoByteString* untag(const String& str) {
10670 return reinterpret_cast<const UntaggedTwoByteString*>(str.untag());
10671 }
10672
10673 static uint16_t* CharAddr(const String& str, intptr_t index) {
10674 ASSERT((index >= 0) && (index < str.Length()));
10675 ASSERT(str.IsTwoByteString());
10676 return &str.UnsafeMutableNonPointer(addr: untag(str)->data())[index];
10677 }
10678
10679 // Use this instead of CharAddr(0). It will not assert that the index is <
10680 // length.
10681 static uint16_t* DataStart(const String& str) {
10682 ASSERT(str.IsTwoByteString());
10683 return &str.UnsafeMutableNonPointer(addr: untag(str)->data())[0];
10684 }
10685
10686 ALLSTATIC_CONTAINS_COMPRESSED_IMPLEMENTATION(TwoByteString, String);
10687
10688 friend class Class;
10689 friend class FlowGraphSerializer;
10690 friend class ImageWriter;
10691 friend class String;
10692 friend class StringHasher;
10693 friend class Symbols;
10694 friend class TwoByteStringMessageSerializationCluster;
10695 friend class JSONWriter;
10696};
10697
10698class ExternalOneByteString : public AllStatic {
10699 public:
10700 static uint16_t CharAt(const String& str, intptr_t index) {
10701 ASSERT(str.IsExternalOneByteString());
10702 return ExternalOneByteString::CharAt(
10703 str: static_cast<ExternalOneByteStringPtr>(str.ptr()), index);
10704 }
10705
10706 static uint16_t CharAt(ExternalOneByteStringPtr str, intptr_t index) {
10707 ASSERT(index >= 0 && index < String::LengthOf(str));
10708 return str->untag()->external_data_[index];
10709 }
10710
10711 static void* GetPeer(const String& str) { return untag(str)->peer_; }
10712
10713 static intptr_t external_data_offset() {
10714 return OFFSET_OF(UntaggedExternalOneByteString, external_data_);
10715 }
10716
10717 // We use the same maximum elements for all strings.
10718 static constexpr intptr_t kBytesPerElement = 1;
10719 static constexpr intptr_t kMaxElements = String::kMaxElements;
10720
10721 static intptr_t InstanceSize() {
10722 return String::RoundedAllocationSize(size: sizeof(UntaggedExternalOneByteString));
10723 }
10724
10725 static ExternalOneByteStringPtr New(const uint8_t* characters,
10726 intptr_t len,
10727 void* peer,
10728 intptr_t external_allocation_size,
10729 Dart_HandleFinalizer callback,
10730 Heap::Space space);
10731
10732 static ExternalOneByteStringPtr null() {
10733 return static_cast<ExternalOneByteStringPtr>(Object::null());
10734 }
10735
10736 static OneByteStringPtr EscapeSpecialCharacters(const String& str);
10737 static OneByteStringPtr EncodeIRI(const String& str);
10738 static OneByteStringPtr DecodeIRI(const String& str);
10739
10740 static const ClassId kClassId = kExternalOneByteStringCid;
10741
10742 private:
10743 static ExternalOneByteStringPtr raw(const String& str) {
10744 return static_cast<ExternalOneByteStringPtr>(str.ptr());
10745 }
10746
10747 static const UntaggedExternalOneByteString* untag(const String& str) {
10748 return reinterpret_cast<const UntaggedExternalOneByteString*>(str.untag());
10749 }
10750
10751 static const uint8_t* CharAddr(const String& str, intptr_t index) {
10752 ASSERT((index >= 0) && (index < str.Length()));
10753 ASSERT(str.IsExternalOneByteString());
10754 return &(untag(str)->external_data_[index]);
10755 }
10756
10757 static const uint8_t* DataStart(const String& str) {
10758 ASSERT(str.IsExternalOneByteString());
10759 return untag(str)->external_data_;
10760 }
10761
10762 static void SetExternalData(const String& str,
10763 const uint8_t* data,
10764 void* peer) {
10765 ASSERT(str.IsExternalOneByteString());
10766 ASSERT(!IsolateGroup::Current()->heap()->Contains(
10767 reinterpret_cast<uword>(data)));
10768 str.StoreNonPointer(addr: &untag(str)->external_data_, value: data);
10769 str.StoreNonPointer(addr: &untag(str)->peer_, value: peer);
10770 }
10771
10772 static void Finalize(void* isolate_callback_data,
10773 Dart_WeakPersistentHandle handle,
10774 void* peer);
10775
10776 static intptr_t NextFieldOffset() {
10777 // Indicates this class cannot be extended by dart code.
10778 return -kWordSize;
10779 }
10780
10781 ALLSTATIC_CONTAINS_COMPRESSED_IMPLEMENTATION(ExternalOneByteString, String);
10782
10783 friend class Class;
10784 friend class String;
10785 friend class StringHasher;
10786 friend class Symbols;
10787 friend class Utf8;
10788 friend class JSONWriter;
10789};
10790
10791class ExternalTwoByteString : public AllStatic {
10792 public:
10793 static uint16_t CharAt(const String& str, intptr_t index) {
10794 ASSERT(str.IsExternalTwoByteString());
10795 return ExternalTwoByteString::CharAt(
10796 str: static_cast<ExternalTwoByteStringPtr>(str.ptr()), index);
10797 }
10798
10799 static uint16_t CharAt(ExternalTwoByteStringPtr str, intptr_t index) {
10800 ASSERT(index >= 0 && index < String::LengthOf(str));
10801 return str->untag()->external_data_[index];
10802 }
10803
10804 static void* GetPeer(const String& str) { return untag(str)->peer_; }
10805
10806 static intptr_t external_data_offset() {
10807 return OFFSET_OF(UntaggedExternalTwoByteString, external_data_);
10808 }
10809
10810 // We use the same maximum elements for all strings.
10811 static constexpr intptr_t kBytesPerElement = 2;
10812 static constexpr intptr_t kMaxElements = String::kMaxElements;
10813
10814 static intptr_t InstanceSize() {
10815 return String::RoundedAllocationSize(size: sizeof(UntaggedExternalTwoByteString));
10816 }
10817
10818 static ExternalTwoByteStringPtr New(const uint16_t* characters,
10819 intptr_t len,
10820 void* peer,
10821 intptr_t external_allocation_size,
10822 Dart_HandleFinalizer callback,
10823 Heap::Space space = Heap::kNew);
10824
10825 static ExternalTwoByteStringPtr null() {
10826 return static_cast<ExternalTwoByteStringPtr>(Object::null());
10827 }
10828
10829 static const ClassId kClassId = kExternalTwoByteStringCid;
10830
10831 private:
10832 static ExternalTwoByteStringPtr raw(const String& str) {
10833 return static_cast<ExternalTwoByteStringPtr>(str.ptr());
10834 }
10835
10836 static const UntaggedExternalTwoByteString* untag(const String& str) {
10837 return reinterpret_cast<const UntaggedExternalTwoByteString*>(str.untag());
10838 }
10839
10840 static const uint16_t* CharAddr(const String& str, intptr_t index) {
10841 ASSERT((index >= 0) && (index < str.Length()));
10842 ASSERT(str.IsExternalTwoByteString());
10843 return &(untag(str)->external_data_[index]);
10844 }
10845
10846 static const uint16_t* DataStart(const String& str) {
10847 ASSERT(str.IsExternalTwoByteString());
10848 return untag(str)->external_data_;
10849 }
10850
10851 static void SetExternalData(const String& str,
10852 const uint16_t* data,
10853 void* peer) {
10854 ASSERT(str.IsExternalTwoByteString());
10855 ASSERT(!IsolateGroup::Current()->heap()->Contains(
10856 reinterpret_cast<uword>(data)));
10857 str.StoreNonPointer(addr: &untag(str)->external_data_, value: data);
10858 str.StoreNonPointer(addr: &untag(str)->peer_, value: peer);
10859 }
10860
10861 static void Finalize(void* isolate_callback_data,
10862 Dart_WeakPersistentHandle handle,
10863 void* peer);
10864
10865 static intptr_t NextFieldOffset() {
10866 // Indicates this class cannot be extended by dart code.
10867 return -kWordSize;
10868 }
10869
10870 ALLSTATIC_CONTAINS_COMPRESSED_IMPLEMENTATION(ExternalTwoByteString, String);
10871
10872 friend class Class;
10873 friend class String;
10874 friend class StringHasher;
10875 friend class Symbols;
10876 friend class JSONWriter;
10877};
10878
10879// Matches null_patch.dart / bool_patch.dart.
10880static constexpr intptr_t kNullIdentityHash = 2011;
10881static constexpr intptr_t kTrueIdentityHash = 1231;
10882static constexpr intptr_t kFalseIdentityHash = 1237;
10883
10884// Class Bool implements Dart core class bool.
10885class Bool : public Instance {
10886 public:
10887 bool value() const { return untag()->value_; }
10888
10889 static intptr_t InstanceSize() {
10890 return RoundedAllocationSize(size: sizeof(UntaggedBool));
10891 }
10892
10893 static const Bool& True() { return Object::bool_true(); }
10894
10895 static const Bool& False() { return Object::bool_false(); }
10896
10897 static const Bool& Get(bool value) {
10898 return value ? Bool::True() : Bool::False();
10899 }
10900
10901 virtual uint32_t CanonicalizeHash() const {
10902 return ptr() == True().ptr() ? kTrueIdentityHash : kFalseIdentityHash;
10903 }
10904
10905 private:
10906 FINAL_HEAP_OBJECT_IMPLEMENTATION(Bool, Instance);
10907 friend class Class;
10908 friend class Object; // To initialize the true and false values.
10909};
10910
10911class Array : public Instance {
10912 public:
10913 // Returns `true` if we use card marking for arrays of length [array_length].
10914 static constexpr bool UseCardMarkingForAllocation(
10915 const intptr_t array_length) {
10916 return Array::InstanceSize(len: array_length) > kNewAllocatableSize;
10917 }
10918
10919 // WB invariant restoration code only applies to arrives which have at most
10920 // this many elements. Consequently WB elimination code should not eliminate
10921 // WB on arrays of larger lengths across instructions that can cause GC.
10922 // Note: we also can't restore WB invariant for arrays which use card marking.
10923 static constexpr intptr_t kMaxLengthForWriteBarrierElimination = 8;
10924
10925 intptr_t Length() const { return LengthOf(array: ptr()); }
10926 static intptr_t LengthOf(const ArrayPtr array) {
10927 return Smi::Value(raw_smi: array->untag()->length());
10928 }
10929
10930 static intptr_t length_offset() { return OFFSET_OF(UntaggedArray, length_); }
10931 static intptr_t data_offset() {
10932 return OFFSET_OF_RETURNED_VALUE(UntaggedArray, data);
10933 }
10934 static intptr_t element_offset(intptr_t index) {
10935 return OFFSET_OF_RETURNED_VALUE(UntaggedArray, data) +
10936 kBytesPerElement * index;
10937 }
10938 static intptr_t index_at_offset(intptr_t offset_in_bytes) {
10939 intptr_t index = (offset_in_bytes - data_offset()) / kBytesPerElement;
10940 ASSERT(index >= 0);
10941 return index;
10942 }
10943
10944 struct ArrayTraits {
10945 static intptr_t elements_start_offset() { return Array::data_offset(); }
10946
10947 static constexpr intptr_t kElementSize = kCompressedWordSize;
10948 };
10949
10950 static bool Equals(ArrayPtr a, ArrayPtr b) {
10951 if (a == b) return true;
10952 if (a->IsRawNull() || b->IsRawNull()) return false;
10953 if (a->untag()->length() != b->untag()->length()) return false;
10954 if (a->untag()->type_arguments() != b->untag()->type_arguments()) {
10955 return false;
10956 }
10957 const intptr_t length = LengthOf(array: a);
10958 return memcmp(s1: a->untag()->data(), s2: b->untag()->data(),
10959 n: kBytesPerElement * length) == 0;
10960 }
10961 bool Equals(const Array& other) const {
10962 NoSafepointScope scope;
10963 return Equals(a: ptr(), b: other.ptr());
10964 }
10965
10966 static CompressedObjectPtr* DataOf(ArrayPtr array) {
10967 return array->untag()->data();
10968 }
10969
10970 template <std::memory_order order = std::memory_order_relaxed>
10971 ObjectPtr At(intptr_t index) const {
10972 return untag()->element<order>(index);
10973 }
10974 template <std::memory_order order = std::memory_order_relaxed>
10975 void SetAt(intptr_t index, const Object& value) const {
10976 untag()->set_element<order>(index, value.ptr());
10977 }
10978 template <std::memory_order order = std::memory_order_relaxed>
10979 void SetAt(intptr_t index, const Object& value, Thread* thread) const {
10980 untag()->set_element<order>(index, value.ptr(), thread);
10981 }
10982
10983 // Access to the array with acquire release semantics.
10984 ObjectPtr AtAcquire(intptr_t index) const {
10985 return untag()->element<std::memory_order_acquire>(index);
10986 }
10987 void SetAtRelease(intptr_t index, const Object& value) const {
10988 untag()->set_element<std::memory_order_release>(index, value: value.ptr());
10989 }
10990
10991 bool IsImmutable() const { return ptr()->GetClassId() == kImmutableArrayCid; }
10992
10993 // Position of element type in type arguments.
10994 static constexpr intptr_t kElementTypeTypeArgPos = 0;
10995
10996 virtual TypeArgumentsPtr GetTypeArguments() const {
10997 return untag()->type_arguments();
10998 }
10999 virtual void SetTypeArguments(const TypeArguments& value) const {
11000 // An Array is raw or takes one type argument. However, its type argument
11001 // vector may be longer than 1 due to a type optimization reusing the type
11002 // argument vector of the instantiator.
11003 ASSERT(value.IsNull() ||
11004 ((value.Length() >= 1) &&
11005 value.IsInstantiated() /*&& value.IsCanonical()*/));
11006 // TODO(asiva): Values read from a message snapshot are not properly marked
11007 // as canonical. See for example tests/isolate/mandel_isolate_test.dart.
11008 StoreArrayPointer(addr: &untag()->type_arguments_, value: value.ptr());
11009 }
11010
11011 virtual bool CanonicalizeEquals(const Instance& other) const;
11012 virtual uint32_t CanonicalizeHash() const;
11013
11014 static constexpr intptr_t kBytesPerElement = ArrayTraits::kElementSize;
11015 static constexpr intptr_t kMaxElements = kSmiMax / kBytesPerElement;
11016 static constexpr intptr_t kMaxNewSpaceElements =
11017 (kNewAllocatableSize - sizeof(UntaggedArray)) / kBytesPerElement;
11018
11019 static intptr_t type_arguments_offset() {
11020 return OFFSET_OF(UntaggedArray, type_arguments_);
11021 }
11022
11023 static constexpr bool IsValidLength(intptr_t len) {
11024 return 0 <= len && len <= kMaxElements;
11025 }
11026
11027 static intptr_t InstanceSize() {
11028 ASSERT(sizeof(UntaggedArray) ==
11029 OFFSET_OF_RETURNED_VALUE(UntaggedArray, data));
11030 return 0;
11031 }
11032
11033 static constexpr intptr_t InstanceSize(intptr_t len) {
11034 // Ensure that variable length data is not adding to the object length.
11035 ASSERT(sizeof(UntaggedArray) ==
11036 (sizeof(UntaggedInstance) + (2 * kBytesPerElement)));
11037 ASSERT(IsValidLength(len));
11038 return RoundedAllocationSize(size: sizeof(UntaggedArray) +
11039 (len * kBytesPerElement));
11040 }
11041
11042 virtual void CanonicalizeFieldsLocked(Thread* thread) const;
11043
11044 // Make the array immutable to Dart code by switching the class pointer
11045 // to ImmutableArray.
11046 void MakeImmutable() const;
11047
11048 static ArrayPtr New(intptr_t len, Heap::Space space = Heap::kNew) {
11049 return New(class_id: kArrayCid, len, space);
11050 }
11051 // The result's type arguments and elements are GC-safe but not initialized to
11052 // null.
11053 static ArrayPtr NewUninitialized(intptr_t len,
11054 Heap::Space space = Heap::kNew) {
11055 return NewUninitialized(class_id: kArrayCid, len, space);
11056 }
11057 static ArrayPtr New(intptr_t len,
11058 const AbstractType& element_type,
11059 Heap::Space space = Heap::kNew);
11060
11061 // Creates and returns a new array with 'new_length'. Copies all elements from
11062 // 'source' to the new array. 'new_length' must be greater than or equal to
11063 // 'source.Length()'. 'source' can be null.
11064 static ArrayPtr Grow(const Array& source,
11065 intptr_t new_length,
11066 Heap::Space space = Heap::kNew);
11067
11068 // Truncates the array to a given length. 'new_length' must be less than
11069 // or equal to 'source.Length()'. The remaining unused part of the array is
11070 // marked as an Array object or a regular Object so that it can be traversed
11071 // during garbage collection.
11072 void Truncate(intptr_t new_length) const;
11073
11074 // Return an Array object that contains all the elements currently present
11075 // in the specified Growable Object Array. This is done by first truncating
11076 // the Growable Object Array's backing array to the currently used size and
11077 // returning the truncated backing array.
11078 // The backing array of the original Growable Object Array is
11079 // set to an empty array.
11080 // If the unique parameter is false, the function is allowed to return
11081 // a shared Array instance.
11082 static ArrayPtr MakeFixedLength(const GrowableObjectArray& growable_array,
11083 bool unique = false);
11084
11085 ArrayPtr Slice(intptr_t start, intptr_t count, bool with_type_argument) const;
11086 ArrayPtr Copy() const {
11087 return Slice(start: 0, count: Length(), /*with_type_argument=*/with_type_argument: true);
11088 }
11089
11090 protected:
11091 static ArrayPtr New(intptr_t class_id,
11092 intptr_t len,
11093 Heap::Space space = Heap::kNew);
11094 static ArrayPtr NewUninitialized(intptr_t class_id,
11095 intptr_t len,
11096 Heap::Space space = Heap::kNew);
11097
11098 private:
11099 CompressedObjectPtr const* ObjectAddr(intptr_t index) const {
11100 // TODO(iposva): Determine if we should throw an exception here.
11101 ASSERT((index >= 0) && (index < Length()));
11102 return &untag()->data()[index];
11103 }
11104
11105 void SetLength(intptr_t value) const { untag()->set_length(Smi::New(value)); }
11106 void SetLengthRelease(intptr_t value) const {
11107 untag()->set_length<std::memory_order_release>(Smi::New(value));
11108 }
11109
11110 template <typename type,
11111 std::memory_order order = std::memory_order_relaxed,
11112 typename value_type>
11113 void StoreArrayPointer(type const* addr, value_type value) const {
11114 ptr()->untag()->StoreArrayPointer<type, order, value_type>(addr, value);
11115 }
11116
11117 FINAL_HEAP_OBJECT_IMPLEMENTATION(Array, Instance);
11118 friend class Class;
11119 friend class ImmutableArray;
11120 friend class Object;
11121 friend class String;
11122 friend class MessageDeserializer;
11123};
11124
11125class ImmutableArray : public AllStatic {
11126 public:
11127 static constexpr bool ContainsCompressedPointers() {
11128 return Array::ContainsCompressedPointers();
11129 }
11130
11131 static ImmutableArrayPtr New(intptr_t len, Heap::Space space = Heap::kNew);
11132
11133 static const ClassId kClassId = kImmutableArrayCid;
11134
11135 static intptr_t InstanceSize() { return Array::InstanceSize(); }
11136
11137 static intptr_t InstanceSize(intptr_t len) {
11138 return Array::InstanceSize(len);
11139 }
11140
11141 private:
11142 static intptr_t NextFieldOffset() {
11143 // Indicates this class cannot be extended by dart code.
11144 return -kWordSize;
11145 }
11146
11147 static ImmutableArrayPtr raw(const Array& array) {
11148 return static_cast<ImmutableArrayPtr>(array.ptr());
11149 }
11150
11151 friend class Class;
11152};
11153
11154class GrowableObjectArray : public Instance {
11155 public:
11156 intptr_t Capacity() const {
11157 NoSafepointScope no_safepoint;
11158 ASSERT(!IsNull());
11159 return Smi::Value(raw_smi: DataArray()->length());
11160 }
11161 intptr_t Length() const {
11162 ASSERT(!IsNull());
11163 return Smi::Value(raw_smi: untag()->length());
11164 }
11165 void SetLength(intptr_t value) const {
11166 // This is only safe because we create a new Smi, which does not cause
11167 // heap allocation.
11168 untag()->set_length(Smi::New(value));
11169 }
11170
11171 ArrayPtr data() const { return untag()->data(); }
11172 void SetData(const Array& value) const { untag()->set_data(value.ptr()); }
11173
11174 ObjectPtr At(intptr_t index) const {
11175 NoSafepointScope no_safepoint;
11176 ASSERT(!IsNull());
11177 ASSERT(index < Length());
11178 return data()->untag()->element(index);
11179 }
11180 void SetAt(intptr_t index, const Object& value) const {
11181 ASSERT(!IsNull());
11182 ASSERT(index < Length());
11183
11184 // TODO(iposva): Add storing NoSafepointScope.
11185 data()->untag()->set_element(index, value: value.ptr());
11186 }
11187
11188 void Add(const Object& value, Heap::Space space = Heap::kNew) const;
11189
11190 void Grow(intptr_t new_capacity, Heap::Space space = Heap::kNew) const;
11191 ObjectPtr RemoveLast() const;
11192
11193 virtual TypeArgumentsPtr GetTypeArguments() const {
11194 return untag()->type_arguments();
11195 }
11196 virtual void SetTypeArguments(const TypeArguments& value) const {
11197 // A GrowableObjectArray is raw or takes one type argument. However, its
11198 // type argument vector may be longer than 1 due to a type optimization
11199 // reusing the type argument vector of the instantiator.
11200 ASSERT(value.IsNull() || ((value.Length() >= 1) && value.IsInstantiated() &&
11201 value.IsCanonical()));
11202
11203 untag()->set_type_arguments(value.ptr());
11204 }
11205
11206 // We don't expect a growable object array to be canonicalized.
11207 virtual bool CanonicalizeEquals(const Instance& other) const {
11208 UNREACHABLE();
11209 return false;
11210 }
11211
11212 // We don't expect a growable object array to be canonicalized.
11213 virtual InstancePtr CanonicalizeLocked(Thread* thread) const {
11214 UNREACHABLE();
11215 return Instance::null();
11216 }
11217
11218 static intptr_t type_arguments_offset() {
11219 return OFFSET_OF(UntaggedGrowableObjectArray, type_arguments_);
11220 }
11221
11222 static intptr_t length_offset() {
11223 return OFFSET_OF(UntaggedGrowableObjectArray, length_);
11224 }
11225 static intptr_t data_offset() {
11226 return OFFSET_OF(UntaggedGrowableObjectArray, data_);
11227 }
11228
11229 static intptr_t InstanceSize() {
11230 return RoundedAllocationSize(size: sizeof(UntaggedGrowableObjectArray));
11231 }
11232
11233 static GrowableObjectArrayPtr New(Heap::Space space = Heap::kNew) {
11234 return New(capacity: kDefaultInitialCapacity, space);
11235 }
11236 static GrowableObjectArrayPtr New(intptr_t capacity,
11237 Heap::Space space = Heap::kNew);
11238 static GrowableObjectArrayPtr New(const Array& array,
11239 Heap::Space space = Heap::kNew);
11240
11241 static SmiPtr NoSafepointLength(const GrowableObjectArrayPtr array) {
11242 return array->untag()->length();
11243 }
11244
11245 static ArrayPtr NoSafepointData(const GrowableObjectArrayPtr array) {
11246 return array->untag()->data();
11247 }
11248
11249 private:
11250 UntaggedArray* DataArray() const { return data()->untag(); }
11251
11252 static constexpr int kDefaultInitialCapacity = 0;
11253
11254 FINAL_HEAP_OBJECT_IMPLEMENTATION(GrowableObjectArray, Instance);
11255 friend class Array;
11256 friend class Class;
11257};
11258
11259class Float32x4 : public Instance {
11260 public:
11261 static Float32x4Ptr New(float value0,
11262 float value1,
11263 float value2,
11264 float value3,
11265 Heap::Space space = Heap::kNew);
11266 static Float32x4Ptr New(simd128_value_t value,
11267 Heap::Space space = Heap::kNew);
11268
11269 float x() const;
11270 float y() const;
11271 float z() const;
11272 float w() const;
11273
11274 void set_x(float x) const;
11275 void set_y(float y) const;
11276 void set_z(float z) const;
11277 void set_w(float w) const;
11278
11279 simd128_value_t value() const;
11280 void set_value(simd128_value_t value) const;
11281
11282 static intptr_t InstanceSize() {
11283 return RoundedAllocationSize(size: sizeof(UntaggedFloat32x4));
11284 }
11285
11286 static intptr_t value_offset() {
11287 return OFFSET_OF(UntaggedFloat32x4, value_);
11288 }
11289
11290 private:
11291 FINAL_HEAP_OBJECT_IMPLEMENTATION(Float32x4, Instance);
11292 friend class Class;
11293};
11294
11295class Int32x4 : public Instance {
11296 public:
11297 static Int32x4Ptr New(int32_t value0,
11298 int32_t value1,
11299 int32_t value2,
11300 int32_t value3,
11301 Heap::Space space = Heap::kNew);
11302 static Int32x4Ptr New(simd128_value_t value, Heap::Space space = Heap::kNew);
11303
11304 int32_t x() const;
11305 int32_t y() const;
11306 int32_t z() const;
11307 int32_t w() const;
11308
11309 void set_x(int32_t x) const;
11310 void set_y(int32_t y) const;
11311 void set_z(int32_t z) const;
11312 void set_w(int32_t w) const;
11313
11314 simd128_value_t value() const;
11315 void set_value(simd128_value_t value) const;
11316
11317 static intptr_t InstanceSize() {
11318 return RoundedAllocationSize(size: sizeof(UntaggedInt32x4));
11319 }
11320
11321 static intptr_t value_offset() { return OFFSET_OF(UntaggedInt32x4, value_); }
11322
11323 private:
11324 FINAL_HEAP_OBJECT_IMPLEMENTATION(Int32x4, Instance);
11325 friend class Class;
11326};
11327
11328class Float64x2 : public Instance {
11329 public:
11330 static Float64x2Ptr New(double value0,
11331 double value1,
11332 Heap::Space space = Heap::kNew);
11333 static Float64x2Ptr New(simd128_value_t value,
11334 Heap::Space space = Heap::kNew);
11335
11336 double x() const;
11337 double y() const;
11338
11339 void set_x(double x) const;
11340 void set_y(double y) const;
11341
11342 simd128_value_t value() const;
11343 void set_value(simd128_value_t value) const;
11344
11345 static intptr_t InstanceSize() {
11346 return RoundedAllocationSize(size: sizeof(UntaggedFloat64x2));
11347 }
11348
11349 static intptr_t value_offset() {
11350 return OFFSET_OF(UntaggedFloat64x2, value_);
11351 }
11352
11353 private:
11354 FINAL_HEAP_OBJECT_IMPLEMENTATION(Float64x2, Instance);
11355 friend class Class;
11356};
11357
11358// Packed representation of record shape (number of fields and field names).
11359class RecordShape {
11360 enum {
11361 kNumFieldsBits = 16,
11362 kFieldNamesIndexBits = kSmiBits - kNumFieldsBits,
11363 };
11364 using NumFieldsBitField = BitField<intptr_t, intptr_t, 0, kNumFieldsBits>;
11365 using FieldNamesIndexBitField = BitField<intptr_t,
11366 intptr_t,
11367 NumFieldsBitField::kNextBit,
11368 kFieldNamesIndexBits>;
11369
11370 public:
11371 static constexpr intptr_t kNumFieldsMask = NumFieldsBitField::mask();
11372 static constexpr intptr_t kMaxNumFields = kNumFieldsMask;
11373 static constexpr intptr_t kFieldNamesIndexMask =
11374 FieldNamesIndexBitField::mask();
11375 static constexpr intptr_t kFieldNamesIndexShift =
11376 FieldNamesIndexBitField::shift();
11377 static constexpr intptr_t kMaxFieldNamesIndex = kFieldNamesIndexMask;
11378
11379 explicit RecordShape(intptr_t value) : value_(value) { ASSERT(value_ >= 0); }
11380 explicit RecordShape(SmiPtr smi_value) : value_(Smi::Value(raw_smi: smi_value)) {
11381 ASSERT(value_ >= 0);
11382 }
11383 RecordShape(intptr_t num_fields, intptr_t field_names_index)
11384 : value_(NumFieldsBitField::encode(value: num_fields) |
11385 FieldNamesIndexBitField::encode(value: field_names_index)) {
11386 ASSERT(value_ >= 0);
11387 }
11388 static RecordShape ForUnnamed(intptr_t num_fields) {
11389 return RecordShape(num_fields, 0);
11390 }
11391
11392 bool HasNamedFields() const { return field_names_index() != 0; }
11393
11394 intptr_t num_fields() const { return NumFieldsBitField::decode(value: value_); }
11395
11396 intptr_t field_names_index() const {
11397 return FieldNamesIndexBitField::decode(value: value_);
11398 }
11399
11400 SmiPtr AsSmi() const { return Smi::New(value: value_); }
11401
11402 intptr_t AsInt() const { return value_; }
11403
11404 bool operator==(const RecordShape& other) const {
11405 return value_ == other.value_;
11406 }
11407 bool operator!=(const RecordShape& other) const {
11408 return value_ != other.value_;
11409 }
11410
11411 // Registers record shape with [num_fields] and [field_names] in the current
11412 // isolate group.
11413 static RecordShape Register(Thread* thread,
11414 intptr_t num_fields,
11415 const Array& field_names);
11416
11417 // Retrieves an array of field names.
11418 ArrayPtr GetFieldNames(Thread* thread) const;
11419
11420 private:
11421 intptr_t value_;
11422
11423 DISALLOW_ALLOCATION();
11424};
11425
11426// A RecordType represents the type of a record. It describes
11427// number of named and positional fields, field types and
11428// names of the named fields.
11429class RecordType : public AbstractType {
11430 public:
11431 virtual bool HasTypeClass() const { return false; }
11432 RecordTypePtr ToNullability(Nullability value, Heap::Space space) const;
11433 virtual classid_t type_class_id() const { return kIllegalCid; }
11434 virtual bool IsInstantiated(
11435 Genericity genericity = kAny,
11436 intptr_t num_free_fun_type_params = kAllFree) const;
11437 virtual bool IsEquivalent(
11438 const Instance& other,
11439 TypeEquality kind,
11440 FunctionTypeMapping* function_type_equivalence = nullptr) const;
11441 virtual bool RequireConstCanonicalTypeErasure(Zone* zone) const;
11442
11443 virtual AbstractTypePtr InstantiateFrom(
11444 const TypeArguments& instantiator_type_arguments,
11445 const TypeArguments& function_type_arguments,
11446 intptr_t num_free_fun_type_params,
11447 Heap::Space space,
11448 FunctionTypeMapping* function_type_mapping = nullptr,
11449 intptr_t num_parent_type_args_adjustment = 0) const;
11450
11451 virtual AbstractTypePtr UpdateFunctionTypes(
11452 intptr_t num_parent_type_args_adjustment,
11453 intptr_t num_free_fun_type_params,
11454 Heap::Space space,
11455 FunctionTypeMapping* function_type_mapping) const;
11456
11457 virtual AbstractTypePtr Canonicalize(Thread* thread) const;
11458 virtual void EnumerateURIs(URIs* uris) const;
11459 virtual void PrintName(NameVisibility visibility,
11460 BaseTextBuffer* printer) const;
11461
11462 virtual uword ComputeHash() const;
11463
11464 bool IsSubtypeOf(
11465 const RecordType& other,
11466 Heap::Space space,
11467 FunctionTypeMapping* function_type_equivalence = nullptr) const;
11468
11469 RecordShape shape() const { return RecordShape(untag()->shape()); }
11470
11471 ArrayPtr field_types() const { return untag()->field_types(); }
11472
11473 AbstractTypePtr FieldTypeAt(intptr_t index) const;
11474 void SetFieldTypeAt(intptr_t index, const AbstractType& value) const;
11475
11476 // Names of the named fields, sorted.
11477 ArrayPtr GetFieldNames(Thread* thread) const;
11478
11479 intptr_t NumFields() const;
11480
11481 void Print(NameVisibility name_visibility, BaseTextBuffer* printer) const;
11482
11483 static intptr_t InstanceSize() {
11484 return RoundedAllocationSize(size: sizeof(UntaggedRecordType));
11485 }
11486
11487 static RecordTypePtr New(RecordShape shape,
11488 const Array& field_types,
11489 Nullability nullability = Nullability::kLegacy,
11490 Heap::Space space = Heap::kOld);
11491
11492 private:
11493 void set_shape(RecordShape shape) const;
11494 void set_field_types(const Array& value) const;
11495
11496 static RecordTypePtr New(Heap::Space space);
11497
11498 FINAL_HEAP_OBJECT_IMPLEMENTATION(RecordType, AbstractType);
11499 friend class Class;
11500 friend class ClassFinalizer;
11501 friend class Record;
11502};
11503
11504class Record : public Instance {
11505 public:
11506 intptr_t num_fields() const { return NumFields(ptr: ptr()); }
11507 static intptr_t NumFields(RecordPtr ptr) {
11508 return RecordShape(ptr->untag()->shape()).num_fields();
11509 }
11510
11511 RecordShape shape() const { return RecordShape(untag()->shape()); }
11512 static intptr_t shape_offset() { return OFFSET_OF(UntaggedRecord, shape_); }
11513
11514 ObjectPtr FieldAt(intptr_t field_index) const {
11515 return untag()->field(index: field_index);
11516 }
11517 void SetFieldAt(intptr_t field_index, const Object& value) const {
11518 untag()->set_field(index: field_index, value: value.ptr());
11519 }
11520
11521 static constexpr intptr_t kBytesPerElement = kCompressedWordSize;
11522 static constexpr intptr_t kMaxElements = RecordShape::kMaxNumFields;
11523
11524 struct ArrayTraits {
11525 static intptr_t elements_start_offset() { return sizeof(UntaggedRecord); }
11526 static constexpr intptr_t kElementSize = kBytesPerElement;
11527 };
11528
11529 static intptr_t field_offset(intptr_t index) {
11530 return OFFSET_OF_RETURNED_VALUE(UntaggedRecord, data) +
11531 kBytesPerElement * index;
11532 }
11533 static intptr_t field_index_at_offset(intptr_t offset_in_bytes) {
11534 const intptr_t index =
11535 (offset_in_bytes - OFFSET_OF_RETURNED_VALUE(UntaggedRecord, data)) /
11536 kBytesPerElement;
11537 ASSERT(index >= 0);
11538 return index;
11539 }
11540
11541 static intptr_t InstanceSize() {
11542 ASSERT(sizeof(UntaggedRecord) ==
11543 OFFSET_OF_RETURNED_VALUE(UntaggedRecord, data));
11544 return 0;
11545 }
11546
11547 static intptr_t InstanceSize(intptr_t num_fields) {
11548 return RoundedAllocationSize(size: sizeof(UntaggedRecord) +
11549 (num_fields * kBytesPerElement));
11550 }
11551
11552 static RecordPtr New(RecordShape shape, Heap::Space space = Heap::kNew);
11553
11554 virtual bool CanonicalizeEquals(const Instance& other) const;
11555 virtual uint32_t CanonicalizeHash() const;
11556 virtual void CanonicalizeFieldsLocked(Thread* thread) const;
11557
11558 // Returns RecordType representing runtime type of this record instance.
11559 // It is not created eagerly when record instance is allocated because
11560 // it depends on runtime types of values if its fields, which can be
11561 // quite expensive to query.
11562 RecordTypePtr GetRecordType() const;
11563
11564 // Parses positional field name and return its index,
11565 // or -1 if [field_name] is not a valid positional field name.
11566 static intptr_t GetPositionalFieldIndexFromFieldName(
11567 const String& field_name);
11568
11569 // Returns index of the field with given name, or -1
11570 // if such field doesn't exist.
11571 // Supports positional field names ("$1", "$2", etc).
11572 intptr_t GetFieldIndexByName(Thread* thread, const String& field_name) const;
11573
11574 ArrayPtr GetFieldNames(Thread* thread) const {
11575 return shape().GetFieldNames(thread);
11576 }
11577
11578 private:
11579 FINAL_HEAP_OBJECT_IMPLEMENTATION(Record, Instance);
11580 friend class Class;
11581 friend class Object;
11582};
11583
11584class PointerBase : public Instance {
11585 public:
11586 static intptr_t data_offset() {
11587 return OFFSET_OF(UntaggedPointerBase, data_);
11588 }
11589};
11590
11591class TypedDataBase : public PointerBase {
11592 public:
11593 static intptr_t length_offset() {
11594 return OFFSET_OF(UntaggedTypedDataBase, length_);
11595 }
11596
11597 SmiPtr length() const { return untag()->length(); }
11598
11599 intptr_t Length() const {
11600 ASSERT(!IsNull());
11601 return Smi::Value(raw_smi: untag()->length());
11602 }
11603
11604 intptr_t LengthInBytes() const {
11605 return ElementSizeInBytes(cid: ptr()->GetClassId()) * Length();
11606 }
11607
11608 TypedDataElementType ElementType() const {
11609 return ElementType(cid: ptr()->GetClassId());
11610 }
11611
11612 intptr_t ElementSizeInBytes() const {
11613 return element_size(index: ElementType(cid: ptr()->GetClassId()));
11614 }
11615
11616 static intptr_t ElementSizeInBytes(classid_t cid) {
11617 return element_size(index: ElementType(cid));
11618 }
11619
11620 static TypedDataElementType ElementType(classid_t cid) {
11621 if (cid == kByteDataViewCid || cid == kUnmodifiableByteDataViewCid) {
11622 return kUint8ArrayElement;
11623 } else if (IsTypedDataClassId(index: cid)) {
11624 const intptr_t index =
11625 (cid - kTypedDataInt8ArrayCid - kTypedDataCidRemainderInternal) / 4;
11626 return static_cast<TypedDataElementType>(index);
11627 } else if (IsTypedDataViewClassId(index: cid)) {
11628 const intptr_t index =
11629 (cid - kTypedDataInt8ArrayCid - kTypedDataCidRemainderView) / 4;
11630 return static_cast<TypedDataElementType>(index);
11631 } else if (IsExternalTypedDataClassId(index: cid)) {
11632 const intptr_t index =
11633 (cid - kTypedDataInt8ArrayCid - kTypedDataCidRemainderExternal) / 4;
11634 return static_cast<TypedDataElementType>(index);
11635 } else {
11636 ASSERT(IsUnmodifiableTypedDataViewClassId(cid));
11637 const intptr_t index =
11638 (cid - kTypedDataInt8ArrayCid - kTypedDataCidRemainderUnmodifiable) /
11639 4;
11640 return static_cast<TypedDataElementType>(index);
11641 }
11642 }
11643
11644 bool IsExternalOrExternalView() const;
11645 TypedDataViewPtr ViewFromTo(intptr_t start,
11646 intptr_t end,
11647 Heap::Space space = Heap::kNew) const;
11648
11649 void* DataAddr(intptr_t byte_offset) const {
11650 ASSERT((byte_offset == 0) ||
11651 ((byte_offset > 0) && (byte_offset < LengthInBytes())));
11652 return reinterpret_cast<void*>(Validate(data: untag()->data_) + byte_offset);
11653 }
11654
11655#define TYPED_GETTER_SETTER(name, type) \
11656 type Get##name(intptr_t byte_offset) const { \
11657 ASSERT(static_cast<uintptr_t>(byte_offset) <= \
11658 static_cast<uintptr_t>(LengthInBytes()) - sizeof(type)); \
11659 return LoadUnaligned( \
11660 reinterpret_cast<type*>(untag()->data_ + byte_offset)); \
11661 } \
11662 void Set##name(intptr_t byte_offset, type value) const { \
11663 ASSERT(static_cast<uintptr_t>(byte_offset) <= \
11664 static_cast<uintptr_t>(LengthInBytes()) - sizeof(type)); \
11665 StoreUnaligned(reinterpret_cast<type*>(untag()->data_ + byte_offset), \
11666 value); \
11667 }
11668
11669 TYPED_GETTER_SETTER(Int8, int8_t)
11670 TYPED_GETTER_SETTER(Uint8, uint8_t)
11671 TYPED_GETTER_SETTER(Int16, int16_t)
11672 TYPED_GETTER_SETTER(Uint16, uint16_t)
11673 TYPED_GETTER_SETTER(Int32, int32_t)
11674 TYPED_GETTER_SETTER(Uint32, uint32_t)
11675 TYPED_GETTER_SETTER(Int64, int64_t)
11676 TYPED_GETTER_SETTER(Uint64, uint64_t)
11677 TYPED_GETTER_SETTER(Float32, float)
11678 TYPED_GETTER_SETTER(Float64, double)
11679 TYPED_GETTER_SETTER(Float32x4, simd128_value_t)
11680 TYPED_GETTER_SETTER(Int32x4, simd128_value_t)
11681 TYPED_GETTER_SETTER(Float64x2, simd128_value_t)
11682
11683#undef TYPED_GETTER_SETTER
11684
11685 protected:
11686 void SetLength(intptr_t value) const {
11687 ASSERT(value <= Smi::kMaxValue);
11688 untag()->set_length(Smi::New(value));
11689 }
11690
11691 virtual uint8_t* Validate(uint8_t* data) const {
11692 return UnsafeMutableNonPointer(addr: data);
11693 }
11694
11695 private:
11696 friend class Class;
11697
11698 static intptr_t element_size(intptr_t index) {
11699 ASSERT(0 <= index && index < kNumElementSizes);
11700 intptr_t size = element_size_table[index];
11701 ASSERT(size != 0);
11702 return size;
11703 }
11704 static constexpr intptr_t kNumElementSizes =
11705 (kTypedDataFloat64x2ArrayCid - kTypedDataInt8ArrayCid) / 4 + 1;
11706 static const intptr_t element_size_table[kNumElementSizes];
11707
11708 HEAP_OBJECT_IMPLEMENTATION(TypedDataBase, PointerBase);
11709};
11710
11711class TypedData : public TypedDataBase {
11712 public:
11713 virtual bool CanonicalizeEquals(const Instance& other) const;
11714 virtual uint32_t CanonicalizeHash() const;
11715
11716#define TYPED_GETTER_SETTER(name, type) \
11717 type Get##name(intptr_t byte_offset) const { \
11718 ASSERT(static_cast<uintptr_t>(byte_offset) <= \
11719 static_cast<uintptr_t>(LengthInBytes()) - sizeof(type)); \
11720 return LoadUnaligned( \
11721 reinterpret_cast<const type*>(untag()->data() + byte_offset)); \
11722 } \
11723 void Set##name(intptr_t byte_offset, type value) const { \
11724 ASSERT(static_cast<uintptr_t>(byte_offset) <= \
11725 static_cast<uintptr_t>(LengthInBytes()) - sizeof(type)); \
11726 return StoreUnaligned( \
11727 reinterpret_cast<type*>(untag()->data() + byte_offset), value); \
11728 }
11729
11730 TYPED_GETTER_SETTER(Int8, int8_t)
11731 TYPED_GETTER_SETTER(Uint8, uint8_t)
11732 TYPED_GETTER_SETTER(Int16, int16_t)
11733 TYPED_GETTER_SETTER(Uint16, uint16_t)
11734 TYPED_GETTER_SETTER(Int32, int32_t)
11735 TYPED_GETTER_SETTER(Uint32, uint32_t)
11736 TYPED_GETTER_SETTER(Int64, int64_t)
11737 TYPED_GETTER_SETTER(Uint64, uint64_t)
11738 TYPED_GETTER_SETTER(Float32, float)
11739 TYPED_GETTER_SETTER(Float64, double)
11740 TYPED_GETTER_SETTER(Float32x4, simd128_value_t)
11741 TYPED_GETTER_SETTER(Int32x4, simd128_value_t)
11742 TYPED_GETTER_SETTER(Float64x2, simd128_value_t)
11743
11744#undef TYPED_GETTER_SETTER
11745
11746 static intptr_t payload_offset() {
11747 return UntaggedTypedData::payload_offset();
11748 }
11749
11750 static intptr_t InstanceSize() {
11751 ASSERT(sizeof(UntaggedTypedData) ==
11752 OFFSET_OF_RETURNED_VALUE(UntaggedTypedData, internal_data));
11753 return 0;
11754 }
11755
11756 static intptr_t InstanceSize(intptr_t lengthInBytes) {
11757 ASSERT(0 <= lengthInBytes && lengthInBytes <= kSmiMax);
11758 return RoundedAllocationSize(size: sizeof(UntaggedTypedData) + lengthInBytes);
11759 }
11760
11761 static intptr_t MaxElements(intptr_t class_id) {
11762 ASSERT(IsTypedDataClassId(class_id));
11763 return (kSmiMax / ElementSizeInBytes(cid: class_id));
11764 }
11765
11766 static intptr_t MaxNewSpaceElements(intptr_t class_id) {
11767 ASSERT(IsTypedDataClassId(class_id));
11768 return (kNewAllocatableSize - sizeof(UntaggedTypedData)) /
11769 ElementSizeInBytes(cid: class_id);
11770 }
11771
11772 static TypedDataPtr New(intptr_t class_id,
11773 intptr_t len,
11774 Heap::Space space = Heap::kNew);
11775
11776 static TypedDataPtr Grow(const TypedData& current,
11777 intptr_t len,
11778 Heap::Space space = Heap::kNew);
11779
11780 static bool IsTypedData(const Instance& obj) {
11781 ASSERT(!obj.IsNull());
11782 intptr_t cid = obj.ptr()->GetClassId();
11783 return IsTypedDataClassId(index: cid);
11784 }
11785
11786 protected:
11787 void RecomputeDataField() { ptr()->untag()->RecomputeDataField(); }
11788
11789 private:
11790 // Provides const access to non-pointer, non-aligned data within the object.
11791 // Such access does not need a write barrier, but it is *not* GC-safe, since
11792 // the object might move.
11793 //
11794 // Therefore this method is private and the call-sites in this class need to
11795 // ensure the returned pointer does not escape.
11796 template <typename FieldType>
11797 const FieldType* ReadOnlyDataAddr(intptr_t byte_offset) const {
11798 return reinterpret_cast<const FieldType*>((untag()->data()) + byte_offset);
11799 }
11800
11801 FINAL_HEAP_OBJECT_IMPLEMENTATION(TypedData, TypedDataBase);
11802 friend class Class;
11803 friend class ExternalTypedData;
11804 friend class TypedDataView;
11805};
11806
11807class ExternalTypedData : public TypedDataBase {
11808 public:
11809 // Alignment of data when serializing ExternalTypedData in a clustered
11810 // snapshot. Should be independent of word size.
11811 static constexpr int kDataSerializationAlignment = 8;
11812
11813 FinalizablePersistentHandle* AddFinalizer(void* peer,
11814 Dart_HandleFinalizer callback,
11815 intptr_t external_size) const;
11816
11817 static intptr_t InstanceSize() {
11818 return RoundedAllocationSize(size: sizeof(UntaggedExternalTypedData));
11819 }
11820
11821 static intptr_t MaxElements(intptr_t class_id) {
11822 ASSERT(IsExternalTypedDataClassId(class_id));
11823 return (kSmiMax / ElementSizeInBytes(cid: class_id));
11824 }
11825
11826 static ExternalTypedDataPtr New(
11827 intptr_t class_id,
11828 uint8_t* data,
11829 intptr_t len,
11830 Heap::Space space = Heap::kNew,
11831 bool perform_eager_msan_initialization_check = true);
11832
11833 static ExternalTypedDataPtr NewFinalizeWithFree(uint8_t* data, intptr_t len);
11834
11835 static bool IsExternalTypedData(const Instance& obj) {
11836 ASSERT(!obj.IsNull());
11837 intptr_t cid = obj.ptr()->GetClassId();
11838 return IsExternalTypedDataClassId(index: cid);
11839 }
11840
11841 protected:
11842 virtual uint8_t* Validate(uint8_t* data) const { return data; }
11843
11844 void SetLength(intptr_t value) const {
11845 ASSERT(value <= Smi::kMaxValue);
11846 untag()->set_length(Smi::New(value));
11847 }
11848
11849 void SetData(uint8_t* data) const {
11850 ASSERT(!IsolateGroup::Current()->heap()->Contains(
11851 reinterpret_cast<uword>(data)));
11852 StoreNonPointer(addr: &untag()->data_, value: data);
11853 }
11854
11855 private:
11856 FINAL_HEAP_OBJECT_IMPLEMENTATION(ExternalTypedData, TypedDataBase);
11857 friend class Class;
11858};
11859
11860class TypedDataView : public TypedDataBase {
11861 public:
11862 static TypedDataViewPtr New(intptr_t class_id,
11863 Heap::Space space = Heap::kNew);
11864 static TypedDataViewPtr New(intptr_t class_id,
11865 const TypedDataBase& typed_data,
11866 intptr_t offset_in_bytes,
11867 intptr_t length,
11868 Heap::Space space = Heap::kNew);
11869
11870 static intptr_t InstanceSize() {
11871 return RoundedAllocationSize(size: sizeof(UntaggedTypedDataView));
11872 }
11873
11874 static InstancePtr Data(const TypedDataView& view) {
11875 return view.typed_data();
11876 }
11877
11878 static SmiPtr OffsetInBytes(const TypedDataView& view) {
11879 return view.offset_in_bytes();
11880 }
11881
11882 static bool IsExternalTypedDataView(const TypedDataView& view_obj) {
11883 const auto& data = Instance::Handle(ptr: Data(view: view_obj));
11884 intptr_t cid = data.ptr()->GetClassId();
11885 ASSERT(IsTypedDataClassId(cid) || IsExternalTypedDataClassId(cid));
11886 return IsExternalTypedDataClassId(index: cid);
11887 }
11888
11889 static intptr_t typed_data_offset() {
11890 return OFFSET_OF(UntaggedTypedDataView, typed_data_);
11891 }
11892
11893 static intptr_t offset_in_bytes_offset() {
11894 return OFFSET_OF(UntaggedTypedDataView, offset_in_bytes_);
11895 }
11896
11897 TypedDataBasePtr typed_data() const { return untag()->typed_data(); }
11898
11899 void InitializeWith(const TypedDataBase& typed_data,
11900 intptr_t offset_in_bytes,
11901 intptr_t length) {
11902 const classid_t cid = typed_data.GetClassId();
11903 ASSERT(IsTypedDataClassId(cid) || IsExternalTypedDataClassId(cid));
11904 untag()->set_typed_data(typed_data.ptr());
11905 untag()->set_length(Smi::New(value: length));
11906 untag()->set_offset_in_bytes(Smi::New(value: offset_in_bytes));
11907
11908 // Update the inner pointer.
11909 RecomputeDataField();
11910 }
11911
11912 SmiPtr offset_in_bytes() const { return untag()->offset_in_bytes(); }
11913
11914 protected:
11915 virtual uint8_t* Validate(uint8_t* data) const { return data; }
11916
11917 private:
11918 void RecomputeDataField() { ptr()->untag()->RecomputeDataField(); }
11919
11920 void Clear() {
11921 untag()->set_length(Smi::New(value: 0));
11922 untag()->set_offset_in_bytes(Smi::New(value: 0));
11923 StoreNonPointer(addr: &untag()->data_, value: nullptr);
11924 untag()->set_typed_data(TypedDataBase::RawCast(raw: Object::null()));
11925 }
11926
11927 FINAL_HEAP_OBJECT_IMPLEMENTATION(TypedDataView, TypedDataBase);
11928 friend class Class;
11929 friend class Object;
11930 friend class TypedDataViewDeserializationCluster;
11931};
11932
11933class ByteBuffer : public AllStatic {
11934 public:
11935 static constexpr bool ContainsCompressedPointers() {
11936 return Instance::ContainsCompressedPointers();
11937 }
11938
11939 static InstancePtr Data(const Instance& view_obj) {
11940 ASSERT(!view_obj.IsNull());
11941 return reinterpret_cast<CompressedInstancePtr*>(
11942 reinterpret_cast<uword>(view_obj.untag()) + data_offset())
11943 ->Decompress(heap_base: view_obj.untag()->heap_base());
11944 }
11945
11946 static intptr_t NumberOfFields() { return kNumFields; }
11947
11948 static intptr_t data_offset() {
11949 return sizeof(UntaggedObject) + (kCompressedWordSize * kDataIndex);
11950 }
11951
11952 private:
11953 enum {
11954 kDataIndex = 0,
11955 kNumFields = 1,
11956 };
11957};
11958
11959class Pointer : public Instance {
11960 public:
11961 static PointerPtr New(uword native_address, Heap::Space space = Heap::kNew);
11962
11963 static intptr_t InstanceSize() {
11964 return RoundedAllocationSize(size: sizeof(UntaggedPointer));
11965 }
11966
11967 static bool IsPointer(const Instance& obj);
11968
11969 size_t NativeAddress() const {
11970 return reinterpret_cast<size_t>(untag()->data_);
11971 }
11972
11973 void SetNativeAddress(size_t address) const {
11974 uint8_t* value = reinterpret_cast<uint8_t*>(address);
11975 StoreNonPointer(addr: &untag()->data_, value);
11976 }
11977
11978 static intptr_t type_arguments_offset() {
11979 return OFFSET_OF(UntaggedPointer, type_arguments_);
11980 }
11981
11982 static constexpr intptr_t kNativeTypeArgPos = 0;
11983
11984 // Fetches the NativeType type argument.
11985 AbstractTypePtr type_argument() const {
11986 TypeArguments& type_args = TypeArguments::Handle(ptr: GetTypeArguments());
11987 return type_args.TypeAtNullSafe(index: Pointer::kNativeTypeArgPos);
11988 }
11989
11990 private:
11991 FINAL_HEAP_OBJECT_IMPLEMENTATION(Pointer, Instance);
11992
11993 friend class Class;
11994};
11995
11996class DynamicLibrary : public Instance {
11997 public:
11998 static DynamicLibraryPtr New(void* handle,
11999 bool canBeClosed,
12000 Heap::Space space = Heap::kNew);
12001
12002 static intptr_t InstanceSize() {
12003 return RoundedAllocationSize(size: sizeof(UntaggedDynamicLibrary));
12004 }
12005
12006 static bool IsDynamicLibrary(const Instance& obj) {
12007 ASSERT(!obj.IsNull());
12008 intptr_t cid = obj.ptr()->GetClassId();
12009 return IsFfiDynamicLibraryClassId(index: cid);
12010 }
12011
12012 void* GetHandle() const {
12013 ASSERT(!IsNull());
12014 return untag()->handle_;
12015 }
12016
12017 void SetHandle(void* value) const {
12018 StoreNonPointer(addr: &untag()->handle_, value);
12019 }
12020
12021 bool CanBeClosed() const {
12022 ASSERT(!IsNull());
12023 return untag()->canBeClosed_;
12024 }
12025
12026 void SetCanBeClosed(bool value) const {
12027 ASSERT(!IsNull());
12028 StoreNonPointer(addr: &untag()->canBeClosed_, value);
12029 }
12030
12031 bool IsClosed() const {
12032 ASSERT(!IsNull());
12033 return untag()->isClosed_;
12034 }
12035
12036 void SetClosed(bool value) const {
12037 StoreNonPointer(addr: &untag()->isClosed_, value);
12038 }
12039
12040 private:
12041 FINAL_HEAP_OBJECT_IMPLEMENTATION(DynamicLibrary, Instance);
12042
12043 friend class Class;
12044};
12045
12046class LinkedHashBase : public Instance {
12047 public:
12048 // Keep consistent with _indexSizeToHashMask in compact_hash.dart.
12049 static intptr_t IndexSizeToHashMask(intptr_t index_size) {
12050 ASSERT(index_size >= kInitialIndexSize);
12051 intptr_t index_bits = Utils::BitLength(value: index_size) - 2;
12052#if defined(HAS_SMI_63_BITS)
12053 return (1 << (32 - index_bits)) - 1;
12054#else
12055 return (1 << (Object::kHashBits - index_bits)) - 1;
12056#endif
12057 }
12058 static intptr_t InstanceSize() {
12059 return RoundedAllocationSize(size: sizeof(UntaggedLinkedHashBase));
12060 }
12061
12062 static intptr_t type_arguments_offset() {
12063 return OFFSET_OF(UntaggedLinkedHashBase, type_arguments_);
12064 }
12065
12066 static intptr_t index_offset() {
12067 return OFFSET_OF(UntaggedLinkedHashBase, index_);
12068 }
12069
12070 static intptr_t data_offset() {
12071 return OFFSET_OF(UntaggedLinkedHashBase, data_);
12072 }
12073
12074 static intptr_t hash_mask_offset() {
12075 return OFFSET_OF(UntaggedLinkedHashBase, hash_mask_);
12076 }
12077
12078 static intptr_t used_data_offset() {
12079 return OFFSET_OF(UntaggedLinkedHashBase, used_data_);
12080 }
12081
12082 static intptr_t deleted_keys_offset() {
12083 return OFFSET_OF(UntaggedLinkedHashBase, deleted_keys_);
12084 }
12085
12086 static const LinkedHashBase& Cast(const Object& obj) {
12087 ASSERT(obj.IsMap() || obj.IsSet());
12088 return static_cast<const LinkedHashBase&>(obj);
12089 }
12090
12091 bool IsImmutable() const {
12092 return GetClassId() == kConstMapCid || GetClassId() == kConstSetCid;
12093 }
12094
12095 virtual TypeArgumentsPtr GetTypeArguments() const {
12096 return untag()->type_arguments();
12097 }
12098 virtual void SetTypeArguments(const TypeArguments& value) const {
12099 const intptr_t num_type_args = IsMap() ? 2 : 1;
12100 ASSERT(value.IsNull() ||
12101 ((value.Length() >= num_type_args) &&
12102 value.IsInstantiated() /*&& value.IsCanonical()*/));
12103 // TODO(asiva): Values read from a message snapshot are not properly marked
12104 // as canonical. See for example tests/isolate/message3_test.dart.
12105 untag()->set_type_arguments(value.ptr());
12106 }
12107
12108 TypedDataPtr index() const { return untag()->index(); }
12109 void set_index(const TypedData& value) const {
12110 ASSERT(!value.IsNull());
12111 untag()->set_index(value.ptr());
12112 }
12113
12114 ArrayPtr data() const { return untag()->data(); }
12115 void set_data(const Array& value) const { untag()->set_data(value.ptr()); }
12116
12117 SmiPtr hash_mask() const { return untag()->hash_mask(); }
12118 void set_hash_mask(intptr_t value) const {
12119 untag()->set_hash_mask(Smi::New(value));
12120 }
12121
12122 SmiPtr used_data() const { return untag()->used_data(); }
12123 void set_used_data(intptr_t value) const {
12124 untag()->set_used_data(Smi::New(value));
12125 }
12126
12127 SmiPtr deleted_keys() const { return untag()->deleted_keys(); }
12128 void set_deleted_keys(intptr_t value) const {
12129 untag()->set_deleted_keys(Smi::New(value));
12130 }
12131
12132 intptr_t Length() const {
12133 // The map or set may be uninitialized.
12134 if (untag()->used_data() == Object::null()) return 0;
12135 if (untag()->deleted_keys() == Object::null()) return 0;
12136
12137 intptr_t used = Smi::Value(raw_smi: untag()->used_data());
12138 if (IsMap()) {
12139 used >>= 1;
12140 }
12141 const intptr_t deleted = Smi::Value(raw_smi: untag()->deleted_keys());
12142 return used - deleted;
12143 }
12144
12145 // We do not compute the indices in the VM, but we do precompute the hash
12146 // mask to avoid a load acquire barrier on reading the combination of index
12147 // and hash mask.
12148 void ComputeAndSetHashMask() const;
12149
12150 virtual bool CanonicalizeEquals(const Instance& other) const;
12151 virtual uint32_t CanonicalizeHash() const;
12152 virtual void CanonicalizeFieldsLocked(Thread* thread) const;
12153
12154 protected:
12155 // Keep this in sync with Dart implementation (lib/compact_hash.dart).
12156 static constexpr intptr_t kInitialIndexBits = 2;
12157 static constexpr intptr_t kInitialIndexSize = 1 << (kInitialIndexBits + 1);
12158
12159 private:
12160 LinkedHashBasePtr ptr() const { return static_cast<LinkedHashBasePtr>(ptr_); }
12161 UntaggedLinkedHashBase* untag() const {
12162 ASSERT(ptr() != null());
12163 return const_cast<UntaggedLinkedHashBase*>(ptr()->untag());
12164 }
12165
12166 friend class Class;
12167 friend class ImmutableLinkedHashBase;
12168 friend class LinkedHashBaseDeserializationCluster;
12169};
12170
12171class ImmutableLinkedHashBase : public AllStatic {
12172 public:
12173 static constexpr bool ContainsCompressedPointers() {
12174 return LinkedHashBase::ContainsCompressedPointers();
12175 }
12176
12177 static intptr_t data_offset() { return LinkedHashBase::data_offset(); }
12178};
12179
12180// Corresponds to
12181// - _Map in dart:collection
12182// - "new Map()",
12183// - non-const map literals, and
12184// - the default constructor of LinkedHashMap in dart:collection.
12185class Map : public LinkedHashBase {
12186 public:
12187 static intptr_t InstanceSize() {
12188 return RoundedAllocationSize(size: sizeof(UntaggedMap));
12189 }
12190
12191 // Allocates a map with some default capacity, just like "new Map()".
12192 static MapPtr NewDefault(intptr_t class_id = kMapCid,
12193 Heap::Space space = Heap::kNew);
12194 static MapPtr New(intptr_t class_id,
12195 const Array& data,
12196 const TypedData& index,
12197 intptr_t hash_mask,
12198 intptr_t used_data,
12199 intptr_t deleted_keys,
12200 Heap::Space space = Heap::kNew);
12201
12202 // This iterator differs somewhat from its Dart counterpart (_CompactIterator
12203 // in runtime/lib/compact_hash.dart):
12204 // - There are no checks for concurrent modifications.
12205 // - Accessing a key or value before the first call to MoveNext and after
12206 // MoveNext returns false will result in crashes.
12207 class Iterator : public ValueObject {
12208 public:
12209 explicit Iterator(const Map& map)
12210 : data_(Array::Handle(ptr: map.data())),
12211 scratch_(Object::Handle()),
12212 offset_(-2),
12213 length_(Smi::Value(raw_smi: map.used_data())) {}
12214
12215 bool MoveNext() {
12216 while (true) {
12217 offset_ += 2;
12218 if (offset_ >= length_) {
12219 return false;
12220 }
12221 scratch_ = data_.At(index: offset_);
12222 if (scratch_.ptr() != data_.ptr()) {
12223 // Slot is not deleted (self-reference indicates deletion).
12224 return true;
12225 }
12226 }
12227 }
12228
12229 ObjectPtr CurrentKey() const { return data_.At(index: offset_); }
12230
12231 ObjectPtr CurrentValue() const { return data_.At(index: offset_ + 1); }
12232
12233 private:
12234 const Array& data_;
12235 Object& scratch_;
12236 intptr_t offset_;
12237 const intptr_t length_;
12238 };
12239
12240 private:
12241 FINAL_HEAP_OBJECT_IMPLEMENTATION(Map, LinkedHashBase);
12242
12243 // Allocate a map, but leave all fields set to null.
12244 // Used during deserialization (since map might contain itself as key/value).
12245 static MapPtr NewUninitialized(intptr_t class_id,
12246 Heap::Space space = Heap::kNew);
12247
12248 friend class Class;
12249 friend class ConstMap;
12250 friend class MapDeserializationCluster;
12251};
12252
12253// Corresponds to
12254// - _ConstMap in dart:collection
12255// - const map literals
12256class ConstMap : public AllStatic {
12257 public:
12258 static constexpr bool ContainsCompressedPointers() {
12259 return Map::ContainsCompressedPointers();
12260 }
12261
12262 static ConstMapPtr NewDefault(Heap::Space space = Heap::kNew);
12263
12264 static ConstMapPtr NewUninitialized(Heap::Space space = Heap::kNew);
12265
12266 static const ClassId kClassId = kConstMapCid;
12267
12268 static intptr_t InstanceSize() { return Map::InstanceSize(); }
12269
12270 private:
12271 static intptr_t NextFieldOffset() {
12272 // Indicates this class cannot be extended by dart code.
12273 return -kWordSize;
12274 }
12275
12276 static ConstMapPtr raw(const Map& map) {
12277 return static_cast<ConstMapPtr>(map.ptr());
12278 }
12279
12280 friend class Class;
12281};
12282
12283// Corresponds to
12284// - _Set in dart:collection,
12285// - "new Set()",
12286// - non-const set literals, and
12287// - the default constructor of LinkedHashSet in dart:collection.
12288class Set : public LinkedHashBase {
12289 public:
12290 static intptr_t InstanceSize() {
12291 return RoundedAllocationSize(size: sizeof(UntaggedSet));
12292 }
12293
12294 // Allocates a set with some default capacity, just like "new Set()".
12295 static SetPtr NewDefault(intptr_t class_id = kSetCid,
12296 Heap::Space space = Heap::kNew);
12297 static SetPtr New(intptr_t class_id,
12298 const Array& data,
12299 const TypedData& index,
12300 intptr_t hash_mask,
12301 intptr_t used_data,
12302 intptr_t deleted_keys,
12303 Heap::Space space = Heap::kNew);
12304
12305 // This iterator differs somewhat from its Dart counterpart (_CompactIterator
12306 // in runtime/lib/compact_hash.dart):
12307 // - There are no checks for concurrent modifications.
12308 // - Accessing a key or value before the first call to MoveNext and after
12309 // MoveNext returns false will result in crashes.
12310 class Iterator : public ValueObject {
12311 public:
12312 explicit Iterator(const Set& set)
12313 : data_(Array::Handle(ptr: set.data())),
12314 scratch_(Object::Handle()),
12315 offset_(-1),
12316 length_(Smi::Value(raw_smi: set.used_data())) {}
12317
12318 bool MoveNext() {
12319 while (true) {
12320 offset_++;
12321 if (offset_ >= length_) {
12322 return false;
12323 }
12324 scratch_ = data_.At(index: offset_);
12325 if (scratch_.ptr() != data_.ptr()) {
12326 // Slot is not deleted (self-reference indicates deletion).
12327 return true;
12328 }
12329 }
12330 }
12331
12332 ObjectPtr CurrentKey() const { return data_.At(index: offset_); }
12333
12334 private:
12335 const Array& data_;
12336 Object& scratch_;
12337 intptr_t offset_;
12338 const intptr_t length_;
12339 };
12340
12341 private:
12342 FINAL_HEAP_OBJECT_IMPLEMENTATION(Set, LinkedHashBase);
12343
12344 // Allocate a set, but leave all fields set to null.
12345 // Used during deserialization (since set might contain itself as key/value).
12346 static SetPtr NewUninitialized(intptr_t class_id,
12347 Heap::Space space = Heap::kNew);
12348
12349 friend class Class;
12350 friend class ConstSet;
12351 friend class SetDeserializationCluster;
12352};
12353
12354// Corresponds to
12355// - _ConstSet in dart:collection
12356// - const set literals
12357class ConstSet : public AllStatic {
12358 public:
12359 static constexpr bool ContainsCompressedPointers() {
12360 return Set::ContainsCompressedPointers();
12361 }
12362
12363 static ConstSetPtr NewDefault(Heap::Space space = Heap::kNew);
12364
12365 static ConstSetPtr NewUninitialized(Heap::Space space = Heap::kNew);
12366
12367 static const ClassId kClassId = kConstSetCid;
12368
12369 static intptr_t InstanceSize() { return Set::InstanceSize(); }
12370
12371 private:
12372 static intptr_t NextFieldOffset() {
12373 // Indicates this class cannot be extended by dart code.
12374 return -kWordSize;
12375 }
12376
12377 static ConstSetPtr raw(const Set& map) {
12378 return static_cast<ConstSetPtr>(map.ptr());
12379 }
12380
12381 friend class Class;
12382};
12383
12384class Closure : public Instance {
12385 public:
12386#if defined(DART_PRECOMPILED_RUNTIME)
12387 uword entry_point() const { return untag()->entry_point_; }
12388 void set_entry_point(uword entry_point) const {
12389 StoreNonPointer(&untag()->entry_point_, entry_point);
12390 }
12391 static intptr_t entry_point_offset() {
12392 return OFFSET_OF(UntaggedClosure, entry_point_);
12393 }
12394#endif
12395
12396 TypeArgumentsPtr instantiator_type_arguments() const {
12397 return untag()->instantiator_type_arguments();
12398 }
12399 void set_instantiator_type_arguments(const TypeArguments& args) const {
12400 untag()->set_instantiator_type_arguments(args.ptr());
12401 }
12402 static intptr_t instantiator_type_arguments_offset() {
12403 return OFFSET_OF(UntaggedClosure, instantiator_type_arguments_);
12404 }
12405
12406 TypeArgumentsPtr function_type_arguments() const {
12407 return untag()->function_type_arguments();
12408 }
12409 void set_function_type_arguments(const TypeArguments& args) const {
12410 untag()->set_function_type_arguments(args.ptr());
12411 }
12412 static intptr_t function_type_arguments_offset() {
12413 return OFFSET_OF(UntaggedClosure, function_type_arguments_);
12414 }
12415
12416 TypeArgumentsPtr delayed_type_arguments() const {
12417 return untag()->delayed_type_arguments();
12418 }
12419 void set_delayed_type_arguments(const TypeArguments& args) const {
12420 untag()->set_delayed_type_arguments(args.ptr());
12421 }
12422 static intptr_t delayed_type_arguments_offset() {
12423 return OFFSET_OF(UntaggedClosure, delayed_type_arguments_);
12424 }
12425
12426 FunctionPtr function() const { return untag()->function(); }
12427 static intptr_t function_offset() {
12428 return OFFSET_OF(UntaggedClosure, function_);
12429 }
12430 static FunctionPtr FunctionOf(ClosurePtr closure) {
12431 return closure.untag()->function();
12432 }
12433
12434 ContextPtr context() const { return untag()->context(); }
12435 static intptr_t context_offset() {
12436 return OFFSET_OF(UntaggedClosure, context_);
12437 }
12438 static ContextPtr ContextOf(ClosurePtr closure) {
12439 return closure.untag()->context();
12440 }
12441
12442 // Returns whether the closure is generic, that is, it has a generic closure
12443 // function and no delayed type arguments.
12444 bool IsGeneric() const {
12445 return delayed_type_arguments() == Object::empty_type_arguments().ptr();
12446 }
12447
12448 SmiPtr hash() const { return untag()->hash(); }
12449 static intptr_t hash_offset() { return OFFSET_OF(UntaggedClosure, hash_); }
12450
12451 static intptr_t InstanceSize() {
12452 return RoundedAllocationSize(size: sizeof(UntaggedClosure));
12453 }
12454
12455 virtual void CanonicalizeFieldsLocked(Thread* thread) const;
12456 virtual bool CanonicalizeEquals(const Instance& other) const;
12457 virtual uint32_t CanonicalizeHash() const {
12458 return Function::Handle(ptr: function()).Hash();
12459 }
12460 uword ComputeHash() const;
12461
12462 static ClosurePtr New(const TypeArguments& instantiator_type_arguments,
12463 const TypeArguments& function_type_arguments,
12464 const Function& function,
12465 const Context& context,
12466 Heap::Space space = Heap::kNew);
12467
12468 static ClosurePtr New(const TypeArguments& instantiator_type_arguments,
12469 const TypeArguments& function_type_arguments,
12470 const TypeArguments& delayed_type_arguments,
12471 const Function& function,
12472 const Context& context,
12473 Heap::Space space = Heap::kNew);
12474
12475 FunctionTypePtr GetInstantiatedSignature(Zone* zone) const;
12476
12477 private:
12478 FINAL_HEAP_OBJECT_IMPLEMENTATION(Closure, Instance);
12479 friend class Class;
12480};
12481
12482// Corresponds to _Capability in dart:isolate.
12483class Capability : public Instance {
12484 public:
12485 uint64_t Id() const { return untag()->id_; }
12486
12487 static intptr_t InstanceSize() {
12488 return RoundedAllocationSize(size: sizeof(UntaggedCapability));
12489 }
12490 static CapabilityPtr New(uint64_t id, Heap::Space space = Heap::kNew);
12491
12492 private:
12493 FINAL_HEAP_OBJECT_IMPLEMENTATION(Capability, Instance);
12494 friend class Class;
12495};
12496
12497// Corresponds to _RawReceivePort in dart:isolate.
12498class ReceivePort : public Instance {
12499 public:
12500 SendPortPtr send_port() const { return untag()->send_port(); }
12501 static intptr_t send_port_offset() {
12502 return OFFSET_OF(UntaggedReceivePort, send_port_);
12503 }
12504 Dart_Port Id() const { return send_port()->untag()->id_; }
12505
12506 InstancePtr handler() const { return untag()->handler(); }
12507 void set_handler(const Instance& value) const {
12508 untag()->set_handler(value.ptr());
12509 }
12510 static intptr_t handler_offset() {
12511 return OFFSET_OF(UntaggedReceivePort, handler_);
12512 }
12513
12514#if !defined(PRODUCT)
12515 StackTracePtr allocation_location() const {
12516 return untag()->allocation_location();
12517 }
12518
12519 StringPtr debug_name() const { return untag()->debug_name(); }
12520#endif
12521
12522 static intptr_t InstanceSize() {
12523 return RoundedAllocationSize(size: sizeof(UntaggedReceivePort));
12524 }
12525 static ReceivePortPtr New(Dart_Port id,
12526 const String& debug_name,
12527 bool is_control_port,
12528 Heap::Space space = Heap::kNew);
12529
12530 private:
12531 FINAL_HEAP_OBJECT_IMPLEMENTATION(ReceivePort, Instance);
12532 friend class Class;
12533};
12534
12535// Corresponds to _SendPort in dart:isolate.
12536class SendPort : public Instance {
12537 public:
12538 Dart_Port Id() const { return untag()->id_; }
12539
12540 Dart_Port origin_id() const { return untag()->origin_id_; }
12541 void set_origin_id(Dart_Port id) const {
12542 ASSERT(origin_id() == 0);
12543 StoreNonPointer(addr: &(untag()->origin_id_), value: id);
12544 }
12545
12546 static intptr_t InstanceSize() {
12547 return RoundedAllocationSize(size: sizeof(UntaggedSendPort));
12548 }
12549 static SendPortPtr New(Dart_Port id, Heap::Space space = Heap::kNew);
12550 static SendPortPtr New(Dart_Port id,
12551 Dart_Port origin_id,
12552 Heap::Space space = Heap::kNew);
12553
12554 private:
12555 FINAL_HEAP_OBJECT_IMPLEMENTATION(SendPort, Instance);
12556 friend class Class;
12557};
12558
12559// This is allocated when new instance of TransferableTypedData is created in
12560// [TransferableTypedData::New].
12561class TransferableTypedDataPeer {
12562 public:
12563 // [data] backing store should be malloc'ed, not new'ed.
12564 TransferableTypedDataPeer(uint8_t* data, intptr_t length)
12565 : data_(data), length_(length), handle_(nullptr) {}
12566
12567 ~TransferableTypedDataPeer() { free(ptr: data_); }
12568
12569 uint8_t* data() const { return data_; }
12570 intptr_t length() const { return length_; }
12571 FinalizablePersistentHandle* handle() const { return handle_; }
12572 void set_handle(FinalizablePersistentHandle* handle) { handle_ = handle; }
12573
12574 void ClearData() {
12575 data_ = nullptr;
12576 length_ = 0;
12577 handle_ = nullptr;
12578 }
12579
12580 private:
12581 uint8_t* data_;
12582 intptr_t length_;
12583 FinalizablePersistentHandle* handle_;
12584
12585 DISALLOW_COPY_AND_ASSIGN(TransferableTypedDataPeer);
12586};
12587
12588class TransferableTypedData : public Instance {
12589 public:
12590 static TransferableTypedDataPtr New(uint8_t* data, intptr_t len);
12591
12592 static intptr_t InstanceSize() {
12593 return RoundedAllocationSize(size: sizeof(UntaggedTransferableTypedData));
12594 }
12595
12596 private:
12597 FINAL_HEAP_OBJECT_IMPLEMENTATION(TransferableTypedData, Instance);
12598 friend class Class;
12599};
12600
12601class DebuggerStackTrace;
12602
12603// Internal stacktrace object used in exceptions for printing stack traces.
12604class StackTrace : public Instance {
12605 public:
12606 static constexpr int kPreallocatedStackdepth = 90;
12607
12608 intptr_t Length() const;
12609
12610 StackTracePtr async_link() const { return untag()->async_link(); }
12611 void set_async_link(const StackTrace& async_link) const;
12612 void set_expand_inlined(bool value) const;
12613
12614 ArrayPtr code_array() const { return untag()->code_array(); }
12615 ObjectPtr CodeAtFrame(intptr_t frame_index) const;
12616 void SetCodeAtFrame(intptr_t frame_index, const Object& code) const;
12617
12618 TypedDataPtr pc_offset_array() const { return untag()->pc_offset_array(); }
12619 uword PcOffsetAtFrame(intptr_t frame_index) const;
12620 void SetPcOffsetAtFrame(intptr_t frame_index, uword pc_offset) const;
12621
12622 bool skip_sync_start_in_parent_stack() const;
12623 void set_skip_sync_start_in_parent_stack(bool value) const;
12624
12625 // The number of frames that should be cut off the top of an async stack trace
12626 // if it's appended to a synchronous stack trace along a sync-async call.
12627 //
12628 // Without cropping, the border would look like:
12629 //
12630 // <async function>
12631 // ---------------------------
12632 // <asynchronous gap marker>
12633 // <async function>
12634 //
12635 // Since it's not actually an async call, we crop off the last two
12636 // frames when concatenating the sync and async stacktraces.
12637 static constexpr intptr_t kSyncAsyncCroppedFrames = 2;
12638
12639 static intptr_t InstanceSize() {
12640 return RoundedAllocationSize(size: sizeof(UntaggedStackTrace));
12641 }
12642 static StackTracePtr New(const Array& code_array,
12643 const TypedData& pc_offset_array,
12644 Heap::Space space = Heap::kNew);
12645
12646 static StackTracePtr New(const Array& code_array,
12647 const TypedData& pc_offset_array,
12648 const StackTrace& async_link,
12649 bool skip_sync_start_in_parent_stack,
12650 Heap::Space space = Heap::kNew);
12651
12652 private:
12653 void set_code_array(const Array& code_array) const;
12654 void set_pc_offset_array(const TypedData& pc_offset_array) const;
12655 bool expand_inlined() const;
12656
12657 FINAL_HEAP_OBJECT_IMPLEMENTATION(StackTrace, Instance);
12658 friend class Class;
12659 friend class DebuggerStackTrace;
12660};
12661
12662class SuspendState : public Instance {
12663 public:
12664 // :suspend_state local variable index
12665 static constexpr intptr_t kSuspendStateVarIndex = 0;
12666
12667 static intptr_t HeaderSize() { return sizeof(UntaggedSuspendState); }
12668 static intptr_t UnroundedSize(SuspendStatePtr ptr) {
12669 return UnroundedSize(frame_capacity: ptr->untag()->frame_capacity());
12670 }
12671 static intptr_t UnroundedSize(intptr_t frame_capacity) {
12672 return HeaderSize() + frame_capacity;
12673 }
12674 static intptr_t InstanceSize() {
12675 ASSERT_EQUAL(sizeof(UntaggedSuspendState),
12676 OFFSET_OF_RETURNED_VALUE(UntaggedSuspendState, payload));
12677 return 0;
12678 }
12679 static intptr_t InstanceSize(intptr_t frame_capacity) {
12680 return RoundedAllocationSize(size: UnroundedSize(frame_capacity));
12681 }
12682
12683 // Number of extra words reserved for growth of frame size
12684 // during SuspendState allocation. Frames do not grow in AOT.
12685 static intptr_t FrameSizeGrowthGap() {
12686 return ONLY_IN_PRECOMPILED(0) NOT_IN_PRECOMPILED(2);
12687 }
12688
12689#if !defined(DART_PRECOMPILED_RUNTIME)
12690 static intptr_t frame_capacity_offset() {
12691 return OFFSET_OF(UntaggedSuspendState, frame_capacity_);
12692 }
12693#endif
12694 static intptr_t frame_size_offset() {
12695 return OFFSET_OF(UntaggedSuspendState, frame_size_);
12696 }
12697 static intptr_t pc_offset() { return OFFSET_OF(UntaggedSuspendState, pc_); }
12698 static intptr_t function_data_offset() {
12699 return OFFSET_OF(UntaggedSuspendState, function_data_);
12700 }
12701 static intptr_t then_callback_offset() {
12702 return OFFSET_OF(UntaggedSuspendState, then_callback_);
12703 }
12704 static intptr_t error_callback_offset() {
12705 return OFFSET_OF(UntaggedSuspendState, error_callback_);
12706 }
12707 static intptr_t payload_offset() {
12708 return UntaggedSuspendState::payload_offset();
12709 }
12710
12711 static SuspendStatePtr New(intptr_t frame_size,
12712 const Instance& function_data,
12713 Heap::Space space = Heap::kNew);
12714
12715 // Makes a copy of [src] object.
12716 // The object should be holding a suspended frame.
12717 static SuspendStatePtr Clone(Thread* thread,
12718 const SuspendState& src,
12719 Heap::Space space = Heap::kNew);
12720
12721 uword pc() const { return untag()->pc_; }
12722
12723 intptr_t frame_size() const { return untag()->frame_size_; }
12724
12725 InstancePtr function_data() const {
12726 return untag()->function_data();
12727 }
12728
12729 ClosurePtr then_callback() const { return untag()->then_callback(); }
12730
12731 ClosurePtr error_callback() const {
12732 return untag()->error_callback();
12733 }
12734
12735 // Returns Code object corresponding to the suspended function.
12736 CodePtr GetCodeObject() const;
12737
12738 private:
12739#if !defined(DART_PRECOMPILED_RUNTIME)
12740 void set_frame_capacity(intptr_t frame_capcity) const;
12741#endif
12742 void set_frame_size(intptr_t frame_size) const;
12743 void set_pc(uword pc) const;
12744 void set_function_data(const Instance& function_data) const;
12745 void set_then_callback(const Closure& then_callback) const;
12746 void set_error_callback(const Closure& error_callback) const;
12747
12748 uint8_t* payload() const { return untag()->payload(); }
12749
12750 FINAL_HEAP_OBJECT_IMPLEMENTATION(SuspendState, Instance);
12751 friend class Class;
12752};
12753
12754class RegExpFlags {
12755 public:
12756 // Flags are passed to a regex object as follows:
12757 // 'i': ignore case, 'g': do global matches, 'm': pattern is multi line,
12758 // 'u': pattern is full Unicode, not just BMP, 's': '.' in pattern matches
12759 // all characters including line terminators.
12760 enum Flags {
12761 kNone = 0,
12762 kGlobal = 1,
12763 kIgnoreCase = 2,
12764 kMultiLine = 4,
12765 kUnicode = 8,
12766 kDotAll = 16,
12767 };
12768
12769 static constexpr int kDefaultFlags = 0;
12770
12771 RegExpFlags() : value_(kDefaultFlags) {}
12772 explicit RegExpFlags(int value) : value_(value) {}
12773
12774 inline bool IsGlobal() const { return (value_ & kGlobal) != 0; }
12775 inline bool IgnoreCase() const { return (value_ & kIgnoreCase) != 0; }
12776 inline bool IsMultiLine() const { return (value_ & kMultiLine) != 0; }
12777 inline bool IsUnicode() const { return (value_ & kUnicode) != 0; }
12778 inline bool IsDotAll() const { return (value_ & kDotAll) != 0; }
12779
12780 inline bool NeedsUnicodeCaseEquivalents() {
12781 // Both unicode and ignore_case flags are set. We need to use ICU to find
12782 // the closure over case equivalents.
12783 return IsUnicode() && IgnoreCase();
12784 }
12785
12786 void SetGlobal() { value_ |= kGlobal; }
12787 void SetIgnoreCase() { value_ |= kIgnoreCase; }
12788 void SetMultiLine() { value_ |= kMultiLine; }
12789 void SetUnicode() { value_ |= kUnicode; }
12790 void SetDotAll() { value_ |= kDotAll; }
12791
12792 const char* ToCString() const;
12793
12794 int value() const { return value_; }
12795
12796 bool operator==(const RegExpFlags& other) const {
12797 return value_ == other.value_;
12798 }
12799 bool operator!=(const RegExpFlags& other) const {
12800 return value_ != other.value_;
12801 }
12802
12803 private:
12804 int value_;
12805};
12806
12807// Internal JavaScript regular expression object.
12808class RegExp : public Instance {
12809 public:
12810 // Meaning of RegExType:
12811 // kUninitialized: the type of th regexp has not been initialized yet.
12812 // kSimple: A simple pattern to match against, using string indexOf operation.
12813 // kComplex: A complex pattern to match.
12814 enum RegExType {
12815 kUninitialized = 0,
12816 kSimple = 1,
12817 kComplex = 2,
12818 };
12819
12820 enum {
12821 kTypePos = 0,
12822 kTypeSize = 2,
12823 kFlagsPos = 2,
12824 kFlagsSize = 5,
12825 };
12826
12827 class TypeBits : public BitField<int8_t, RegExType, kTypePos, kTypeSize> {};
12828 class GlobalBit : public BitField<int8_t, bool, kFlagsPos, 1> {};
12829 class IgnoreCaseBit : public BitField<int8_t, bool, GlobalBit::kNextBit, 1> {
12830 };
12831 class MultiLineBit
12832 : public BitField<int8_t, bool, IgnoreCaseBit::kNextBit, 1> {};
12833 class UnicodeBit : public BitField<int8_t, bool, MultiLineBit::kNextBit, 1> {
12834 };
12835 class DotAllBit : public BitField<int8_t, bool, UnicodeBit::kNextBit, 1> {};
12836
12837 class FlagsBits : public BitField<int8_t, int8_t, kFlagsPos, kFlagsSize> {};
12838
12839 bool is_initialized() const { return (type() != kUninitialized); }
12840 bool is_simple() const { return (type() == kSimple); }
12841 bool is_complex() const { return (type() == kComplex); }
12842
12843 intptr_t num_registers(bool is_one_byte) const {
12844 return LoadNonPointer<intptr_t, std::memory_order_relaxed>(
12845 addr: is_one_byte ? &untag()->num_one_byte_registers_
12846 : &untag()->num_two_byte_registers_);
12847 }
12848
12849 StringPtr pattern() const { return untag()->pattern(); }
12850 intptr_t num_bracket_expressions() const {
12851 return untag()->num_bracket_expressions_;
12852 }
12853 ArrayPtr capture_name_map() const { return untag()->capture_name_map(); }
12854
12855 TypedDataPtr bytecode(bool is_one_byte, bool sticky) const {
12856 if (sticky) {
12857 return TypedData::RawCast(
12858 raw: is_one_byte ? untag()->one_byte_sticky<std::memory_order_acquire>()
12859 : untag()->two_byte_sticky<std::memory_order_acquire>());
12860 } else {
12861 return TypedData::RawCast(
12862 raw: is_one_byte ? untag()->one_byte<std::memory_order_acquire>()
12863 : untag()->two_byte<std::memory_order_acquire>());
12864 }
12865 }
12866
12867 static intptr_t function_offset(intptr_t cid, bool sticky) {
12868 if (sticky) {
12869 switch (cid) {
12870 case kOneByteStringCid:
12871 return OFFSET_OF(UntaggedRegExp, one_byte_sticky_);
12872 case kTwoByteStringCid:
12873 return OFFSET_OF(UntaggedRegExp, two_byte_sticky_);
12874 case kExternalOneByteStringCid:
12875 return OFFSET_OF(UntaggedRegExp, external_one_byte_sticky_);
12876 case kExternalTwoByteStringCid:
12877 return OFFSET_OF(UntaggedRegExp, external_two_byte_sticky_);
12878 }
12879 } else {
12880 switch (cid) {
12881 case kOneByteStringCid:
12882 return OFFSET_OF(UntaggedRegExp, one_byte_);
12883 case kTwoByteStringCid:
12884 return OFFSET_OF(UntaggedRegExp, two_byte_);
12885 case kExternalOneByteStringCid:
12886 return OFFSET_OF(UntaggedRegExp, external_one_byte_);
12887 case kExternalTwoByteStringCid:
12888 return OFFSET_OF(UntaggedRegExp, external_two_byte_);
12889 }
12890 }
12891
12892 UNREACHABLE();
12893 return -1;
12894 }
12895
12896 FunctionPtr function(intptr_t cid, bool sticky) const {
12897 if (sticky) {
12898 switch (cid) {
12899 case kOneByteStringCid:
12900 return static_cast<FunctionPtr>(untag()->one_byte_sticky());
12901 case kTwoByteStringCid:
12902 return static_cast<FunctionPtr>(untag()->two_byte_sticky());
12903 case kExternalOneByteStringCid:
12904 return static_cast<FunctionPtr>(untag()->external_one_byte_sticky());
12905 case kExternalTwoByteStringCid:
12906 return static_cast<FunctionPtr>(untag()->external_two_byte_sticky());
12907 }
12908 } else {
12909 switch (cid) {
12910 case kOneByteStringCid:
12911 return static_cast<FunctionPtr>(untag()->one_byte());
12912 case kTwoByteStringCid:
12913 return static_cast<FunctionPtr>(untag()->two_byte());
12914 case kExternalOneByteStringCid:
12915 return static_cast<FunctionPtr>(untag()->external_one_byte());
12916 case kExternalTwoByteStringCid:
12917 return static_cast<FunctionPtr>(untag()->external_two_byte());
12918 }
12919 }
12920
12921 UNREACHABLE();
12922 return Function::null();
12923 }
12924
12925 void set_pattern(const String& pattern) const;
12926 void set_function(intptr_t cid, bool sticky, const Function& value) const;
12927 void set_bytecode(bool is_one_byte,
12928 bool sticky,
12929 const TypedData& bytecode) const;
12930
12931 void set_num_bracket_expressions(SmiPtr value) const;
12932 void set_num_bracket_expressions(const Smi& value) const;
12933 void set_num_bracket_expressions(intptr_t value) const;
12934 void set_capture_name_map(const Array& array) const;
12935 void set_is_global() const {
12936 untag()->type_flags_.UpdateBool<GlobalBit>(value: true);
12937 }
12938 void set_is_ignore_case() const {
12939 untag()->type_flags_.UpdateBool<IgnoreCaseBit>(value: true);
12940 }
12941 void set_is_multi_line() const {
12942 untag()->type_flags_.UpdateBool<MultiLineBit>(value: true);
12943 }
12944 void set_is_unicode() const {
12945 untag()->type_flags_.UpdateBool<UnicodeBit>(value: true);
12946 }
12947 void set_is_dot_all() const {
12948 untag()->type_flags_.UpdateBool<DotAllBit>(value: true);
12949 }
12950 void set_is_simple() const { set_type(kSimple); }
12951 void set_is_complex() const { set_type(kComplex); }
12952 void set_num_registers(bool is_one_byte, intptr_t value) const {
12953 StoreNonPointer<intptr_t, intptr_t, std::memory_order_relaxed>(
12954 addr: is_one_byte ? &untag()->num_one_byte_registers_
12955 : &untag()->num_two_byte_registers_,
12956 value);
12957 }
12958
12959 RegExpFlags flags() const {
12960 return RegExpFlags(untag()->type_flags_.Read<FlagsBits>());
12961 }
12962 void set_flags(RegExpFlags flags) const {
12963 untag()->type_flags_.Update<FlagsBits>(value: flags.value());
12964 }
12965
12966 virtual bool CanonicalizeEquals(const Instance& other) const;
12967 virtual uint32_t CanonicalizeHash() const;
12968
12969 static intptr_t InstanceSize() {
12970 return RoundedAllocationSize(size: sizeof(UntaggedRegExp));
12971 }
12972
12973 static RegExpPtr New(Zone* zone, Heap::Space space = Heap::kNew);
12974
12975 private:
12976 void set_type(RegExType type) const {
12977 untag()->type_flags_.Update<TypeBits>(value: type);
12978 }
12979 RegExType type() const { return untag()->type_flags_.Read<TypeBits>(); }
12980
12981 FINAL_HEAP_OBJECT_IMPLEMENTATION(RegExp, Instance);
12982 friend class Class;
12983};
12984
12985// Corresponds to _WeakProperty in dart:core.
12986class WeakProperty : public Instance {
12987 public:
12988 ObjectPtr key() const { return untag()->key(); }
12989 void set_key(const Object& key) const { untag()->set_key(key.ptr()); }
12990 static intptr_t key_offset() { return OFFSET_OF(UntaggedWeakProperty, key_); }
12991
12992 ObjectPtr value() const { return untag()->value(); }
12993 void set_value(const Object& value) const { untag()->set_value(value.ptr()); }
12994 static intptr_t value_offset() {
12995 return OFFSET_OF(UntaggedWeakProperty, value_);
12996 }
12997
12998 static WeakPropertyPtr New(Heap::Space space = Heap::kNew);
12999
13000 static intptr_t InstanceSize() {
13001 return RoundedAllocationSize(size: sizeof(UntaggedWeakProperty));
13002 }
13003
13004 private:
13005 FINAL_HEAP_OBJECT_IMPLEMENTATION(WeakProperty, Instance);
13006 friend class Class;
13007};
13008
13009// Corresponds to _WeakReference in dart:core.
13010class WeakReference : public Instance {
13011 public:
13012 ObjectPtr target() const { return untag()->target(); }
13013 void set_target(const Object& target) const {
13014 untag()->set_target(target.ptr());
13015 }
13016 static intptr_t target_offset() {
13017 return OFFSET_OF(UntaggedWeakReference, target_);
13018 }
13019
13020 static intptr_t type_arguments_offset() {
13021 return OFFSET_OF(UntaggedWeakReference, type_arguments_);
13022 }
13023
13024 static WeakReferencePtr New(Heap::Space space = Heap::kNew);
13025
13026 static intptr_t InstanceSize() {
13027 return RoundedAllocationSize(size: sizeof(UntaggedWeakReference));
13028 }
13029
13030 private:
13031 FINAL_HEAP_OBJECT_IMPLEMENTATION(WeakReference, Instance);
13032 friend class Class;
13033};
13034
13035class FinalizerBase;
13036class FinalizerEntry : public Instance {
13037 public:
13038 ObjectPtr value() const { return untag()->value(); }
13039 void set_value(const Object& value) const { untag()->set_value(value.ptr()); }
13040 static intptr_t value_offset() {
13041 return OFFSET_OF(UntaggedFinalizerEntry, value_);
13042 }
13043
13044 ObjectPtr detach() const { return untag()->detach(); }
13045 void set_detach(const Object& value) const {
13046 untag()->set_detach(value.ptr());
13047 }
13048 static intptr_t detach_offset() {
13049 return OFFSET_OF(UntaggedFinalizerEntry, detach_);
13050 }
13051
13052 ObjectPtr token() const { return untag()->token(); }
13053 void set_token(const Object& value) const { untag()->set_token(value.ptr()); }
13054 static intptr_t token_offset() {
13055 return OFFSET_OF(UntaggedFinalizerEntry, token_);
13056 }
13057
13058 FinalizerBasePtr finalizer() const { return untag()->finalizer(); }
13059 void set_finalizer(const FinalizerBase& value) const;
13060 static intptr_t finalizer_offset() {
13061 return OFFSET_OF(UntaggedFinalizerEntry, finalizer_);
13062 }
13063
13064 FinalizerEntryPtr next() const { return untag()->next(); }
13065 void set_next(const FinalizerEntry& value) const {
13066 untag()->set_next(value.ptr());
13067 }
13068 static intptr_t next_offset() {
13069 return OFFSET_OF(UntaggedFinalizerEntry, next_);
13070 }
13071
13072 intptr_t external_size() const { return untag()->external_size(); }
13073 void set_external_size(intptr_t value) const {
13074 untag()->set_external_size(value);
13075 }
13076 static intptr_t external_size_offset() {
13077 return OFFSET_OF(UntaggedFinalizerEntry, external_size_);
13078 }
13079
13080 static intptr_t InstanceSize() {
13081 return RoundedAllocationSize(size: sizeof(UntaggedFinalizerEntry));
13082 }
13083
13084 // Allocates a new FinalizerEntry, initializing the external size (to 0) and
13085 // finalizer.
13086 //
13087 // Should only be used for object tests.
13088 //
13089 // Does not initialize `value`, `token`, and `detach` to allow for flexible
13090 // testing code setting those manually.
13091 //
13092 // Does _not_ add the entry to the finalizer. We could add the entry to
13093 // finalizer.all_entries.data, but we have no way of initializing the hashset
13094 // index.
13095 static FinalizerEntryPtr New(const FinalizerBase& finalizer,
13096 Heap::Space space = Heap::kNew);
13097
13098 private:
13099 FINAL_HEAP_OBJECT_IMPLEMENTATION(FinalizerEntry, Instance);
13100 friend class Class;
13101};
13102
13103class FinalizerBase : public Instance {
13104 public:
13105 static intptr_t isolate_offset() {
13106 return OFFSET_OF(UntaggedFinalizerBase, isolate_);
13107 }
13108 Isolate* isolate() const { return untag()->isolate_; }
13109 void set_isolate(Isolate* value) const { untag()->isolate_ = value; }
13110
13111 static intptr_t detachments_offset() {
13112 return OFFSET_OF(UntaggedFinalizerBase, detachments_);
13113 }
13114
13115 SetPtr all_entries() const { return untag()->all_entries(); }
13116 void set_all_entries(const Set& value) const {
13117 untag()->set_all_entries(value.ptr());
13118 }
13119 static intptr_t all_entries_offset() {
13120 return OFFSET_OF(UntaggedFinalizerBase, all_entries_);
13121 }
13122
13123 FinalizerEntryPtr entries_collected() const {
13124 return untag()->entries_collected();
13125 }
13126 void set_entries_collected(const FinalizerEntry& value) const {
13127 untag()->set_entries_collected(value.ptr());
13128 }
13129 static intptr_t entries_collected_offset() {
13130 return OFFSET_OF(UntaggedFinalizer, entries_collected_);
13131 }
13132
13133 private:
13134 HEAP_OBJECT_IMPLEMENTATION(FinalizerBase, Instance);
13135 friend class Class;
13136};
13137
13138class Finalizer : public FinalizerBase {
13139 public:
13140 static intptr_t type_arguments_offset() {
13141 return OFFSET_OF(UntaggedFinalizer, type_arguments_);
13142 }
13143
13144 ObjectPtr callback() const { return untag()->callback(); }
13145 static intptr_t callback_offset() {
13146 return OFFSET_OF(UntaggedFinalizer, callback_);
13147 }
13148
13149 static intptr_t InstanceSize() {
13150 return RoundedAllocationSize(size: sizeof(UntaggedFinalizer));
13151 }
13152
13153 static FinalizerPtr New(Heap::Space space = Heap::kNew);
13154
13155 private:
13156 FINAL_HEAP_OBJECT_IMPLEMENTATION(Finalizer, FinalizerBase);
13157 friend class Class;
13158};
13159
13160class NativeFinalizer : public FinalizerBase {
13161 public:
13162 typedef void (*Callback)(void*);
13163
13164 PointerPtr callback() const { return untag()->callback(); }
13165 void set_callback(const Pointer& value) const {
13166 untag()->set_callback(value.ptr());
13167 }
13168 static intptr_t callback_offset() {
13169 return OFFSET_OF(UntaggedNativeFinalizer, callback_);
13170 }
13171
13172 static intptr_t InstanceSize() {
13173 return RoundedAllocationSize(size: sizeof(UntaggedNativeFinalizer));
13174 }
13175
13176 static NativeFinalizerPtr New(Heap::Space space = Heap::kNew);
13177
13178 void RunCallback(const FinalizerEntry& entry,
13179 const char* trace_context) const;
13180
13181 private:
13182 FINAL_HEAP_OBJECT_IMPLEMENTATION(NativeFinalizer, FinalizerBase);
13183 friend class Class;
13184};
13185
13186class MirrorReference : public Instance {
13187 public:
13188 ObjectPtr referent() const { return untag()->referent(); }
13189
13190 void set_referent(const Object& referent) const {
13191 untag()->set_referent(referent.ptr());
13192 }
13193
13194 AbstractTypePtr GetAbstractTypeReferent() const;
13195
13196 ClassPtr GetClassReferent() const;
13197
13198 FieldPtr GetFieldReferent() const;
13199
13200 FunctionPtr GetFunctionReferent() const;
13201
13202 FunctionTypePtr GetFunctionTypeReferent() const;
13203
13204 LibraryPtr GetLibraryReferent() const;
13205
13206 TypeParameterPtr GetTypeParameterReferent() const;
13207
13208 static MirrorReferencePtr New(const Object& referent,
13209 Heap::Space space = Heap::kNew);
13210
13211 static intptr_t InstanceSize() {
13212 return RoundedAllocationSize(size: sizeof(UntaggedMirrorReference));
13213 }
13214
13215 private:
13216 FINAL_HEAP_OBJECT_IMPLEMENTATION(MirrorReference, Instance);
13217 friend class Class;
13218};
13219
13220class UserTag : public Instance {
13221 public:
13222 uword tag() const { return untag()->tag(); }
13223 void set_tag(uword t) const {
13224 ASSERT(t >= UserTags::kUserTagIdOffset);
13225 ASSERT(t < UserTags::kUserTagIdOffset + UserTags::kMaxUserTags);
13226 StoreNonPointer(addr: &untag()->tag_, value: t);
13227 }
13228
13229 bool streamable() const { return untag()->streamable(); }
13230 void set_streamable(bool streamable) {
13231 StoreNonPointer(addr: &untag()->streamable_, value: streamable);
13232 }
13233
13234 static intptr_t tag_offset() { return OFFSET_OF(UntaggedUserTag, tag_); }
13235
13236 StringPtr label() const { return untag()->label(); }
13237
13238 UserTagPtr MakeActive() const;
13239
13240 static intptr_t InstanceSize() {
13241 return RoundedAllocationSize(size: sizeof(UntaggedUserTag));
13242 }
13243
13244 static UserTagPtr New(const String& label, Heap::Space space = Heap::kOld);
13245 static UserTagPtr DefaultTag();
13246
13247 static bool TagTableIsFull(Thread* thread);
13248 static UserTagPtr FindTagById(uword tag_id);
13249 static UserTagPtr FindTagInIsolate(Isolate* isolate,
13250 Thread* thread,
13251 const String& label);
13252
13253 private:
13254 static UserTagPtr FindTagInIsolate(Thread* thread, const String& label);
13255 static void AddTagToIsolate(Thread* thread, const UserTag& tag);
13256
13257 void set_label(const String& tag_label) const {
13258 untag()->set_label(tag_label.ptr());
13259 }
13260
13261 FINAL_HEAP_OBJECT_IMPLEMENTATION(UserTag, Instance);
13262 friend class Class;
13263};
13264
13265// Represents abstract FutureOr class in dart:async.
13266class FutureOr : public Instance {
13267 public:
13268 static intptr_t InstanceSize() {
13269 return RoundedAllocationSize(size: sizeof(UntaggedFutureOr));
13270 }
13271
13272 virtual TypeArgumentsPtr GetTypeArguments() const {
13273 return untag()->type_arguments();
13274 }
13275 static intptr_t type_arguments_offset() {
13276 return OFFSET_OF(UntaggedFutureOr, type_arguments_);
13277 }
13278
13279 private:
13280 FINAL_HEAP_OBJECT_IMPLEMENTATION(FutureOr, Instance);
13281
13282 friend class Class;
13283};
13284
13285// Breaking cycles and loops.
13286ClassPtr Object::clazz() const {
13287 uword raw_value = static_cast<uword>(ptr_);
13288 if ((raw_value & kSmiTagMask) == kSmiTag) {
13289 return Smi::Class();
13290 }
13291 return IsolateGroup::Current()->class_table()->At(cid: ptr()->GetClassId());
13292}
13293
13294DART_FORCE_INLINE
13295void Object::setPtr(ObjectPtr value, intptr_t default_cid) {
13296 ptr_ = value;
13297 intptr_t cid = value->GetClassIdMayBeSmi();
13298 // Free-list elements cannot be wrapped in a handle.
13299 ASSERT(cid != kFreeListElement);
13300 ASSERT(cid != kForwardingCorpse);
13301 if (cid == kNullCid) {
13302 cid = default_cid;
13303 } else if (cid >= kNumPredefinedCids) {
13304 cid = kInstanceCid;
13305 }
13306 set_vtable(builtin_vtables_[cid]);
13307}
13308
13309intptr_t Field::HostOffset() const {
13310 ASSERT(is_instance()); // Valid only for dart instance fields.
13311 return (Smi::Value(raw_smi: untag()->host_offset_or_field_id()) * kCompressedWordSize);
13312}
13313
13314intptr_t Field::TargetOffset() const {
13315 ASSERT(is_instance()); // Valid only for dart instance fields.
13316#if !defined(DART_PRECOMPILED_RUNTIME)
13317 return (untag()->target_offset_ * compiler::target::kCompressedWordSize);
13318#else
13319 return HostOffset();
13320#endif // !defined(DART_PRECOMPILED_RUNTIME)
13321}
13322
13323inline intptr_t Field::TargetOffsetOf(const FieldPtr field) {
13324#if !defined(DART_PRECOMPILED_RUNTIME)
13325 return field->untag()->target_offset_;
13326#else
13327 return Smi::Value(field->untag()->host_offset_or_field_id());
13328#endif // !defined(DART_PRECOMPILED_RUNTIME)
13329}
13330
13331void Field::SetOffset(intptr_t host_offset_in_bytes,
13332 intptr_t target_offset_in_bytes) const {
13333 ASSERT(is_instance()); // Valid only for dart instance fields.
13334 ASSERT(kCompressedWordSize != 0);
13335 untag()->set_host_offset_or_field_id(
13336 Smi::New(value: host_offset_in_bytes / kCompressedWordSize));
13337#if !defined(DART_PRECOMPILED_RUNTIME)
13338 ASSERT(compiler::target::kCompressedWordSize != 0);
13339 StoreNonPointer(
13340 addr: &untag()->target_offset_,
13341 value: target_offset_in_bytes / compiler::target::kCompressedWordSize);
13342#else
13343 ASSERT(host_offset_in_bytes == target_offset_in_bytes);
13344#endif // !defined(DART_PRECOMPILED_RUNTIME)
13345}
13346
13347ObjectPtr Field::StaticValue() const {
13348 ASSERT(is_static()); // Valid only for static dart fields.
13349 return Isolate::Current()->field_table()->At(index: field_id());
13350}
13351
13352inline intptr_t Field::field_id() const {
13353 return Smi::Value(raw_smi: untag()->host_offset_or_field_id());
13354}
13355
13356void Field::set_field_id(intptr_t field_id) const {
13357 DEBUG_ASSERT(
13358 IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
13359 set_field_id_unsafe(field_id);
13360}
13361
13362void Field::set_field_id_unsafe(intptr_t field_id) const {
13363 ASSERT(is_static());
13364 untag()->set_host_offset_or_field_id(Smi::New(value: field_id));
13365}
13366
13367intptr_t WeakArray::LengthOf(const WeakArrayPtr array) {
13368 return Smi::Value(raw_smi: array->untag()->length());
13369}
13370
13371void Context::SetAt(intptr_t index, const Object& value) const {
13372 untag()->set_element(index, value: value.ptr());
13373}
13374
13375intptr_t Instance::GetNativeField(int index) const {
13376 ASSERT(IsValidNativeIndex(index));
13377 NoSafepointScope no_safepoint;
13378 TypedDataPtr native_fields = static_cast<TypedDataPtr>(
13379 NativeFieldsAddr()->Decompress(heap_base: untag()->heap_base()));
13380 if (native_fields == TypedData::null()) {
13381 return 0;
13382 }
13383 return reinterpret_cast<intptr_t*>(native_fields->untag()->data())[index];
13384}
13385
13386void Instance::GetNativeFields(uint16_t num_fields,
13387 intptr_t* field_values) const {
13388 NoSafepointScope no_safepoint;
13389 ASSERT(num_fields == NumNativeFields());
13390 ASSERT(field_values != nullptr);
13391 TypedDataPtr native_fields = static_cast<TypedDataPtr>(
13392 NativeFieldsAddr()->Decompress(heap_base: untag()->heap_base()));
13393 if (native_fields == TypedData::null()) {
13394 for (intptr_t i = 0; i < num_fields; i++) {
13395 field_values[i] = 0;
13396 }
13397 }
13398 intptr_t* fields =
13399 reinterpret_cast<intptr_t*>(native_fields->untag()->data());
13400 for (intptr_t i = 0; i < num_fields; i++) {
13401 field_values[i] = fields[i];
13402 }
13403}
13404
13405bool String::Equals(const String& str) const {
13406 if (ptr() == str.ptr()) {
13407 return true; // Both handles point to the same raw instance.
13408 }
13409 if (str.IsNull()) {
13410 return false;
13411 }
13412 if (IsCanonical() && str.IsCanonical()) {
13413 return false; // Two symbols that aren't identical aren't equal.
13414 }
13415 if (HasHash() && str.HasHash() && (Hash() != str.Hash())) {
13416 return false; // Both sides have hash codes and they do not match.
13417 }
13418 return Equals(str, begin_index: 0, len: str.Length());
13419}
13420
13421intptr_t Library::UrlHash() const {
13422 intptr_t result = String::GetCachedHash(obj: url());
13423 ASSERT(result != 0);
13424 return result;
13425}
13426
13427void MegamorphicCache::SetEntry(const Array& array,
13428 intptr_t index,
13429 const Smi& class_id,
13430 const Object& target) {
13431 ASSERT(target.IsNull() || target.IsFunction() || target.IsSmi());
13432 array.SetAt(index: (index * kEntryLength) + kClassIdIndex, value: class_id);
13433 array.SetAt(index: (index * kEntryLength) + kTargetFunctionIndex, value: target);
13434}
13435
13436ObjectPtr MegamorphicCache::GetClassId(const Array& array, intptr_t index) {
13437 return array.At(index: (index * kEntryLength) + kClassIdIndex);
13438}
13439
13440ObjectPtr MegamorphicCache::GetTargetFunction(const Array& array,
13441 intptr_t index) {
13442 return array.At(index: (index * kEntryLength) + kTargetFunctionIndex);
13443}
13444
13445inline uword AbstractType::Hash() const {
13446 ASSERT(IsFinalized());
13447 intptr_t result = Smi::Value(raw_smi: untag()->hash());
13448 if (result != 0) {
13449 return result;
13450 }
13451 return ComputeHash();
13452}
13453
13454inline void AbstractType::SetHash(intptr_t value) const {
13455 // This is only safe because we create a new Smi, which does not cause
13456 // heap allocation.
13457 untag()->set_hash(Smi::New(value));
13458}
13459
13460inline intptr_t RecordType::NumFields() const {
13461 return Array::LengthOf(array: field_types());
13462}
13463
13464inline uword TypeArguments::Hash() const {
13465 if (IsNull()) return kAllDynamicHash;
13466 intptr_t result = Smi::Value(raw_smi: untag()->hash());
13467 if (result != 0) {
13468 return result;
13469 }
13470 return ComputeHash();
13471}
13472
13473inline void TypeArguments::SetHash(intptr_t value) const {
13474 // This is only safe because we create a new Smi, which does not cause
13475 // heap allocation.
13476 untag()->set_hash(Smi::New(value));
13477}
13478
13479inline uint16_t String::CharAt(StringPtr str, intptr_t index) {
13480 switch (str->GetClassId()) {
13481 case kOneByteStringCid:
13482 return OneByteString::CharAt(str: static_cast<OneByteStringPtr>(str), index);
13483 case kTwoByteStringCid:
13484 return TwoByteString::CharAt(str: static_cast<TwoByteStringPtr>(str), index);
13485 case kExternalOneByteStringCid:
13486 return ExternalOneByteString::CharAt(
13487 str: static_cast<ExternalOneByteStringPtr>(str), index);
13488 case kExternalTwoByteStringCid:
13489 return ExternalTwoByteString::CharAt(
13490 str: static_cast<ExternalTwoByteStringPtr>(str), index);
13491 }
13492 UNREACHABLE();
13493 return 0;
13494}
13495
13496// A view on an [Array] as a list of tuples, optionally starting at an offset.
13497//
13498// Example: We store a list of (kind, function, code) tuples into the
13499// [Code::static_calls_target_table] array of type [Array].
13500//
13501// This helper class can then be used via
13502//
13503// using CallTableView = ArrayOfTuplesView<
13504// Code::Kind, std::tuple<Smi, Function, Code>>;
13505//
13506// auto& array = Array::Handle(code.static_calls_targets_table());
13507// CallTableView static_calls(array);
13508//
13509// // Using convenient for loop.
13510// auto& function = Function::Handle();
13511// for (auto& call : static_calls) {
13512// function = call.Get<Code::kSCallTableFunctionTarget>();
13513// call.Set<Code::kSCallTableFunctionTarget>(function);
13514// }
13515//
13516// // Using manual loop.
13517// auto& function = Function::Handle();
13518// for (intptr_t i = 0; i < static_calls.Length(); ++i) {
13519// auto call = static_calls[i];
13520// function = call.Get<Code::kSCallTableFunctionTarget>();
13521// call.Set<Code::kSCallTableFunctionTarget>(function);
13522// }
13523//
13524//
13525// Template parameters:
13526//
13527// * [EnumType] must be a normal enum which enumerates the entries of the
13528// tuple
13529//
13530// * [kStartOffset] is the offset at which the first tuple in the array
13531// starts (can be 0).
13532//
13533// * [TupleT] must be a std::tuple<...> where "..." are the heap object handle
13534// classes (e.g. 'Code', 'Smi', 'Object')
13535template <typename EnumType, typename TupleT, int kStartOffset = 0>
13536class ArrayOfTuplesView {
13537 public:
13538 static constexpr intptr_t EntrySize = std::tuple_size<TupleT>::value;
13539
13540 class Iterator;
13541
13542 class TupleView {
13543 public:
13544 TupleView(const Array& array, intptr_t index)
13545 : array_(array), index_(index) {}
13546
13547 template <EnumType kElement,
13548 std::memory_order order = std::memory_order_relaxed>
13549 typename std::tuple_element<kElement, TupleT>::type::ObjectPtrType Get()
13550 const {
13551 using object_type = typename std::tuple_element<kElement, TupleT>::type;
13552 return object_type::RawCast(array_.At<order>(index_ + kElement));
13553 }
13554
13555 template <EnumType kElement,
13556 std::memory_order order = std::memory_order_relaxed>
13557 void Set(const typename std::tuple_element<kElement, TupleT>::type& value)
13558 const {
13559 array_.SetAt<order>(index_ + kElement, value);
13560 }
13561
13562 intptr_t index() const { return (index_ - kStartOffset) / EntrySize; }
13563
13564 private:
13565 const Array& array_;
13566 intptr_t index_;
13567
13568 friend class Iterator;
13569 };
13570
13571 class Iterator {
13572 public:
13573 Iterator(const Array& array, intptr_t index) : entry_(array, index) {}
13574
13575 bool operator==(const Iterator& other) {
13576 return entry_.index_ == other.entry_.index_;
13577 }
13578 bool operator!=(const Iterator& other) {
13579 return entry_.index_ != other.entry_.index_;
13580 }
13581
13582 const TupleView& operator*() const { return entry_; }
13583
13584 Iterator& operator++() {
13585 entry_.index_ += EntrySize;
13586 return *this;
13587 }
13588
13589 private:
13590 TupleView entry_;
13591 };
13592
13593 explicit ArrayOfTuplesView(const Array& array) : array_(array) {
13594 ASSERT(!array.IsNull());
13595 ASSERT(array.Length() >= kStartOffset);
13596 ASSERT(array.Length() % EntrySize == kStartOffset);
13597 }
13598
13599 intptr_t Length() const {
13600 return (array_.Length() - kStartOffset) / EntrySize;
13601 }
13602
13603 TupleView At(intptr_t i) const {
13604 return TupleView(array_, kStartOffset + i * EntrySize);
13605 }
13606
13607 TupleView operator[](intptr_t i) const { return At(i); }
13608
13609 Iterator begin() const { return Iterator(array_, kStartOffset); }
13610
13611 Iterator end() const {
13612 return Iterator(array_, kStartOffset + Length() * EntrySize);
13613 }
13614
13615 private:
13616 const Array& array_;
13617};
13618
13619using StaticCallsTable =
13620 ArrayOfTuplesView<Code::SCallTableEntry, std::tuple<Smi, Object, Function>>;
13621
13622using StaticCallsTableEntry = StaticCallsTable::TupleView;
13623
13624using SubtypeTestCacheTable = ArrayOfTuplesView<SubtypeTestCache::Entries,
13625 std::tuple<Object,
13626 TypeArguments,
13627 TypeArguments,
13628 TypeArguments,
13629 TypeArguments,
13630 TypeArguments,
13631 AbstractType,
13632 Bool>>;
13633
13634using MegamorphicCacheEntries =
13635 ArrayOfTuplesView<MegamorphicCache::EntryType, std::tuple<Smi, Object>>;
13636
13637using InstantiationsCacheTable =
13638 ArrayOfTuplesView<TypeArguments::Cache::Entry,
13639 std::tuple<Object, TypeArguments, TypeArguments>,
13640 TypeArguments::Cache::kHeaderSize>;
13641
13642void DumpTypeTable(Isolate* isolate);
13643void DumpTypeParameterTable(Isolate* isolate);
13644void DumpTypeArgumentsTable(Isolate* isolate);
13645
13646bool FindPragmaInMetadata(Thread* T,
13647 const Object& metadata_obj,
13648 const String& pragma_name,
13649 bool multiple = false,
13650 Object* options = nullptr);
13651
13652EntryPointPragma FindEntryPointPragma(IsolateGroup* isolate_group,
13653 const Array& metadata,
13654 Field* reusable_field_handle,
13655 Object* reusable_object_handle);
13656
13657DART_WARN_UNUSED_RESULT
13658ErrorPtr EntryPointFieldInvocationError(const String& getter_name);
13659
13660DART_WARN_UNUSED_RESULT
13661ErrorPtr EntryPointMemberInvocationError(const Object& member);
13662
13663#undef PRECOMPILER_WSR_FIELD_DECLARATION
13664
13665} // namespace dart
13666
13667#endif // RUNTIME_VM_OBJECT_H_
13668

source code of flutter_engine/third_party/dart/runtime/vm/object.h