1// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
2// for details. All rights reserved. Use of this source code is governed by a
3// BSD-style license that can be found in the LICENSE file.
4
5#include "vm/stub_code.h"
6
7#include "platform/assert.h"
8#include "platform/globals.h"
9#include "vm/app_snapshot.h"
10#include "vm/compiler/assembler/disassembler.h"
11#include "vm/flags.h"
12#include "vm/heap/safepoint.h"
13#include "vm/object_store.h"
14#include "vm/snapshot.h"
15#include "vm/virtual_memory.h"
16#include "vm/visitor.h"
17
18#if !defined(DART_PRECOMPILED_RUNTIME)
19#include "vm/compiler/aot/precompiler.h"
20#include "vm/compiler/assembler/assembler.h"
21#endif // !defined(DART_PRECOMPILED_RUNTIME)
22
23namespace dart {
24
25DECLARE_FLAG(bool, precompiled_mode);
26
27StubCode::StubCodeEntry StubCode::entries_[kNumStubEntries] = {
28#if defined(DART_PRECOMPILED_RUNTIME)
29#define STUB_CODE_DECLARE(name) {nullptr, #name},
30#else
31#define STUB_CODE_DECLARE(name) \
32 {nullptr, #name, &compiler::StubCodeCompiler::Generate##name##Stub},
33#endif
34 VM_STUB_CODE_LIST(STUB_CODE_DECLARE)
35#undef STUB_CODE_DECLARE
36};
37AcqRelAtomic<bool> StubCode::initialized_ = {false};
38
39#if defined(DART_PRECOMPILED_RUNTIME)
40void StubCode::Init() {
41 // Stubs will be loaded from the snapshot.
42 UNREACHABLE();
43}
44
45#else
46
47void StubCode::Init() {
48 compiler::ObjectPoolBuilder object_pool_builder;
49
50 // Generate all the stubs.
51 for (size_t i = 0; i < ARRAY_SIZE(entries_); i++) {
52 entries_[i].code = Code::ReadOnlyHandle();
53 *(entries_[i].code) =
54 Generate(name: entries_[i].name, object_pool_builder: &object_pool_builder, GenerateStub: entries_[i].generator);
55 }
56
57 const ObjectPool& object_pool =
58 ObjectPool::Handle(ptr: ObjectPool::NewFromBuilder(builder: object_pool_builder));
59
60 for (size_t i = 0; i < ARRAY_SIZE(entries_); i++) {
61 entries_[i].code->set_object_pool(object_pool.ptr());
62 }
63
64 InitializationDone();
65
66#if defined(DART_PRECOMPILER)
67 {
68 // Set Function owner for UnknownDartCode stub so it pretends to
69 // be a Dart code.
70 Zone* zone = Thread::Current()->zone();
71 const auto& signature = FunctionType::Handle(zone, FunctionType::New());
72 auto& owner = Object::Handle(zone);
73 owner = Object::void_class();
74 ASSERT(!owner.IsNull());
75 owner = Function::New(signature, Object::null_string(),
76 UntaggedFunction::kRegularFunction,
77 /*is_static=*/true,
78 /*is_const=*/false,
79 /*is_abstract=*/false,
80 /*is_external=*/false,
81 /*is_native=*/false, owner, TokenPosition::kNoSource);
82 StubCode::UnknownDartCode().set_owner(owner);
83 StubCode::UnknownDartCode().set_exception_handlers(
84 Object::empty_exception_handlers());
85 StubCode::UnknownDartCode().set_pc_descriptors(Object::empty_descriptors());
86 ASSERT(StubCode::UnknownDartCode().IsFunctionCode());
87 }
88#endif // defined(DART_PRECOMPILER)
89}
90
91#undef STUB_CODE_GENERATE
92#undef STUB_CODE_SET_OBJECT_POOL
93
94CodePtr StubCode::Generate(const char* name,
95 compiler::ObjectPoolBuilder* object_pool_builder,
96 void (compiler::StubCodeCompiler::*GenerateStub)()) {
97 auto thread = Thread::Current();
98 SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock());
99
100 compiler::Assembler assembler(object_pool_builder);
101 Zone* zone = thread->zone();
102 auto* pc_descriptors_list = new (zone) DescriptorList(zone);
103 compiler::StubCodeCompiler stubCodeCompiler(&assembler, pc_descriptors_list);
104 (stubCodeCompiler.*GenerateStub)();
105 const Code& code = Code::Handle(
106 zone, ptr: Code::FinalizeCodeAndNotify(name, compiler: nullptr, assembler: &assembler,
107 pool_attachment: Code::PoolAttachment::kNotAttachPool,
108 /*optimized=*/false));
109 const PcDescriptors& descriptors = PcDescriptors::Handle(
110 zone, ptr: pc_descriptors_list->FinalizePcDescriptors(entry_point: code.PayloadStart()));
111 code.set_pc_descriptors(descriptors);
112
113#ifndef PRODUCT
114 if (FLAG_support_disassembler && FLAG_disassemble_stubs) {
115 Disassembler::DisassembleStub(name, code);
116 }
117#endif // !PRODUCT
118 return code.ptr();
119}
120#endif // defined(DART_PRECOMPILED_RUNTIME)
121
122void StubCode::Cleanup() {
123 initialized_.store(arg: false, order: std::memory_order_release);
124
125 for (size_t i = 0; i < ARRAY_SIZE(entries_); i++) {
126 entries_[i].code = nullptr;
127 }
128}
129
130bool StubCode::InInvocationStub(uword pc) {
131 ASSERT(HasBeenInitialized());
132 uword entry = StubCode::InvokeDartCode().EntryPoint();
133 uword size = StubCode::InvokeDartCodeSize();
134 return (pc >= entry) && (pc < (entry + size));
135}
136
137bool StubCode::InJumpToFrameStub(uword pc) {
138 ASSERT(HasBeenInitialized());
139 uword entry = StubCode::JumpToFrame().EntryPoint();
140 uword size = StubCode::JumpToFrameSize();
141 return (pc >= entry) && (pc < (entry + size));
142}
143
144#if !defined(DART_PRECOMPILED_RUNTIME)
145ArrayPtr compiler::StubCodeCompiler::BuildStaticCallsTable(
146 Zone* zone,
147 compiler::UnresolvedPcRelativeCalls* unresolved_calls) {
148 if (unresolved_calls->length() == 0) {
149 return Array::null();
150 }
151 const intptr_t array_length =
152 unresolved_calls->length() * Code::kSCallTableEntryLength;
153 const auto& static_calls_table =
154 Array::Handle(zone, ptr: Array::New(len: array_length, space: Heap::kOld));
155 StaticCallsTable entries(static_calls_table);
156 auto& kind_type_and_offset = Smi::Handle(zone);
157 for (intptr_t i = 0; i < unresolved_calls->length(); i++) {
158 auto& unresolved_call = (*unresolved_calls)[i];
159 auto call_kind = unresolved_call->is_tail_call() ? Code::kPcRelativeTailCall
160 : Code::kPcRelativeCall;
161 kind_type_and_offset =
162 Smi::New(value: Code::KindField::encode(value: call_kind) |
163 Code::EntryPointField::encode(value: Code::kDefaultEntry) |
164 Code::OffsetField::encode(value: unresolved_call->offset()));
165 auto view = entries[i];
166 view.Set<Code::kSCallTableKindAndOffset>(kind_type_and_offset);
167 view.Set<Code::kSCallTableCodeOrTypeTarget>(unresolved_call->target());
168 }
169 return static_calls_table.ptr();
170}
171
172CodePtr StubCode::GetAllocationStubForClass(const Class& cls) {
173 Thread* thread = Thread::Current();
174 auto object_store = thread->isolate_group()->object_store();
175 Zone* zone = thread->zone();
176 const Error& error =
177 Error::Handle(zone, ptr: cls.EnsureIsAllocateFinalized(thread));
178 ASSERT(error.IsNull());
179 switch (cls.id()) {
180 case kArrayCid:
181 return object_store->allocate_array_stub();
182#if !defined(TARGET_ARCH_IA32)
183 case kGrowableObjectArrayCid:
184 return object_store->allocate_growable_array_stub();
185#endif // !defined(TARGET_ARCH_IA32)
186 case kContextCid:
187 return object_store->allocate_context_stub();
188 case kUnhandledExceptionCid:
189 return object_store->allocate_unhandled_exception_stub();
190 case kMintCid:
191 return object_store->allocate_mint_stub();
192 case kDoubleCid:
193 return object_store->allocate_double_stub();
194 case kFloat32x4Cid:
195 return object_store->allocate_float32x4_stub();
196 case kFloat64x2Cid:
197 return object_store->allocate_float64x2_stub();
198 case kInt32x4Cid:
199 return object_store->allocate_int32x4_stub();
200 case kClosureCid:
201 return object_store->allocate_closure_stub();
202 case kRecordCid:
203 return object_store->allocate_record_stub();
204 }
205 Code& stub = Code::Handle(zone, ptr: cls.allocation_stub());
206 if (stub.IsNull()) {
207 compiler::ObjectPoolBuilder object_pool_builder;
208 Precompiler* precompiler = Precompiler::Instance();
209
210 compiler::ObjectPoolBuilder* wrapper =
211 precompiler != nullptr ? precompiler->global_object_pool_builder()
212 : &object_pool_builder;
213
214 const auto pool_attachment = FLAG_precompiled_mode
215 ? Code::PoolAttachment::kNotAttachPool
216 : Code::PoolAttachment::kAttachPool;
217
218 auto zone = thread->zone();
219 auto object_store = thread->isolate_group()->object_store();
220 auto& allocate_object_stub = Code::ZoneHandle(zone);
221 auto& allocate_object_parametrized_stub = Code::ZoneHandle(zone);
222 if (FLAG_precompiled_mode) {
223 allocate_object_stub = object_store->allocate_object_stub();
224 allocate_object_parametrized_stub =
225 object_store->allocate_object_parametrized_stub();
226 }
227
228 compiler::Assembler assembler(wrapper);
229 compiler::UnresolvedPcRelativeCalls unresolved_calls;
230 const char* name = cls.ToCString();
231 compiler::StubCodeCompiler stubCodeCompiler(&assembler, nullptr);
232 stubCodeCompiler.GenerateAllocationStubForClass(
233 unresolved_calls: &unresolved_calls, cls, allocate_object: allocate_object_stub,
234 allocat_object_parametrized: allocate_object_parametrized_stub);
235
236 const auto& static_calls_table =
237 Array::Handle(zone, ptr: compiler::StubCodeCompiler::BuildStaticCallsTable(
238 zone, unresolved_calls: &unresolved_calls));
239
240 SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock());
241
242 auto mutator_fun = [&]() {
243 stub = Code::FinalizeCode(compiler: nullptr, assembler: &assembler, pool_attachment,
244 /*optimized=*/false,
245 /*stats=*/nullptr);
246 // Check if some other thread has not already added the stub.
247 if (cls.allocation_stub() == Code::null()) {
248 stub.set_owner(cls);
249 if (!static_calls_table.IsNull()) {
250 stub.set_static_calls_target_table(static_calls_table);
251 }
252 cls.set_allocation_stub(stub);
253 }
254 };
255
256 // We have to ensure no mutators are running, because:
257 //
258 // a) We allocate an instructions object, which might cause us to
259 // temporarily flip page protections from (RX -> RW -> RX).
260 thread->isolate_group()->RunWithStoppedMutators(function: mutator_fun,
261 /*use_force_growth=*/true);
262
263 // We notify code observers after finalizing the code in order to be
264 // outside a [SafepointOperationScope].
265 Code::NotifyCodeObservers(name, code: stub, /*optimized=*/false);
266#ifndef PRODUCT
267 if (FLAG_support_disassembler && FLAG_disassemble_stubs) {
268 Disassembler::DisassembleStub(name, code: stub);
269 }
270#endif // !PRODUCT
271 }
272 return stub.ptr();
273}
274
275CodePtr StubCode::GetAllocationStubForTypedData(classid_t class_id) {
276 auto object_store = Thread::Current()->isolate_group()->object_store();
277 switch (class_id) {
278 case kTypedDataInt8ArrayCid:
279 return object_store->allocate_int8_array_stub();
280 case kTypedDataUint8ArrayCid:
281 return object_store->allocate_uint8_array_stub();
282 case kTypedDataUint8ClampedArrayCid:
283 return object_store->allocate_uint8_clamped_array_stub();
284 case kTypedDataInt16ArrayCid:
285 return object_store->allocate_int16_array_stub();
286 case kTypedDataUint16ArrayCid:
287 return object_store->allocate_uint16_array_stub();
288 case kTypedDataInt32ArrayCid:
289 return object_store->allocate_int32_array_stub();
290 case kTypedDataUint32ArrayCid:
291 return object_store->allocate_uint32_array_stub();
292 case kTypedDataInt64ArrayCid:
293 return object_store->allocate_int64_array_stub();
294 case kTypedDataUint64ArrayCid:
295 return object_store->allocate_uint64_array_stub();
296 case kTypedDataFloat32ArrayCid:
297 return object_store->allocate_float32_array_stub();
298 case kTypedDataFloat64ArrayCid:
299 return object_store->allocate_float64_array_stub();
300 case kTypedDataFloat32x4ArrayCid:
301 return object_store->allocate_float32x4_array_stub();
302 case kTypedDataInt32x4ArrayCid:
303 return object_store->allocate_int32x4_array_stub();
304 case kTypedDataFloat64x2ArrayCid:
305 return object_store->allocate_float64x2_array_stub();
306 }
307 UNREACHABLE();
308 return Code::null();
309}
310#endif // !defined(DART_PRECOMPILED_RUNTIME)
311
312#if !defined(TARGET_ARCH_IA32)
313CodePtr StubCode::GetBuildMethodExtractorStub(compiler::ObjectPoolBuilder* pool,
314 bool generic) {
315#if !defined(DART_PRECOMPILED_RUNTIME)
316 auto thread = Thread::Current();
317 auto Z = thread->zone();
318 auto object_store = thread->isolate_group()->object_store();
319
320 const auto& closure_allocation_stub =
321 Code::ZoneHandle(zone: Z, ptr: object_store->allocate_closure_stub());
322 const auto& context_allocation_stub =
323 Code::ZoneHandle(zone: Z, ptr: object_store->allocate_context_stub());
324
325 compiler::ObjectPoolBuilder object_pool_builder;
326 compiler::Assembler assembler(pool != nullptr ? pool : &object_pool_builder);
327 compiler::StubCodeCompiler stubCodeCompiler(&assembler, nullptr);
328 stubCodeCompiler.GenerateBuildMethodExtractorStub(
329 closure_allocation_stub, context_allocation_stub, generic);
330
331 const char* name = generic ? "BuildGenericMethodExtractor"
332 : "BuildNonGenericMethodExtractor";
333 const Code& stub = Code::Handle(ptr: Code::FinalizeCodeAndNotify(
334 name, compiler: nullptr, assembler: &assembler, pool_attachment: Code::PoolAttachment::kNotAttachPool,
335 /*optimized=*/false));
336
337 if (pool == nullptr) {
338 stub.set_object_pool(ObjectPool::NewFromBuilder(builder: object_pool_builder));
339 }
340
341#ifndef PRODUCT
342 if (FLAG_support_disassembler && FLAG_disassemble_stubs) {
343 Disassembler::DisassembleStub(name, code: stub);
344 }
345#endif // !PRODUCT
346 return stub.ptr();
347#else // !defined(DART_PRECOMPILED_RUNTIME)
348 UNIMPLEMENTED();
349 return nullptr;
350#endif // !defined(DART_PRECOMPILED_RUNTIME)
351}
352#endif // !defined(TARGET_ARCH_IA32)
353
354const Code& StubCode::UnoptimizedStaticCallEntry(intptr_t num_args_tested) {
355 switch (num_args_tested) {
356 case 0:
357 return ZeroArgsUnoptimizedStaticCall();
358 case 1:
359 return OneArgUnoptimizedStaticCall();
360 case 2:
361 return TwoArgsUnoptimizedStaticCall();
362 default:
363 UNIMPLEMENTED();
364 return Code::Handle();
365 }
366}
367
368const char* StubCode::NameOfStub(uword entry_point) {
369 for (size_t i = 0; i < ARRAY_SIZE(entries_); i++) {
370 if ((entries_[i].code != nullptr) && !entries_[i].code->IsNull() &&
371 (entries_[i].code->EntryPoint() == entry_point)) {
372 return entries_[i].name;
373 }
374 }
375
376 auto object_store = IsolateGroup::Current()->object_store();
377
378#define MATCH(member, name) \
379 if (object_store->member() != Code::null() && \
380 entry_point == Code::EntryPointOf(object_store->member())) { \
381 return "_iso_stub_" #name "Stub"; \
382 }
383 OBJECT_STORE_STUB_CODE_LIST(MATCH)
384 MATCH(build_generic_method_extractor_code, BuildGenericMethodExtractor)
385 MATCH(build_nongeneric_method_extractor_code, BuildNonGenericMethodExtractor)
386#undef MATCH
387 return nullptr;
388}
389
390} // namespace dart
391

source code of flutter_engine/third_party/dart/runtime/vm/stub_code.cc