-
Notifications
You must be signed in to change notification settings - Fork 16.1k
Expand file tree
/
Copy patharena.h
More file actions
1237 lines (1090 loc) · 48 KB
/
arena.h
File metadata and controls
1237 lines (1090 loc) · 48 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
// This file defines an Arena allocator for better allocation performance.
#ifndef GOOGLE_PROTOBUF_ARENA_H__
#define GOOGLE_PROTOBUF_ARENA_H__
#include <algorithm>
#include <cstddef>
#include <cstdint>
#include <limits>
#include <new> // IWYU pragma: keep for operator new().
#include <string>
#include <type_traits>
#include <utility>
#include <vector>
#include "absl/base/macros.h"
#include "google/protobuf/internal_visibility.h"
#if defined(_MSC_VER) && !defined(_LIBCPP_STD_VER) && !_HAS_EXCEPTIONS
// Work around bugs in MSVC <typeinfo> header when _HAS_EXCEPTIONS=0.
#include <exception>
#include <typeinfo>
namespace std {
using type_info = ::type_info;
}
#endif
#include "absl/base/attributes.h"
#include "absl/base/optimization.h"
#include "absl/hash/hash.h"
#include "absl/log/absl_check.h"
#include "absl/strings/str_format.h"
#include "google/protobuf/arena_align.h"
#include "google/protobuf/arena_allocation_policy.h"
#include "google/protobuf/port.h"
#include "google/protobuf/serial_arena.h"
#include "google/protobuf/thread_safe_arena.h"
// Must be included last.
#include "google/protobuf/port_def.inc"
#ifdef SWIG
#error "You cannot SWIG proto headers"
#endif
namespace google {
namespace protobuf {
struct ArenaOptions; // defined below
class Arena; // defined below
class Message; // defined in message.h
class MessageLite;
template <typename Key, typename T>
class Map;
namespace internal {
struct RepeatedFieldBase;
class ExtensionSet;
} // namespace internal
namespace TestUtil {
class ReflectionTester; // defined in test_util.h
} // namespace TestUtil
namespace internal {
struct ArenaTestPeer; // defined in arena_test_util.h
class InternalMetadata; // defined in metadata_lite.h
class LazyField; // defined in lazy_field.h
class EpsCopyInputStream; // defined in parse_context.h
class UntypedMapBase; // defined in map.h
class RepeatedPtrFieldBase; // defined in repeated_ptr_field.h
class TcParser; // defined in generated_message_tctable_impl.h
SerialArena* PROTOBUF_NULLABLE GetSerialArena(Arena* PROTOBUF_NULLABLE);
template <typename Type>
class GenericTypeHandler; // defined in repeated_field.h
// This struct maps field types to the types that we will use to represent them
// when allocated on an arena. This is necessary because fields no longer own an
// arena pointer, but can be allocated directly on an arena. In this case, we
// will use a wrapper class that holds both the arena pointer and the field, and
// points the field to the arena pointer.
//
// Additionally, split pointer fields will use this representation when
// allocated, regardless of whether they are on an arena or not.
//
// For example:
// ```
// template <>
// struct FieldArenaRep<Message> {
// using Type = ArenaMessage;
// static Message* Get(ArenaMessage* arena_rep) {
// return &arena_rep->message();
// }
// };
// ```
template <typename T>
struct FieldArenaRep {
// The type of the field when allocated on an arena. By default, this is just
// `T`, but can be specialized to use a wrapper class that holds both the
// arena pointer and the field.
using Type = T;
// Returns a pointer to the field from the arena representation. By default,
// this is just a no-op, but can be specialized to extract the field from the
// wrapper class.
static T* PROTOBUF_NONNULL Get(Type* PROTOBUF_NONNULL arena_rep) {
return arena_rep;
}
};
// Returns true if `T` uses arena offsets instead of holding a copy of the arena
// pointer. This can be deduced if the field's arena representation is not the
// same as the field itself.
template <typename T>
constexpr bool FieldHasArenaOffset() {
using ArenaRepT = typename FieldArenaRep<T>::Type;
return !std::is_same_v<T, ArenaRepT>;
}
// TODO - Some types have a deprecated arena-enabled constructor,
// as we plan to remove it in favor of using arena offsets, but for now Arena
// needs to call it. While the arena constructor exists, we will call the
// `InternalVisibility` override to silence the warning.
template <typename T>
constexpr bool HasDeprecatedArenaConstructor() {
return std::is_base_of_v<internal::RepeatedPtrFieldBase, T> &&
!std::is_same_v<T, internal::RepeatedPtrFieldBase>;
}
template <typename T>
void arena_delete_object(void* PROTOBUF_NONNULL object) {
delete reinterpret_cast<T*>(object);
}
inline bool CanUseInternalSwap(Arena* PROTOBUF_NULLABLE lhs,
Arena* PROTOBUF_NULLABLE rhs) {
if (DebugHardenForceCopyInSwap()) {
// We force copy in swap when we are not using an arena.
// If we did with an arena we would grow arena usage too much.
return lhs != nullptr && lhs == rhs;
} else {
return lhs == rhs;
}
}
inline bool CanMoveWithInternalSwap(Arena* PROTOBUF_NULLABLE lhs,
Arena* PROTOBUF_NULLABLE rhs) {
if (DebugHardenForceCopyInMove()) {
// We force copy in move when we are not using an arena.
// If we did with an arena we would grow arena usage too much.
return lhs != nullptr && lhs == rhs;
} else {
return lhs == rhs;
}
}
} // namespace internal
// ArenaOptions provides optional additional parameters to arena construction
// that control its block-allocation behavior.
struct ABSL_ATTRIBUTE_WARN_UNUSED ArenaOptions final {
// This defines the size of the first block requested from the system malloc.
// Subsequent block sizes will increase in a geometric series up to a maximum.
size_t start_block_size = internal::AllocationPolicy::kDefaultStartBlockSize;
// This defines the maximum block size requested from system malloc (unless an
// individual arena allocation request occurs with a size larger than this
// maximum). Requested block sizes increase up to this value, then remain
// here.
size_t max_block_size = internal::AllocationPolicy::DefaultMaxBlockSize();
// An initial block of memory for the arena to use, or nullptr for none. If
// provided, the block must live at least as long as the arena itself. The
// creator of the Arena retains ownership of the block after the Arena is
// destroyed.
char* PROTOBUF_NULLABLE initial_block = nullptr;
// The size of the initial block, if provided.
size_t initial_block_size = 0;
// A function pointer to an alloc method that returns memory blocks of size
// requested. By default, it contains a ptr to the malloc function.
//
// NOTE: block_alloc and dealloc functions are expected to behave like
// malloc and free, including Asan poisoning.
void* PROTOBUF_NONNULL (*PROTOBUF_NULLABLE block_alloc)(size_t) = nullptr;
// A function pointer to a dealloc method that takes ownership of the blocks
// from the arena. By default, it contains a ptr to a wrapper function that
// calls free.
void (*PROTOBUF_NULLABLE block_dealloc)(void* PROTOBUF_NONNULL,
size_t) = nullptr;
private:
internal::AllocationPolicy AllocationPolicy() const {
internal::AllocationPolicy res;
res.start_block_size = start_block_size;
res.max_block_size = max_block_size;
res.block_alloc = block_alloc;
res.block_dealloc = block_dealloc;
return res;
}
friend class Arena;
friend class ArenaOptionsTestFriend;
};
// Arena allocator. Arena allocation replaces ordinary (heap-based) allocation
// with new/delete, and improves performance by aggregating allocations into
// larger blocks and freeing allocations all at once. Protocol messages are
// allocated on an arena by using Arena::Create<T>(Arena*), below, and are
// automatically freed when the arena is destroyed.
//
// This is a thread-safe implementation: multiple threads may allocate from the
// arena concurrently. Destruction is not thread-safe and the destructing
// thread must synchronize with users of the arena first.
class PROTOBUF_EXPORT PROTOBUF_ALIGNAS(8)
#ifdef __clang__
// TODO: Enable this for GCC.
ABSL_ATTRIBUTE_WARN_UNUSED
#endif // __clang__
Arena final {
public:
// A unique-pointer-like smart pointer type for holding objects that
// correctly and safely deletes them, whether or not the objects owned by
// protobuf `Arena`s. `UniquePtr` is used to hold either a newly created
// object or a message released from a parent container.
//
// In spirit, an `Arena::UniquePtr<T>` is akin to
// `std::variant<std::unique_ptr<T>, Arena::Ptr<T>>`. It might semantically
// contain either of those types, it can be constructed from them, and you can
// extract them out as needed.
//
// To create an `UniquePtr`, use the helper functions in `Arena` or release a
// message from a parent using one of the release functions in
// message_movers.h.
//
// If using heap, this smart pointer will own its object and destroy it as
// needed.
//
// `UniquePtr` provides a similar interface to `std::unique_ptr` except that
// it also provides access to the message's owning `Arena`, explicitly removes
// the `reset(T*)` function (though it leaves `reset()` and `reset(nullptr)`),
// and makes all constructors except the move-constructor private. Instead of
// `reset(T*)` or a constructor, you should use move assignment and the
// `MakeUnique`/`UnsafeWrapUniquePtr` functions.
//
//
// Example Usage:
// Arena* arena_ptr = ...;
// UniquePtr<MyMessage> parent = Arena::MakeUnique<MyMessage>(arena_ptr);
// ...
// UniquePtr<ChildMessage> ptr =
// google::protobuf::ReleaseMessageField<"child_field">(parent);
// ...
// ptr.reset(); // Will delete ChildMessage ptr if arena_ptr was nullptr.
// CHECK(ptr == nullptr);
// ...
// ModifyChildMessage(ptr.get());
// ConsumeChildMessage(std::move(ptr));
template <typename T>
class
ABSL_MUST_USE_RESULT
ABSL_ATTRIBUTE_TRIVIAL_ABI ABSL_NULLABILITY_COMPATIBLE UniquePtr;
// A smart pointer type for holding objects that are statically known to be
// owned by an `Arena`. Even though it is a smart pointer, `Ptr` does not
// actually own the underlying object.
//
// `Ptr` exists to provide invariants in the type-system in a way that `T*`
// cannot. `Ptr<T>` is similar to `T*` except it hoolds extra static
// information (the fact that it is arena owned) and extra dynamic information
// (the arena that owns it). Main differences from `UniquePtr` are:
// - `Ptr` is never null. It has no default state, and no moved-from state.
// - `Ptr` does not own the object. The underlying `Arena` does.
// - `Ptr` is copyable. Trying to move it will just copy it, just like `T*`
// would.
// - `Ptr` has no `reset()`. It can be assigned from another `Ptr`.
template <typename T>
class ABSL_MUST_USE_RESULT ABSL_ATTRIBUTE_TRIVIAL_ABI Ptr;
// Default constructor with sensible default options, tuned for average
// use-cases.
inline Arena() : impl_() {}
// Construct an arena with default options, except for the supplied
// initial block. It is more efficient to use this constructor
// instead of passing ArenaOptions if the only configuration needed
// by the caller is supplying an initial block.
inline Arena(char* PROTOBUF_NULLABLE initial_block, size_t initial_block_size)
: impl_(initial_block, initial_block_size) {}
// Arena constructor taking custom options. See ArenaOptions above for
// descriptions of the options available.
explicit Arena(const ArenaOptions& options)
: impl_(options.initial_block, options.initial_block_size,
options.AllocationPolicy()) {}
// Block overhead. Use this as a guide for how much to over-allocate the
// initial block if you want an allocation of size N to fit inside it.
//
// WARNING: if you allocate multiple objects, it is difficult to guarantee
// that a series of allocations will fit in the initial block, especially if
// Arena changes its alignment guarantees in the future!
static const size_t kBlockOverhead =
internal::ThreadSafeArena::kBlockHeaderSize +
internal::ThreadSafeArena::kSerialArenaSize;
inline ~Arena() = default;
// Allocates an object type T if the arena passed in is not nullptr;
// otherwise, returns a heap-allocated object.
//
// In new code, prefer `arena.Make<T>()` when it is statically known to have
// an arena, and `Arena::MakeUnique<T>(arena)` when you have a potentially
// null Arena*. These functions return smart pointers that help manage the
// lifetime of the returned object.
template <typename T, typename... Args>
PROTOBUF_FUTURE_ADD_EARLY_NODISCARD PROTOBUF_NDEBUG_INLINE static T*
PROTOBUF_NONNULL
Create(Arena* PROTOBUF_NULLABLE arena, Args&&... args) {
if constexpr (is_arena_constructable<T>::value) {
using Type = std::remove_const_t<T>;
// DefaultConstruct/CopyConstruct are optimized for messages, which
// are both arena constructible and destructor skippable and they
// assume much. Don't use these functions unless the invariants
// hold.
if constexpr (is_destructor_skippable<T>::value) {
constexpr auto construct_type = GetConstructType<T, Args&&...>();
// We delegate to DefaultConstruct/CopyConstruct where appropriate
// because protobuf generated classes have external templates for
// these functions for code size reasons. When `if constexpr` is not
// available always use the fallback.
if constexpr (construct_type == ConstructType::kDefault) {
return static_cast<Type*>(DefaultConstruct<Type>(arena));
} else if constexpr (construct_type == ConstructType::kCopy) {
return static_cast<Type*>(CopyConstruct<Type>(arena, &args...));
}
}
return CreateArenaCompatible<Type>(arena, std::forward<Args>(args)...);
} else {
if (ABSL_PREDICT_FALSE(arena == nullptr)) {
return new T(std::forward<Args>(args)...);
}
return new (arena->AllocateInternal<T>()) T(std::forward<Args>(args)...);
}
}
// Allocates an object type T if the arena passed in is not nullptr;
// otherwise, returns a heap-allocated object.
// The returned smart pointer owns the object even in the arena case.
template <typename T, int&..., typename... Args>
[[nodiscard]] PROTOBUF_NDEBUG_INLINE static UniquePtr<T> PROTOBUF_NONNULL
MakeUnique(Arena* PROTOBUF_NULLABLE arena, Args&&... args) {
// NOLINTNEXTLINE(google3-runtime-pointer-nullability)
return UnsafeWrapUniquePtr(arena,
Create<T>(arena, std::forward<Args>(args)...));
}
// Allocates an object type T in the arena.
// As opposed to `MakeUnique`, this is a non-static member implying that there
// is always an `Arena` instance.
// The returned value is always Arena owned.
//
// Note that `arena->Make<T>()` has undefined behavior if `arena` is null. If
// the caller is uncertain of the nullness of the arena pointer, it should
// prefer `MakeUnique<T>(arena)` instead.
template <typename T, int&..., typename... Args>
[[nodiscard]] PROTOBUF_NDEBUG_INLINE Ptr<T> Make(Args&&... args) {
return Ptr<T>(this, Create<T>(this, std::forward<Args>(args)...));
}
// Creates a `UniquePtr` with an explicit owning arena.
//
// If `owning_arena` is not the actual owner of `ptr`, the behavior is
// undefined. As such, this function is unsafe and should be of last resort.
//
// Note: The owning arena is not necessarily the same as `msg->GetArena()`.
// Do not use `msg->GetArena()` as the owning arena.
template <typename T>
[[nodiscard]] static UniquePtr<T> PROTOBUF_NULLABLE UnsafeWrapUniquePtr(
Arena* PROTOBUF_NULLABLE owning_arena, T* PROTOBUF_NULLABLE ptr) {
return UniquePtr<T>(ptr, owning_arena);
}
// API to delete any objects not on an arena. This can be used to safely
// clean up messages or repeated fields without knowing whether or not they're
// owned by an arena. The pointer passed to this function should not be used
// again.
template <typename T>
PROTOBUF_ALWAYS_INLINE static void Destroy(T* PROTOBUF_NONNULL obj) {
if (InternalGetArena(obj) == nullptr) delete obj;
}
// Allocates memory with the specific size and alignment.
PROTOBUF_FUTURE_ADD_EARLY_NODISCARD void* PROTOBUF_NONNULL
AllocateAligned(size_t size, size_t align = 8) {
if (align <= internal::ArenaAlignDefault::align) {
return Allocate(internal::ArenaAlignDefault::Ceil(size));
} else {
// We are wasting space by over allocating align - 8 bytes. Compared
// to a dedicated function that takes current alignment in consideration.
// Such a scheme would only waste (align - 8)/2 bytes on average, but
// requires a dedicated function in the outline arena allocation
// functions. Possibly re-evaluate tradeoffs later.
auto align_as = internal::ArenaAlignAs(align);
return align_as.Ceil(Allocate(align_as.Padded(size)));
}
}
// Create an array of object type T on the arena *without* invoking the
// constructor of T. If `arena` is null, then the return value should be freed
// with `delete[] x;` (or `::operator delete[](x);`).
// To ensure safe uses, this function checks at compile time
// (when compiled as C++11) that T is trivially default-constructible and
// trivially destructible.
template <typename T>
PROTOBUF_FUTURE_ADD_EARLY_NODISCARD PROTOBUF_NDEBUG_INLINE static T*
PROTOBUF_NONNULL
CreateArray(Arena* PROTOBUF_NULLABLE arena, size_t num_elements) {
static_assert(std::is_trivially_default_constructible<T>::value,
"CreateArray requires a trivially constructible type");
static_assert(std::is_trivially_destructible<T>::value,
"CreateArray requires a trivially destructible type");
ABSL_CHECK_LE(num_elements,
// Max rounded down to the 8 byte alignment.
(std::numeric_limits<size_t>::max() & ~7) / sizeof(T))
<< "Requested size is too large to fit into size_t.";
if (ABSL_PREDICT_FALSE(arena == nullptr)) {
return new T[num_elements];
} else {
// We count on compiler to realize that if sizeof(T) is a multiple of
// 8 AlignUpTo can be elided.
return static_cast<T*>(
arena->AllocateAlignedForArray(sizeof(T) * num_elements, alignof(T)));
}
}
// The following routines are for monitoring. They will approximate the total
// sum allocated and used memory, but the exact value is an implementation
// deal. For instance allocated space depends on growth policies. Do not use
// these in unit tests. Returns the total space allocated by the arena, which
// is the sum of the sizes of the underlying blocks.
PROTOBUF_FUTURE_ADD_EARLY_NODISCARD uint64_t SpaceAllocated() const {
return impl_.SpaceAllocated();
}
// Returns the total space used by the arena. Similar to SpaceAllocated but
// does not include free space and block overhead. This is a best-effort
// estimate and may inaccurately calculate space used by other threads
// executing concurrently with the call to this method. These inaccuracies
// are due to race conditions, and are bounded but unpredictable. Stale data
// can lead to underestimates of the space used, and race conditions can lead
// to overestimates (up to the current block size).
PROTOBUF_FUTURE_ADD_EARLY_NODISCARD uint64_t SpaceUsed() const {
return impl_.SpaceUsed();
}
// Frees all storage allocated by this arena after calling destructors
// registered with OwnDestructor() and freeing objects registered with Own().
// Any objects allocated on this arena are unusable after this call. It also
// returns the total space used by the arena which is the sums of the sizes
// of the allocated blocks. This method is not thread-safe.
uint64_t Reset() { return impl_.Reset(); }
// Adds |object| to a list of heap-allocated objects to be freed with |delete|
// when the arena is destroyed or reset.
template <typename T>
PROTOBUF_ALWAYS_INLINE void Own(T* PROTOBUF_NULLABLE object) {
// Collapsing all template instantiations to one for generic Message reduces
// code size, using the virtual destructor instead.
using TypeToUse =
std::conditional_t<std::is_convertible<T*, MessageLite*>::value,
MessageLite, T>;
if (object != nullptr) {
impl_.AddCleanup(static_cast<TypeToUse*>(object),
&internal::arena_delete_object<TypeToUse>);
}
}
// Adds |object| to a list of objects whose destructors will be manually
// called when the arena is destroyed or reset. This differs from Own() in
// that it does not free the underlying memory with |delete|; hence, it is
// normally only used for objects that are placement-newed into
// arena-allocated memory.
template <typename T>
PROTOBUF_ALWAYS_INLINE void OwnDestructor(T* PROTOBUF_NULLABLE object) {
if (object != nullptr) {
impl_.AddCleanup(object, &internal::cleanup::arena_destruct_object<T>);
}
}
// Adds a custom member function on an object to the list of destructors that
// will be manually called when the arena is destroyed or reset. This differs
// from OwnDestructor() in that any member function may be specified, not only
// the class destructor.
PROTOBUF_ALWAYS_INLINE void OwnCustomDestructor(
void* PROTOBUF_NONNULL object,
void (*PROTOBUF_NONNULL destruct)(void* PROTOBUF_NONNULL)) {
impl_.AddCleanup(object, destruct);
}
template <typename T>
class InternalHelper {
private:
// A SFINAE friendly trait that probes for `U` but always evalues to
// `Arena*`.
template <typename U>
using EnableIfArena =
typename std::enable_if<std::is_same<Arena*, U>::value, Arena*>::type;
// Use go/ranked-overloads for dispatching.
struct Rank0 {};
struct Rank1 : Rank0 {};
static void InternalSwap(T* PROTOBUF_NONNULL a, T* PROTOBUF_NONNULL b) {
a->InternalSwap(b);
}
static Arena* PROTOBUF_NULLABLE GetArena(T* PROTOBUF_NONNULL p) {
return GetArena(Rank1{}, p);
}
template <typename U>
static auto GetArena(Rank1, U* PROTOBUF_NONNULL p)
-> EnableIfArena<decltype(p->GetArena())> {
return p->GetArena();
}
template <typename U>
static Arena* PROTOBUF_NULLABLE GetArena(Rank0, U* PROTOBUF_NULLABLE) {
return nullptr;
}
// If an object type T satisfies the appropriate protocol, it is deemed
// "arena compatible" and handled more efficiently because this interface
// (i) passes the arena pointer to the created object so that its
// sub-objects and internal allocations can use the arena too, and (ii)
// elides the object's destructor call when possible; e.g. protobuf
// messages, RepeatedField, etc. Otherwise, the arena will invoke the
// object's destructor when the arena is destroyed.
//
// To be "arena-compatible", a type T must satisfy the following:
//
// - The type T must have (at least) two constructors: a constructor
// callable with `args` (without `arena`), called when a T is allocated on
// the heap; and a constructor callable with `Arena* arena, Args&&...
// args`, called when a T is allocated on an arena. If the second
// constructor is called with a null arena pointer, it must be equivalent
// to invoking the first
// (`args`-only) constructor.
//
// - The type T must have a particular type trait: a nested type
// |InternalArenaConstructable_|. This is usually a typedef to |void|.
//
// - The type T *may* have the type trait |DestructorSkippable_|. If this
// type trait is present in the type, then its destructor will not be
// called if and only if it was passed a non-null arena pointer. If this
// type trait is not present on the type, then its destructor is always
// called when the containing arena is destroyed.
//
// The protocol is implemented by all protobuf message classes as well as
// protobuf container types like RepeatedPtrField and Map. It is internal to
// protobuf and is not guaranteed to be stable. Non-proto types should not
// rely on this protocol.
template <typename U>
static char DestructorSkippable(
const typename U::DestructorSkippable_* PROTOBUF_NULLABLE);
template <typename U>
static double DestructorSkippable(...);
typedef std::integral_constant<
bool, sizeof(DestructorSkippable<T>(static_cast<const T*>(nullptr))) ==
sizeof(char) ||
std::is_trivially_destructible<T>::value>
is_destructor_skippable;
template <typename U>
static char ArenaConstructable(
const typename U::InternalArenaConstructable_* PROTOBUF_NULLABLE);
template <typename U>
static double ArenaConstructable(...);
typedef std::integral_constant<bool, sizeof(ArenaConstructable<T>(
static_cast<const T*>(nullptr))) ==
sizeof(char)>
is_arena_constructable;
// Note that by this point, for types `U` which overload `FieldArenaRep<U>`,
// `T` is the arena representation `FieldArenaRep<U>::Type` and is expected
// to have an arena-enabled constructor.
//
// For types with a different arena representation, if the arena pointer is
// null, the object is allocated directly with `new` as its original type,
// since wrapping the type in the arena representation would be wasteful.
template <typename... Args>
static T* PROTOBUF_NONNULL ConstructOnArena(void* PROTOBUF_NONNULL ptr,
Arena& arena, Args&&... args) {
return new (ptr) T(&arena, static_cast<Args&&>(args)...);
}
template <typename... Args>
static T* PROTOBUF_NONNULL Construct(void* PROTOBUF_NONNULL ptr,
Arena* PROTOBUF_NULLABLE arena,
Args&&... args) {
if (ABSL_PREDICT_FALSE(arena == nullptr)) {
return new (ptr) T(static_cast<Args&&>(args)...);
} else {
return ConstructOnArena(ptr, *arena, static_cast<Args&&>(args)...);
}
}
static PROTOBUF_ALWAYS_INLINE T* PROTOBUF_NONNULL New() {
// Fields which use arena offsets don't have constructors that take an
// arena pointer. Since the arena is nullptr, it is safe to default
// construct the object.
if constexpr (internal::FieldHasArenaOffset<T>() ||
internal::HasDeprecatedArenaConstructor<T>()) {
return new T();
} else {
return new T(nullptr);
}
}
friend class Arena;
friend class TestUtil::ReflectionTester;
};
// Provides access to protected GetArena to generated messages.
// For internal use only.
template <typename T>
static Arena* PROTOBUF_NULLABLE InternalGetArena(T* PROTOBUF_NONNULL p) {
return InternalHelper<T>::GetArena(p);
}
// Helper typetraits that indicates support for arenas in a type T at compile
// time. This is public only to allow construction of higher-level templated
// utilities.
//
// is_arena_constructable<T>::value is true if the message type T has arena
// support enabled, and false otherwise.
//
// is_destructor_skippable<T>::value is true if the message type T has told
// the arena that it is safe to skip the destructor, and false otherwise.
//
// This is inside Arena because only Arena has the friend relationships
// necessary to see the underlying generated code traits.
template <typename T>
struct is_arena_constructable : InternalHelper<T>::is_arena_constructable {};
template <typename T>
struct is_destructor_skippable : InternalHelper<T>::is_destructor_skippable {
};
private:
internal::ThreadSafeArena impl_;
enum class ConstructType { kUnknown, kDefault, kCopy, kMove };
// Overload set to detect which kind of construction is going to happen for a
// specific set of input arguments. This is used to dispatch to different
// helper functions.
template <typename T>
static auto ProbeConstructType()
-> std::integral_constant<ConstructType, ConstructType::kDefault>;
template <typename T>
static auto ProbeConstructType(const T&)
-> std::integral_constant<ConstructType, ConstructType::kCopy>;
template <typename T>
static auto ProbeConstructType(T&)
-> std::integral_constant<ConstructType, ConstructType::kCopy>;
template <typename T>
static auto ProbeConstructType(const T&&)
-> std::integral_constant<ConstructType, ConstructType::kCopy>;
template <typename T>
static auto ProbeConstructType(T&&)
-> std::integral_constant<ConstructType, ConstructType::kMove>;
template <typename T, typename... U>
static auto ProbeConstructType(U&&...)
-> std::integral_constant<ConstructType, ConstructType::kUnknown>;
template <typename T, typename... Args>
static constexpr auto GetConstructType() {
return std::is_base_of<MessageLite, T>::value
? decltype(ProbeConstructType<T>(std::declval<Args>()...))::value
: ConstructType::kUnknown;
}
void ReturnArrayMemory(void* PROTOBUF_NONNULL p, size_t size) {
impl_.ReturnArrayMemory(p, size);
}
template <typename T, typename... Args>
PROTOBUF_NDEBUG_INLINE static T* PROTOBUF_NONNULL
CreateArenaCompatible(Arena* PROTOBUF_NULLABLE arena, Args&&... args) {
static_assert(is_arena_constructable<T>::value,
"Can only construct types that are ArenaConstructable");
if (ABSL_PREDICT_FALSE(arena == nullptr)) {
if constexpr (internal::FieldHasArenaOffset<T>() ||
internal::HasDeprecatedArenaConstructor<T>()) {
return new T(static_cast<Args&&>(args)...);
} else {
return new T(nullptr, static_cast<Args&&>(args)...);
}
} else {
return arena->DoCreateMessage<T>(static_cast<Args&&>(args)...);
}
}
// This specialization for no arguments is necessary, because its behavior is
// slightly different. When the arena pointer is nullptr, it calls T()
// instead of T(nullptr).
template <typename T>
PROTOBUF_NDEBUG_INLINE static T* PROTOBUF_NONNULL
CreateArenaCompatible(Arena* PROTOBUF_NULLABLE arena) {
static_assert(is_arena_constructable<T>::value,
"Can only construct types that are ArenaConstructable");
if (ABSL_PREDICT_FALSE(arena == nullptr)) {
// Generated arena constructor T(Arena*) is protected. Call via
// InternalHelper.
return InternalHelper<T>::New();
} else {
return arena->DoCreateMessage<T>();
}
}
template <typename T, bool trivial = std::is_trivially_destructible<T>::value>
PROTOBUF_NDEBUG_INLINE void* PROTOBUF_NONNULL AllocateInternal() {
if (trivial) {
return AllocateAligned(sizeof(T), alignof(T));
} else {
// We avoid instantiating arena_destruct_object<T> in the trivial case.
constexpr auto dtor = &internal::cleanup::arena_destruct_object<
std::conditional_t<trivial, std::string, T>>;
return AllocateAlignedWithCleanup(sizeof(T), alignof(T), dtor);
}
}
// DefaultConstruct/CopyConstruct:
//
// Functions with a generic signature to support taking the address in generic
// contexts, like RepeatedPtrField, etc.
// These are also used as a hook for `extern template` instantiations where
// codegen can offload the instantiations to the respective .pb.cc files. This
// has two benefits:
// - It reduces the library bloat as callers don't have to instantiate the
// function.
// - It allows the optimizer to see the constructors called to
// further optimize the instantiation.
template <typename T>
static void* PROTOBUF_NONNULL
DefaultConstruct(Arena* PROTOBUF_NULLABLE arena);
template <typename T>
static void* PROTOBUF_NONNULL CopyConstruct(
Arena* PROTOBUF_NULLABLE arena, const void* PROTOBUF_NONNULL from);
template <typename T, typename... Args>
PROTOBUF_NDEBUG_INLINE T* PROTOBUF_NONNULL DoCreateMessage(Args&&... args) {
using ArenaRepT = typename internal::FieldArenaRep<T>::Type;
auto* arena_repr = InternalHelper<ArenaRepT>::ConstructOnArena(
AllocateInternal<ArenaRepT,
is_destructor_skippable<ArenaRepT>::value>(),
*this, std::forward<Args>(args)...);
// Note that we can't static_cast arena_repr to T* here, since T might be a
// member of ArenaRepT.
return internal::FieldArenaRep<T>::Get(arena_repr);
}
// CreateInArenaStorage is used to implement map field. Without it,
// Map need to call generated message's protected arena constructor,
// which needs to declare Map as friend of generated message.
template <typename T, typename... Args>
static void CreateInArenaStorage(T* PROTOBUF_NONNULL ptr,
Arena* PROTOBUF_NULLABLE arena,
Args&&... args) {
if constexpr (is_arena_constructable<T>::value) {
InternalHelper<T>::Construct(ptr, arena, std::forward<Args>(args)...);
} else {
new (ptr) T(std::forward<Args>(args)...);
}
if constexpr (!is_destructor_skippable<T>::value) {
if (ABSL_PREDICT_TRUE(arena != nullptr)) {
arena->OwnDestructor(ptr);
}
}
}
// Implementation for GetArena(). Only message objects with
// InternalArenaConstructable_ tags can be associated with an arena, and such
// objects must implement a GetArena() method.
template <typename T>
PROTOBUF_ALWAYS_INLINE static Arena* PROTOBUF_NULLABLE
GetArenaInternal(T* PROTOBUF_NONNULL value) {
return InternalHelper<T>::GetArena(value);
}
void* PROTOBUF_NONNULL AllocateAlignedForArray(size_t n, size_t align) {
if (align <= internal::ArenaAlignDefault::align) {
return AllocateForArray(internal::ArenaAlignDefault::Ceil(n));
} else {
// We are wasting space by over allocating align - 8 bytes. Compared
// to a dedicated function that takes current alignment in consideration.
// Such a scheme would only waste (align - 8)/2 bytes on average, but
// requires a dedicated function in the outline arena allocation
// functions. Possibly re-evaluate tradeoffs later.
auto align_as = internal::ArenaAlignAs(align);
return align_as.Ceil(AllocateForArray(align_as.Padded(n)));
}
}
void* PROTOBUF_NONNULL Allocate(size_t n);
void* PROTOBUF_NONNULL AllocateForArray(size_t n);
void* PROTOBUF_NONNULL AllocateAlignedWithCleanup(
size_t n, size_t align,
void (*PROTOBUF_NONNULL destructor)(void* PROTOBUF_NONNULL));
// Test only API.
// It returns the objects that are in the cleanup list for the current
// SerialArena. This API is meant for tests that want to see if something was
// added or not to the cleanup list. Sometimes adding something to the cleanup
// list has no visible side effect so peeking into the list is the only way to
// test.
std::vector<void*> PeekCleanupListForTesting();
template <typename Type>
friend class internal::GenericTypeHandler;
friend class internal::InternalMetadata; // For user_arena().
friend class internal::LazyField; // For DefaultConstruct.
friend class internal::EpsCopyInputStream; // For parser performance
friend class internal::TcParser; // For parser performance
friend class MessageLite;
template <typename Key, typename T>
friend class Map;
template <typename>
friend class RepeatedField; // For ReturnArrayMemory
friend class internal::RepeatedPtrFieldBase; // For ReturnArrayMemory
friend class internal::UntypedMapBase; // For ReturnArrayMemory
friend class internal::ExtensionSet; // For ReturnArrayMemory
friend internal::SerialArena* PROTOBUF_NULLABLE
internal::GetSerialArena(Arena* PROTOBUF_NULLABLE);
friend struct internal::ArenaTestPeer;
};
namespace internal {
// Comparison base to inject relational operators in UniquePtr and Ptr.
// We use a base class to facilitate symmetric relational operators with
// UniquePtr, Ptr, T* and nullptr.
struct ArenaPtrCmpBase {
template <typename T>
static T* PROTOBUF_NULLABLE Unpack(T* PROTOBUF_NULLABLE ptr) {
return ptr;
}
template <typename T>
static auto PROTOBUF_NULLABLE
Unpack(const typename Arena::UniquePtr<T>& ptr) {
return ptr.get();
}
template <typename T>
static auto PROTOBUF_NONNULL
Unpack(const typename Arena::template Ptr<T>& ptr) {
return ptr.get();
}
static std::nullptr_t Unpack(std::nullptr_t) { return nullptr; }
public:
template <typename LHS, typename RHS>
friend auto operator==(const LHS& lhs, const RHS& rhs)
-> decltype(Unpack(lhs) == Unpack(rhs)) {
return Unpack(lhs) == Unpack(rhs);
}
template <typename LHS, typename RHS>
friend auto operator!=(const LHS& lhs, const RHS& rhs)
-> decltype(lhs == rhs) {
return !(lhs == rhs);
}
};
// Transparent hasher that supports the same types as equality above.
// This allows for heterogeneous lookup on UniquePtr and Ptr keyed associative
// containers.
struct ArenaPtrContainerHash {
using is_transparent = void;
template <typename T>
auto operator()(const T& value) const
-> decltype(absl::HashOf(ArenaPtrCmpBase::Unpack(value))) {
return absl::HashOf(ArenaPtrCmpBase::Unpack(value));
}
};
// The deleter type used for implementing UniquePtr.
// Only deletes an element if the Arena* passed at construction time is
// nullptr.
struct UniquePtrDeleter {
template <typename T>
void operator()(T* PROTOBUF_NONNULL element) const {
if (arena == nullptr) delete element;
}
Arena* PROTOBUF_NULLABLE arena = nullptr;
};
} // namespace internal
template <typename T>
class
ABSL_MUST_USE_RESULT
ABSL_ATTRIBUTE_TRIVIAL_ABI ABSL_NULLABILITY_COMPATIBLE
Arena::UniquePtr final : internal::ArenaPtrCmpBase {
public:
using pointer = T*;
using element_type = T;
// Public Constructors
constexpr UniquePtr() : ptr_(nullptr, Deleter{}) {}
// NOLINTNEXTLINE(google-explicit-constructor)
constexpr UniquePtr(std::nullptr_t) : ptr_(nullptr, Deleter{}) {}
// Allow implicit conversion from `std::unique_ptr` with
// `std::default_delete`.
// This is always safe since `UniquePtr` can safely hold heap-allocated
// pointers.
// NOLINTNEXTLINE(google-explicit-constructor)
UniquePtr(PROTOBUF_NULLABLE std::unique_ptr<T> heap_owned)
: ptr_(heap_owned.release(), Deleter{}) {}
// Allow implicit conversion from `Ptr<T>`.
// This is always safe since `Ptr` is statically known to be owned by an
// arena. There is no "unique" ownership on it.
// NOLINTNEXTLINE(google-explicit-constructor)
UniquePtr(Ptr<T> arena_owned)
: UniquePtr(arena_owned.get(), arena_owned.GetOwningArena()) {}
~UniquePtr() = default;
constexpr UniquePtr(UniquePtr&& rhs) = default;
template <typename U,
typename = std::enable_if_t<std::is_convertible_v<U*, T*>>>
// NOLINTNEXTLINE(google-explicit-constructor)
constexpr UniquePtr(UniquePtr<U>&& rhs) : ptr_(std::move(rhs.ptr_)) {}
// Use Arena::UnsafeWrapUniquePtr or Arena::MakeUnique
explicit UniquePtr(T* PROTOBUF_NULLABLE ptr) = delete;
UniquePtr& operator=(UniquePtr&& rhs) = default;
UniquePtr& operator=(std::nullptr_t) {
reset();
return *this;
}
template <typename U,
typename = std::enable_if_t<std::is_convertible_v<U*, T*>>>
UniquePtr& operator=(UniquePtr<U>&& rhs) {
ptr_ = std::move(rhs.ptr_);
return *this;
}
// Delete the copy ctor and copy assignment operator.
UniquePtr(const UniquePtr& rhs) = delete;
UniquePtr& operator=(const UniquePtr& rhs) = delete;
// If heap allocated transfer ownership of the pointer to the caller, clearing
// the `UniquePtr` instance.
// Otherwise, return `absl::nullopt` and have no effect.
absl::optional<PROTOBUF_NONNULL std::unique_ptr<T>> try_heap_release() {
if (GetOwningArena() != nullptr || get() == nullptr) {
return absl::nullopt;
}
return std::unique_ptr<T>(std::exchange(ptr_, UniquePtrType()).release());
}
// If it contains an arena allocated object, return a `Ptr` to the caller.
// Otherwise, return `absl::nullopt`.
// This function has does not modify the `UniquePtr`.
absl::optional<Ptr<T>> try_as_arena_ptr() const {
Arena* arena = GetOwningArena();
if (arena == nullptr || get() == nullptr) {
return absl::nullopt;
}
return Ptr<T>(arena, get());
}
void swap(UniquePtr& other) noexcept { ptr_.swap(other.ptr_); }
friend void swap(UniquePtr& a, UniquePtr& b) noexcept { a.swap(b); }
// reset() the pointed to object to nullptr.
ABSL_ATTRIBUTE_REINITIALIZES void reset() { ptr_.reset(); }