arena.h 29 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694
  1. // Protocol Buffers - Google's data interchange format
  2. // Copyright 2008 Google Inc. All rights reserved.
  3. // https://developers.google.com/protocol-buffers/
  4. //
  5. // Redistribution and use in source and binary forms, with or without
  6. // modification, are permitted provided that the following conditions are
  7. // met:
  8. //
  9. // * Redistributions of source code must retain the above copyright
  10. // notice, this list of conditions and the following disclaimer.
  11. // * Redistributions in binary form must reproduce the above
  12. // copyright notice, this list of conditions and the following disclaimer
  13. // in the documentation and/or other materials provided with the
  14. // distribution.
  15. // * Neither the name of Google Inc. nor the names of its
  16. // contributors may be used to endorse or promote products derived from
  17. // this software without specific prior written permission.
  18. //
  19. // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
  20. // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
  21. // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
  22. // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
  23. // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
  24. // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
  25. // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
  26. // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
  27. // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
  28. // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
  29. // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
  30. // This file defines an Arena allocator for better allocation performance.
  31. #ifndef GOOGLE_PROTOBUF_ARENA_H__
  32. #define GOOGLE_PROTOBUF_ARENA_H__
  33. #include <limits>
  34. #include <type_traits>
  35. #include <utility>
  36. #if defined(_MSC_VER) && !defined(_LIBCPP_STD_VER) && !_HAS_EXCEPTIONS
  37. // Work around bugs in MSVC <typeinfo> header when _HAS_EXCEPTIONS=0.
  38. #include <exception>
  39. #include <typeinfo>
  40. namespace std {
  41. using type_info = ::type_info;
  42. }
  43. #else
  44. #include <typeinfo>
  45. #endif
  46. #include "absl/meta/type_traits.h"
  47. #include "google/protobuf/arena_align.h"
  48. #include "google/protobuf/arena_config.h"
  49. #include "google/protobuf/port.h"
  50. #include "google/protobuf/serial_arena.h"
  51. #include "google/protobuf/thread_safe_arena.h"
  52. // Must be included last.
  53. #include "google/protobuf/port_def.inc"
  54. #ifdef SWIG
  55. #error "You cannot SWIG proto headers"
  56. #endif
  57. namespace google {
  58. namespace protobuf {
  59. struct ArenaOptions; // defined below
  60. class Arena; // defined below
  61. class Message; // defined in message.h
  62. class MessageLite;
  63. template <typename Key, typename T>
  64. class Map;
  65. namespace arena_metrics {
  66. void EnableArenaMetrics(ArenaOptions* options);
  67. } // namespace arena_metrics
  68. namespace TestUtil {
  69. class ReflectionTester; // defined in test_util.h
  70. } // namespace TestUtil
  71. namespace internal {
  72. struct ArenaTestPeer; // defined in arena_test_util.h
  73. class InternalMetadata; // defined in metadata_lite.h
  74. class LazyField; // defined in lazy_field.h
  75. class EpsCopyInputStream; // defined in parse_context.h
  76. class RepeatedPtrFieldBase; // defined in repeated_ptr_field.h
  77. class TcParser; // defined in generated_message_tctable_impl.h
  78. template <typename Type>
  79. class GenericTypeHandler; // defined in repeated_field.h
  80. template <bool destructor_skippable, typename T>
  81. struct ObjectDestructor {
  82. constexpr static void (*destructor)(void*) =
  83. &internal::cleanup::arena_destruct_object<T>;
  84. };
  85. template <typename T>
  86. struct ObjectDestructor<true, T> {
  87. constexpr static void (*destructor)(void*) = nullptr;
  88. };
  89. template <typename T>
  90. void arena_delete_object(void* object) {
  91. delete reinterpret_cast<T*>(object);
  92. }
  93. } // namespace internal
  94. // ArenaOptions provides optional additional parameters to arena construction
  95. // that control its block-allocation behavior.
  96. struct ArenaOptions {
  97. // This defines the size of the first block requested from the system malloc.
  98. // Subsequent block sizes will increase in a geometric series up to a maximum.
  99. size_t start_block_size = internal::AllocationPolicy::kDefaultStartBlockSize;
  100. // This defines the maximum block size requested from system malloc (unless an
  101. // individual arena allocation request occurs with a size larger than this
  102. // maximum). Requested block sizes increase up to this value, then remain
  103. // here.
  104. size_t max_block_size = internal::GetDefaultArenaMaxBlockSize();
  105. // An initial block of memory for the arena to use, or nullptr for none. If
  106. // provided, the block must live at least as long as the arena itself. The
  107. // creator of the Arena retains ownership of the block after the Arena is
  108. // destroyed.
  109. char* initial_block = nullptr;
  110. // The size of the initial block, if provided.
  111. size_t initial_block_size = 0;
  112. // A function pointer to an alloc method that returns memory blocks of size
  113. // requested. By default, it contains a ptr to the malloc function.
  114. //
  115. // NOTE: block_alloc and dealloc functions are expected to behave like
  116. // malloc and free, including Asan poisoning.
  117. void* (*block_alloc)(size_t) = nullptr;
  118. // A function pointer to a dealloc method that takes ownership of the blocks
  119. // from the arena. By default, it contains a ptr to a wrapper function that
  120. // calls free.
  121. void (*block_dealloc)(void*, size_t) = nullptr;
  122. private:
  123. internal::AllocationPolicy AllocationPolicy() const {
  124. internal::AllocationPolicy res;
  125. res.start_block_size = start_block_size;
  126. res.max_block_size = max_block_size;
  127. res.block_alloc = block_alloc;
  128. res.block_dealloc = block_dealloc;
  129. return res;
  130. }
  131. friend class Arena;
  132. friend class ArenaOptionsTestFriend;
  133. };
  134. // Arena allocator. Arena allocation replaces ordinary (heap-based) allocation
  135. // with new/delete, and improves performance by aggregating allocations into
  136. // larger blocks and freeing allocations all at once. Protocol messages are
  137. // allocated on an arena by using Arena::CreateMessage<T>(Arena*), below, and
  138. // are automatically freed when the arena is destroyed.
  139. //
  140. // This is a thread-safe implementation: multiple threads may allocate from the
  141. // arena concurrently. Destruction is not thread-safe and the destructing
  142. // thread must synchronize with users of the arena first.
  143. //
  144. // An arena provides two allocation interfaces: CreateMessage<T>, which works
  145. // for arena-enabled proto2 message types as well as other types that satisfy
  146. // the appropriate protocol (described below), and Create<T>, which works for
  147. // any arbitrary type T. CreateMessage<T> is better when the type T supports it,
  148. // because this interface (i) passes the arena pointer to the created object so
  149. // that its sub-objects and internal allocations can use the arena too, and (ii)
  150. // elides the object's destructor call when possible. Create<T> does not place
  151. // any special requirements on the type T, and will invoke the object's
  152. // destructor when the arena is destroyed.
  153. //
  154. // The arena message allocation protocol, required by
  155. // CreateMessage<T>(Arena* arena, Args&&... args), is as follows:
  156. //
  157. // - The type T must have (at least) two constructors: a constructor callable
  158. // with `args` (without `arena`), called when a T is allocated on the heap;
  159. // and a constructor callable with `Arena* arena, Args&&... args`, called when
  160. // a T is allocated on an arena. If the second constructor is called with a
  161. // null arena pointer, it must be equivalent to invoking the first
  162. // (`args`-only) constructor.
  163. //
  164. // - The type T must have a particular type trait: a nested type
  165. // |InternalArenaConstructable_|. This is usually a typedef to |void|. If no
  166. // such type trait exists, then the instantiation CreateMessage<T> will fail
  167. // to compile.
  168. //
  169. // - The type T *may* have the type trait |DestructorSkippable_|. If this type
  170. // trait is present in the type, then its destructor will not be called if and
  171. // only if it was passed a non-null arena pointer. If this type trait is not
  172. // present on the type, then its destructor is always called when the
  173. // containing arena is destroyed.
  174. //
  175. // This protocol is implemented by all arena-enabled proto2 message classes as
  176. // well as protobuf container types like RepeatedPtrField and Map. The protocol
  177. // is internal to protobuf and is not guaranteed to be stable. Non-proto types
  178. // should not rely on this protocol.
  179. class PROTOBUF_EXPORT PROTOBUF_ALIGNAS(8) Arena final {
  180. public:
  181. // Default constructor with sensible default options, tuned for average
  182. // use-cases.
  183. inline Arena() : impl_() {}
  184. // Construct an arena with default options, except for the supplied
  185. // initial block. It is more efficient to use this constructor
  186. // instead of passing ArenaOptions if the only configuration needed
  187. // by the caller is supplying an initial block.
  188. inline Arena(char* initial_block, size_t initial_block_size)
  189. : impl_(initial_block, initial_block_size) {}
  190. // Arena constructor taking custom options. See ArenaOptions above for
  191. // descriptions of the options available.
  192. explicit Arena(const ArenaOptions& options)
  193. : impl_(options.initial_block, options.initial_block_size,
  194. options.AllocationPolicy()) {}
  195. // Block overhead. Use this as a guide for how much to over-allocate the
  196. // initial block if you want an allocation of size N to fit inside it.
  197. //
  198. // WARNING: if you allocate multiple objects, it is difficult to guarantee
  199. // that a series of allocations will fit in the initial block, especially if
  200. // Arena changes its alignment guarantees in the future!
  201. static const size_t kBlockOverhead =
  202. internal::ThreadSafeArena::kBlockHeaderSize +
  203. internal::ThreadSafeArena::kSerialArenaSize;
  204. inline ~Arena() {}
  205. // API to create proto2 message objects on the arena. If the arena passed in
  206. // is nullptr, then a heap allocated object is returned. Type T must be a
  207. // message defined in a .proto file with cc_enable_arenas set to true,
  208. // otherwise a compilation error will occur.
  209. //
  210. // RepeatedField and RepeatedPtrField may also be instantiated directly on an
  211. // arena with this method.
  212. //
  213. // This function also accepts any type T that satisfies the arena message
  214. // allocation protocol, documented above.
  215. template <typename T, typename... Args>
  216. PROTOBUF_ALWAYS_INLINE static T* CreateMessage(Arena* arena, Args&&... args) {
  217. static_assert(
  218. InternalHelper<T>::is_arena_constructable::value,
  219. "CreateMessage can only construct types that are ArenaConstructable");
  220. // We must delegate to CreateMaybeMessage() and NOT CreateMessageInternal()
  221. // because protobuf generated classes specialize CreateMaybeMessage() and we
  222. // need to use that specialization for code size reasons.
  223. return Arena::CreateMaybeMessage<T>(arena, static_cast<Args&&>(args)...);
  224. }
  225. // API to create any objects on the arena. Note that only the object will
  226. // be created on the arena; the underlying ptrs (in case of a proto2 message)
  227. // will be still heap allocated. Proto messages should usually be allocated
  228. // with CreateMessage<T>() instead.
  229. //
  230. // Note that even if T satisfies the arena message construction protocol
  231. // (InternalArenaConstructable_ trait and optional DestructorSkippable_
  232. // trait), as described above, this function does not follow the protocol;
  233. // instead, it treats T as a black-box type, just as if it did not have these
  234. // traits. Specifically, T's constructor arguments will always be only those
  235. // passed to Create<T>() -- no additional arena pointer is implicitly added.
  236. // Furthermore, the destructor will always be called at arena destruction time
  237. // (unless the destructor is trivial). Hence, from T's point of view, it is as
  238. // if the object were allocated on the heap (except that the underlying memory
  239. // is obtained from the arena).
  240. template <typename T, typename... Args>
  241. PROTOBUF_NDEBUG_INLINE static T* Create(Arena* arena, Args&&... args) {
  242. if (PROTOBUF_PREDICT_FALSE(arena == nullptr)) {
  243. return new T(std::forward<Args>(args)...);
  244. }
  245. return new (arena->AllocateInternal<T>()) T(std::forward<Args>(args)...);
  246. }
  247. // API to delete any objects not on an arena. This can be used to safely
  248. // clean up messages or repeated fields without knowing whether or not they're
  249. // owned by an arena. The pointer passed to this function should not be used
  250. // again.
  251. template <typename T>
  252. PROTOBUF_ALWAYS_INLINE static void Destroy(T* obj) {
  253. if (InternalGetOwningArena(obj) == nullptr) delete obj;
  254. }
  255. // Allocates memory with the specific size and alignment.
  256. void* AllocateAligned(size_t size, size_t align = 8) {
  257. if (align <= internal::ArenaAlignDefault::align) {
  258. return Allocate(internal::ArenaAlignDefault::Ceil(size));
  259. } else {
  260. // We are wasting space by over allocating align - 8 bytes. Compared
  261. // to a dedicated function that takes current alignment in consideration.
  262. // Such a scheme would only waste (align - 8)/2 bytes on average, but
  263. // requires a dedicated function in the outline arena allocation
  264. // functions. Possibly re-evaluate tradeoffs later.
  265. auto align_as = internal::ArenaAlignAs(align);
  266. return align_as.Ceil(Allocate(align_as.Padded(size)));
  267. }
  268. }
  269. // Create an array of object type T on the arena *without* invoking the
  270. // constructor of T. If `arena` is null, then the return value should be freed
  271. // with `delete[] x;` (or `::operator delete[](x);`).
  272. // To ensure safe uses, this function checks at compile time
  273. // (when compiled as C++11) that T is trivially default-constructible and
  274. // trivially destructible.
  275. template <typename T>
  276. PROTOBUF_NDEBUG_INLINE static T* CreateArray(Arena* arena,
  277. size_t num_elements) {
  278. static_assert(std::is_trivial<T>::value,
  279. "CreateArray requires a trivially constructible type");
  280. static_assert(std::is_trivially_destructible<T>::value,
  281. "CreateArray requires a trivially destructible type");
  282. ABSL_CHECK_LE(num_elements, std::numeric_limits<size_t>::max() / sizeof(T))
  283. << "Requested size is too large to fit into size_t.";
  284. if (PROTOBUF_PREDICT_FALSE(arena == nullptr)) {
  285. return static_cast<T*>(::operator new[](num_elements * sizeof(T)));
  286. } else {
  287. // We count on compiler to realize that if sizeof(T) is a multiple of
  288. // 8 AlignUpTo can be elided.
  289. return static_cast<T*>(
  290. arena->AllocateAlignedForArray(sizeof(T) * num_elements, alignof(T)));
  291. }
  292. }
  293. // The following are routines are for monitoring. They will approximate the
  294. // total sum allocated and used memory, but the exact value is an
  295. // implementation deal. For instance allocated space depends on growth
  296. // policies. Do not use these in unit tests.
  297. // Returns the total space allocated by the arena, which is the sum of the
  298. // sizes of the underlying blocks.
  299. uint64_t SpaceAllocated() const { return impl_.SpaceAllocated(); }
  300. // Returns the total space used by the arena. Similar to SpaceAllocated but
  301. // does not include free space and block overhead. This is a best-effort
  302. // estimate and may inaccurately calculate space used by other threads
  303. // executing concurrently with the call to this method. These inaccuracies
  304. // are due to race conditions, and are bounded but unpredictable. Stale data
  305. // can lead to underestimates of the space used, and race conditions can lead
  306. // to overestimates (up to the current block size).
  307. uint64_t SpaceUsed() const { return impl_.SpaceUsed(); }
  308. // Frees all storage allocated by this arena after calling destructors
  309. // registered with OwnDestructor() and freeing objects registered with Own().
  310. // Any objects allocated on this arena are unusable after this call. It also
  311. // returns the total space used by the arena which is the sums of the sizes
  312. // of the allocated blocks. This method is not thread-safe.
  313. uint64_t Reset() { return impl_.Reset(); }
  314. // Adds |object| to a list of heap-allocated objects to be freed with |delete|
  315. // when the arena is destroyed or reset.
  316. template <typename T>
  317. PROTOBUF_ALWAYS_INLINE void Own(T* object) {
  318. // Collapsing all template instantiations to one for generic Message reduces
  319. // code size, using the virtual destructor instead.
  320. using TypeToUse =
  321. std::conditional_t<std::is_convertible<T*, MessageLite*>::value,
  322. MessageLite, T>;
  323. if (object != nullptr) {
  324. impl_.AddCleanup(static_cast<TypeToUse*>(object),
  325. &internal::arena_delete_object<TypeToUse>);
  326. }
  327. }
  328. // Adds |object| to a list of objects whose destructors will be manually
  329. // called when the arena is destroyed or reset. This differs from Own() in
  330. // that it does not free the underlying memory with |delete|; hence, it is
  331. // normally only used for objects that are placement-newed into
  332. // arena-allocated memory.
  333. template <typename T>
  334. PROTOBUF_ALWAYS_INLINE void OwnDestructor(T* object) {
  335. if (object != nullptr) {
  336. impl_.AddCleanup(object, &internal::cleanup::arena_destruct_object<T>);
  337. }
  338. }
  339. // Adds a custom member function on an object to the list of destructors that
  340. // will be manually called when the arena is destroyed or reset. This differs
  341. // from OwnDestructor() in that any member function may be specified, not only
  342. // the class destructor.
  343. PROTOBUF_ALWAYS_INLINE void OwnCustomDestructor(void* object,
  344. void (*destruct)(void*)) {
  345. impl_.AddCleanup(object, destruct);
  346. }
  347. // Retrieves the arena associated with |value| if |value| is an arena-capable
  348. // message, or nullptr otherwise. If possible, the call resolves at compile
  349. // time. Note that we can often devirtualize calls to `value->GetArena()` so
  350. // usually calling this method is unnecessary.
  351. template <typename T>
  352. PROTOBUF_ALWAYS_INLINE static Arena* GetArena(T* value) {
  353. return GetArenaInternal(value);
  354. }
  355. template <typename T>
  356. class InternalHelper {
  357. private:
  358. // A SFINAE friendly trait that probes for `U` but always evalues to
  359. // `Arena*`.
  360. template <typename U>
  361. using EnableIfArena =
  362. typename std::enable_if<std::is_same<Arena*, U>::value, Arena*>::type;
  363. // Rather than use SFINAE that must fully cover the space of options in a
  364. // mutually exclusive fashion, we use implicit conversions to base classes
  365. // to force an explicit ranking for our preferences. The lowest ranked
  366. // version that compiles will be accepted.
  367. struct Rank2 {};
  368. struct Rank1 : Rank2 {};
  369. struct Rank0 : Rank1 {};
  370. static Arena* GetOwningArena(const T* p) {
  371. return GetOwningArena(Rank0{}, p);
  372. }
  373. template <typename U>
  374. static auto GetOwningArena(Rank0, const U* p)
  375. -> EnableIfArena<decltype(p->GetOwningArena())> {
  376. return p->GetOwningArena();
  377. }
  378. template <typename U>
  379. static Arena* GetOwningArena(Rank1, const U*) {
  380. return nullptr;
  381. }
  382. static void InternalSwap(T* a, T* b) { a->InternalSwap(b); }
  383. static Arena* GetArenaForAllocation(T* p) {
  384. return GetArenaForAllocation(Rank0{}, p);
  385. }
  386. static Arena* GetArena(T* p) {
  387. // Rather than replicate probing for `GetArena` with fallback to nullptr,
  388. // we borrow the implementation of `GetArenaForAllocation` but skip
  389. // `Rank0` which probes for `GetArenaForAllocation`.
  390. return GetArenaForAllocation(Rank1{}, p);
  391. }
  392. template <typename U>
  393. static auto GetArenaForAllocation(Rank0, U* p)
  394. -> EnableIfArena<decltype(p->GetArenaForAllocation())> {
  395. return p->GetArenaForAllocation();
  396. }
  397. template <typename U>
  398. static auto GetArenaForAllocation(Rank1, U* p)
  399. -> EnableIfArena<decltype(p->GetArena())> {
  400. return p->GetArena();
  401. }
  402. template <typename U>
  403. static Arena* GetArenaForAllocation(Rank2, U*) {
  404. return nullptr;
  405. }
  406. template <typename U>
  407. static char DestructorSkippable(const typename U::DestructorSkippable_*);
  408. template <typename U>
  409. static double DestructorSkippable(...);
  410. typedef std::integral_constant<
  411. bool, sizeof(DestructorSkippable<T>(static_cast<const T*>(0))) ==
  412. sizeof(char) ||
  413. std::is_trivially_destructible<T>::value>
  414. is_destructor_skippable;
  415. template <typename U>
  416. static char ArenaConstructable(
  417. const typename U::InternalArenaConstructable_*);
  418. template <typename U>
  419. static double ArenaConstructable(...);
  420. typedef std::integral_constant<bool, sizeof(ArenaConstructable<T>(
  421. static_cast<const T*>(0))) ==
  422. sizeof(char)>
  423. is_arena_constructable;
  424. template <typename... Args>
  425. static T* Construct(void* ptr, Args&&... args) {
  426. return new (ptr) T(static_cast<Args&&>(args)...);
  427. }
  428. static inline PROTOBUF_ALWAYS_INLINE T* New() {
  429. return new T(nullptr);
  430. }
  431. friend class Arena;
  432. friend class TestUtil::ReflectionTester;
  433. };
  434. // Provides access to protected GetOwningArena to generated messages. For
  435. // internal use only.
  436. template <typename T>
  437. static Arena* InternalGetOwningArena(const T* p) {
  438. return InternalHelper<T>::GetOwningArena(p);
  439. }
  440. // Provides access to protected GetArenaForAllocation to generated messages.
  441. // For internal use only.
  442. template <typename T>
  443. static Arena* InternalGetArenaForAllocation(T* p) {
  444. return InternalHelper<T>::GetArenaForAllocation(p);
  445. }
  446. // Helper typetraits that indicates support for arenas in a type T at compile
  447. // time. This is public only to allow construction of higher-level templated
  448. // utilities.
  449. //
  450. // is_arena_constructable<T>::value is true if the message type T has arena
  451. // support enabled, and false otherwise.
  452. //
  453. // is_destructor_skippable<T>::value is true if the message type T has told
  454. // the arena that it is safe to skip the destructor, and false otherwise.
  455. //
  456. // This is inside Arena because only Arena has the friend relationships
  457. // necessary to see the underlying generated code traits.
  458. template <typename T>
  459. struct is_arena_constructable : InternalHelper<T>::is_arena_constructable {};
  460. template <typename T>
  461. struct is_destructor_skippable : InternalHelper<T>::is_destructor_skippable {
  462. };
  463. private:
  464. internal::ThreadSafeArena impl_;
  465. void ReturnArrayMemory(void* p, size_t size) {
  466. impl_.ReturnArrayMemory(p, size);
  467. }
  468. template <typename T, typename... Args>
  469. PROTOBUF_NDEBUG_INLINE static T* CreateMessageInternal(Arena* arena,
  470. Args&&... args) {
  471. static_assert(
  472. InternalHelper<T>::is_arena_constructable::value,
  473. "CreateMessage can only construct types that are ArenaConstructable");
  474. if (PROTOBUF_PREDICT_FALSE(arena == nullptr)) {
  475. return new T(nullptr, static_cast<Args&&>(args)...);
  476. } else {
  477. return arena->DoCreateMessage<T>(static_cast<Args&&>(args)...);
  478. }
  479. }
  480. // This specialization for no arguments is necessary, because its behavior is
  481. // slightly different. When the arena pointer is nullptr, it calls T()
  482. // instead of T(nullptr).
  483. template <typename T>
  484. PROTOBUF_NDEBUG_INLINE static T* CreateMessageInternal(Arena* arena) {
  485. static_assert(
  486. InternalHelper<T>::is_arena_constructable::value,
  487. "CreateMessage can only construct types that are ArenaConstructable");
  488. if (arena == nullptr) {
  489. // Generated arena constructor T(Arena*) is protected. Call via
  490. // InternalHelper.
  491. return InternalHelper<T>::New();
  492. } else {
  493. return arena->DoCreateMessage<T>();
  494. }
  495. }
  496. template <typename T, bool trivial = std::is_trivially_destructible<T>::value>
  497. PROTOBUF_NDEBUG_INLINE void* AllocateInternal() {
  498. if (trivial) {
  499. return AllocateAligned(sizeof(T), alignof(T));
  500. } else {
  501. constexpr auto dtor = &internal::cleanup::arena_destruct_object<T>;
  502. return AllocateAlignedWithCleanup(sizeof(T), alignof(T), dtor);
  503. }
  504. }
  505. // CreateMessage<T> requires that T supports arenas, but this private method
  506. // works whether or not T supports arenas. These are not exposed to user code
  507. // as it can cause confusing API usages, and end up having double free in
  508. // user code. These are used only internally from LazyField and Repeated
  509. // fields, since they are designed to work in all mode combinations.
  510. template <typename Msg, typename... Args>
  511. PROTOBUF_ALWAYS_INLINE static Msg* DoCreateMaybeMessage(Arena* arena,
  512. std::true_type,
  513. Args&&... args) {
  514. return CreateMessageInternal<Msg>(arena, std::forward<Args>(args)...);
  515. }
  516. template <typename T, typename... Args>
  517. PROTOBUF_ALWAYS_INLINE static T* DoCreateMaybeMessage(Arena* arena,
  518. std::false_type,
  519. Args&&... args) {
  520. return Create<T>(arena, std::forward<Args>(args)...);
  521. }
  522. template <typename T, typename... Args>
  523. PROTOBUF_ALWAYS_INLINE static T* CreateMaybeMessage(Arena* arena,
  524. Args&&... args) {
  525. return DoCreateMaybeMessage<T>(arena, is_arena_constructable<T>(),
  526. std::forward<Args>(args)...);
  527. }
  528. template <typename T, typename... Args>
  529. PROTOBUF_NDEBUG_INLINE T* DoCreateMessage(Args&&... args) {
  530. return InternalHelper<T>::Construct(
  531. AllocateInternal<T, is_destructor_skippable<T>::value>(), this,
  532. std::forward<Args>(args)...);
  533. }
  534. // CreateInArenaStorage is used to implement map field. Without it,
  535. // Map need to call generated message's protected arena constructor,
  536. // which needs to declare Map as friend of generated message.
  537. template <typename T, typename... Args>
  538. static void CreateInArenaStorage(T* ptr, Arena* arena, Args&&... args) {
  539. CreateInArenaStorageInternal(ptr, arena,
  540. typename is_arena_constructable<T>::type(),
  541. std::forward<Args>(args)...);
  542. if (PROTOBUF_PREDICT_TRUE(arena != nullptr)) {
  543. RegisterDestructorInternal(
  544. ptr, arena,
  545. typename InternalHelper<T>::is_destructor_skippable::type());
  546. }
  547. }
  548. template <typename T, typename... Args>
  549. static void CreateInArenaStorageInternal(T* ptr, Arena* arena,
  550. std::true_type, Args&&... args) {
  551. InternalHelper<T>::Construct(ptr, arena, std::forward<Args>(args)...);
  552. }
  553. template <typename T, typename... Args>
  554. static void CreateInArenaStorageInternal(T* ptr, Arena* /* arena */,
  555. std::false_type, Args&&... args) {
  556. new (ptr) T(std::forward<Args>(args)...);
  557. }
  558. template <typename T>
  559. static void RegisterDestructorInternal(T* /* ptr */, Arena* /* arena */,
  560. std::true_type) {}
  561. template <typename T>
  562. static void RegisterDestructorInternal(T* ptr, Arena* arena,
  563. std::false_type) {
  564. arena->OwnDestructor(ptr);
  565. }
  566. // Implementation for GetArena(). Only message objects with
  567. // InternalArenaConstructable_ tags can be associated with an arena, and such
  568. // objects must implement a GetArena() method.
  569. template <typename T>
  570. PROTOBUF_ALWAYS_INLINE static Arena* GetArenaInternal(T* value) {
  571. return InternalHelper<T>::GetArena(value);
  572. }
  573. void* AllocateAlignedForArray(size_t n, size_t align) {
  574. if (align <= internal::ArenaAlignDefault::align) {
  575. return AllocateForArray(internal::ArenaAlignDefault::Ceil(n));
  576. } else {
  577. // We are wasting space by over allocating align - 8 bytes. Compared
  578. // to a dedicated function that takes current alignment in consideration.
  579. // Such a scheme would only waste (align - 8)/2 bytes on average, but
  580. // requires a dedicated function in the outline arena allocation
  581. // functions. Possibly re-evaluate tradeoffs later.
  582. auto align_as = internal::ArenaAlignAs(align);
  583. return align_as.Ceil(AllocateForArray(align_as.Padded(n)));
  584. }
  585. }
  586. void* Allocate(size_t n);
  587. void* AllocateForArray(size_t n);
  588. void* AllocateAlignedWithCleanup(size_t n, size_t align,
  589. void (*destructor)(void*));
  590. template <typename Type>
  591. friend class internal::GenericTypeHandler;
  592. friend class internal::InternalMetadata; // For user_arena().
  593. friend class internal::LazyField; // For CreateMaybeMessage.
  594. friend class internal::EpsCopyInputStream; // For parser performance
  595. friend class internal::TcParser; // For parser performance
  596. friend class MessageLite;
  597. template <typename Key, typename T>
  598. friend class Map;
  599. template <typename>
  600. friend class RepeatedField; // For ReturnArrayMemory
  601. friend class internal::RepeatedPtrFieldBase; // For ReturnArrayMemory
  602. friend struct internal::ArenaTestPeer;
  603. };
  604. template <>
  605. inline void* Arena::AllocateInternal<std::string, false>() {
  606. return impl_.AllocateFromStringBlock();
  607. }
  608. } // namespace protobuf
  609. } // namespace google
  610. #include "google/protobuf/port_undef.inc"
  611. #endif // GOOGLE_PROTOBUF_ARENA_H__