Memory.h 22 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726
  1. /*
  2. * Copyright 2013-present Facebook, Inc.
  3. *
  4. * Licensed under the Apache License, Version 2.0 (the "License");
  5. * you may not use this file except in compliance with the License.
  6. * You may obtain a copy of the License at
  7. *
  8. * http://www.apache.org/licenses/LICENSE-2.0
  9. *
  10. * Unless required by applicable law or agreed to in writing, software
  11. * distributed under the License is distributed on an "AS IS" BASIS,
  12. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. * See the License for the specific language governing permissions and
  14. * limitations under the License.
  15. */
  16. #pragma once
  17. #include <cassert>
  18. #include <cerrno>
  19. #include <cstddef>
  20. #include <cstdlib>
  21. #include <exception>
  22. #include <limits>
  23. #include <memory>
  24. #include <stdexcept>
  25. #include <type_traits>
  26. #include <utility>
  27. #include <folly/ConstexprMath.h>
  28. #include <folly/Likely.h>
  29. #include <folly/Traits.h>
  30. #include <folly/functional/Invoke.h>
  31. #include <folly/lang/Align.h>
  32. #include <folly/lang/Exception.h>
  33. #include <folly/portability/Config.h>
  34. #include <folly/portability/Malloc.h>
  35. namespace folly {
  36. #if _POSIX_C_SOURCE >= 200112L || _XOPEN_SOURCE >= 600 || \
  37. (defined(__ANDROID__) && (__ANDROID_API__ > 16)) || \
  38. (defined(__APPLE__) && \
  39. (__MAC_OS_X_VERSION_MIN_REQUIRED >= __MAC_10_6 || \
  40. __IPHONE_OS_VERSION_MIN_REQUIRED >= __IPHONE_3_0))
  41. inline void* aligned_malloc(size_t size, size_t align) {
  42. // use posix_memalign, but mimic the behaviour of memalign
  43. void* ptr = nullptr;
  44. int rc = posix_memalign(&ptr, align, size);
  45. return rc == 0 ? (errno = 0, ptr) : (errno = rc, nullptr);
  46. }
  47. inline void aligned_free(void* aligned_ptr) {
  48. free(aligned_ptr);
  49. }
  50. #elif defined(_WIN32)
  51. inline void* aligned_malloc(size_t size, size_t align) {
  52. return _aligned_malloc(size, align);
  53. }
  54. inline void aligned_free(void* aligned_ptr) {
  55. _aligned_free(aligned_ptr);
  56. }
  57. #else
  58. inline void* aligned_malloc(size_t size, size_t align) {
  59. return memalign(align, size);
  60. }
  61. inline void aligned_free(void* aligned_ptr) {
  62. free(aligned_ptr);
  63. }
  64. #endif
  65. namespace detail {
  66. template <typename Alloc, size_t kAlign, bool kAllocate>
  67. void rawOverAlignedImpl(Alloc const& alloc, size_t n, void*& raw) {
  68. static_assert((kAlign & (kAlign - 1)) == 0, "Align must be a power of 2");
  69. using AllocTraits = std::allocator_traits<Alloc>;
  70. using T = typename AllocTraits::value_type;
  71. constexpr bool kCanBypass = std::is_same<Alloc, std::allocator<T>>::value;
  72. // BaseType is a type that gives us as much alignment as we need if
  73. // we can get it naturally, otherwise it is aligned as max_align_t.
  74. // kBaseAlign is both the alignment and size of this type.
  75. constexpr size_t kBaseAlign = constexpr_min(kAlign, alignof(max_align_t));
  76. using BaseType = std::aligned_storage_t<kBaseAlign, kBaseAlign>;
  77. using BaseAllocTraits =
  78. typename AllocTraits::template rebind_traits<BaseType>;
  79. using BaseAlloc = typename BaseAllocTraits::allocator_type;
  80. static_assert(
  81. sizeof(BaseType) == kBaseAlign && alignof(BaseType) == kBaseAlign, "");
  82. #if __cpp_sized_deallocation
  83. if (kCanBypass && kAlign == kBaseAlign) {
  84. // until std::allocator uses sized deallocation, it is worth the
  85. // effort to bypass it when we are able
  86. if (kAllocate) {
  87. raw = ::operator new(n * sizeof(T));
  88. } else {
  89. ::operator delete(raw, n * sizeof(T));
  90. }
  91. return;
  92. }
  93. #endif
  94. if (kCanBypass && kAlign > kBaseAlign) {
  95. // allocating as BaseType isn't sufficient to get alignment, but
  96. // since we can bypass Alloc we can use something like posix_memalign
  97. if (kAllocate) {
  98. raw = aligned_malloc(n * sizeof(T), kAlign);
  99. } else {
  100. aligned_free(raw);
  101. }
  102. return;
  103. }
  104. // we're not allowed to bypass Alloc, or we don't want to
  105. BaseAlloc a(alloc);
  106. // allocation size is counted in sizeof(BaseType)
  107. size_t quanta = (n * sizeof(T) + kBaseAlign - 1) / sizeof(BaseType);
  108. if (kAlign <= kBaseAlign) {
  109. // rebinding Alloc to BaseType is sufficient to get us the alignment
  110. // we want, happy path
  111. if (kAllocate) {
  112. raw = static_cast<void*>(
  113. std::addressof(*BaseAllocTraits::allocate(a, quanta)));
  114. } else {
  115. BaseAllocTraits::deallocate(
  116. a,
  117. std::pointer_traits<typename BaseAllocTraits::pointer>::pointer_to(
  118. *static_cast<BaseType*>(raw)),
  119. quanta);
  120. }
  121. return;
  122. }
  123. // Overaligned and custom allocator, our only option is to
  124. // overallocate and store a delta to the actual allocation just
  125. // before the returned ptr.
  126. //
  127. // If we give ourselves kAlign extra bytes, then since
  128. // sizeof(BaseType) divides kAlign we can meet alignment while
  129. // getting a prefix of one BaseType. If we happen to get a
  130. // kAlign-aligned block, then we can return a pointer to underlying
  131. // + kAlign, otherwise there will be at least kBaseAlign bytes in
  132. // the unused prefix of the first kAlign-aligned block.
  133. if (kAllocate) {
  134. char* base = reinterpret_cast<char*>(std::addressof(
  135. *BaseAllocTraits::allocate(a, quanta + kAlign / sizeof(BaseType))));
  136. size_t byteDelta =
  137. kAlign - (reinterpret_cast<uintptr_t>(base) & (kAlign - 1));
  138. raw = static_cast<void*>(base + byteDelta);
  139. static_cast<size_t*>(raw)[-1] = byteDelta;
  140. } else {
  141. size_t byteDelta = static_cast<size_t*>(raw)[-1];
  142. char* base = static_cast<char*>(raw) - byteDelta;
  143. BaseAllocTraits::deallocate(
  144. a,
  145. std::pointer_traits<typename BaseAllocTraits::pointer>::pointer_to(
  146. *reinterpret_cast<BaseType*>(base)),
  147. quanta + kAlign / sizeof(BaseType));
  148. }
  149. }
  150. } // namespace detail
  151. // Works like std::allocator_traits<Alloc>::allocate, but handles
  152. // over-aligned types. Feel free to manually specify any power of two as
  153. // the Align template arg. Must be matched with deallocateOverAligned.
  154. // allocationBytesForOverAligned will give you the number of bytes that
  155. // this function actually requests.
  156. template <
  157. typename Alloc,
  158. size_t kAlign = alignof(typename std::allocator_traits<Alloc>::value_type)>
  159. typename std::allocator_traits<Alloc>::pointer allocateOverAligned(
  160. Alloc const& alloc,
  161. size_t n) {
  162. void* raw = nullptr;
  163. detail::rawOverAlignedImpl<Alloc, kAlign, true>(alloc, n, raw);
  164. return std::pointer_traits<typename std::allocator_traits<Alloc>::pointer>::
  165. pointer_to(
  166. *static_cast<typename std::allocator_traits<Alloc>::value_type*>(
  167. raw));
  168. }
  169. template <
  170. typename Alloc,
  171. size_t kAlign = alignof(typename std::allocator_traits<Alloc>::value_type)>
  172. void deallocateOverAligned(
  173. Alloc const& alloc,
  174. typename std::allocator_traits<Alloc>::pointer ptr,
  175. size_t n) {
  176. void* raw = static_cast<void*>(std::addressof(*ptr));
  177. detail::rawOverAlignedImpl<Alloc, kAlign, false>(alloc, n, raw);
  178. }
  179. template <
  180. typename Alloc,
  181. size_t kAlign = alignof(typename std::allocator_traits<Alloc>::value_type)>
  182. size_t allocationBytesForOverAligned(size_t n) {
  183. static_assert((kAlign & (kAlign - 1)) == 0, "Align must be a power of 2");
  184. using AllocTraits = std::allocator_traits<Alloc>;
  185. using T = typename AllocTraits::value_type;
  186. constexpr size_t kBaseAlign = constexpr_min(kAlign, alignof(max_align_t));
  187. if (kAlign > kBaseAlign && std::is_same<Alloc, std::allocator<T>>::value) {
  188. return n * sizeof(T);
  189. } else {
  190. size_t quanta = (n * sizeof(T) + kBaseAlign - 1) / kBaseAlign;
  191. if (kAlign > kBaseAlign) {
  192. quanta += kAlign / kBaseAlign;
  193. }
  194. return quanta * kBaseAlign;
  195. }
  196. }
  197. /**
  198. * For exception safety and consistency with make_shared. Erase me when
  199. * we have std::make_unique().
  200. *
  201. * @author Louis Brandy (ldbrandy@fb.com)
  202. * @author Xu Ning (xning@fb.com)
  203. */
  204. #if __cplusplus >= 201402L || __cpp_lib_make_unique >= 201304L || \
  205. (__ANDROID__ && __cplusplus >= 201300L) || _MSC_VER >= 1900
  206. /* using override */ using std::make_unique;
  207. #else
  208. template <typename T, typename... Args>
  209. typename std::enable_if<!std::is_array<T>::value, std::unique_ptr<T>>::type
  210. make_unique(Args&&... args) {
  211. return std::unique_ptr<T>(new T(std::forward<Args>(args)...));
  212. }
  213. // Allows 'make_unique<T[]>(10)'. (N3690 s20.9.1.4 p3-4)
  214. template <typename T>
  215. typename std::enable_if<std::is_array<T>::value, std::unique_ptr<T>>::type
  216. make_unique(const size_t n) {
  217. return std::unique_ptr<T>(new typename std::remove_extent<T>::type[n]());
  218. }
  219. // Disallows 'make_unique<T[10]>()'. (N3690 s20.9.1.4 p5)
  220. template <typename T, typename... Args>
  221. typename std::enable_if<std::extent<T>::value != 0, std::unique_ptr<T>>::type
  222. make_unique(Args&&...) = delete;
  223. #endif
  224. /**
  225. * static_function_deleter
  226. *
  227. * So you can write this:
  228. *
  229. * using RSA_deleter = folly::static_function_deleter<RSA, &RSA_free>;
  230. * auto rsa = std::unique_ptr<RSA, RSA_deleter>(RSA_new());
  231. * RSA_generate_key_ex(rsa.get(), bits, exponent, nullptr);
  232. * rsa = nullptr; // calls RSA_free(rsa.get())
  233. *
  234. * This would be sweet as well for BIO, but unfortunately BIO_free has signature
  235. * int(BIO*) while we require signature void(BIO*). So you would need to make a
  236. * wrapper for it:
  237. *
  238. * inline void BIO_free_fb(BIO* bio) { CHECK_EQ(1, BIO_free(bio)); }
  239. * using BIO_deleter = folly::static_function_deleter<BIO, &BIO_free_fb>;
  240. * auto buf = std::unique_ptr<BIO, BIO_deleter>(BIO_new(BIO_s_mem()));
  241. * buf = nullptr; // calls BIO_free(buf.get())
  242. */
  243. template <typename T, void (*f)(T*)>
  244. struct static_function_deleter {
  245. void operator()(T* t) const {
  246. f(t);
  247. }
  248. };
  249. /**
  250. * to_shared_ptr
  251. *
  252. * Convert unique_ptr to shared_ptr without specifying the template type
  253. * parameter and letting the compiler deduce it.
  254. *
  255. * So you can write this:
  256. *
  257. * auto sptr = to_shared_ptr(getSomethingUnique<T>());
  258. *
  259. * Instead of this:
  260. *
  261. * auto sptr = shared_ptr<T>(getSomethingUnique<T>());
  262. *
  263. * Useful when `T` is long, such as:
  264. *
  265. * using T = foobar::FooBarAsyncClient;
  266. */
  267. template <typename T, typename D>
  268. std::shared_ptr<T> to_shared_ptr(std::unique_ptr<T, D>&& ptr) {
  269. return std::shared_ptr<T>(std::move(ptr));
  270. }
  271. /**
  272. * to_weak_ptr
  273. *
  274. * Make a weak_ptr and return it from a shared_ptr without specifying the
  275. * template type parameter and letting the compiler deduce it.
  276. *
  277. * So you can write this:
  278. *
  279. * auto wptr = to_weak_ptr(getSomethingShared<T>());
  280. *
  281. * Instead of this:
  282. *
  283. * auto wptr = weak_ptr<T>(getSomethingShared<T>());
  284. *
  285. * Useful when `T` is long, such as:
  286. *
  287. * using T = foobar::FooBarAsyncClient;
  288. */
  289. template <typename T>
  290. std::weak_ptr<T> to_weak_ptr(const std::shared_ptr<T>& ptr) {
  291. return std::weak_ptr<T>(ptr);
  292. }
  293. namespace detail {
  294. template <typename T>
  295. struct lift_void_to_char {
  296. using type = T;
  297. };
  298. template <>
  299. struct lift_void_to_char<void> {
  300. using type = char;
  301. };
  302. } // namespace detail
  303. /**
  304. * SysAllocator
  305. *
  306. * Resembles std::allocator, the default Allocator, but wraps std::malloc and
  307. * std::free.
  308. */
  309. template <typename T>
  310. class SysAllocator {
  311. private:
  312. using Self = SysAllocator<T>;
  313. public:
  314. using value_type = T;
  315. T* allocate(size_t count) {
  316. using lifted = typename detail::lift_void_to_char<T>::type;
  317. auto const p = std::malloc(sizeof(lifted) * count);
  318. if (!p) {
  319. throw_exception<std::bad_alloc>();
  320. }
  321. return static_cast<T*>(p);
  322. }
  323. void deallocate(T* p, size_t /* count */) {
  324. std::free(p);
  325. }
  326. friend bool operator==(Self const&, Self const&) noexcept {
  327. return true;
  328. }
  329. friend bool operator!=(Self const&, Self const&) noexcept {
  330. return false;
  331. }
  332. };
  333. class DefaultAlign {
  334. private:
  335. using Self = DefaultAlign;
  336. std::size_t align_;
  337. public:
  338. explicit DefaultAlign(std::size_t align) noexcept : align_(align) {
  339. assert(!(align_ < sizeof(void*)) && bool("bad align: too small"));
  340. assert(!(align_ & (align_ - 1)) && bool("bad align: not power-of-two"));
  341. }
  342. std::size_t operator()() const noexcept {
  343. return align_;
  344. }
  345. friend bool operator==(Self const& a, Self const& b) noexcept {
  346. return a.align_ == b.align_;
  347. }
  348. friend bool operator!=(Self const& a, Self const& b) noexcept {
  349. return a.align_ != b.align_;
  350. }
  351. };
  352. template <std::size_t Align>
  353. class FixedAlign {
  354. private:
  355. static_assert(!(Align < sizeof(void*)), "bad align: too small");
  356. static_assert(!(Align & (Align - 1)), "bad align: not power-of-two");
  357. using Self = FixedAlign<Align>;
  358. public:
  359. constexpr std::size_t operator()() const noexcept {
  360. return Align;
  361. }
  362. friend bool operator==(Self const&, Self const&) noexcept {
  363. return true;
  364. }
  365. friend bool operator!=(Self const&, Self const&) noexcept {
  366. return false;
  367. }
  368. };
  369. /**
  370. * AlignedSysAllocator
  371. *
  372. * Resembles std::allocator, the default Allocator, but wraps aligned_malloc and
  373. * aligned_free.
  374. *
  375. * Accepts a policy parameter for providing the alignment, which must:
  376. * * be invocable as std::size_t() noexcept, returning the alignment
  377. * * be noexcept-copy-constructible
  378. * * have noexcept operator==
  379. * * have noexcept operator!=
  380. * * not be final
  381. *
  382. * DefaultAlign and FixedAlign<std::size_t>, provided above, are valid policies.
  383. */
  384. template <typename T, typename Align = DefaultAlign>
  385. class AlignedSysAllocator : private Align {
  386. private:
  387. using Self = AlignedSysAllocator<T, Align>;
  388. template <typename, typename>
  389. friend class AlignedSysAllocator;
  390. constexpr Align const& align() const {
  391. return *this;
  392. }
  393. public:
  394. static_assert(std::is_nothrow_copy_constructible<Align>::value, "");
  395. static_assert(is_nothrow_invocable_r<std::size_t, Align>::value, "");
  396. using value_type = T;
  397. using propagate_on_container_copy_assignment = std::true_type;
  398. using propagate_on_container_move_assignment = std::true_type;
  399. using propagate_on_container_swap = std::true_type;
  400. using Align::Align;
  401. // TODO: remove this ctor, which is required only by gcc49
  402. template <
  403. typename S = Align,
  404. _t<std::enable_if<std::is_default_constructible<S>::value, int>> = 0>
  405. constexpr AlignedSysAllocator() noexcept(noexcept(Align())) : Align() {}
  406. template <typename U>
  407. constexpr explicit AlignedSysAllocator(
  408. AlignedSysAllocator<U, Align> const& other) noexcept
  409. : Align(other.align()) {}
  410. T* allocate(size_t count) {
  411. using lifted = typename detail::lift_void_to_char<T>::type;
  412. auto const p = aligned_malloc(sizeof(lifted) * count, align()());
  413. if (!p) {
  414. if (FOLLY_UNLIKELY(errno != ENOMEM)) {
  415. std::terminate();
  416. }
  417. throw_exception<std::bad_alloc>();
  418. }
  419. return static_cast<T*>(p);
  420. }
  421. void deallocate(T* p, size_t /* count */) {
  422. aligned_free(p);
  423. }
  424. friend bool operator==(Self const& a, Self const& b) noexcept {
  425. return a.align() == b.align();
  426. }
  427. friend bool operator!=(Self const& a, Self const& b) noexcept {
  428. return a.align() != b.align();
  429. }
  430. };
  431. /**
  432. * CxxAllocatorAdaptor
  433. *
  434. * A type conforming to C++ concept Allocator, delegating operations to an
  435. * unowned Inner which has this required interface:
  436. *
  437. * void* allocate(std::size_t)
  438. * void deallocate(void*, std::size_t)
  439. *
  440. * Note that Inner is *not* a C++ Allocator.
  441. */
  442. template <typename T, class Inner>
  443. class CxxAllocatorAdaptor {
  444. private:
  445. using Self = CxxAllocatorAdaptor<T, Inner>;
  446. template <typename U, typename UAlloc>
  447. friend class CxxAllocatorAdaptor;
  448. std::reference_wrapper<Inner> ref_;
  449. public:
  450. using value_type = T;
  451. using propagate_on_container_copy_assignment = std::true_type;
  452. using propagate_on_container_move_assignment = std::true_type;
  453. using propagate_on_container_swap = std::true_type;
  454. explicit CxxAllocatorAdaptor(Inner& ref) : ref_(ref) {}
  455. template <typename U>
  456. explicit CxxAllocatorAdaptor(CxxAllocatorAdaptor<U, Inner> const& other)
  457. : ref_(other.ref_) {}
  458. T* allocate(std::size_t n) {
  459. using lifted = typename detail::lift_void_to_char<T>::type;
  460. return static_cast<T*>(ref_.get().allocate(sizeof(lifted) * n));
  461. }
  462. void deallocate(T* p, std::size_t n) {
  463. using lifted = typename detail::lift_void_to_char<T>::type;
  464. ref_.get().deallocate(p, sizeof(lifted) * n);
  465. }
  466. friend bool operator==(Self const& a, Self const& b) noexcept {
  467. return std::addressof(a.ref_.get()) == std::addressof(b.ref_.get());
  468. }
  469. friend bool operator!=(Self const& a, Self const& b) noexcept {
  470. return std::addressof(a.ref_.get()) != std::addressof(b.ref_.get());
  471. }
  472. };
  473. /*
  474. * allocator_delete
  475. *
  476. * A deleter which automatically works with a given allocator.
  477. *
  478. * Derives from the allocator to take advantage of the empty base
  479. * optimization when possible.
  480. */
  481. template <typename Alloc>
  482. class allocator_delete : private std::remove_reference<Alloc>::type {
  483. private:
  484. using allocator_type = typename std::remove_reference<Alloc>::type;
  485. using allocator_traits = std::allocator_traits<allocator_type>;
  486. using value_type = typename allocator_traits::value_type;
  487. using pointer = typename allocator_traits::pointer;
  488. public:
  489. allocator_delete() = default;
  490. allocator_delete(allocator_delete const&) = default;
  491. allocator_delete(allocator_delete&&) = default;
  492. allocator_delete& operator=(allocator_delete const&) = default;
  493. allocator_delete& operator=(allocator_delete&&) = default;
  494. explicit allocator_delete(const allocator_type& alloc)
  495. : allocator_type(alloc) {}
  496. explicit allocator_delete(allocator_type&& alloc)
  497. : allocator_type(std::move(alloc)) {}
  498. template <typename U>
  499. allocator_delete(const allocator_delete<U>& other)
  500. : allocator_type(other.get_allocator()) {}
  501. allocator_type const& get_allocator() const {
  502. return *this;
  503. }
  504. void operator()(pointer p) const {
  505. auto alloc = get_allocator();
  506. allocator_traits::destroy(alloc, p);
  507. allocator_traits::deallocate(alloc, p, 1);
  508. }
  509. };
  510. /**
  511. * allocate_unique, like std::allocate_shared but for std::unique_ptr
  512. */
  513. template <typename T, typename Alloc, typename... Args>
  514. std::unique_ptr<T, allocator_delete<Alloc>> allocate_unique(
  515. Alloc const& alloc,
  516. Args&&... args) {
  517. using traits = std::allocator_traits<Alloc>;
  518. struct DeferCondDeallocate {
  519. bool& cond;
  520. Alloc& copy;
  521. T* p;
  522. ~DeferCondDeallocate() {
  523. if (FOLLY_UNLIKELY(!cond)) {
  524. traits::deallocate(copy, p, 1);
  525. }
  526. }
  527. };
  528. auto copy = alloc;
  529. auto const p = traits::allocate(copy, 1);
  530. {
  531. bool constructed = false;
  532. DeferCondDeallocate handler{constructed, copy, p};
  533. traits::construct(copy, p, static_cast<Args&&>(args)...);
  534. constructed = true;
  535. }
  536. return {p, allocator_delete<Alloc>(std::move(copy))};
  537. }
  538. struct SysBufferDeleter {
  539. void operator()(void* ptr) {
  540. std::free(ptr);
  541. }
  542. };
  543. using SysBufferUniquePtr = std::unique_ptr<void, SysBufferDeleter>;
  544. inline SysBufferUniquePtr allocate_sys_buffer(std::size_t size) {
  545. auto p = std::malloc(size);
  546. if (!p) {
  547. throw_exception<std::bad_alloc>();
  548. }
  549. return {p, {}};
  550. }
  551. /**
  552. * AllocatorHasTrivialDeallocate
  553. *
  554. * Unambiguously inherits std::integral_constant<bool, V> for some bool V.
  555. *
  556. * Describes whether a C++ Aallocator has trivial, i.e. no-op, deallocate().
  557. *
  558. * Also may be used to describe types which may be used with
  559. * CxxAllocatorAdaptor.
  560. */
  561. template <typename Alloc>
  562. struct AllocatorHasTrivialDeallocate : std::false_type {};
  563. template <typename T, class Alloc>
  564. struct AllocatorHasTrivialDeallocate<CxxAllocatorAdaptor<T, Alloc>>
  565. : AllocatorHasTrivialDeallocate<Alloc> {};
  566. namespace detail {
  567. // note that construct and destroy here are methods, not short names for
  568. // the constructor and destructor
  569. FOLLY_CREATE_MEMBER_INVOKE_TRAITS(AllocatorConstruct_, construct);
  570. FOLLY_CREATE_MEMBER_INVOKE_TRAITS(AllocatorDestroy_, destroy);
  571. template <typename Void, typename Alloc, typename... Args>
  572. struct AllocatorCustomizesConstruct_
  573. : AllocatorConstruct_::template is_invocable<Alloc, Args...> {};
  574. template <typename Alloc, typename... Args>
  575. struct AllocatorCustomizesConstruct_<
  576. void_t<typename Alloc::folly_has_default_object_construct>,
  577. Alloc,
  578. Args...> : Negation<typename Alloc::folly_has_default_object_construct> {};
  579. template <typename Void, typename Alloc, typename... Args>
  580. struct AllocatorCustomizesDestroy_
  581. : AllocatorDestroy_::template is_invocable<Alloc, Args...> {};
  582. template <typename Alloc, typename... Args>
  583. struct AllocatorCustomizesDestroy_<
  584. void_t<typename Alloc::folly_has_default_object_destroy>,
  585. Alloc,
  586. Args...> : Negation<typename Alloc::folly_has_default_object_destroy> {};
  587. } // namespace detail
  588. /**
  589. * AllocatorHasDefaultObjectConstruct
  590. *
  591. * AllocatorHasDefaultObjectConstruct<A, T, Args...> unambiguously
  592. * inherits std::integral_constant<bool, V>, where V will be true iff
  593. * the effect of std::allocator_traits<A>::construct(a, p, args...) is
  594. * the same as new (static_cast<void*>(p)) T(args...). If true then
  595. * any optimizations applicable to object construction (relying on
  596. * std::is_trivially_copyable<T>, for example) can be applied to objects
  597. * in an allocator-aware container using an allocation of type A.
  598. *
  599. * Allocator types can override V by declaring a type alias for
  600. * folly_has_default_object_construct. It is helpful to do this if you
  601. * define a custom allocator type that defines a construct method, but
  602. * that method doesn't do anything except call placement new.
  603. */
  604. template <typename Alloc, typename T, typename... Args>
  605. struct AllocatorHasDefaultObjectConstruct
  606. : Negation<
  607. detail::AllocatorCustomizesConstruct_<void, Alloc, T*, Args...>> {};
  608. template <typename Value, typename T, typename... Args>
  609. struct AllocatorHasDefaultObjectConstruct<std::allocator<Value>, T, Args...>
  610. : std::true_type {};
  611. /**
  612. * AllocatorHasDefaultObjectDestroy
  613. *
  614. * AllocatorHasDefaultObjectDestroy<A, T> unambiguously inherits
  615. * std::integral_constant<bool, V>, where V will be true iff the effect
  616. * of std::allocator_traits<A>::destroy(a, p) is the same as p->~T().
  617. * If true then optimizations applicable to object destruction (relying
  618. * on std::is_trivially_destructible<T>, for example) can be applied to
  619. * objects in an allocator-aware container using an allocator of type A.
  620. *
  621. * Allocator types can override V by declaring a type alias for
  622. * folly_has_default_object_destroy. It is helpful to do this if you
  623. * define a custom allocator type that defines a destroy method, but that
  624. * method doesn't do anything except call the object's destructor.
  625. */
  626. template <typename Alloc, typename T>
  627. struct AllocatorHasDefaultObjectDestroy
  628. : Negation<detail::AllocatorCustomizesDestroy_<void, Alloc, T*>> {};
  629. template <typename Value, typename T>
  630. struct AllocatorHasDefaultObjectDestroy<std::allocator<Value>, T>
  631. : std::true_type {};
  632. } // namespace folly