FBVector.h 51 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759
  1. /*
  2. * Copyright 2011-present Facebook, Inc.
  3. *
  4. * Licensed under the Apache License, Version 2.0 (the "License");
  5. * you may not use this file except in compliance with the License.
  6. * You may obtain a copy of the License at
  7. *
  8. * http://www.apache.org/licenses/LICENSE-2.0
  9. *
  10. * Unless required by applicable law or agreed to in writing, software
  11. * distributed under the License is distributed on an "AS IS" BASIS,
  12. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. * See the License for the specific language governing permissions and
  14. * limitations under the License.
  15. */
  16. /*
  17. * Nicholas Ormrod (njormrod)
  18. * Andrei Alexandrescu (aalexandre)
  19. *
  20. * FBVector is Facebook's drop-in implementation of std::vector. It has special
  21. * optimizations for use with relocatable types and jemalloc.
  22. */
  23. #pragma once
  24. //=============================================================================
  25. // headers
  26. #include <algorithm>
  27. #include <cassert>
  28. #include <iterator>
  29. #include <memory>
  30. #include <stdexcept>
  31. #include <type_traits>
  32. #include <utility>
  33. #include <folly/FormatTraits.h>
  34. #include <folly/Likely.h>
  35. #include <folly/Traits.h>
  36. #include <folly/lang/Exception.h>
  37. #include <folly/memory/Malloc.h>
  38. //=============================================================================
  39. // forward declaration
  40. namespace folly {
  41. template <class T, class Allocator = std::allocator<T>>
  42. class fbvector;
  43. } // namespace folly
  44. //=============================================================================
  45. // unrolling
  46. #define FOLLY_FBV_UNROLL_PTR(first, last, OP) \
  47. do { \
  48. for (; (last) - (first) >= 4; (first) += 4) { \
  49. OP(((first) + 0)); \
  50. OP(((first) + 1)); \
  51. OP(((first) + 2)); \
  52. OP(((first) + 3)); \
  53. } \
  54. for (; (first) != (last); ++(first)) \
  55. OP((first)); \
  56. } while (0);
  57. //=============================================================================
  58. ///////////////////////////////////////////////////////////////////////////////
  59. // //
  60. // fbvector class //
  61. // //
  62. ///////////////////////////////////////////////////////////////////////////////
  63. namespace folly {
  64. template <class T, class Allocator>
  65. class fbvector {
  66. //===========================================================================
  67. //---------------------------------------------------------------------------
  68. // implementation
  69. private:
  70. typedef std::allocator_traits<Allocator> A;
  71. struct Impl : public Allocator {
  72. // typedefs
  73. typedef typename A::pointer pointer;
  74. typedef typename A::size_type size_type;
  75. // data
  76. pointer b_, e_, z_;
  77. // constructors
  78. Impl() : Allocator(), b_(nullptr), e_(nullptr), z_(nullptr) {}
  79. /* implicit */ Impl(const Allocator& alloc)
  80. : Allocator(alloc), b_(nullptr), e_(nullptr), z_(nullptr) {}
  81. /* implicit */ Impl(Allocator&& alloc)
  82. : Allocator(std::move(alloc)), b_(nullptr), e_(nullptr), z_(nullptr) {}
  83. /* implicit */ Impl(size_type n, const Allocator& alloc = Allocator())
  84. : Allocator(alloc) {
  85. init(n);
  86. }
  87. Impl(Impl&& other) noexcept
  88. : Allocator(std::move(other)),
  89. b_(other.b_),
  90. e_(other.e_),
  91. z_(other.z_) {
  92. other.b_ = other.e_ = other.z_ = nullptr;
  93. }
  94. // destructor
  95. ~Impl() {
  96. destroy();
  97. }
  98. // allocation
  99. // note that 'allocate' and 'deallocate' are inherited from Allocator
  100. T* D_allocate(size_type n) {
  101. if (usingStdAllocator::value) {
  102. return static_cast<T*>(checkedMalloc(n * sizeof(T)));
  103. } else {
  104. return std::allocator_traits<Allocator>::allocate(*this, n);
  105. }
  106. }
  107. void D_deallocate(T* p, size_type n) noexcept {
  108. if (usingStdAllocator::value) {
  109. free(p);
  110. } else {
  111. std::allocator_traits<Allocator>::deallocate(*this, p, n);
  112. }
  113. }
  114. // helpers
  115. void swapData(Impl& other) {
  116. std::swap(b_, other.b_);
  117. std::swap(e_, other.e_);
  118. std::swap(z_, other.z_);
  119. }
  120. // data ops
  121. inline void destroy() noexcept {
  122. if (b_) {
  123. // THIS DISPATCH CODE IS DUPLICATED IN fbvector::D_destroy_range_a.
  124. // It has been inlined here for speed. It calls the static fbvector
  125. // methods to perform the actual destruction.
  126. if (usingStdAllocator::value) {
  127. S_destroy_range(b_, e_);
  128. } else {
  129. S_destroy_range_a(*this, b_, e_);
  130. }
  131. D_deallocate(b_, size_type(z_ - b_));
  132. }
  133. }
  134. void init(size_type n) {
  135. if (UNLIKELY(n == 0)) {
  136. b_ = e_ = z_ = nullptr;
  137. } else {
  138. size_type sz = folly::goodMallocSize(n * sizeof(T)) / sizeof(T);
  139. b_ = D_allocate(sz);
  140. e_ = b_;
  141. z_ = b_ + sz;
  142. }
  143. }
  144. void set(pointer newB, size_type newSize, size_type newCap) {
  145. z_ = newB + newCap;
  146. e_ = newB + newSize;
  147. b_ = newB;
  148. }
  149. void reset(size_type newCap) {
  150. destroy();
  151. try {
  152. init(newCap);
  153. } catch (...) {
  154. init(0);
  155. throw;
  156. }
  157. }
  158. void reset() { // same as reset(0)
  159. destroy();
  160. b_ = e_ = z_ = nullptr;
  161. }
  162. } impl_;
  163. static void swap(Impl& a, Impl& b) {
  164. using std::swap;
  165. if (!usingStdAllocator::value) {
  166. swap(static_cast<Allocator&>(a), static_cast<Allocator&>(b));
  167. }
  168. a.swapData(b);
  169. }
  170. //===========================================================================
  171. //---------------------------------------------------------------------------
  172. // types and constants
  173. public:
  174. typedef T value_type;
  175. typedef value_type& reference;
  176. typedef const value_type& const_reference;
  177. typedef T* iterator;
  178. typedef const T* const_iterator;
  179. typedef size_t size_type;
  180. typedef typename std::make_signed<size_type>::type difference_type;
  181. typedef Allocator allocator_type;
  182. typedef typename A::pointer pointer;
  183. typedef typename A::const_pointer const_pointer;
  184. typedef std::reverse_iterator<iterator> reverse_iterator;
  185. typedef std::reverse_iterator<const_iterator> const_reverse_iterator;
  186. private:
  187. typedef bool_constant<
  188. is_trivially_copyable<T>::value &&
  189. sizeof(T) <= 16 // don't force large structures to be passed by value
  190. >
  191. should_pass_by_value;
  192. typedef
  193. typename std::conditional<should_pass_by_value::value, T, const T&>::type
  194. VT;
  195. typedef
  196. typename std::conditional<should_pass_by_value::value, T, T&&>::type MT;
  197. typedef bool_constant<std::is_same<Allocator, std::allocator<T>>::value>
  198. usingStdAllocator;
  199. typedef bool_constant<
  200. usingStdAllocator::value ||
  201. A::propagate_on_container_move_assignment::value>
  202. moveIsSwap;
  203. //===========================================================================
  204. //---------------------------------------------------------------------------
  205. // allocator helpers
  206. private:
  207. //---------------------------------------------------------------------------
  208. // allocate
  209. T* M_allocate(size_type n) {
  210. return impl_.D_allocate(n);
  211. }
  212. //---------------------------------------------------------------------------
  213. // deallocate
  214. void M_deallocate(T* p, size_type n) noexcept {
  215. impl_.D_deallocate(p, n);
  216. }
  217. //---------------------------------------------------------------------------
  218. // construct
  219. // GCC is very sensitive to the exact way that construct is called. For
  220. // that reason there are several different specializations of construct.
  221. template <typename U, typename... Args>
  222. void M_construct(U* p, Args&&... args) {
  223. if (usingStdAllocator::value) {
  224. new (p) U(std::forward<Args>(args)...);
  225. } else {
  226. std::allocator_traits<Allocator>::construct(
  227. impl_, p, std::forward<Args>(args)...);
  228. }
  229. }
  230. template <typename U, typename... Args>
  231. static void S_construct(U* p, Args&&... args) {
  232. new (p) U(std::forward<Args>(args)...);
  233. }
  234. template <typename U, typename... Args>
  235. static void S_construct_a(Allocator& a, U* p, Args&&... args) {
  236. std::allocator_traits<Allocator>::construct(
  237. a, p, std::forward<Args>(args)...);
  238. }
  239. // scalar optimization
  240. // TODO we can expand this optimization to: default copyable and assignable
  241. template <
  242. typename U,
  243. typename Enable = typename std::enable_if<std::is_scalar<U>::value>::type>
  244. void M_construct(U* p, U arg) {
  245. if (usingStdAllocator::value) {
  246. *p = arg;
  247. } else {
  248. std::allocator_traits<Allocator>::construct(impl_, p, arg);
  249. }
  250. }
  251. template <
  252. typename U,
  253. typename Enable = typename std::enable_if<std::is_scalar<U>::value>::type>
  254. static void S_construct(U* p, U arg) {
  255. *p = arg;
  256. }
  257. template <
  258. typename U,
  259. typename Enable = typename std::enable_if<std::is_scalar<U>::value>::type>
  260. static void S_construct_a(Allocator& a, U* p, U arg) {
  261. std::allocator_traits<Allocator>::construct(a, p, arg);
  262. }
  263. // const& optimization
  264. template <
  265. typename U,
  266. typename Enable =
  267. typename std::enable_if<!std::is_scalar<U>::value>::type>
  268. void M_construct(U* p, const U& value) {
  269. if (usingStdAllocator::value) {
  270. new (p) U(value);
  271. } else {
  272. std::allocator_traits<Allocator>::construct(impl_, p, value);
  273. }
  274. }
  275. template <
  276. typename U,
  277. typename Enable =
  278. typename std::enable_if<!std::is_scalar<U>::value>::type>
  279. static void S_construct(U* p, const U& value) {
  280. new (p) U(value);
  281. }
  282. template <
  283. typename U,
  284. typename Enable =
  285. typename std::enable_if<!std::is_scalar<U>::value>::type>
  286. static void S_construct_a(Allocator& a, U* p, const U& value) {
  287. std::allocator_traits<Allocator>::construct(a, p, value);
  288. }
  289. //---------------------------------------------------------------------------
  290. // destroy
  291. void M_destroy(T* p) noexcept {
  292. if (usingStdAllocator::value) {
  293. if (!std::is_trivially_destructible<T>::value) {
  294. p->~T();
  295. }
  296. } else {
  297. std::allocator_traits<Allocator>::destroy(impl_, p);
  298. }
  299. }
  300. //===========================================================================
  301. //---------------------------------------------------------------------------
  302. // algorithmic helpers
  303. private:
  304. //---------------------------------------------------------------------------
  305. // destroy_range
  306. // wrappers
  307. void M_destroy_range_e(T* pos) noexcept {
  308. D_destroy_range_a(pos, impl_.e_);
  309. impl_.e_ = pos;
  310. }
  311. // dispatch
  312. // THIS DISPATCH CODE IS DUPLICATED IN IMPL. SEE IMPL FOR DETAILS.
  313. void D_destroy_range_a(T* first, T* last) noexcept {
  314. if (usingStdAllocator::value) {
  315. S_destroy_range(first, last);
  316. } else {
  317. S_destroy_range_a(impl_, first, last);
  318. }
  319. }
  320. // allocator
  321. static void S_destroy_range_a(Allocator& a, T* first, T* last) noexcept {
  322. for (; first != last; ++first) {
  323. std::allocator_traits<Allocator>::destroy(a, first);
  324. }
  325. }
  326. // optimized
  327. static void S_destroy_range(T* first, T* last) noexcept {
  328. if (!std::is_trivially_destructible<T>::value) {
  329. #define FOLLY_FBV_OP(p) (p)->~T()
  330. // EXPERIMENTAL DATA on fbvector<vector<int>> (where each vector<int> has
  331. // size 0).
  332. // The unrolled version seems to work faster for small to medium sized
  333. // fbvectors. It gets a 10% speedup on fbvectors of size 1024, 64, and
  334. // 16.
  335. // The simple loop version seems to work faster for large fbvectors. The
  336. // unrolled version is about 6% slower on fbvectors on size 16384.
  337. // The two methods seem tied for very large fbvectors. The unrolled
  338. // version is about 0.5% slower on size 262144.
  339. // for (; first != last; ++first) first->~T();
  340. FOLLY_FBV_UNROLL_PTR(first, last, FOLLY_FBV_OP)
  341. #undef FOLLY_FBV_OP
  342. }
  343. }
  344. //---------------------------------------------------------------------------
  345. // uninitialized_fill_n
  346. // wrappers
  347. void M_uninitialized_fill_n_e(size_type sz) {
  348. D_uninitialized_fill_n_a(impl_.e_, sz);
  349. impl_.e_ += sz;
  350. }
  351. void M_uninitialized_fill_n_e(size_type sz, VT value) {
  352. D_uninitialized_fill_n_a(impl_.e_, sz, value);
  353. impl_.e_ += sz;
  354. }
  355. // dispatch
  356. void D_uninitialized_fill_n_a(T* dest, size_type sz) {
  357. if (usingStdAllocator::value) {
  358. S_uninitialized_fill_n(dest, sz);
  359. } else {
  360. S_uninitialized_fill_n_a(impl_, dest, sz);
  361. }
  362. }
  363. void D_uninitialized_fill_n_a(T* dest, size_type sz, VT value) {
  364. if (usingStdAllocator::value) {
  365. S_uninitialized_fill_n(dest, sz, value);
  366. } else {
  367. S_uninitialized_fill_n_a(impl_, dest, sz, value);
  368. }
  369. }
  370. // allocator
  371. template <typename... Args>
  372. static void S_uninitialized_fill_n_a(
  373. Allocator& a,
  374. T* dest,
  375. size_type sz,
  376. Args&&... args) {
  377. auto b = dest;
  378. auto e = dest + sz;
  379. try {
  380. for (; b != e; ++b) {
  381. std::allocator_traits<Allocator>::construct(
  382. a, b, std::forward<Args>(args)...);
  383. }
  384. } catch (...) {
  385. S_destroy_range_a(a, dest, b);
  386. throw;
  387. }
  388. }
  389. // optimized
  390. static void S_uninitialized_fill_n(T* dest, size_type n) {
  391. if (folly::IsZeroInitializable<T>::value) {
  392. if (LIKELY(n != 0)) {
  393. std::memset(dest, 0, sizeof(T) * n);
  394. }
  395. } else {
  396. auto b = dest;
  397. auto e = dest + n;
  398. try {
  399. for (; b != e; ++b) {
  400. S_construct(b);
  401. }
  402. } catch (...) {
  403. --b;
  404. for (; b >= dest; --b) {
  405. b->~T();
  406. }
  407. throw;
  408. }
  409. }
  410. }
  411. static void S_uninitialized_fill_n(T* dest, size_type n, const T& value) {
  412. auto b = dest;
  413. auto e = dest + n;
  414. try {
  415. for (; b != e; ++b) {
  416. S_construct(b, value);
  417. }
  418. } catch (...) {
  419. S_destroy_range(dest, b);
  420. throw;
  421. }
  422. }
  423. //---------------------------------------------------------------------------
  424. // uninitialized_copy
  425. // it is possible to add an optimization for the case where
  426. // It = move(T*) and IsRelocatable<T> and Is0Initiailizable<T>
  427. // wrappers
  428. template <typename It>
  429. void M_uninitialized_copy_e(It first, It last) {
  430. D_uninitialized_copy_a(impl_.e_, first, last);
  431. impl_.e_ += std::distance(first, last);
  432. }
  433. template <typename It>
  434. void M_uninitialized_move_e(It first, It last) {
  435. D_uninitialized_move_a(impl_.e_, first, last);
  436. impl_.e_ += std::distance(first, last);
  437. }
  438. // dispatch
  439. template <typename It>
  440. void D_uninitialized_copy_a(T* dest, It first, It last) {
  441. if (usingStdAllocator::value) {
  442. if (folly::is_trivially_copyable<T>::value) {
  443. S_uninitialized_copy_bits(dest, first, last);
  444. } else {
  445. S_uninitialized_copy(dest, first, last);
  446. }
  447. } else {
  448. S_uninitialized_copy_a(impl_, dest, first, last);
  449. }
  450. }
  451. template <typename It>
  452. void D_uninitialized_move_a(T* dest, It first, It last) {
  453. D_uninitialized_copy_a(
  454. dest, std::make_move_iterator(first), std::make_move_iterator(last));
  455. }
  456. // allocator
  457. template <typename It>
  458. static void S_uninitialized_copy_a(Allocator& a, T* dest, It first, It last) {
  459. auto b = dest;
  460. try {
  461. for (; first != last; ++first, ++b) {
  462. std::allocator_traits<Allocator>::construct(a, b, *first);
  463. }
  464. } catch (...) {
  465. S_destroy_range_a(a, dest, b);
  466. throw;
  467. }
  468. }
  469. // optimized
  470. template <typename It>
  471. static void S_uninitialized_copy(T* dest, It first, It last) {
  472. auto b = dest;
  473. try {
  474. for (; first != last; ++first, ++b) {
  475. S_construct(b, *first);
  476. }
  477. } catch (...) {
  478. S_destroy_range(dest, b);
  479. throw;
  480. }
  481. }
  482. static void
  483. S_uninitialized_copy_bits(T* dest, const T* first, const T* last) {
  484. if (last != first) {
  485. std::memcpy((void*)dest, (void*)first, (last - first) * sizeof(T));
  486. }
  487. }
  488. static void S_uninitialized_copy_bits(
  489. T* dest,
  490. std::move_iterator<T*> first,
  491. std::move_iterator<T*> last) {
  492. T* bFirst = first.base();
  493. T* bLast = last.base();
  494. if (bLast != bFirst) {
  495. std::memcpy((void*)dest, (void*)bFirst, (bLast - bFirst) * sizeof(T));
  496. }
  497. }
  498. template <typename It>
  499. static void S_uninitialized_copy_bits(T* dest, It first, It last) {
  500. S_uninitialized_copy(dest, first, last);
  501. }
  502. //---------------------------------------------------------------------------
  503. // copy_n
  504. // This function is "unsafe": it assumes that the iterator can be advanced at
  505. // least n times. However, as a private function, that unsafety is managed
  506. // wholly by fbvector itself.
  507. template <typename It>
  508. static It S_copy_n(T* dest, It first, size_type n) {
  509. auto e = dest + n;
  510. for (; dest != e; ++dest, ++first) {
  511. *dest = *first;
  512. }
  513. return first;
  514. }
  515. static const T* S_copy_n(T* dest, const T* first, size_type n) {
  516. if (is_trivially_copyable<T>::value) {
  517. std::memcpy((void*)dest, (void*)first, n * sizeof(T));
  518. return first + n;
  519. } else {
  520. return S_copy_n<const T*>(dest, first, n);
  521. }
  522. }
  523. static std::move_iterator<T*>
  524. S_copy_n(T* dest, std::move_iterator<T*> mIt, size_type n) {
  525. if (is_trivially_copyable<T>::value) {
  526. T* first = mIt.base();
  527. std::memcpy((void*)dest, (void*)first, n * sizeof(T));
  528. return std::make_move_iterator(first + n);
  529. } else {
  530. return S_copy_n<std::move_iterator<T*>>(dest, mIt, n);
  531. }
  532. }
  533. //===========================================================================
  534. //---------------------------------------------------------------------------
  535. // relocation helpers
  536. private:
  537. // Relocation is divided into three parts:
  538. //
  539. // 1: relocate_move
  540. // Performs the actual movement of data from point a to point b.
  541. //
  542. // 2: relocate_done
  543. // Destroys the old data.
  544. //
  545. // 3: relocate_undo
  546. // Destoys the new data and restores the old data.
  547. //
  548. // The three steps are used because there may be an exception after part 1
  549. // has completed. If that is the case, then relocate_undo can nullify the
  550. // initial move. Otherwise, relocate_done performs the last bit of tidying
  551. // up.
  552. //
  553. // The relocation trio may use either memcpy, move, or copy. It is decided
  554. // by the following case statement:
  555. //
  556. // IsRelocatable && usingStdAllocator -> memcpy
  557. // has_nothrow_move && usingStdAllocator -> move
  558. // cannot copy -> move
  559. // default -> copy
  560. //
  561. // If the class is non-copyable then it must be movable. However, if the
  562. // move constructor is not noexcept, i.e. an error could be thrown, then
  563. // relocate_undo will be unable to restore the old data, for fear of a
  564. // second exception being thrown. This is a known and unavoidable
  565. // deficiency. In lieu of a strong exception guarantee, relocate_undo does
  566. // the next best thing: it provides a weak exception guarantee by
  567. // destorying the new data, but leaving the old data in an indeterminate
  568. // state. Note that that indeterminate state will be valid, since the
  569. // old data has not been destroyed; it has merely been the source of a
  570. // move, which is required to leave the source in a valid state.
  571. // wrappers
  572. void M_relocate(T* newB) {
  573. relocate_move(newB, impl_.b_, impl_.e_);
  574. relocate_done(newB, impl_.b_, impl_.e_);
  575. }
  576. // dispatch type trait
  577. typedef bool_constant<
  578. folly::IsRelocatable<T>::value && usingStdAllocator::value>
  579. relocate_use_memcpy;
  580. typedef bool_constant<
  581. (std::is_nothrow_move_constructible<T>::value &&
  582. usingStdAllocator::value) ||
  583. !std::is_copy_constructible<T>::value>
  584. relocate_use_move;
  585. // move
  586. void relocate_move(T* dest, T* first, T* last) {
  587. relocate_move_or_memcpy(dest, first, last, relocate_use_memcpy());
  588. }
  589. void relocate_move_or_memcpy(T* dest, T* first, T* last, std::true_type) {
  590. if (first != nullptr) {
  591. std::memcpy((void*)dest, (void*)first, (last - first) * sizeof(T));
  592. }
  593. }
  594. void relocate_move_or_memcpy(T* dest, T* first, T* last, std::false_type) {
  595. relocate_move_or_copy(dest, first, last, relocate_use_move());
  596. }
  597. void relocate_move_or_copy(T* dest, T* first, T* last, std::true_type) {
  598. D_uninitialized_move_a(dest, first, last);
  599. }
  600. void relocate_move_or_copy(T* dest, T* first, T* last, std::false_type) {
  601. D_uninitialized_copy_a(dest, first, last);
  602. }
  603. // done
  604. void relocate_done(T* /*dest*/, T* first, T* last) noexcept {
  605. if (folly::IsRelocatable<T>::value && usingStdAllocator::value) {
  606. // used memcpy; data has been relocated, do not call destructor
  607. } else {
  608. D_destroy_range_a(first, last);
  609. }
  610. }
  611. // undo
  612. void relocate_undo(T* dest, T* first, T* last) noexcept {
  613. if (folly::IsRelocatable<T>::value && usingStdAllocator::value) {
  614. // used memcpy, old data is still valid, nothing to do
  615. } else if (
  616. std::is_nothrow_move_constructible<T>::value &&
  617. usingStdAllocator::value) {
  618. // noexcept move everything back, aka relocate_move
  619. relocate_move(first, dest, dest + (last - first));
  620. } else if (!std::is_copy_constructible<T>::value) {
  621. // weak guarantee
  622. D_destroy_range_a(dest, dest + (last - first));
  623. } else {
  624. // used copy, old data is still valid
  625. D_destroy_range_a(dest, dest + (last - first));
  626. }
  627. }
  628. //===========================================================================
  629. //---------------------------------------------------------------------------
  630. // construct/copy/destroy
  631. public:
  632. fbvector() = default;
  633. explicit fbvector(const Allocator& a) : impl_(a) {}
  634. explicit fbvector(size_type n, const Allocator& a = Allocator())
  635. : impl_(n, a) {
  636. M_uninitialized_fill_n_e(n);
  637. }
  638. fbvector(size_type n, VT value, const Allocator& a = Allocator())
  639. : impl_(n, a) {
  640. M_uninitialized_fill_n_e(n, value);
  641. }
  642. template <
  643. class It,
  644. class Category = typename std::iterator_traits<It>::iterator_category>
  645. fbvector(It first, It last, const Allocator& a = Allocator())
  646. : fbvector(first, last, a, Category()) {}
  647. fbvector(const fbvector& other)
  648. : impl_(
  649. other.size(),
  650. A::select_on_container_copy_construction(other.impl_)) {
  651. M_uninitialized_copy_e(other.begin(), other.end());
  652. }
  653. fbvector(fbvector&& other) noexcept : impl_(std::move(other.impl_)) {}
  654. fbvector(const fbvector& other, const Allocator& a)
  655. : fbvector(other.begin(), other.end(), a) {}
  656. /* may throw */ fbvector(fbvector&& other, const Allocator& a) : impl_(a) {
  657. if (impl_ == other.impl_) {
  658. impl_.swapData(other.impl_);
  659. } else {
  660. impl_.init(other.size());
  661. M_uninitialized_move_e(other.begin(), other.end());
  662. }
  663. }
  664. fbvector(std::initializer_list<T> il, const Allocator& a = Allocator())
  665. : fbvector(il.begin(), il.end(), a) {}
  666. ~fbvector() = default; // the cleanup occurs in impl_
  667. fbvector& operator=(const fbvector& other) {
  668. if (UNLIKELY(this == &other)) {
  669. return *this;
  670. }
  671. if (!usingStdAllocator::value &&
  672. A::propagate_on_container_copy_assignment::value) {
  673. if (impl_ != other.impl_) {
  674. // can't use other's different allocator to clean up self
  675. impl_.reset();
  676. }
  677. (Allocator&)impl_ = (Allocator&)other.impl_;
  678. }
  679. assign(other.begin(), other.end());
  680. return *this;
  681. }
  682. fbvector& operator=(fbvector&& other) {
  683. if (UNLIKELY(this == &other)) {
  684. return *this;
  685. }
  686. moveFrom(std::move(other), moveIsSwap());
  687. return *this;
  688. }
  689. fbvector& operator=(std::initializer_list<T> il) {
  690. assign(il.begin(), il.end());
  691. return *this;
  692. }
  693. template <
  694. class It,
  695. class Category = typename std::iterator_traits<It>::iterator_category>
  696. void assign(It first, It last) {
  697. assign(first, last, Category());
  698. }
  699. void assign(size_type n, VT value) {
  700. if (n > capacity()) {
  701. // Not enough space. Do not reserve in place, since we will
  702. // discard the old values anyways.
  703. if (dataIsInternalAndNotVT(value)) {
  704. T copy(std::move(value));
  705. impl_.reset(n);
  706. M_uninitialized_fill_n_e(n, copy);
  707. } else {
  708. impl_.reset(n);
  709. M_uninitialized_fill_n_e(n, value);
  710. }
  711. } else if (n <= size()) {
  712. auto newE = impl_.b_ + n;
  713. std::fill(impl_.b_, newE, value);
  714. M_destroy_range_e(newE);
  715. } else {
  716. std::fill(impl_.b_, impl_.e_, value);
  717. M_uninitialized_fill_n_e(n - size(), value);
  718. }
  719. }
  720. void assign(std::initializer_list<T> il) {
  721. assign(il.begin(), il.end());
  722. }
  723. allocator_type get_allocator() const noexcept {
  724. return impl_;
  725. }
  726. private:
  727. // contract dispatch for iterator types fbvector(It first, It last)
  728. template <class ForwardIterator>
  729. fbvector(
  730. ForwardIterator first,
  731. ForwardIterator last,
  732. const Allocator& a,
  733. std::forward_iterator_tag)
  734. : impl_(size_type(std::distance(first, last)), a) {
  735. M_uninitialized_copy_e(first, last);
  736. }
  737. template <class InputIterator>
  738. fbvector(
  739. InputIterator first,
  740. InputIterator last,
  741. const Allocator& a,
  742. std::input_iterator_tag)
  743. : impl_(a) {
  744. for (; first != last; ++first) {
  745. emplace_back(*first);
  746. }
  747. }
  748. // contract dispatch for allocator movement in operator=(fbvector&&)
  749. void moveFrom(fbvector&& other, std::true_type) {
  750. swap(impl_, other.impl_);
  751. }
  752. void moveFrom(fbvector&& other, std::false_type) {
  753. if (impl_ == other.impl_) {
  754. impl_.swapData(other.impl_);
  755. } else {
  756. impl_.reset(other.size());
  757. M_uninitialized_move_e(other.begin(), other.end());
  758. }
  759. }
  760. // contract dispatch for iterator types in assign(It first, It last)
  761. template <class ForwardIterator>
  762. void assign(
  763. ForwardIterator first,
  764. ForwardIterator last,
  765. std::forward_iterator_tag) {
  766. const auto newSize = size_type(std::distance(first, last));
  767. if (newSize > capacity()) {
  768. impl_.reset(newSize);
  769. M_uninitialized_copy_e(first, last);
  770. } else if (newSize <= size()) {
  771. auto newEnd = std::copy(first, last, impl_.b_);
  772. M_destroy_range_e(newEnd);
  773. } else {
  774. auto mid = S_copy_n(impl_.b_, first, size());
  775. M_uninitialized_copy_e<decltype(last)>(mid, last);
  776. }
  777. }
  778. template <class InputIterator>
  779. void
  780. assign(InputIterator first, InputIterator last, std::input_iterator_tag) {
  781. auto p = impl_.b_;
  782. for (; first != last && p != impl_.e_; ++first, ++p) {
  783. *p = *first;
  784. }
  785. if (p != impl_.e_) {
  786. M_destroy_range_e(p);
  787. } else {
  788. for (; first != last; ++first) {
  789. emplace_back(*first);
  790. }
  791. }
  792. }
  793. // contract dispatch for aliasing under VT optimization
  794. bool dataIsInternalAndNotVT(const T& t) {
  795. if (should_pass_by_value::value) {
  796. return false;
  797. }
  798. return dataIsInternal(t);
  799. }
  800. bool dataIsInternal(const T& t) {
  801. return UNLIKELY(
  802. impl_.b_ <= std::addressof(t) && std::addressof(t) < impl_.e_);
  803. }
  804. //===========================================================================
  805. //---------------------------------------------------------------------------
  806. // iterators
  807. public:
  808. iterator begin() noexcept {
  809. return impl_.b_;
  810. }
  811. const_iterator begin() const noexcept {
  812. return impl_.b_;
  813. }
  814. iterator end() noexcept {
  815. return impl_.e_;
  816. }
  817. const_iterator end() const noexcept {
  818. return impl_.e_;
  819. }
  820. reverse_iterator rbegin() noexcept {
  821. return reverse_iterator(end());
  822. }
  823. const_reverse_iterator rbegin() const noexcept {
  824. return const_reverse_iterator(end());
  825. }
  826. reverse_iterator rend() noexcept {
  827. return reverse_iterator(begin());
  828. }
  829. const_reverse_iterator rend() const noexcept {
  830. return const_reverse_iterator(begin());
  831. }
  832. const_iterator cbegin() const noexcept {
  833. return impl_.b_;
  834. }
  835. const_iterator cend() const noexcept {
  836. return impl_.e_;
  837. }
  838. const_reverse_iterator crbegin() const noexcept {
  839. return const_reverse_iterator(end());
  840. }
  841. const_reverse_iterator crend() const noexcept {
  842. return const_reverse_iterator(begin());
  843. }
  844. //===========================================================================
  845. //---------------------------------------------------------------------------
  846. // capacity
  847. public:
  848. size_type size() const noexcept {
  849. return size_type(impl_.e_ - impl_.b_);
  850. }
  851. size_type max_size() const noexcept {
  852. // good luck gettin' there
  853. return ~size_type(0);
  854. }
  855. void resize(size_type n) {
  856. if (n <= size()) {
  857. M_destroy_range_e(impl_.b_ + n);
  858. } else {
  859. reserve(n);
  860. M_uninitialized_fill_n_e(n - size());
  861. }
  862. }
  863. void resize(size_type n, VT t) {
  864. if (n <= size()) {
  865. M_destroy_range_e(impl_.b_ + n);
  866. } else if (dataIsInternalAndNotVT(t) && n > capacity()) {
  867. T copy(t);
  868. reserve(n);
  869. M_uninitialized_fill_n_e(n - size(), copy);
  870. } else {
  871. reserve(n);
  872. M_uninitialized_fill_n_e(n - size(), t);
  873. }
  874. }
  875. size_type capacity() const noexcept {
  876. return size_type(impl_.z_ - impl_.b_);
  877. }
  878. bool empty() const noexcept {
  879. return impl_.b_ == impl_.e_;
  880. }
  881. void reserve(size_type n) {
  882. if (n <= capacity()) {
  883. return;
  884. }
  885. if (impl_.b_ && reserve_in_place(n)) {
  886. return;
  887. }
  888. auto newCap = folly::goodMallocSize(n * sizeof(T)) / sizeof(T);
  889. auto newB = M_allocate(newCap);
  890. try {
  891. M_relocate(newB);
  892. } catch (...) {
  893. M_deallocate(newB, newCap);
  894. throw;
  895. }
  896. if (impl_.b_) {
  897. M_deallocate(impl_.b_, size_type(impl_.z_ - impl_.b_));
  898. }
  899. impl_.z_ = newB + newCap;
  900. impl_.e_ = newB + (impl_.e_ - impl_.b_);
  901. impl_.b_ = newB;
  902. }
  903. void shrink_to_fit() noexcept {
  904. if (empty()) {
  905. impl_.reset();
  906. return;
  907. }
  908. auto const newCapacityBytes = folly::goodMallocSize(size() * sizeof(T));
  909. auto const newCap = newCapacityBytes / sizeof(T);
  910. auto const oldCap = capacity();
  911. if (newCap >= oldCap) {
  912. return;
  913. }
  914. void* p = impl_.b_;
  915. // xallocx() will shrink to precisely newCapacityBytes (which was generated
  916. // by goodMallocSize()) if it successfully shrinks in place.
  917. if ((usingJEMalloc() && usingStdAllocator::value) &&
  918. newCapacityBytes >= folly::jemallocMinInPlaceExpandable &&
  919. xallocx(p, newCapacityBytes, 0, 0) == newCapacityBytes) {
  920. impl_.z_ += newCap - oldCap;
  921. } else {
  922. T* newB; // intentionally uninitialized
  923. try {
  924. newB = M_allocate(newCap);
  925. try {
  926. M_relocate(newB);
  927. } catch (...) {
  928. M_deallocate(newB, newCap);
  929. return; // swallow the error
  930. }
  931. } catch (...) {
  932. return;
  933. }
  934. if (impl_.b_) {
  935. M_deallocate(impl_.b_, size_type(impl_.z_ - impl_.b_));
  936. }
  937. impl_.z_ = newB + newCap;
  938. impl_.e_ = newB + (impl_.e_ - impl_.b_);
  939. impl_.b_ = newB;
  940. }
  941. }
  942. private:
  943. bool reserve_in_place(size_type n) {
  944. if (!usingStdAllocator::value || !usingJEMalloc()) {
  945. return false;
  946. }
  947. // jemalloc can never grow in place blocks smaller than 4096 bytes.
  948. if ((impl_.z_ - impl_.b_) * sizeof(T) <
  949. folly::jemallocMinInPlaceExpandable) {
  950. return false;
  951. }
  952. auto const newCapacityBytes = folly::goodMallocSize(n * sizeof(T));
  953. void* p = impl_.b_;
  954. if (xallocx(p, newCapacityBytes, 0, 0) == newCapacityBytes) {
  955. impl_.z_ = impl_.b_ + newCapacityBytes / sizeof(T);
  956. return true;
  957. }
  958. return false;
  959. }
  960. //===========================================================================
  961. //---------------------------------------------------------------------------
  962. // element access
  963. public:
  964. reference operator[](size_type n) {
  965. assert(n < size());
  966. return impl_.b_[n];
  967. }
  968. const_reference operator[](size_type n) const {
  969. assert(n < size());
  970. return impl_.b_[n];
  971. }
  972. const_reference at(size_type n) const {
  973. if (UNLIKELY(n >= size())) {
  974. throw_exception<std::out_of_range>(
  975. "fbvector: index is greater than size.");
  976. }
  977. return (*this)[n];
  978. }
  979. reference at(size_type n) {
  980. auto const& cThis = *this;
  981. return const_cast<reference>(cThis.at(n));
  982. }
  983. reference front() {
  984. assert(!empty());
  985. return *impl_.b_;
  986. }
  987. const_reference front() const {
  988. assert(!empty());
  989. return *impl_.b_;
  990. }
  991. reference back() {
  992. assert(!empty());
  993. return impl_.e_[-1];
  994. }
  995. const_reference back() const {
  996. assert(!empty());
  997. return impl_.e_[-1];
  998. }
  999. //===========================================================================
  1000. //---------------------------------------------------------------------------
  1001. // data access
  1002. public:
  1003. T* data() noexcept {
  1004. return impl_.b_;
  1005. }
  1006. const T* data() const noexcept {
  1007. return impl_.b_;
  1008. }
  1009. //===========================================================================
  1010. //---------------------------------------------------------------------------
  1011. // modifiers (common)
  1012. public:
  1013. template <class... Args>
  1014. void emplace_back(Args&&... args) {
  1015. if (impl_.e_ != impl_.z_) {
  1016. M_construct(impl_.e_, std::forward<Args>(args)...);
  1017. ++impl_.e_;
  1018. } else {
  1019. emplace_back_aux(std::forward<Args>(args)...);
  1020. }
  1021. }
  1022. void push_back(const T& value) {
  1023. if (impl_.e_ != impl_.z_) {
  1024. M_construct(impl_.e_, value);
  1025. ++impl_.e_;
  1026. } else {
  1027. emplace_back_aux(value);
  1028. }
  1029. }
  1030. void push_back(T&& value) {
  1031. if (impl_.e_ != impl_.z_) {
  1032. M_construct(impl_.e_, std::move(value));
  1033. ++impl_.e_;
  1034. } else {
  1035. emplace_back_aux(std::move(value));
  1036. }
  1037. }
  1038. void pop_back() {
  1039. assert(!empty());
  1040. --impl_.e_;
  1041. M_destroy(impl_.e_);
  1042. }
  1043. void swap(fbvector& other) noexcept {
  1044. if (!usingStdAllocator::value && A::propagate_on_container_swap::value) {
  1045. swap(impl_, other.impl_);
  1046. } else {
  1047. impl_.swapData(other.impl_);
  1048. }
  1049. }
  1050. void clear() noexcept {
  1051. M_destroy_range_e(impl_.b_);
  1052. }
  1053. private:
  1054. // std::vector implements a similar function with a different growth
  1055. // strategy: empty() ? 1 : capacity() * 2.
  1056. //
  1057. // fbvector grows differently on two counts:
  1058. //
  1059. // (1) initial size
  1060. // Instead of growing to size 1 from empty, fbvector allocates at least
  1061. // 64 bytes. You may still use reserve to reserve a lesser amount of
  1062. // memory.
  1063. // (2) 1.5x
  1064. // For medium-sized vectors, the growth strategy is 1.5x. See the docs
  1065. // for details.
  1066. // This does not apply to very small or very large fbvectors. This is a
  1067. // heuristic.
  1068. // A nice addition to fbvector would be the capability of having a user-
  1069. // defined growth strategy, probably as part of the allocator.
  1070. //
  1071. size_type computePushBackCapacity() const {
  1072. if (capacity() == 0) {
  1073. return std::max(64 / sizeof(T), size_type(1));
  1074. }
  1075. if (capacity() < folly::jemallocMinInPlaceExpandable / sizeof(T)) {
  1076. return capacity() * 2;
  1077. }
  1078. if (capacity() > 4096 * 32 / sizeof(T)) {
  1079. return capacity() * 2;
  1080. }
  1081. return (capacity() * 3 + 1) / 2;
  1082. }
  1083. template <class... Args>
  1084. void emplace_back_aux(Args&&... args);
  1085. //===========================================================================
  1086. //---------------------------------------------------------------------------
  1087. // modifiers (erase)
  1088. public:
  1089. iterator erase(const_iterator position) {
  1090. return erase(position, position + 1);
  1091. }
  1092. iterator erase(const_iterator first, const_iterator last) {
  1093. assert(isValid(first) && isValid(last));
  1094. assert(first <= last);
  1095. if (first != last) {
  1096. if (last == end()) {
  1097. M_destroy_range_e((iterator)first);
  1098. } else {
  1099. if (folly::IsRelocatable<T>::value && usingStdAllocator::value) {
  1100. D_destroy_range_a((iterator)first, (iterator)last);
  1101. if (last - first >= cend() - last) {
  1102. std::memcpy((void*)first, (void*)last, (cend() - last) * sizeof(T));
  1103. } else {
  1104. std::memmove((iterator)first, last, (cend() - last) * sizeof(T));
  1105. }
  1106. impl_.e_ -= (last - first);
  1107. } else {
  1108. std::copy(
  1109. std::make_move_iterator((iterator)last),
  1110. std::make_move_iterator(end()),
  1111. (iterator)first);
  1112. auto newEnd = impl_.e_ - std::distance(first, last);
  1113. M_destroy_range_e(newEnd);
  1114. }
  1115. }
  1116. }
  1117. return (iterator)first;
  1118. }
  1119. //===========================================================================
  1120. //---------------------------------------------------------------------------
  1121. // modifiers (insert)
  1122. private: // we have the private section first because it defines some macros
  1123. bool isValid(const_iterator it) {
  1124. return cbegin() <= it && it <= cend();
  1125. }
  1126. size_type computeInsertCapacity(size_type n) {
  1127. size_type nc = std::max(computePushBackCapacity(), size() + n);
  1128. size_type ac = folly::goodMallocSize(nc * sizeof(T)) / sizeof(T);
  1129. return ac;
  1130. }
  1131. //---------------------------------------------------------------------------
  1132. //
  1133. // make_window takes an fbvector, and creates an uninitialized gap (a
  1134. // window) at the given position, of the given size. The fbvector must
  1135. // have enough capacity.
  1136. //
  1137. // Explanation by picture.
  1138. //
  1139. // 123456789______
  1140. // ^
  1141. // make_window here of size 3
  1142. //
  1143. // 1234___56789___
  1144. //
  1145. // If something goes wrong and the window must be destroyed, use
  1146. // undo_window to provide a weak exception guarantee. It destroys
  1147. // the right ledge.
  1148. //
  1149. // 1234___________
  1150. //
  1151. //---------------------------------------------------------------------------
  1152. //
  1153. // wrap_frame takes an inverse window and relocates an fbvector around it.
  1154. // The fbvector must have at least as many elements as the left ledge.
  1155. //
  1156. // Explanation by picture.
  1157. //
  1158. // START
  1159. // fbvector: inverse window:
  1160. // 123456789______ _____abcde_______
  1161. // [idx][ n ]
  1162. //
  1163. // RESULT
  1164. // _______________ 12345abcde6789___
  1165. //
  1166. //---------------------------------------------------------------------------
  1167. //
  1168. // insert_use_fresh_memory returns true iff the fbvector should use a fresh
  1169. // block of memory for the insertion. If the fbvector does not have enough
  1170. // spare capacity, then it must return true. Otherwise either true or false
  1171. // may be returned.
  1172. //
  1173. //---------------------------------------------------------------------------
  1174. //
  1175. // These three functions, make_window, wrap_frame, and
  1176. // insert_use_fresh_memory, can be combined into a uniform interface.
  1177. // Since that interface involves a lot of case-work, it is built into
  1178. // some macros: FOLLY_FBVECTOR_INSERT_(PRE|START|TRY|END)
  1179. // Macros are used in an attempt to let GCC perform better optimizations,
  1180. // especially control flow optimization.
  1181. //
  1182. //---------------------------------------------------------------------------
  1183. // window
  1184. void make_window(iterator position, size_type n) {
  1185. // The result is guaranteed to be non-negative, so use an unsigned type:
  1186. size_type tail = size_type(std::distance(position, impl_.e_));
  1187. if (tail <= n) {
  1188. relocate_move(position + n, position, impl_.e_);
  1189. relocate_done(position + n, position, impl_.e_);
  1190. impl_.e_ += n;
  1191. } else {
  1192. if (folly::IsRelocatable<T>::value && usingStdAllocator::value) {
  1193. std::memmove(position + n, position, tail * sizeof(T));
  1194. impl_.e_ += n;
  1195. } else {
  1196. D_uninitialized_move_a(impl_.e_, impl_.e_ - n, impl_.e_);
  1197. try {
  1198. std::copy_backward(
  1199. std::make_move_iterator(position),
  1200. std::make_move_iterator(impl_.e_ - n),
  1201. impl_.e_);
  1202. } catch (...) {
  1203. D_destroy_range_a(impl_.e_ - n, impl_.e_ + n);
  1204. impl_.e_ -= n;
  1205. throw;
  1206. }
  1207. impl_.e_ += n;
  1208. D_destroy_range_a(position, position + n);
  1209. }
  1210. }
  1211. }
  1212. void undo_window(iterator position, size_type n) noexcept {
  1213. D_destroy_range_a(position + n, impl_.e_);
  1214. impl_.e_ = position;
  1215. }
  1216. //---------------------------------------------------------------------------
  1217. // frame
  1218. void wrap_frame(T* ledge, size_type idx, size_type n) {
  1219. assert(size() >= idx);
  1220. assert(n != 0);
  1221. relocate_move(ledge, impl_.b_, impl_.b_ + idx);
  1222. try {
  1223. relocate_move(ledge + idx + n, impl_.b_ + idx, impl_.e_);
  1224. } catch (...) {
  1225. relocate_undo(ledge, impl_.b_, impl_.b_ + idx);
  1226. throw;
  1227. }
  1228. relocate_done(ledge, impl_.b_, impl_.b_ + idx);
  1229. relocate_done(ledge + idx + n, impl_.b_ + idx, impl_.e_);
  1230. }
  1231. //---------------------------------------------------------------------------
  1232. // use fresh?
  1233. bool insert_use_fresh(bool at_end, size_type n) {
  1234. if (at_end) {
  1235. if (size() + n <= capacity()) {
  1236. return false;
  1237. }
  1238. if (reserve_in_place(size() + n)) {
  1239. return false;
  1240. }
  1241. return true;
  1242. }
  1243. if (size() + n > capacity()) {
  1244. return true;
  1245. }
  1246. return false;
  1247. }
  1248. //---------------------------------------------------------------------------
  1249. // interface
  1250. template <
  1251. typename IsInternalFunc,
  1252. typename InsertInternalFunc,
  1253. typename ConstructFunc,
  1254. typename DestroyFunc>
  1255. iterator do_real_insert(
  1256. const_iterator cpos,
  1257. size_type n,
  1258. IsInternalFunc&& isInternalFunc,
  1259. InsertInternalFunc&& insertInternalFunc,
  1260. ConstructFunc&& constructFunc,
  1261. DestroyFunc&& destroyFunc) {
  1262. if (n == 0) {
  1263. return iterator(cpos);
  1264. }
  1265. bool at_end = cpos == cend();
  1266. bool fresh = insert_use_fresh(at_end, n);
  1267. if (!at_end) {
  1268. if (!fresh && isInternalFunc()) {
  1269. // check for internal data (technically not required by the standard)
  1270. return insertInternalFunc();
  1271. }
  1272. assert(isValid(cpos));
  1273. }
  1274. T* position = const_cast<T*>(cpos);
  1275. size_type idx = size_type(std::distance(impl_.b_, position));
  1276. T* b;
  1277. size_type newCap; /* intentionally uninitialized */
  1278. if (fresh) {
  1279. newCap = computeInsertCapacity(n);
  1280. b = M_allocate(newCap);
  1281. } else {
  1282. if (!at_end) {
  1283. make_window(position, n);
  1284. } else {
  1285. impl_.e_ += n;
  1286. }
  1287. b = impl_.b_;
  1288. }
  1289. T* start = b + idx;
  1290. try {
  1291. // construct the inserted elements
  1292. constructFunc(start);
  1293. } catch (...) {
  1294. if (fresh) {
  1295. M_deallocate(b, newCap);
  1296. } else {
  1297. if (!at_end) {
  1298. undo_window(position, n);
  1299. } else {
  1300. impl_.e_ -= n;
  1301. }
  1302. }
  1303. throw;
  1304. }
  1305. if (fresh) {
  1306. try {
  1307. wrap_frame(b, idx, n);
  1308. } catch (...) {
  1309. // delete the inserted elements (exception has been thrown)
  1310. destroyFunc(start);
  1311. M_deallocate(b, newCap);
  1312. throw;
  1313. }
  1314. if (impl_.b_) {
  1315. M_deallocate(impl_.b_, capacity());
  1316. }
  1317. impl_.set(b, size() + n, newCap);
  1318. return impl_.b_ + idx;
  1319. } else {
  1320. return position;
  1321. }
  1322. }
  1323. public:
  1324. template <class... Args>
  1325. iterator emplace(const_iterator cpos, Args&&... args) {
  1326. return do_real_insert(
  1327. cpos,
  1328. 1,
  1329. [&] { return false; },
  1330. [&] { return iterator{}; },
  1331. [&](iterator start) {
  1332. M_construct(start, std::forward<Args>(args)...);
  1333. },
  1334. [&](iterator start) { M_destroy(start); });
  1335. }
  1336. iterator insert(const_iterator cpos, const T& value) {
  1337. return do_real_insert(
  1338. cpos,
  1339. 1,
  1340. [&] { return dataIsInternal(value); },
  1341. [&] { return insert(cpos, T(value)); },
  1342. [&](iterator start) { M_construct(start, value); },
  1343. [&](iterator start) { M_destroy(start); });
  1344. }
  1345. iterator insert(const_iterator cpos, T&& value) {
  1346. return do_real_insert(
  1347. cpos,
  1348. 1,
  1349. [&] { return dataIsInternal(value); },
  1350. [&] { return insert(cpos, T(std::move(value))); },
  1351. [&](iterator start) { M_construct(start, std::move(value)); },
  1352. [&](iterator start) { M_destroy(start); });
  1353. }
  1354. iterator insert(const_iterator cpos, size_type n, VT value) {
  1355. return do_real_insert(
  1356. cpos,
  1357. n,
  1358. [&] { return dataIsInternalAndNotVT(value); },
  1359. [&] { return insert(cpos, n, T(value)); },
  1360. [&](iterator start) { D_uninitialized_fill_n_a(start, n, value); },
  1361. [&](iterator start) { D_destroy_range_a(start, start + n); });
  1362. }
  1363. template <
  1364. class It,
  1365. class Category = typename std::iterator_traits<It>::iterator_category>
  1366. iterator insert(const_iterator cpos, It first, It last) {
  1367. return insert(cpos, first, last, Category());
  1368. }
  1369. iterator insert(const_iterator cpos, std::initializer_list<T> il) {
  1370. return insert(cpos, il.begin(), il.end());
  1371. }
  1372. //---------------------------------------------------------------------------
  1373. // insert dispatch for iterator types
  1374. private:
  1375. template <class FIt>
  1376. iterator
  1377. insert(const_iterator cpos, FIt first, FIt last, std::forward_iterator_tag) {
  1378. size_type n = size_type(std::distance(first, last));
  1379. return do_real_insert(
  1380. cpos,
  1381. n,
  1382. [&] { return false; },
  1383. [&] { return iterator{}; },
  1384. [&](iterator start) { D_uninitialized_copy_a(start, first, last); },
  1385. [&](iterator start) { D_destroy_range_a(start, start + n); });
  1386. }
  1387. template <class IIt>
  1388. iterator
  1389. insert(const_iterator cpos, IIt first, IIt last, std::input_iterator_tag) {
  1390. T* position = const_cast<T*>(cpos);
  1391. assert(isValid(position));
  1392. size_type idx = std::distance(begin(), position);
  1393. fbvector storage(
  1394. std::make_move_iterator(position),
  1395. std::make_move_iterator(end()),
  1396. A::select_on_container_copy_construction(impl_));
  1397. M_destroy_range_e(position);
  1398. for (; first != last; ++first) {
  1399. emplace_back(*first);
  1400. }
  1401. insert(
  1402. cend(),
  1403. std::make_move_iterator(storage.begin()),
  1404. std::make_move_iterator(storage.end()));
  1405. return impl_.b_ + idx;
  1406. }
  1407. //===========================================================================
  1408. //---------------------------------------------------------------------------
  1409. // lexicographical functions
  1410. public:
  1411. bool operator==(const fbvector& other) const {
  1412. return size() == other.size() && std::equal(begin(), end(), other.begin());
  1413. }
  1414. bool operator!=(const fbvector& other) const {
  1415. return !(*this == other);
  1416. }
  1417. bool operator<(const fbvector& other) const {
  1418. return std::lexicographical_compare(
  1419. begin(), end(), other.begin(), other.end());
  1420. }
  1421. bool operator>(const fbvector& other) const {
  1422. return other < *this;
  1423. }
  1424. bool operator<=(const fbvector& other) const {
  1425. return !(*this > other);
  1426. }
  1427. bool operator>=(const fbvector& other) const {
  1428. return !(*this < other);
  1429. }
  1430. //===========================================================================
  1431. //---------------------------------------------------------------------------
  1432. // friends
  1433. private:
  1434. template <class _T, class _A>
  1435. friend _T* relinquish(fbvector<_T, _A>&);
  1436. template <class _T, class _A>
  1437. friend void attach(fbvector<_T, _A>&, _T* data, size_t sz, size_t cap);
  1438. }; // class fbvector
  1439. //=============================================================================
  1440. //-----------------------------------------------------------------------------
  1441. // outlined functions (gcc, you finicky compiler you)
  1442. template <typename T, typename Allocator>
  1443. template <class... Args>
  1444. void fbvector<T, Allocator>::emplace_back_aux(Args&&... args) {
  1445. size_type byte_sz =
  1446. folly::goodMallocSize(computePushBackCapacity() * sizeof(T));
  1447. if (usingStdAllocator::value && usingJEMalloc() &&
  1448. ((impl_.z_ - impl_.b_) * sizeof(T) >=
  1449. folly::jemallocMinInPlaceExpandable)) {
  1450. // Try to reserve in place.
  1451. // Ask xallocx to allocate in place at least size()+1 and at most sz space.
  1452. // xallocx will allocate as much as possible within that range, which
  1453. // is the best possible outcome: if sz space is available, take it all,
  1454. // otherwise take as much as possible. If nothing is available, then fail.
  1455. // In this fashion, we never relocate if there is a possibility of
  1456. // expanding in place, and we never reallocate by less than the desired
  1457. // amount unless we cannot expand further. Hence we will not reallocate
  1458. // sub-optimally twice in a row (modulo the blocking memory being freed).
  1459. size_type lower = folly::goodMallocSize(sizeof(T) + size() * sizeof(T));
  1460. size_type upper = byte_sz;
  1461. size_type extra = upper - lower;
  1462. void* p = impl_.b_;
  1463. size_t actual;
  1464. if ((actual = xallocx(p, lower, extra, 0)) >= lower) {
  1465. impl_.z_ = impl_.b_ + actual / sizeof(T);
  1466. M_construct(impl_.e_, std::forward<Args>(args)...);
  1467. ++impl_.e_;
  1468. return;
  1469. }
  1470. }
  1471. // Reallocation failed. Perform a manual relocation.
  1472. size_type sz = byte_sz / sizeof(T);
  1473. auto newB = M_allocate(sz);
  1474. auto newE = newB + size();
  1475. try {
  1476. if (folly::IsRelocatable<T>::value && usingStdAllocator::value) {
  1477. // For linear memory access, relocate before construction.
  1478. // By the test condition, relocate is noexcept.
  1479. // Note that there is no cleanup to do if M_construct throws - that's
  1480. // one of the beauties of relocation.
  1481. // Benchmarks for this code have high variance, and seem to be close.
  1482. relocate_move(newB, impl_.b_, impl_.e_);
  1483. M_construct(newE, std::forward<Args>(args)...);
  1484. ++newE;
  1485. } else {
  1486. M_construct(newE, std::forward<Args>(args)...);
  1487. ++newE;
  1488. try {
  1489. M_relocate(newB);
  1490. } catch (...) {
  1491. M_destroy(newE - 1);
  1492. throw;
  1493. }
  1494. }
  1495. } catch (...) {
  1496. M_deallocate(newB, sz);
  1497. throw;
  1498. }
  1499. if (impl_.b_) {
  1500. M_deallocate(impl_.b_, size());
  1501. }
  1502. impl_.b_ = newB;
  1503. impl_.e_ = newE;
  1504. impl_.z_ = newB + sz;
  1505. }
  1506. //=============================================================================
  1507. //-----------------------------------------------------------------------------
  1508. // specialized functions
  1509. template <class T, class A>
  1510. void swap(fbvector<T, A>& lhs, fbvector<T, A>& rhs) noexcept {
  1511. lhs.swap(rhs);
  1512. }
  1513. //=============================================================================
  1514. //-----------------------------------------------------------------------------
  1515. // other
  1516. namespace detail {
  1517. // Format support.
  1518. template <class T, class A>
  1519. struct IndexableTraits<fbvector<T, A>>
  1520. : public IndexableTraitsSeq<fbvector<T, A>> {};
  1521. } // namespace detail
  1522. template <class T, class A>
  1523. void compactResize(fbvector<T, A>* v, size_t sz) {
  1524. v->resize(sz);
  1525. v->shrink_to_fit();
  1526. }
  1527. // DANGER
  1528. //
  1529. // relinquish and attach are not a members function specifically so that it is
  1530. // awkward to call them. It is very easy to shoot yourself in the foot with
  1531. // these functions.
  1532. //
  1533. // If you call relinquish, then it is your responsibility to free the data
  1534. // and the storage, both of which may have been generated in a non-standard
  1535. // way through the fbvector's allocator.
  1536. //
  1537. // If you call attach, it is your responsibility to ensure that the fbvector
  1538. // is fresh (size and capacity both zero), and that the supplied data is
  1539. // capable of being manipulated by the allocator.
  1540. // It is acceptable to supply a stack pointer IF:
  1541. // (1) The vector's data does not outlive the stack pointer. This includes
  1542. // extension of the data's life through a move operation.
  1543. // (2) The pointer has enough capacity that the vector will never be
  1544. // relocated.
  1545. // (3) Insert is not called on the vector; these functions have leeway to
  1546. // relocate the vector even if there is enough capacity.
  1547. // (4) A stack pointer is compatible with the fbvector's allocator.
  1548. //
  1549. template <class T, class A>
  1550. T* relinquish(fbvector<T, A>& v) {
  1551. T* ret = v.data();
  1552. v.impl_.b_ = v.impl_.e_ = v.impl_.z_ = nullptr;
  1553. return ret;
  1554. }
  1555. template <class T, class A>
  1556. void attach(fbvector<T, A>& v, T* data, size_t sz, size_t cap) {
  1557. assert(v.data() == nullptr);
  1558. v.impl_.b_ = data;
  1559. v.impl_.e_ = data + sz;
  1560. v.impl_.z_ = data + cap;
  1561. }
  1562. } // namespace folly