HazptrTest.cpp 35 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382
  1. /*
  2. * Copyright 2018-present Facebook, Inc.
  3. *
  4. * Licensed under the Apache License, Version 2.0 (the "License");
  5. * you may not use this file except in compliance with the License.
  6. * You may obtain a copy of the License at
  7. *
  8. * http://www.apache.org/licenses/LICENSE-2.0
  9. *
  10. * Unless required by applicable law or agreed to in writing, software
  11. * distributed under the License is distributed on an "AS IS" BASIS,
  12. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. * See the License for the specific language governing permissions and
  14. * limitations under the License.
  15. */
  16. #include <folly/synchronization/Hazptr.h>
  17. #include <folly/synchronization/example/HazptrLockFreeLIFO.h>
  18. #include <folly/synchronization/example/HazptrSWMRSet.h>
  19. #include <folly/synchronization/example/HazptrWideCAS.h>
  20. #include <folly/synchronization/test/Barrier.h>
  21. #include <folly/portability/GFlags.h>
  22. #include <folly/portability/GTest.h>
  23. #include <folly/test/DeterministicSchedule.h>
  24. #include <atomic>
  25. #include <thread>
  26. DEFINE_bool(bench, false, "run benchmark");
  27. DEFINE_int64(num_reps, 10, "Number of test reps");
  28. DEFINE_int32(num_threads, 6, "Number of threads");
  29. DEFINE_int64(num_ops, 1003, "Number of ops or pairs of ops per rep");
  30. using folly::default_hazptr_domain;
  31. using folly::hazptr_array;
  32. using folly::hazptr_cleanup;
  33. using folly::hazptr_domain;
  34. using folly::hazptr_holder;
  35. using folly::hazptr_local;
  36. using folly::hazptr_obj_base;
  37. using folly::hazptr_obj_base_linked;
  38. using folly::hazptr_retire;
  39. using folly::hazptr_root;
  40. using folly::hazptr_tc;
  41. using folly::HazptrLockFreeLIFO;
  42. using folly::HazptrSWMRSet;
  43. using folly::HazptrWideCAS;
  44. using folly::test::Barrier;
  45. using folly::test::DeterministicAtomic;
  46. using DSched = folly::test::DeterministicSchedule;
  47. // Structures
  48. /** Count */
  49. class Count {
  50. std::atomic<int> ctors_{0};
  51. std::atomic<int> dtors_{0};
  52. std::atomic<int> retires_{0};
  53. public:
  54. void clear() noexcept {
  55. ctors_.store(0);
  56. dtors_.store(0);
  57. retires_.store(0);
  58. }
  59. int ctors() const noexcept {
  60. return ctors_.load();
  61. }
  62. int dtors() const noexcept {
  63. return dtors_.load();
  64. }
  65. int retires() const noexcept {
  66. return retires_.load();
  67. }
  68. void inc_ctors() noexcept {
  69. ctors_.fetch_add(1);
  70. }
  71. void inc_dtors() noexcept {
  72. dtors_.fetch_add(1);
  73. }
  74. void inc_retires() noexcept {
  75. retires_.fetch_add(1);
  76. }
  77. }; // Count
  78. static Count c_;
  79. /** Node */
  80. template <template <typename> class Atom = std::atomic>
  81. class Node : public hazptr_obj_base<Node<Atom>, Atom> {
  82. int val_;
  83. Atom<Node<Atom>*> next_;
  84. public:
  85. explicit Node(int v = 0, Node* n = nullptr, bool = false) noexcept
  86. : val_(v), next_(n) {
  87. c_.inc_ctors();
  88. }
  89. ~Node() {
  90. c_.inc_dtors();
  91. }
  92. int value() const noexcept {
  93. return val_;
  94. }
  95. Node<Atom>* next() const noexcept {
  96. return next_.load(std::memory_order_acquire);
  97. }
  98. Atom<Node<Atom>*>* ptr_next() noexcept {
  99. return &next_;
  100. }
  101. }; // Node
  102. /** NodeRC */
  103. template <bool Mutable, template <typename> class Atom = std::atomic>
  104. class NodeRC : public hazptr_obj_base_linked<NodeRC<Mutable, Atom>, Atom> {
  105. Atom<NodeRC<Mutable, Atom>*> next_;
  106. int val_;
  107. public:
  108. explicit NodeRC(int v = 0, NodeRC* n = nullptr, bool acq = false) noexcept
  109. : next_(n), val_(v) {
  110. this->set_deleter();
  111. c_.inc_ctors();
  112. if (acq) {
  113. if (Mutable) {
  114. this->acquire_link_safe();
  115. } else {
  116. this->acquire_ref_safe();
  117. }
  118. }
  119. }
  120. ~NodeRC() {
  121. c_.inc_dtors();
  122. }
  123. int value() const noexcept {
  124. return val_;
  125. }
  126. NodeRC<Mutable, Atom>* next() const noexcept {
  127. return next_.load(std::memory_order_acquire);
  128. }
  129. template <typename S>
  130. void push_links(bool m, S& s) {
  131. if (Mutable == m) {
  132. auto p = next();
  133. if (p) {
  134. s.push(p);
  135. }
  136. }
  137. }
  138. }; // NodeRC
  139. /** List */
  140. template <typename T, template <typename> class Atom = std::atomic>
  141. struct List {
  142. Atom<T*> head_{nullptr};
  143. public:
  144. explicit List(int size) {
  145. auto p = head_.load(std::memory_order_relaxed);
  146. for (int i = 0; i < size - 1; ++i) {
  147. p = new T(i + 10000, p, true);
  148. }
  149. p = new T(size + 9999, p);
  150. head_.store(p, std::memory_order_relaxed);
  151. }
  152. ~List() {
  153. auto curr = head_.load(std::memory_order_relaxed);
  154. while (curr) {
  155. auto next = curr->next();
  156. curr->retire();
  157. curr = next;
  158. }
  159. }
  160. bool hand_over_hand(
  161. int val,
  162. hazptr_holder<Atom>* hptr_prev,
  163. hazptr_holder<Atom>* hptr_curr) {
  164. while (true) {
  165. auto prev = &head_;
  166. auto curr = prev->load(std::memory_order_acquire);
  167. while (true) {
  168. if (!curr) {
  169. return false;
  170. }
  171. if (!hptr_curr->try_protect(curr, *prev)) {
  172. break;
  173. }
  174. auto next = curr->next();
  175. if (prev->load(std::memory_order_acquire) != curr) {
  176. break;
  177. }
  178. if (curr->value() == val) {
  179. return true;
  180. }
  181. prev = curr->ptr_next();
  182. curr = next;
  183. std::swap(hptr_curr, hptr_prev);
  184. }
  185. }
  186. }
  187. bool hand_over_hand(int val) {
  188. hazptr_local<2, Atom> hptr;
  189. return hand_over_hand(val, &hptr[0], &hptr[1]);
  190. }
  191. bool protect_all(int val, hazptr_holder<Atom>& hptr) {
  192. auto curr = hptr.get_protected(head_);
  193. while (curr) {
  194. auto next = curr->next();
  195. if (curr->value() == val) {
  196. return true;
  197. }
  198. curr = next;
  199. }
  200. return false;
  201. }
  202. bool protect_all(int val) {
  203. hazptr_local<1, Atom> hptr;
  204. return protect_all(val, hptr[0]);
  205. }
  206. }; // NodeRC
  207. /** NodeAuto */
  208. template <template <typename> class Atom = std::atomic>
  209. class NodeAuto : public hazptr_obj_base_linked<NodeAuto<Atom>, Atom> {
  210. Atom<NodeAuto<Atom>*> link_[2];
  211. public:
  212. explicit NodeAuto(NodeAuto* l1 = nullptr, NodeAuto* l2 = nullptr) noexcept {
  213. this->set_deleter();
  214. link_[0].store(l1, std::memory_order_relaxed);
  215. link_[1].store(l2, std::memory_order_relaxed);
  216. c_.inc_ctors();
  217. }
  218. ~NodeAuto() {
  219. c_.inc_dtors();
  220. }
  221. NodeAuto<Atom>* link(size_t i) {
  222. return link_[i].load(std::memory_order_acquire);
  223. }
  224. template <typename S>
  225. void push_links(bool m, S& s) {
  226. if (m == false) { // Immutable
  227. for (int i = 0; i < 2; ++i) {
  228. auto p = link(i);
  229. if (p) {
  230. s.push(p);
  231. }
  232. }
  233. }
  234. }
  235. }; // NodeAuto
  236. // Test Functions
  237. template <template <typename> class Atom = std::atomic>
  238. void basic_objects_test() {
  239. c_.clear();
  240. int num = 0;
  241. {
  242. ++num;
  243. auto obj = new Node<Atom>;
  244. obj->retire();
  245. }
  246. {
  247. ++num;
  248. auto obj = new NodeRC<false, Atom>(0, nullptr);
  249. obj->retire();
  250. }
  251. {
  252. ++num;
  253. auto obj = new NodeRC<false, Atom>(0, nullptr);
  254. obj->acquire_link_safe();
  255. obj->unlink();
  256. }
  257. {
  258. ++num;
  259. auto obj = new NodeRC<false, Atom>(0, nullptr);
  260. obj->acquire_link_safe();
  261. obj->unlink_and_reclaim_unchecked();
  262. }
  263. {
  264. ++num;
  265. auto obj = new NodeRC<false, Atom>(0, nullptr);
  266. obj->acquire_link_safe();
  267. hazptr_root<NodeRC<false, Atom>> root(obj);
  268. }
  269. ASSERT_EQ(c_.ctors(), num);
  270. hazptr_cleanup<Atom>();
  271. ASSERT_EQ(c_.dtors(), num);
  272. }
  273. template <template <typename> class Atom = std::atomic>
  274. void copy_and_move_test() {
  275. struct Obj : hazptr_obj_base<Obj, Atom> {
  276. int a;
  277. };
  278. auto p1 = new Obj();
  279. auto p2 = new Obj(*p1);
  280. p1->retire();
  281. p2->retire();
  282. p1 = new Obj();
  283. p2 = new Obj(std::move(*p1));
  284. p1->retire();
  285. p2->retire();
  286. p1 = new Obj();
  287. p2 = new Obj();
  288. *p2 = *p1;
  289. p1->retire();
  290. p2->retire();
  291. p1 = new Obj();
  292. p2 = new Obj();
  293. *p2 = std::move(*p1);
  294. p1->retire();
  295. p2->retire();
  296. hazptr_cleanup<Atom>();
  297. }
  298. template <template <typename> class Atom = std::atomic>
  299. void basic_holders_test() {
  300. { hazptr_holder<Atom> h; }
  301. { hazptr_array<2, Atom> h; }
  302. { hazptr_local<2, Atom> h; }
  303. }
  304. template <template <typename> class Atom = std::atomic>
  305. void basic_protection_test() {
  306. c_.clear();
  307. auto obj = new Node<Atom>;
  308. hazptr_holder<Atom> h;
  309. h.reset(obj);
  310. obj->retire();
  311. ASSERT_EQ(c_.ctors(), 1);
  312. hazptr_cleanup<Atom>();
  313. ASSERT_EQ(c_.dtors(), 0);
  314. h.reset();
  315. hazptr_cleanup<Atom>();
  316. ASSERT_EQ(c_.dtors(), 1);
  317. }
  318. template <template <typename> class Atom = std::atomic>
  319. void virtual_test() {
  320. struct Thing : public hazptr_obj_base<Thing, Atom> {
  321. virtual ~Thing() {}
  322. int a;
  323. };
  324. for (int i = 0; i < 100; i++) {
  325. auto bar = new Thing;
  326. bar->a = i;
  327. hazptr_holder<Atom> hptr;
  328. hptr.reset(bar);
  329. bar->retire();
  330. ASSERT_EQ(bar->a, i);
  331. }
  332. hazptr_cleanup<Atom>();
  333. }
  334. template <template <typename> class Atom = std::atomic>
  335. void destruction_test(hazptr_domain<Atom>& domain) {
  336. struct Thing : public hazptr_obj_base<Thing, Atom> {
  337. Thing* next;
  338. hazptr_domain<Atom>* domain;
  339. int val;
  340. Thing(int v, Thing* n, hazptr_domain<Atom>* d)
  341. : next(n), domain(d), val(v) {}
  342. ~Thing() {
  343. if (next) {
  344. next->retire(*domain);
  345. }
  346. }
  347. };
  348. Thing* last{nullptr};
  349. for (int i = 0; i < 2000; i++) {
  350. last = new Thing(i, last, &domain);
  351. }
  352. last->retire(domain);
  353. hazptr_cleanup<Atom>();
  354. }
  355. template <template <typename> class Atom = std::atomic>
  356. void move_test() {
  357. for (int i = 0; i < 100; ++i) {
  358. auto x = new Node<Atom>(i);
  359. hazptr_holder<Atom> hptr0;
  360. // Protect object
  361. hptr0.reset(x);
  362. // Retire object
  363. x->retire();
  364. // Move constructor - still protected
  365. hazptr_holder<Atom> hptr1(std::move(hptr0));
  366. // Self move is no-op - still protected
  367. auto phptr1 = &hptr1;
  368. ASSERT_EQ(phptr1, &hptr1);
  369. hptr1 = std::move(*phptr1);
  370. // Empty constructor
  371. hazptr_holder<Atom> hptr2(nullptr);
  372. // Move assignment - still protected
  373. hptr2 = std::move(hptr1);
  374. // Access object
  375. ASSERT_EQ(x->value(), i);
  376. // Unprotect object - hptr2 is nonempty
  377. hptr2.reset();
  378. }
  379. hazptr_cleanup<Atom>();
  380. }
  381. template <template <typename> class Atom = std::atomic>
  382. void array_test() {
  383. for (int i = 0; i < 100; ++i) {
  384. auto x = new Node<Atom>(i);
  385. hazptr_array<3, Atom> hptr;
  386. // Protect object
  387. hptr[2].reset(x);
  388. // Empty array
  389. hazptr_array<3, Atom> h(nullptr);
  390. // Move assignment
  391. h = std::move(hptr);
  392. // Retire object
  393. x->retire();
  394. ASSERT_EQ(x->value(), i);
  395. // Unprotect object - hptr2 is nonempty
  396. h[2].reset();
  397. }
  398. hazptr_cleanup<Atom>();
  399. }
  400. template <template <typename> class Atom = std::atomic>
  401. void array_dtor_full_tc_test() {
  402. #if FOLLY_HAZPTR_THR_LOCAL
  403. const uint8_t M = hazptr_tc<Atom>::capacity();
  404. #else
  405. const uint8_t M = 3;
  406. #endif
  407. {
  408. // Fill the thread cache
  409. hazptr_array<M, Atom> w;
  410. }
  411. {
  412. // Empty array x
  413. hazptr_array<M, Atom> x(nullptr);
  414. {
  415. // y ctor gets elements from the thread cache filled by w dtor.
  416. hazptr_array<M, Atom> y;
  417. // z ctor gets elements from the default domain.
  418. hazptr_array<M, Atom> z;
  419. // Elements of y are moved to x.
  420. x = std::move(y);
  421. // z dtor fills the thread cache.
  422. }
  423. // x dtor finds the thread cache full. It has to call
  424. // ~hazptr_holder() for each of its elements, which were
  425. // previously taken from the thread cache by y ctor.
  426. }
  427. }
  428. template <template <typename> class Atom = std::atomic>
  429. void local_test() {
  430. for (int i = 0; i < 100; ++i) {
  431. auto x = new Node<Atom>(i);
  432. hazptr_local<3, Atom> hptr;
  433. // Protect object
  434. hptr[2].reset(x);
  435. // Retire object
  436. x->retire();
  437. // Unprotect object - hptr2 is nonempty
  438. hptr[2].reset();
  439. }
  440. hazptr_cleanup<Atom>();
  441. }
  442. template <bool Mutable, template <typename> class Atom = std::atomic>
  443. void linked_test() {
  444. c_.clear();
  445. NodeRC<Mutable, Atom>* p = nullptr;
  446. int num = 193;
  447. for (int i = 0; i < num - 1; ++i) {
  448. p = new NodeRC<Mutable, Atom>(i, p, true);
  449. }
  450. p = new NodeRC<Mutable, Atom>(num - 1, p, Mutable);
  451. hazptr_holder<Atom> hptr;
  452. hptr.reset(p);
  453. if (!Mutable) {
  454. for (auto q = p->next(); q; q = q->next()) {
  455. q->retire();
  456. }
  457. }
  458. int v = num;
  459. for (auto q = p; q; q = q->next()) {
  460. ASSERT_GT(v, 0);
  461. --v;
  462. ASSERT_EQ(q->value(), v);
  463. }
  464. hazptr_cleanup<Atom>();
  465. ASSERT_EQ(c_.ctors(), num);
  466. ASSERT_EQ(c_.dtors(), 0);
  467. if (Mutable) {
  468. hazptr_root<NodeRC<Mutable, Atom>, Atom> root(p);
  469. } else {
  470. p->retire();
  471. }
  472. hazptr_cleanup<Atom>();
  473. ASSERT_EQ(c_.dtors(), 0);
  474. hptr.reset();
  475. hazptr_cleanup<Atom>();
  476. ASSERT_EQ(c_.dtors(), num);
  477. }
  478. template <bool Mutable, template <typename> class Atom = std::atomic>
  479. void mt_linked_test() {
  480. c_.clear();
  481. Atom<bool> ready(false);
  482. Atom<bool> done(false);
  483. Atom<int> setHazptrs(0);
  484. hazptr_root<NodeRC<Mutable, Atom>, Atom> head;
  485. int num = FLAGS_num_ops;
  486. int nthr = FLAGS_num_threads;
  487. ASSERT_GT(FLAGS_num_threads, 0);
  488. std::vector<std::thread> thr(nthr);
  489. for (int i = 0; i < nthr; ++i) {
  490. thr[i] = DSched::thread([&] {
  491. while (!ready.load()) {
  492. /* spin */
  493. }
  494. hazptr_holder<Atom> hptr;
  495. auto p = hptr.get_protected(head());
  496. ++setHazptrs;
  497. /* Concurrent with removal */
  498. int v = num;
  499. for (auto q = p; q; q = q->next()) {
  500. ASSERT_GT(v, 0);
  501. --v;
  502. ASSERT_EQ(q->value(), v);
  503. }
  504. ASSERT_EQ(v, 0);
  505. while (!done.load()) {
  506. /* spin */
  507. }
  508. });
  509. }
  510. NodeRC<Mutable, Atom>* p = nullptr;
  511. for (int i = 0; i < num - 1; ++i) {
  512. p = new NodeRC<Mutable, Atom>(i, p, true);
  513. }
  514. p = new NodeRC<Mutable, Atom>(num - 1, p, Mutable);
  515. ASSERT_EQ(c_.ctors(), num);
  516. head().store(p);
  517. ready.store(true);
  518. while (setHazptrs.load() < nthr) {
  519. /* spin */
  520. }
  521. /* this is concurrent with traversal by readers */
  522. head().store(nullptr);
  523. if (Mutable) {
  524. p->unlink();
  525. } else {
  526. for (auto q = p; q; q = q->next()) {
  527. q->retire();
  528. }
  529. }
  530. ASSERT_EQ(c_.dtors(), 0);
  531. done.store(true);
  532. for (auto& t : thr) {
  533. DSched::join(t);
  534. }
  535. hazptr_cleanup<Atom>();
  536. ASSERT_EQ(c_.dtors(), num);
  537. }
  538. template <template <typename> class Atom = std::atomic>
  539. void auto_retire_test() {
  540. c_.clear();
  541. auto d = new NodeAuto<Atom>;
  542. d->acquire_link_safe();
  543. auto c = new NodeAuto<Atom>(d);
  544. d->acquire_link_safe();
  545. auto b = new NodeAuto<Atom>(d);
  546. c->acquire_link_safe();
  547. b->acquire_link_safe();
  548. auto a = new NodeAuto<Atom>(b, c);
  549. hazptr_holder<Atom> h;
  550. {
  551. hazptr_root<NodeAuto<Atom>> root;
  552. a->acquire_link_safe();
  553. root().store(a);
  554. ASSERT_EQ(c_.ctors(), 4);
  555. /* So far the links and link counts are:
  556. root-->a a-->b a-->c b-->d c-->d
  557. a(1,0) b(1,0) c(1,0) d(2,0)
  558. */
  559. h.reset(c); /* h protects c */
  560. hazptr_cleanup<Atom>();
  561. ASSERT_EQ(c_.dtors(), 0);
  562. /* Nothing is retired or reclaimed yet */
  563. }
  564. /* root dtor calls a->unlink, which calls a->release_link, which
  565. changes a's link counts from (1,0) to (0,0), which triggers calls
  566. to c->downgrade_link, b->downgrade_link, and a->retire.
  567. c->downgrade_link changes c's link counts from (1,0) to (0,1),
  568. which triggers calls to d->downgrade_link and c->retire.
  569. d->downgrade_link changes d's link counts from (2,0) to (1,1).
  570. b->downgrade_link changes b's link counts from (1,0) to (0,1),
  571. which triggers calls to d->downgrade_link and b->retire.
  572. d->downgrade_link changes d's link counts from (1,1) to (0,2),
  573. which triggers a call to d->retire.
  574. So far (assuming retire-s did not trigger bulk_reclaim):
  575. a-->b a-->c b-->d c-->d
  576. a(0,0) b(0,1) c(0,1) d(0,2)
  577. Retired: a b c d
  578. Protected: c
  579. */
  580. hazptr_cleanup<Atom>();
  581. /* hazptr_cleanup calls bulk_reclaim which finds a, b, and d
  582. unprotected, which triggers calls to a->release_ref,
  583. b->release_ref, and d->release_ref (not necessarily in that
  584. order).
  585. a->release_ref finds a's link counts to be (0,0), which triggers
  586. calls to c->release_ref, b->release_ref and delete a.
  587. The call to c->release_ref changes its link counts from (0,1) to
  588. (0,0).
  589. The first call to b->release_ref changes b's link counts to
  590. (0,0). The second call finds the link counts to be (0,0), which
  591. triggers a call to d->release_ref and delete b.
  592. The first call to d->release_ref changes its link counts to
  593. (0,1), and the second call changes them to (0,0);
  594. So far:
  595. c-->d
  596. a(deleted) b(deleted) c(0,0) d(0,0)
  597. Retired and protected: c
  598. bulk_reclamed-ed (i.e, found not protected): d
  599. */
  600. ASSERT_EQ(c_.dtors(), 2);
  601. h.reset(); /* c is now no longer protected */
  602. hazptr_cleanup<Atom>();
  603. /* hazptr_cleanup calls bulk_reclaim which finds c unprotected,
  604. which triggers a call to c->release_ref.
  605. c->release_ref finds c's link counts to be (0,0), which
  606. triggers calls to d->release_ref and delete c.
  607. d->release_ref finds d's link counts to be (0,0), which triggers
  608. a call to delete d.
  609. Finally:
  610. a(deleted) b(deleted) c(deleted) d(deleted)
  611. */
  612. ASSERT_EQ(c_.dtors(), 4);
  613. }
  614. template <template <typename> class Atom = std::atomic>
  615. void free_function_retire_test() {
  616. auto foo = new int;
  617. hazptr_retire<Atom>(foo);
  618. auto foo2 = new int;
  619. hazptr_retire<Atom>(foo2, [](int* obj) { delete obj; });
  620. bool retired = false;
  621. {
  622. hazptr_domain<Atom> myDomain0;
  623. struct delret {
  624. bool* retired_;
  625. explicit delret(bool* retire) : retired_(retire) {}
  626. ~delret() {
  627. *retired_ = true;
  628. }
  629. };
  630. auto foo3 = new delret(&retired);
  631. myDomain0.retire(foo3);
  632. }
  633. ASSERT_TRUE(retired);
  634. }
  635. template <template <typename> class Atom = std::atomic>
  636. void cleanup_test() {
  637. int threadOps = 1007;
  638. int mainOps = 19;
  639. c_.clear();
  640. Atom<int> threadsDone{0};
  641. Atom<bool> mainDone{false};
  642. std::vector<std::thread> threads(FLAGS_num_threads);
  643. for (int tid = 0; tid < FLAGS_num_threads; ++tid) {
  644. threads[tid] = DSched::thread([&, tid]() {
  645. for (int j = tid; j < threadOps; j += FLAGS_num_threads) {
  646. auto p = new Node<Atom>;
  647. p->retire();
  648. }
  649. threadsDone.fetch_add(1);
  650. while (!mainDone.load()) {
  651. /* spin */;
  652. }
  653. });
  654. }
  655. { // include the main thread in the test
  656. for (int i = 0; i < mainOps; ++i) {
  657. auto p = new Node<Atom>;
  658. p->retire();
  659. }
  660. }
  661. while (threadsDone.load() < FLAGS_num_threads) {
  662. /* spin */;
  663. }
  664. ASSERT_EQ(c_.ctors(), threadOps + mainOps);
  665. hazptr_cleanup<Atom>();
  666. ASSERT_EQ(c_.dtors(), threadOps + mainOps);
  667. mainDone.store(true);
  668. for (auto& t : threads) {
  669. DSched::join(t);
  670. }
  671. { // Cleanup after using array
  672. c_.clear();
  673. { hazptr_array<2, Atom> h; }
  674. {
  675. hazptr_array<2, Atom> h;
  676. auto p0 = new Node<Atom>;
  677. auto p1 = new Node<Atom>;
  678. h[0].reset(p0);
  679. h[1].reset(p1);
  680. p0->retire();
  681. p1->retire();
  682. }
  683. ASSERT_EQ(c_.ctors(), 2);
  684. hazptr_cleanup<Atom>();
  685. ASSERT_EQ(c_.dtors(), 2);
  686. }
  687. { // Cleanup after using local
  688. c_.clear();
  689. { hazptr_local<2, Atom> h; }
  690. {
  691. hazptr_local<2, Atom> h;
  692. auto p0 = new Node<Atom>;
  693. auto p1 = new Node<Atom>;
  694. h[0].reset(p0);
  695. h[1].reset(p1);
  696. p0->retire();
  697. p1->retire();
  698. }
  699. ASSERT_EQ(c_.ctors(), 2);
  700. hazptr_cleanup<Atom>();
  701. ASSERT_EQ(c_.dtors(), 2);
  702. }
  703. }
  704. template <template <typename> class Atom = std::atomic>
  705. void priv_dtor_test() {
  706. c_.clear();
  707. using NodeT = NodeRC<true, Atom>;
  708. auto y = new NodeT;
  709. y->acquire_link_safe();
  710. struct Foo : hazptr_obj_base<Foo, Atom> {
  711. hazptr_root<NodeT, Atom> r_;
  712. };
  713. auto x = new Foo;
  714. x->r_().store(y);
  715. /* Thread retires x. Dtor of TLS priv list pushes x to domain, which
  716. triggers bulk reclaim due to timed cleanup (when the test is run
  717. by itself). Reclamation of x unlinks and retires y. y should
  718. not be pushed into the thread's priv list. It should be pushed to
  719. domain instead. */
  720. auto thr = DSched::thread([&]() { x->retire(); });
  721. DSched::join(thr);
  722. ASSERT_EQ(c_.ctors(), 1);
  723. hazptr_cleanup<Atom>();
  724. ASSERT_EQ(c_.dtors(), 1);
  725. }
  726. template <template <typename> class Atom = std::atomic>
  727. void lifo_test() {
  728. for (int i = 0; i < FLAGS_num_reps; ++i) {
  729. Atom<int> sum{0};
  730. HazptrLockFreeLIFO<int, Atom> s;
  731. std::vector<std::thread> threads(FLAGS_num_threads);
  732. for (int tid = 0; tid < FLAGS_num_threads; ++tid) {
  733. threads[tid] = DSched::thread([&, tid]() {
  734. int local = 0;
  735. for (int j = tid; j < FLAGS_num_ops; j += FLAGS_num_threads) {
  736. s.push(j);
  737. int v;
  738. ASSERT_TRUE(s.pop(v));
  739. local += v;
  740. }
  741. sum.fetch_add(local);
  742. });
  743. }
  744. for (auto& t : threads) {
  745. DSched::join(t);
  746. }
  747. hazptr_cleanup<Atom>();
  748. int expected = FLAGS_num_ops * (FLAGS_num_ops - 1) / 2;
  749. ASSERT_EQ(sum.load(), expected);
  750. }
  751. }
  752. template <template <typename> class Atom = std::atomic>
  753. void swmr_test() {
  754. using T = uint64_t;
  755. for (int i = 0; i < FLAGS_num_reps; ++i) {
  756. HazptrSWMRSet<T, Atom> s;
  757. std::vector<std::thread> threads(FLAGS_num_threads);
  758. for (int tid = 0; tid < FLAGS_num_threads; ++tid) {
  759. threads[tid] = DSched::thread([&s, tid]() {
  760. for (int j = tid; j < FLAGS_num_ops; j += FLAGS_num_threads) {
  761. s.contains(j);
  762. }
  763. });
  764. }
  765. for (int j = 0; j < 10; ++j) {
  766. s.add(j);
  767. }
  768. for (int j = 0; j < 10; ++j) {
  769. s.remove(j);
  770. }
  771. for (auto& t : threads) {
  772. DSched::join(t);
  773. }
  774. hazptr_cleanup<Atom>();
  775. }
  776. }
  777. template <template <typename> class Atom = std::atomic>
  778. void wide_cas_test() {
  779. HazptrWideCAS<std::string, Atom> s;
  780. std::string u = "";
  781. std::string v = "11112222";
  782. auto ret = s.cas(u, v);
  783. ASSERT_TRUE(ret);
  784. u = "";
  785. v = "11112222";
  786. ret = s.cas(u, v);
  787. ASSERT_FALSE(ret);
  788. u = "11112222";
  789. v = "22223333";
  790. ret = s.cas(u, v);
  791. ASSERT_TRUE(ret);
  792. u = "22223333";
  793. v = "333344445555";
  794. ret = s.cas(u, v);
  795. ASSERT_TRUE(ret);
  796. hazptr_cleanup<Atom>();
  797. }
  798. // Tests
  799. TEST(HazptrTest, basic_objects) {
  800. basic_objects_test();
  801. }
  802. TEST(HazptrTest, dsched_basic_objects) {
  803. DSched sched(DSched::uniform(0));
  804. basic_objects_test<DeterministicAtomic>();
  805. }
  806. TEST(HazptrTest, copy_and_move) {
  807. copy_and_move_test();
  808. }
  809. TEST(HazptrTest, dsched_copy_and_move) {
  810. DSched sched(DSched::uniform(0));
  811. copy_and_move_test<DeterministicAtomic>();
  812. }
  813. TEST(HazptrTest, basic_holders) {
  814. basic_holders_test();
  815. }
  816. TEST(HazptrTest, dsched_basic_holders) {
  817. DSched sched(DSched::uniform(0));
  818. basic_holders_test<DeterministicAtomic>();
  819. }
  820. TEST(HazptrTest, basic_protection) {
  821. basic_protection_test();
  822. }
  823. TEST(HazptrTest, dsched_basic_protection) {
  824. DSched sched(DSched::uniform(0));
  825. basic_protection_test<DeterministicAtomic>();
  826. }
  827. TEST(HazptrTest, virtual) {
  828. virtual_test();
  829. }
  830. TEST(HazptrTest, dsched_virtual) {
  831. DSched sched(DSched::uniform(0));
  832. virtual_test<DeterministicAtomic>();
  833. }
  834. TEST(HazptrTest, destruction) {
  835. {
  836. hazptr_domain<> myDomain0;
  837. destruction_test(myDomain0);
  838. }
  839. destruction_test(default_hazptr_domain<std::atomic>());
  840. }
  841. TEST(HazptrTest, dsched_destruction) {
  842. DSched sched(DSched::uniform(0));
  843. {
  844. hazptr_domain<DeterministicAtomic> myDomain0;
  845. destruction_test<DeterministicAtomic>(myDomain0);
  846. }
  847. destruction_test<DeterministicAtomic>(
  848. default_hazptr_domain<DeterministicAtomic>());
  849. }
  850. TEST(HazptrTest, move) {
  851. move_test();
  852. }
  853. TEST(HazptrTest, dsched_move) {
  854. DSched sched(DSched::uniform(0));
  855. move_test<DeterministicAtomic>();
  856. }
  857. TEST(HazptrTest, array) {
  858. array_test();
  859. }
  860. TEST(HazptrTest, dsched_array) {
  861. DSched sched(DSched::uniform(0));
  862. array_test<DeterministicAtomic>();
  863. }
  864. TEST(HazptrTest, array_dtor_full_tc) {
  865. array_dtor_full_tc_test();
  866. }
  867. TEST(HazptrTest, dsched_array_dtor_full_tc) {
  868. DSched sched(DSched::uniform(0));
  869. array_dtor_full_tc_test<DeterministicAtomic>();
  870. }
  871. TEST(HazptrTest, local) {
  872. local_test();
  873. }
  874. TEST(HazptrTest, dsched_local) {
  875. DSched sched(DSched::uniform(0));
  876. local_test<DeterministicAtomic>();
  877. }
  878. TEST(HazptrTest, linked_mutable) {
  879. linked_test<true>();
  880. }
  881. TEST(HazptrTest, dsched_linked_mutable) {
  882. DSched sched(DSched::uniform(0));
  883. linked_test<true, DeterministicAtomic>();
  884. }
  885. TEST(HazptrTest, linked_immutable) {
  886. linked_test<false>();
  887. }
  888. TEST(HazptrTest, dsched_linked_immutable) {
  889. DSched sched(DSched::uniform(0));
  890. linked_test<false, DeterministicAtomic>();
  891. }
  892. TEST(HazptrTest, mt_linked_mutable) {
  893. mt_linked_test<true>();
  894. }
  895. TEST(HazptrTest, dsched_mt_linked_mutable) {
  896. DSched sched(DSched::uniform(0));
  897. mt_linked_test<true, DeterministicAtomic>();
  898. }
  899. TEST(HazptrTest, mt_linked_immutable) {
  900. mt_linked_test<false>();
  901. }
  902. TEST(HazptrTest, dsched_mt_linked_immutable) {
  903. DSched sched(DSched::uniform(0));
  904. mt_linked_test<false, DeterministicAtomic>();
  905. }
  906. TEST(HazptrTest, auto_retire) {
  907. auto_retire_test();
  908. }
  909. TEST(HazptrTest, dsched_auto_retire) {
  910. DSched sched(DSched::uniform(0));
  911. auto_retire_test<DeterministicAtomic>();
  912. }
  913. TEST(HazptrTest, free_function_retire) {
  914. free_function_retire_test();
  915. }
  916. TEST(HazptrTest, dsched_free_function_retire) {
  917. DSched sched(DSched::uniform(0));
  918. free_function_retire_test<DeterministicAtomic>();
  919. }
  920. TEST(HazptrTest, cleanup) {
  921. cleanup_test();
  922. }
  923. TEST(HazptrTest, dsched_cleanup) {
  924. DSched sched(DSched::uniform(0));
  925. cleanup_test<DeterministicAtomic>();
  926. }
  927. TEST(HazptrTest, priv_dtor) {
  928. priv_dtor_test();
  929. }
  930. TEST(HazptrTest, dsched_priv_dtor) {
  931. DSched sched(DSched::uniform(0));
  932. priv_dtor_test<DeterministicAtomic>();
  933. }
  934. TEST(HazptrTest, lifo) {
  935. lifo_test();
  936. }
  937. TEST(HazptrTest, dsched_lifo) {
  938. DSched sched(DSched::uniform(0));
  939. lifo_test<DeterministicAtomic>();
  940. }
  941. TEST(HazptrTest, swmr) {
  942. swmr_test();
  943. }
  944. TEST(HazptrTest, dsched_swmr) {
  945. DSched sched(DSched::uniform(0));
  946. swmr_test<DeterministicAtomic>();
  947. }
  948. TEST(HazptrTest, wide_cas) {
  949. wide_cas_test();
  950. }
  951. TEST(HazptrTest, dsched_wide_cas) {
  952. DSched sched(DSched::uniform(0));
  953. wide_cas_test<DeterministicAtomic>();
  954. }
  955. TEST(HazptrTest, reclamation_without_calling_cleanup) {
  956. c_.clear();
  957. int nthr = 5;
  958. int objs = folly::detail::hazptr_domain_rcount_threshold();
  959. std::vector<std::thread> thr(nthr);
  960. for (int tid = 0; tid < nthr; ++tid) {
  961. thr[tid] = std::thread([&, tid] {
  962. for (int i = tid; i < objs; i += nthr) {
  963. auto p = new Node<>;
  964. p->retire();
  965. }
  966. });
  967. }
  968. for (auto& t : thr) {
  969. t.join();
  970. }
  971. ASSERT_GT(c_.dtors(), 0);
  972. }
  973. // Benchmark drivers
  974. template <typename InitFunc, typename Func, typename EndFunc>
  975. uint64_t run_once(
  976. int nthreads,
  977. const InitFunc& init,
  978. const Func& fn,
  979. const EndFunc& endFn) {
  980. std::atomic<bool> start{false};
  981. Barrier b(nthreads + 1);
  982. init();
  983. std::vector<std::thread> threads(nthreads);
  984. for (int tid = 0; tid < nthreads; ++tid) {
  985. threads[tid] = std::thread([&, tid] {
  986. b.wait();
  987. fn(tid);
  988. });
  989. }
  990. b.wait();
  991. // begin time measurement
  992. auto tbegin = std::chrono::steady_clock::now();
  993. start.store(true);
  994. for (auto& t : threads) {
  995. t.join();
  996. }
  997. hazptr_cleanup();
  998. // end time measurement
  999. auto tend = std::chrono::steady_clock::now();
  1000. endFn();
  1001. return std::chrono::duration_cast<std::chrono::nanoseconds>(tend - tbegin)
  1002. .count();
  1003. }
  1004. template <typename RepFunc>
  1005. uint64_t bench(std::string name, int ops, const RepFunc& repFn) {
  1006. int reps = 10;
  1007. uint64_t min = UINTMAX_MAX;
  1008. uint64_t max = 0;
  1009. uint64_t sum = 0;
  1010. repFn(); // sometimes first run is outlier
  1011. for (int r = 0; r < reps; ++r) {
  1012. uint64_t dur = repFn();
  1013. sum += dur;
  1014. min = std::min(min, dur);
  1015. max = std::max(max, dur);
  1016. }
  1017. const std::string unit = " ns";
  1018. uint64_t avg = sum / reps;
  1019. uint64_t res = min;
  1020. std::cout << name;
  1021. std::cout << " " << std::setw(4) << max / ops << unit;
  1022. std::cout << " " << std::setw(4) << avg / ops << unit;
  1023. std::cout << " " << std::setw(4) << res / ops << unit;
  1024. std::cout << std::endl;
  1025. return res;
  1026. }
  1027. //
  1028. // Benchmarks
  1029. //
  1030. // const int ops = 1000000;
  1031. const int ops = 1000000;
  1032. inline uint64_t holder_bench(std::string name, int nthreads) {
  1033. auto repFn = [&] {
  1034. auto init = [] {};
  1035. auto fn = [&](int tid) {
  1036. for (int j = tid; j < 10 * ops; j += nthreads) {
  1037. hazptr_holder<> h;
  1038. }
  1039. };
  1040. auto endFn = [] {};
  1041. return run_once(nthreads, init, fn, endFn);
  1042. };
  1043. return bench(name, ops, repFn);
  1044. }
  1045. template <size_t M>
  1046. inline uint64_t array_bench(std::string name, int nthreads) {
  1047. auto repFn = [&] {
  1048. auto init = [] {};
  1049. auto fn = [&](int tid) {
  1050. for (int j = tid; j < 10 * ops; j += nthreads) {
  1051. hazptr_array<M> a;
  1052. }
  1053. };
  1054. auto endFn = [] {};
  1055. return run_once(nthreads, init, fn, endFn);
  1056. };
  1057. return bench(name, ops, repFn);
  1058. }
  1059. template <size_t M>
  1060. inline uint64_t local_bench(std::string name, int nthreads) {
  1061. auto repFn = [&] {
  1062. auto init = [] {};
  1063. auto fn = [&](int tid) {
  1064. for (int j = tid; j < 10 * ops; j += nthreads) {
  1065. hazptr_local<M> a;
  1066. }
  1067. };
  1068. auto endFn = [] {};
  1069. return run_once(nthreads, init, fn, endFn);
  1070. };
  1071. return bench(name, ops, repFn);
  1072. }
  1073. inline uint64_t obj_bench(std::string name, int nthreads) {
  1074. struct Foo : public hazptr_obj_base<Foo> {};
  1075. auto repFn = [&] {
  1076. auto init = [] {};
  1077. auto fn = [&](int tid) {
  1078. for (int j = tid; j < ops; j += nthreads) {
  1079. auto p = new Foo;
  1080. p->retire();
  1081. }
  1082. };
  1083. auto endFn = [] {};
  1084. return run_once(nthreads, init, fn, endFn);
  1085. };
  1086. return bench(name, ops, repFn);
  1087. }
  1088. uint64_t list_hoh_bench(
  1089. std::string name,
  1090. int nthreads,
  1091. int size,
  1092. bool provided = false) {
  1093. auto repFn = [&] {
  1094. List<Node<>> l(size);
  1095. auto init = [&] {};
  1096. auto fn = [&](int tid) {
  1097. if (provided) {
  1098. hazptr_local<2> hptr;
  1099. for (int j = tid; j < ops; j += nthreads) {
  1100. l.hand_over_hand(size, &hptr[0], &hptr[1]);
  1101. }
  1102. } else {
  1103. for (int j = tid; j < ops; j += nthreads) {
  1104. l.hand_over_hand(size);
  1105. }
  1106. }
  1107. };
  1108. auto endFn = [] {};
  1109. return run_once(nthreads, init, fn, endFn);
  1110. };
  1111. return bench(name, ops, repFn);
  1112. }
  1113. uint64_t list_protect_all_bench(
  1114. std::string name,
  1115. int nthreads,
  1116. int size,
  1117. bool provided = false) {
  1118. auto repFn = [&] {
  1119. List<NodeRC<true>> l(size);
  1120. auto init = [] {};
  1121. auto fn = [&](int tid) {
  1122. if (provided) {
  1123. hazptr_local<1> hptr;
  1124. for (int j = tid; j < ops; j += nthreads) {
  1125. l.protect_all(size, hptr[0]);
  1126. }
  1127. } else {
  1128. for (int j = tid; j < ops; j += nthreads) {
  1129. l.protect_all(size);
  1130. }
  1131. }
  1132. };
  1133. auto endFn = [] {};
  1134. return run_once(nthreads, init, fn, endFn);
  1135. };
  1136. return bench(name, ops, repFn);
  1137. }
  1138. uint64_t cleanup_bench(std::string name, int nthreads) {
  1139. auto repFn = [&] {
  1140. auto init = [] {};
  1141. auto fn = [&](int) {
  1142. hazptr_holder<std::atomic> hptr;
  1143. for (int i = 0; i < 1000; i++) {
  1144. hazptr_cleanup();
  1145. }
  1146. };
  1147. auto endFn = [] {};
  1148. return run_once(nthreads, init, fn, endFn);
  1149. };
  1150. return bench(name, ops, repFn);
  1151. }
  1152. const int nthr[] = {1, 10};
  1153. const int sizes[] = {10, 20};
  1154. void benches() {
  1155. for (int i : nthr) {
  1156. std::cout << "================================ " << std::setw(2) << i
  1157. << " threads "
  1158. << "================================" << std::endl;
  1159. std::cout << "10x construct/destruct hazptr_holder ";
  1160. holder_bench("", i);
  1161. std::cout << "10x construct/destruct hazptr_array<1> ";
  1162. array_bench<1>("", i);
  1163. std::cout << "10x construct/destruct hazptr_array<2> ";
  1164. array_bench<2>("", i);
  1165. std::cout << "10x construct/destruct hazptr_array<3> ";
  1166. array_bench<3>("", i);
  1167. std::cout << "10x construct/destruct hazptr_local<1> ";
  1168. local_bench<1>("", i);
  1169. std::cout << "10x construct/destruct hazptr_local<2> ";
  1170. local_bench<2>("", i);
  1171. std::cout << "10x construct/destruct hazptr_local<3> ";
  1172. local_bench<3>("", i);
  1173. std::cout << "allocate/retire/reclaim object ";
  1174. obj_bench("", i);
  1175. for (int j : sizes) {
  1176. std::cout << j << "-item list hand-over-hand - own hazptrs ";
  1177. list_hoh_bench("", i, j, true);
  1178. std::cout << j << "-item list hand-over-hand ";
  1179. list_hoh_bench("", i, j);
  1180. std::cout << j << "-item list protect all - own hazptr ";
  1181. list_protect_all_bench("", i, j, true);
  1182. std::cout << j << "-item list protect all ";
  1183. list_protect_all_bench("", i, j);
  1184. }
  1185. std::cout << "hazptr_cleanup ";
  1186. cleanup_bench("", i);
  1187. }
  1188. }
  1189. TEST(HazptrTest, bench) {
  1190. if (FLAGS_bench) {
  1191. benches();
  1192. }
  1193. }
  1194. /*
  1195. $ numactl -N 1 ./buck-out/gen/folly/synchronization/test/hazptr_test --bench
  1196. ================================ 1 threads ================================
  1197. 10x construct/destruct hazptr_holder 51 ns 51 ns 50 ns
  1198. 10x construct/destruct hazptr_array<1> 54 ns 52 ns 52 ns
  1199. 10x construct/destruct hazptr_array<2> 60 ns 59 ns 58 ns
  1200. 10x construct/destruct hazptr_array<3> 141 ns 88 ns 82 ns
  1201. 10x construct/destruct hazptr_local<1> 13 ns 12 ns 12 ns
  1202. 10x construct/destruct hazptr_local<2> 15 ns 15 ns 15 ns
  1203. 10x construct/destruct hazptr_local<3> 39 ns 39 ns 38 ns
  1204. allocate/retire/reclaim object 70 ns 68 ns 67 ns
  1205. 10-item list hand-over-hand - own hazptrs 22 ns 20 ns 18 ns
  1206. 10-item list hand-over-hand 28 ns 25 ns 22 ns
  1207. 10-item list protect all - own hazptr 12 ns 11 ns 11 ns
  1208. 10-item list protect all 22 ns 13 ns 12 ns
  1209. 20-item list hand-over-hand - own hazptrs 42 ns 40 ns 38 ns
  1210. 20-item list hand-over-hand 48 ns 43 ns 41 ns
  1211. 20-item list protect all - own hazptr 28 ns 28 ns 28 ns
  1212. 20-item list protect all 31 ns 29 ns 29 ns
  1213. hazptr_cleanup 2 ns 1 ns 1 ns
  1214. ================================ 10 threads ================================
  1215. 10x construct/destruct hazptr_holder 11 ns 8 ns 8 ns
  1216. 10x construct/destruct hazptr_array<1> 8 ns 7 ns 7 ns
  1217. 10x construct/destruct hazptr_array<2> 9 ns 9 ns 9 ns
  1218. 10x construct/destruct hazptr_array<3> 19 ns 17 ns 14 ns
  1219. 10x construct/destruct hazptr_local<1> 8 ns 8 ns 8 ns
  1220. 10x construct/destruct hazptr_local<2> 8 ns 8 ns 7 ns
  1221. 10x construct/destruct hazptr_local<3> 11 ns 11 ns 10 ns
  1222. allocate/retire/reclaim object 20 ns 17 ns 16 ns
  1223. 10-item list hand-over-hand - own hazptrs 3 ns 3 ns 3 ns
  1224. 10-item list hand-over-hand 3 ns 3 ns 3 ns
  1225. 10-item list protect all - own hazptr 2 ns 2 ns 2 ns
  1226. 10-item list protect all 2 ns 2 ns 2 ns
  1227. 20-item list hand-over-hand - own hazptrs 6 ns 6 ns 6 ns
  1228. 20-item list hand-over-hand 6 ns 6 ns 6 ns
  1229. 20-item list protect all - own hazptr 4 ns 4 ns 4 ns
  1230. 20-item list protect all 5 ns 4 ns 4 ns
  1231. hazptr_cleanup 119 ns 113 ns 97 ns
  1232. */