1/*
2 * Copyright 2013-present Facebook, Inc.
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#pragma once
18
19#include <cassert>
20#include <cerrno>
21#include <cstddef>
22#include <cstdlib>
23#include <exception>
24#include <limits>
25#include <memory>
26#include <stdexcept>
27#include <type_traits>
28#include <utility>
29
30#include <folly/ConstexprMath.h>
31#include <folly/Likely.h>
32#include <folly/Traits.h>
33#include <folly/functional/Invoke.h>
34#include <folly/lang/Align.h>
35#include <folly/lang/Exception.h>
36#include <folly/portability/Config.h>
37#include <folly/portability/Malloc.h>
38
39namespace folly {
40
41/// allocateBytes and deallocateBytes work like a checkedMalloc/free pair,
42/// but take advantage of sized deletion when available
43inline void* allocateBytes(size_t n) {
44 return ::operator new(n);
45}
46
47inline void deallocateBytes(void* p, size_t n) {
48#if __cpp_sized_deallocation
49 return ::operator delete(p, n);
50#else
51 (void)n;
52 return ::operator delete(p);
53#endif
54}
55
56#if _POSIX_C_SOURCE >= 200112L || _XOPEN_SOURCE >= 600 || \
57 (defined(__ANDROID__) && (__ANDROID_API__ > 16)) || \
58 (defined(__APPLE__) && \
59 (__MAC_OS_X_VERSION_MIN_REQUIRED >= __MAC_10_6 || \
60 __IPHONE_OS_VERSION_MIN_REQUIRED >= __IPHONE_3_0))
61
62inline void* aligned_malloc(size_t size, size_t align) {
63 // use posix_memalign, but mimic the behaviour of memalign
64 void* ptr = nullptr;
65 int rc = posix_memalign(&ptr, align, size);
66 return rc == 0 ? (errno = 0, ptr) : (errno = rc, nullptr);
67}
68
69inline void aligned_free(void* aligned_ptr) {
70 free(aligned_ptr);
71}
72
73#elif defined(_WIN32)
74
75inline void* aligned_malloc(size_t size, size_t align) {
76 return _aligned_malloc(size, align);
77}
78
79inline void aligned_free(void* aligned_ptr) {
80 _aligned_free(aligned_ptr);
81}
82
83#else
84
85inline void* aligned_malloc(size_t size, size_t align) {
86 return memalign(align, size);
87}
88
89inline void aligned_free(void* aligned_ptr) {
90 free(aligned_ptr);
91}
92
93#endif
94
95namespace detail {
96template <typename Alloc, size_t kAlign, bool kAllocate>
97void rawOverAlignedImpl(Alloc const& alloc, size_t n, void*& raw) {
98 static_assert((kAlign & (kAlign - 1)) == 0, "Align must be a power of 2");
99
100 using AllocTraits = std::allocator_traits<Alloc>;
101 using T = typename AllocTraits::value_type;
102
103 constexpr bool kCanBypass = std::is_same<Alloc, std::allocator<T>>::value;
104
105 // BaseType is a type that gives us as much alignment as we need if
106 // we can get it naturally, otherwise it is aligned as max_align_t.
107 // kBaseAlign is both the alignment and size of this type.
108 constexpr size_t kBaseAlign = constexpr_min(kAlign, alignof(max_align_t));
109 using BaseType = std::aligned_storage_t<kBaseAlign, kBaseAlign>;
110 using BaseAllocTraits =
111 typename AllocTraits::template rebind_traits<BaseType>;
112 using BaseAlloc = typename BaseAllocTraits::allocator_type;
113 static_assert(
114 sizeof(BaseType) == kBaseAlign && alignof(BaseType) == kBaseAlign, "");
115
116#if __cpp_sized_deallocation
117 if (kCanBypass && kAlign == kBaseAlign) {
118 // until std::allocator uses sized deallocation, it is worth the
119 // effort to bypass it when we are able
120 if (kAllocate) {
121 raw = ::operator new(n * sizeof(T));
122 } else {
123 ::operator delete(raw, n * sizeof(T));
124 }
125 return;
126 }
127#endif
128
129 if (kCanBypass && kAlign > kBaseAlign) {
130 // allocating as BaseType isn't sufficient to get alignment, but
131 // since we can bypass Alloc we can use something like posix_memalign
132 if (kAllocate) {
133 raw = aligned_malloc(n * sizeof(T), kAlign);
134 } else {
135 aligned_free(raw);
136 }
137 return;
138 }
139
140 // we're not allowed to bypass Alloc, or we don't want to
141 BaseAlloc a(alloc);
142
143 // allocation size is counted in sizeof(BaseType)
144 size_t quanta = (n * sizeof(T) + kBaseAlign - 1) / sizeof(BaseType);
145 if (kAlign <= kBaseAlign) {
146 // rebinding Alloc to BaseType is sufficient to get us the alignment
147 // we want, happy path
148 if (kAllocate) {
149 raw = static_cast<void*>(
150 std::addressof(*BaseAllocTraits::allocate(a, quanta)));
151 } else {
152 BaseAllocTraits::deallocate(
153 a,
154 std::pointer_traits<typename BaseAllocTraits::pointer>::pointer_to(
155 *static_cast<BaseType*>(raw)),
156 quanta);
157 }
158 return;
159 }
160
161 // Overaligned and custom allocator, our only option is to
162 // overallocate and store a delta to the actual allocation just
163 // before the returned ptr.
164 //
165 // If we give ourselves kAlign extra bytes, then since
166 // sizeof(BaseType) divides kAlign we can meet alignment while
167 // getting a prefix of one BaseType. If we happen to get a
168 // kAlign-aligned block, then we can return a pointer to underlying
169 // + kAlign, otherwise there will be at least kBaseAlign bytes in
170 // the unused prefix of the first kAlign-aligned block.
171 if (kAllocate) {
172 char* base = reinterpret_cast<char*>(std::addressof(
173 *BaseAllocTraits::allocate(a, quanta + kAlign / sizeof(BaseType))));
174 size_t byteDelta =
175 kAlign - (reinterpret_cast<uintptr_t>(base) & (kAlign - 1));
176 raw = static_cast<void*>(base + byteDelta);
177 static_cast<size_t*>(raw)[-1] = byteDelta;
178 } else {
179 size_t byteDelta = static_cast<size_t*>(raw)[-1];
180 char* base = static_cast<char*>(raw) - byteDelta;
181 BaseAllocTraits::deallocate(
182 a,
183 std::pointer_traits<typename BaseAllocTraits::pointer>::pointer_to(
184 *reinterpret_cast<BaseType*>(base)),
185 quanta + kAlign / sizeof(BaseType));
186 }
187}
188} // namespace detail
189
190// Works like std::allocator_traits<Alloc>::allocate, but handles
191// over-aligned types. Feel free to manually specify any power of two as
192// the Align template arg. Must be matched with deallocateOverAligned.
193// allocationBytesForOverAligned will give you the number of bytes that
194// this function actually requests.
195template <
196 typename Alloc,
197 size_t kAlign = alignof(typename std::allocator_traits<Alloc>::value_type)>
198typename std::allocator_traits<Alloc>::pointer allocateOverAligned(
199 Alloc const& alloc,
200 size_t n) {
201 void* raw = nullptr;
202 detail::rawOverAlignedImpl<Alloc, kAlign, true>(alloc, n, raw);
203 return std::pointer_traits<typename std::allocator_traits<Alloc>::pointer>::
204 pointer_to(
205 *static_cast<typename std::allocator_traits<Alloc>::value_type*>(
206 raw));
207}
208
209template <
210 typename Alloc,
211 size_t kAlign = alignof(typename std::allocator_traits<Alloc>::value_type)>
212void deallocateOverAligned(
213 Alloc const& alloc,
214 typename std::allocator_traits<Alloc>::pointer ptr,
215 size_t n) {
216 void* raw = static_cast<void*>(std::addressof(*ptr));
217 detail::rawOverAlignedImpl<Alloc, kAlign, false>(alloc, n, raw);
218}
219
220template <
221 typename Alloc,
222 size_t kAlign = alignof(typename std::allocator_traits<Alloc>::value_type)>
223size_t allocationBytesForOverAligned(size_t n) {
224 static_assert((kAlign & (kAlign - 1)) == 0, "Align must be a power of 2");
225
226 using AllocTraits = std::allocator_traits<Alloc>;
227 using T = typename AllocTraits::value_type;
228
229 constexpr size_t kBaseAlign = constexpr_min(kAlign, alignof(max_align_t));
230
231 if (kAlign > kBaseAlign && std::is_same<Alloc, std::allocator<T>>::value) {
232 return n * sizeof(T);
233 } else {
234 size_t quanta = (n * sizeof(T) + kBaseAlign - 1) / kBaseAlign;
235 if (kAlign > kBaseAlign) {
236 quanta += kAlign / kBaseAlign;
237 }
238 return quanta * kBaseAlign;
239 }
240}
241
242/**
243 * For exception safety and consistency with make_shared. Erase me when
244 * we have std::make_unique().
245 *
246 * @author Louis Brandy (ldbrandy@fb.com)
247 * @author Xu Ning (xning@fb.com)
248 */
249
250#if __cplusplus >= 201402L || __cpp_lib_make_unique >= 201304L || \
251 (__ANDROID__ && __cplusplus >= 201300L) || _MSC_VER >= 1900
252
253/* using override */ using std::make_unique;
254
255#else
256
257template <typename T, typename... Args>
258typename std::enable_if<!std::is_array<T>::value, std::unique_ptr<T>>::type
259make_unique(Args&&... args) {
260 return std::unique_ptr<T>(new T(std::forward<Args>(args)...));
261}
262
263// Allows 'make_unique<T[]>(10)'. (N3690 s20.9.1.4 p3-4)
264template <typename T>
265typename std::enable_if<std::is_array<T>::value, std::unique_ptr<T>>::type
266make_unique(const size_t n) {
267 return std::unique_ptr<T>(new typename std::remove_extent<T>::type[n]());
268}
269
270// Disallows 'make_unique<T[10]>()'. (N3690 s20.9.1.4 p5)
271template <typename T, typename... Args>
272typename std::enable_if<std::extent<T>::value != 0, std::unique_ptr<T>>::type
273make_unique(Args&&...) = delete;
274
275#endif
276
277/**
278 * static_function_deleter
279 *
280 * So you can write this:
281 *
282 * using RSA_deleter = folly::static_function_deleter<RSA, &RSA_free>;
283 * auto rsa = std::unique_ptr<RSA, RSA_deleter>(RSA_new());
284 * RSA_generate_key_ex(rsa.get(), bits, exponent, nullptr);
285 * rsa = nullptr; // calls RSA_free(rsa.get())
286 *
287 * This would be sweet as well for BIO, but unfortunately BIO_free has signature
288 * int(BIO*) while we require signature void(BIO*). So you would need to make a
289 * wrapper for it:
290 *
291 * inline void BIO_free_fb(BIO* bio) { CHECK_EQ(1, BIO_free(bio)); }
292 * using BIO_deleter = folly::static_function_deleter<BIO, &BIO_free_fb>;
293 * auto buf = std::unique_ptr<BIO, BIO_deleter>(BIO_new(BIO_s_mem()));
294 * buf = nullptr; // calls BIO_free(buf.get())
295 */
296
297template <typename T, void (*f)(T*)>
298struct static_function_deleter {
299 void operator()(T* t) const {
300 f(t);
301 }
302};
303
304/**
305 * to_shared_ptr
306 *
307 * Convert unique_ptr to shared_ptr without specifying the template type
308 * parameter and letting the compiler deduce it.
309 *
310 * So you can write this:
311 *
312 * auto sptr = to_shared_ptr(getSomethingUnique<T>());
313 *
314 * Instead of this:
315 *
316 * auto sptr = shared_ptr<T>(getSomethingUnique<T>());
317 *
318 * Useful when `T` is long, such as:
319 *
320 * using T = foobar::FooBarAsyncClient;
321 */
322template <typename T, typename D>
323std::shared_ptr<T> to_shared_ptr(std::unique_ptr<T, D>&& ptr) {
324 return std::shared_ptr<T>(std::move(ptr));
325}
326
327/**
328 * to_weak_ptr
329 *
330 * Make a weak_ptr and return it from a shared_ptr without specifying the
331 * template type parameter and letting the compiler deduce it.
332 *
333 * So you can write this:
334 *
335 * auto wptr = to_weak_ptr(getSomethingShared<T>());
336 *
337 * Instead of this:
338 *
339 * auto wptr = weak_ptr<T>(getSomethingShared<T>());
340 *
341 * Useful when `T` is long, such as:
342 *
343 * using T = foobar::FooBarAsyncClient;
344 */
345template <typename T>
346std::weak_ptr<T> to_weak_ptr(const std::shared_ptr<T>& ptr) {
347 return std::weak_ptr<T>(ptr);
348}
349
350namespace detail {
351template <typename T>
352struct lift_void_to_char {
353 using type = T;
354};
355template <>
356struct lift_void_to_char<void> {
357 using type = char;
358};
359} // namespace detail
360
361/**
362 * SysAllocator
363 *
364 * Resembles std::allocator, the default Allocator, but wraps std::malloc and
365 * std::free.
366 */
367template <typename T>
368class SysAllocator {
369 private:
370 using Self = SysAllocator<T>;
371
372 public:
373 using value_type = T;
374
375 T* allocate(size_t count) {
376 using lifted = typename detail::lift_void_to_char<T>::type;
377 auto const p = std::malloc(sizeof(lifted) * count);
378 if (!p) {
379 throw_exception<std::bad_alloc>();
380 }
381 return static_cast<T*>(p);
382 }
383 void deallocate(T* p, size_t /* count */) {
384 std::free(p);
385 }
386
387 friend bool operator==(Self const&, Self const&) noexcept {
388 return true;
389 }
390 friend bool operator!=(Self const&, Self const&) noexcept {
391 return false;
392 }
393};
394
395class DefaultAlign {
396 private:
397 using Self = DefaultAlign;
398 std::size_t align_;
399
400 public:
401 explicit DefaultAlign(std::size_t align) noexcept : align_(align) {
402 assert(!(align_ < sizeof(void*)) && bool("bad align: too small"));
403 assert(!(align_ & (align_ - 1)) && bool("bad align: not power-of-two"));
404 }
405 std::size_t operator()() const noexcept {
406 return align_;
407 }
408
409 friend bool operator==(Self const& a, Self const& b) noexcept {
410 return a.align_ == b.align_;
411 }
412 friend bool operator!=(Self const& a, Self const& b) noexcept {
413 return a.align_ != b.align_;
414 }
415};
416
417template <std::size_t Align>
418class FixedAlign {
419 private:
420 static_assert(!(Align < sizeof(void*)), "bad align: too small");
421 static_assert(!(Align & (Align - 1)), "bad align: not power-of-two");
422 using Self = FixedAlign<Align>;
423
424 public:
425 constexpr std::size_t operator()() const noexcept {
426 return Align;
427 }
428
429 friend bool operator==(Self const&, Self const&) noexcept {
430 return true;
431 }
432 friend bool operator!=(Self const&, Self const&) noexcept {
433 return false;
434 }
435};
436
437/**
438 * AlignedSysAllocator
439 *
440 * Resembles std::allocator, the default Allocator, but wraps aligned_malloc and
441 * aligned_free.
442 *
443 * Accepts a policy parameter for providing the alignment, which must:
444 * * be invocable as std::size_t() noexcept, returning the alignment
445 * * be noexcept-copy-constructible
446 * * have noexcept operator==
447 * * have noexcept operator!=
448 * * not be final
449 *
450 * DefaultAlign and FixedAlign<std::size_t>, provided above, are valid policies.
451 */
452template <typename T, typename Align = DefaultAlign>
453class AlignedSysAllocator : private Align {
454 private:
455 using Self = AlignedSysAllocator<T, Align>;
456
457 template <typename, typename>
458 friend class AlignedSysAllocator;
459
460 constexpr Align const& align() const {
461 return *this;
462 }
463
464 public:
465 static_assert(std::is_nothrow_copy_constructible<Align>::value, "");
466 static_assert(is_nothrow_invocable_r<std::size_t, Align>::value, "");
467
468 using value_type = T;
469
470 using propagate_on_container_copy_assignment = std::true_type;
471 using propagate_on_container_move_assignment = std::true_type;
472 using propagate_on_container_swap = std::true_type;
473
474 using Align::Align;
475
476 // TODO: remove this ctor, which is required only by gcc49
477 template <
478 typename S = Align,
479 std::enable_if_t<std::is_default_constructible<S>::value, int> = 0>
480 constexpr AlignedSysAllocator() noexcept(noexcept(Align())) : Align() {}
481
482 template <typename U>
483 constexpr explicit AlignedSysAllocator(
484 AlignedSysAllocator<U, Align> const& other) noexcept
485 : Align(other.align()) {}
486
487 T* allocate(size_t count) {
488 using lifted = typename detail::lift_void_to_char<T>::type;
489 auto const p = aligned_malloc(sizeof(lifted) * count, align()());
490 if (!p) {
491 if (FOLLY_UNLIKELY(errno != ENOMEM)) {
492 std::terminate();
493 }
494 throw_exception<std::bad_alloc>();
495 }
496 return static_cast<T*>(p);
497 }
498 void deallocate(T* p, size_t /* count */) {
499 aligned_free(p);
500 }
501
502 friend bool operator==(Self const& a, Self const& b) noexcept {
503 return a.align() == b.align();
504 }
505 friend bool operator!=(Self const& a, Self const& b) noexcept {
506 return a.align() != b.align();
507 }
508};
509
510/**
511 * CxxAllocatorAdaptor
512 *
513 * A type conforming to C++ concept Allocator, delegating operations to an
514 * unowned Inner which has this required interface:
515 *
516 * void* allocate(std::size_t)
517 * void deallocate(void*, std::size_t)
518 *
519 * Note that Inner is *not* a C++ Allocator.
520 */
521template <typename T, class Inner>
522class CxxAllocatorAdaptor {
523 private:
524 using Self = CxxAllocatorAdaptor<T, Inner>;
525
526 template <typename U, typename UAlloc>
527 friend class CxxAllocatorAdaptor;
528
529 std::reference_wrapper<Inner> ref_;
530
531 public:
532 using value_type = T;
533
534 using propagate_on_container_copy_assignment = std::true_type;
535 using propagate_on_container_move_assignment = std::true_type;
536 using propagate_on_container_swap = std::true_type;
537
538 explicit CxxAllocatorAdaptor(Inner& ref) : ref_(ref) {}
539
540 template <typename U>
541 explicit CxxAllocatorAdaptor(CxxAllocatorAdaptor<U, Inner> const& other)
542 : ref_(other.ref_) {}
543
544 T* allocate(std::size_t n) {
545 using lifted = typename detail::lift_void_to_char<T>::type;
546 return static_cast<T*>(ref_.get().allocate(sizeof(lifted) * n));
547 }
548 void deallocate(T* p, std::size_t n) {
549 using lifted = typename detail::lift_void_to_char<T>::type;
550 ref_.get().deallocate(p, sizeof(lifted) * n);
551 }
552
553 friend bool operator==(Self const& a, Self const& b) noexcept {
554 return std::addressof(a.ref_.get()) == std::addressof(b.ref_.get());
555 }
556 friend bool operator!=(Self const& a, Self const& b) noexcept {
557 return std::addressof(a.ref_.get()) != std::addressof(b.ref_.get());
558 }
559};
560
561/*
562 * allocator_delete
563 *
564 * A deleter which automatically works with a given allocator.
565 *
566 * Derives from the allocator to take advantage of the empty base
567 * optimization when possible.
568 */
569template <typename Alloc>
570class allocator_delete : private std::remove_reference<Alloc>::type {
571 private:
572 using allocator_type = typename std::remove_reference<Alloc>::type;
573 using allocator_traits = std::allocator_traits<allocator_type>;
574 using value_type = typename allocator_traits::value_type;
575 using pointer = typename allocator_traits::pointer;
576
577 public:
578 allocator_delete() = default;
579 allocator_delete(allocator_delete const&) = default;
580 allocator_delete(allocator_delete&&) = default;
581 allocator_delete& operator=(allocator_delete const&) = default;
582 allocator_delete& operator=(allocator_delete&&) = default;
583
584 explicit allocator_delete(const allocator_type& alloc)
585 : allocator_type(alloc) {}
586
587 explicit allocator_delete(allocator_type&& alloc)
588 : allocator_type(std::move(alloc)) {}
589
590 template <typename U>
591 allocator_delete(const allocator_delete<U>& other)
592 : allocator_type(other.get_allocator()) {}
593
594 allocator_type const& get_allocator() const {
595 return *this;
596 }
597
598 void operator()(pointer p) const {
599 auto alloc = get_allocator();
600 allocator_traits::destroy(alloc, p);
601 allocator_traits::deallocate(alloc, p, 1);
602 }
603};
604
605/**
606 * allocate_unique, like std::allocate_shared but for std::unique_ptr
607 */
608template <typename T, typename Alloc, typename... Args>
609std::unique_ptr<T, allocator_delete<Alloc>> allocate_unique(
610 Alloc const& alloc,
611 Args&&... args) {
612 using traits = std::allocator_traits<Alloc>;
613 struct DeferCondDeallocate {
614 bool& cond;
615 Alloc& copy;
616 T* p;
617 ~DeferCondDeallocate() {
618 if (FOLLY_UNLIKELY(!cond)) {
619 traits::deallocate(copy, p, 1);
620 }
621 }
622 };
623 auto copy = alloc;
624 auto const p = traits::allocate(copy, 1);
625 {
626 bool constructed = false;
627 DeferCondDeallocate handler{constructed, copy, p};
628 traits::construct(copy, p, static_cast<Args&&>(args)...);
629 constructed = true;
630 }
631 return {p, allocator_delete<Alloc>(std::move(copy))};
632}
633
634struct SysBufferDeleter {
635 void operator()(void* ptr) {
636 std::free(ptr);
637 }
638};
639using SysBufferUniquePtr = std::unique_ptr<void, SysBufferDeleter>;
640
641inline SysBufferUniquePtr allocate_sys_buffer(std::size_t size) {
642 auto p = std::malloc(size);
643 if (!p) {
644 throw_exception<std::bad_alloc>();
645 }
646 return {p, {}};
647}
648
649/**
650 * AllocatorHasTrivialDeallocate
651 *
652 * Unambiguously inherits std::integral_constant<bool, V> for some bool V.
653 *
654 * Describes whether a C++ Aallocator has trivial, i.e. no-op, deallocate().
655 *
656 * Also may be used to describe types which may be used with
657 * CxxAllocatorAdaptor.
658 */
659template <typename Alloc>
660struct AllocatorHasTrivialDeallocate : std::false_type {};
661
662template <typename T, class Alloc>
663struct AllocatorHasTrivialDeallocate<CxxAllocatorAdaptor<T, Alloc>>
664 : AllocatorHasTrivialDeallocate<Alloc> {};
665
666namespace detail {
667// note that construct and destroy here are methods, not short names for
668// the constructor and destructor
669FOLLY_CREATE_MEMBER_INVOKE_TRAITS(AllocatorConstruct_, construct);
670FOLLY_CREATE_MEMBER_INVOKE_TRAITS(AllocatorDestroy_, destroy);
671
672template <typename Void, typename Alloc, typename... Args>
673struct AllocatorCustomizesConstruct_
674 : AllocatorConstruct_::template is_invocable<Alloc, Args...> {};
675
676template <typename Alloc, typename... Args>
677struct AllocatorCustomizesConstruct_<
678 void_t<typename Alloc::folly_has_default_object_construct>,
679 Alloc,
680 Args...> : Negation<typename Alloc::folly_has_default_object_construct> {};
681
682template <typename Void, typename Alloc, typename... Args>
683struct AllocatorCustomizesDestroy_
684 : AllocatorDestroy_::template is_invocable<Alloc, Args...> {};
685
686template <typename Alloc, typename... Args>
687struct AllocatorCustomizesDestroy_<
688 void_t<typename Alloc::folly_has_default_object_destroy>,
689 Alloc,
690 Args...> : Negation<typename Alloc::folly_has_default_object_destroy> {};
691} // namespace detail
692
693/**
694 * AllocatorHasDefaultObjectConstruct
695 *
696 * AllocatorHasDefaultObjectConstruct<A, T, Args...> unambiguously
697 * inherits std::integral_constant<bool, V>, where V will be true iff
698 * the effect of std::allocator_traits<A>::construct(a, p, args...) is
699 * the same as new (static_cast<void*>(p)) T(args...). If true then
700 * any optimizations applicable to object construction (relying on
701 * std::is_trivially_copyable<T>, for example) can be applied to objects
702 * in an allocator-aware container using an allocation of type A.
703 *
704 * Allocator types can override V by declaring a type alias for
705 * folly_has_default_object_construct. It is helpful to do this if you
706 * define a custom allocator type that defines a construct method, but
707 * that method doesn't do anything except call placement new.
708 */
709template <typename Alloc, typename T, typename... Args>
710struct AllocatorHasDefaultObjectConstruct
711 : Negation<
712 detail::AllocatorCustomizesConstruct_<void, Alloc, T*, Args...>> {};
713
714template <typename Value, typename T, typename... Args>
715struct AllocatorHasDefaultObjectConstruct<std::allocator<Value>, T, Args...>
716 : std::true_type {};
717
718/**
719 * AllocatorHasDefaultObjectDestroy
720 *
721 * AllocatorHasDefaultObjectDestroy<A, T> unambiguously inherits
722 * std::integral_constant<bool, V>, where V will be true iff the effect
723 * of std::allocator_traits<A>::destroy(a, p) is the same as p->~T().
724 * If true then optimizations applicable to object destruction (relying
725 * on std::is_trivially_destructible<T>, for example) can be applied to
726 * objects in an allocator-aware container using an allocator of type A.
727 *
728 * Allocator types can override V by declaring a type alias for
729 * folly_has_default_object_destroy. It is helpful to do this if you
730 * define a custom allocator type that defines a destroy method, but that
731 * method doesn't do anything except call the object's destructor.
732 */
733template <typename Alloc, typename T>
734struct AllocatorHasDefaultObjectDestroy
735 : Negation<detail::AllocatorCustomizesDestroy_<void, Alloc, T*>> {};
736
737template <typename Value, typename T>
738struct AllocatorHasDefaultObjectDestroy<std::allocator<Value>, T>
739 : std::true_type {};
740
741} // namespace folly
742