proxsuite 0.6.7
The Advanced Proximal Optimization Toolbox
Loading...
Searching...
No Matches
vec.hpp
Go to the documentation of this file.
1#ifndef VEG_VECTOR_HPP_QWFSH3ROS
2#define VEG_VECTOR_HPP_QWFSH3ROS
3
10
12
13namespace proxsuite {
14namespace linalg {
15namespace veg {
16namespace _detail {
17namespace _vector {
18namespace adl {
19struct AdlBase
20{};
21} // namespace adl
22} // namespace _vector
23} // namespace _detail
24
25namespace _detail {
26template<typename T>
27VEG_INLINE constexpr auto
28min2(T a, T b) noexcept -> T
29{
30 return (static_cast<T const&>(a) < static_cast<T const&>(b)) ? VEG_FWD(a)
31 : VEG_FWD(b);
32}
33template<typename T>
34VEG_INLINE constexpr auto
35max2(T a, T b) noexcept -> T
36{
37 return (static_cast<T const&>(a) < static_cast<T const&>(b)) ? VEG_FWD(b)
38 : VEG_FWD(a);
39}
40
41namespace _collections {
42VEG_INLINE constexpr auto
47// new_cap must be larger than current_cap
48VEG_INLINE constexpr auto
53
54template<typename T>
55auto
56relocate(void* out, void const* in, usize nbytes) noexcept -> void*;
57
59{
60 static constexpr void* (*value)(void*, void const*, usize) = &mem::memmove;
61};
65
66template<typename T>
68{
69 static constexpr void* (*value)(void*,
70 void const*,
71 usize) = _collections::relocate<T>;
72};
73} // namespace _collections
74} // namespace _detail
75
76namespace collections {
77template<typename T>
80} // namespace collections
81
82namespace vector {
83template<typename T>
85{
86 T* data;
87 T* end;
89
90 VEG_INLINE constexpr auto len() const noexcept -> usize
91 {
92 return static_cast<usize>(end - data);
93 }
94 VEG_INLINE constexpr auto cap() const noexcept -> usize
95 {
96 return static_cast<usize>(end_alloc - data);
97 }
98};
99} // namespace vector
100
101namespace _detail {
102namespace _collections {
103template<bool IsNoExcept>
105
106template<bool NoThrow, typename T, typename A, typename C>
108{
111 T const* in;
112 VEG_CPP14(constexpr)
117};
118
119template<>
121{
122 template<typename T, typename A, typename C>
123 static VEG_CPP14(constexpr) void fn( //
124 RefMut<A> alloc,
125 RefMut<C> cloner,
126 T* out,
127 T* out_end,
128 T const* in) VEG_NOEXCEPT
129 {
130 for (; out < out_end; ++out, ++in) {
131 mem::construct_with(out, CloneFn<true, T, A, C>{ alloc, cloner, in });
132 }
133 }
134};
135
136template<>
138{
139 template<typename T, typename A, typename C>
140 static void fn( //
141 RefMut<A> alloc,
142 RefMut<C> cloner,
143 T* out,
144 T* out_end,
145 T const* in) VEG_NOEXCEPT_IF(false)
146 {
147
148 Defer<Cleanup<T, A, C>> _{ { alloc, cloner, out, out_end } };
149 for (; _.fn.ptr < _.fn.ptr_end; ++_.fn.ptr, ++in) {
150 mem::construct_with(_.fn.ptr,
152 _.fn.alloc,
153 _.fn.cloner,
154 in,
155 });
156 }
157 }
158};
159
160template<typename T, typename A, typename C>
161VEG_CPP14(constexpr)
162void slice_clone(RefMut<A> alloc,
163 RefMut<C> cloner,
164 T* out,
165 T* out_end,
166 T const* in)
168{
169 CloneImpl<VEG_CONCEPT(alloc::nothrow_clone<C, T, A>)>::fn(
170 alloc, cloner, out, out_end, in);
171}
172
173template<typename T, typename A, typename C>
174VEG_CPP14(constexpr)
176 RefMut<C> cloner,
177 T* out,
178 T* out_end,
180{
181 while (true) {
182 if (out == out_end) {
183 break;
184 }
186 RefMut<C>(cloner),
187 mut(*out),
188 ref(*in),
189 RefMut<A>(alloc));
190 ++out;
191 ++in;
192 }
193}
194
195template<typename T>
196auto
197relocate(void* out, void const* in, usize nbytes) noexcept -> void*
198{
199 T* out_T = static_cast<T*>(out);
200 T* in_T = const_cast<T*>(static_cast<T const*>(in));
201 usize n = nbytes / sizeof(T);
202
203 for (usize i = 0; i < n; ++i) {
204 mem::construct_at(out_T + i, static_cast<T&&>(in_T[i]));
205 in_T[i].~T();
206 }
207
208 return out;
209}
210
211template<typename T>
212auto
213relocate_backward(void* out, void const* in, usize nbytes) noexcept -> void*
214{
215 T* out_T = static_cast<T*>(out);
216 T* in_T = const_cast<T*>(static_cast<T const*>(in));
217 usize n = nbytes / sizeof(T);
218
219 for (usize i = 0; i < n; ++i) {
220 mem::construct_at(out_T + (n - i - 1), static_cast<T&&>(in_T[n - i - 1]));
221 in_T[n - i - 1].~T();
222 }
223
224 return out;
225}
226
227template<typename A>
229{
231 void* data;
233
234 VEG_INLINE VEG_CPP14(constexpr) void operator()()
236 {
237 if (data != nullptr) {
239 RefMut<A>(alloc), static_cast<void*>(data), mem::Layout(layout));
240 }
241 }
242};
243
244template<typename T, typename A, typename C>
245auto
246alloc_and_copy(RefMut<A> alloc, RefMut<C> cloner, T const* data, usize len)
247 VEG_NOEXCEPT_IF(VEG_CONCEPT(alloc::nothrow_alloc<A>) &&
248 VEG_CONCEPT(alloc::nothrow_clone<C, T, A>)) -> mem::AllocBlock
249{
250
252 RefMut<A>(alloc), mem::Layout{ sizeof(T) * usize(len), alignof(T) });
253
254 // if copying fails, this takes care of deallocating
256 alloc,
257 block.data,
258 mem::Layout{ block.byte_cap, alignof(T) },
259 } };
260
261 // copy construct elements
262 _collections::slice_clone(_.fn.alloc,
263 cloner,
264 static_cast<T*>(block.data),
265 static_cast<T*>(block.data) + len,
266 data);
267
268 _.fn.data = nullptr;
269 return block;
270}
271
272template<typename T, typename A, typename C>
273auto
274realloc_and_append( //
275 RefMut<A> alloc,
276 RefMut<C> cloner,
277 mem::AllocBlock out,
278 usize out_len,
279 T const* in,
280 usize in_len)
281 VEG_NOEXCEPT_IF(VEG_CONCEPT(alloc::nothrow_grow<A>) &&
282 VEG_CONCEPT(alloc::nothrow_clone<C, T, A>)) -> mem::AllocBlock
283{
284
285 if (in_len == 0) {
286 return out;
287 }
288
289 if (out.byte_cap >= (in_len + out_len) * sizeof(T)) {
290 mem::AllocBlock block = mem::Alloc<A>::grow(
291 RefMut<A>(alloc),
292 static_cast<void*>(out.data),
293 mem::Layout{ out.byte_cap, alignof(T) },
294 out_len * sizeof(T),
295 mem::RelocFn{ collections::relocate_pointer<T>::value });
296
297 // if copying fails, this takes care of deallocating
298 Defer<AllocCleanup<A>> _{ {
299 alloc,
300 block.data,
301 mem::Layout{ block.byte_cap, alignof(T) },
302 } };
303 // if copying fails, this takes care of destroying
304 Defer<Cleanup<T, A, C>> destroy{ {
305 _.fn.alloc,
306 cloner,
307 static_cast<T*>(block.data),
308 static_cast<T*>(block.data) + out_len,
309 } };
310
311 // copy construct elements
313 destroy.fn.alloc,
314 destroy.fn.cloner,
315 static_cast<T*>(block.data) + out_len,
316 static_cast<T*>(block.data) + in_len,
317 in);
318 // disable destruction
319 destroy.fn.ptr = nullptr;
320 destroy.fn.ptr_end = nullptr;
321 // disable deallocation
322 _.fn.data = nullptr;
323 out = block;
324 } else {
325 // copy construct elements
327 alloc,
328 cloner,
329 static_cast<T*>(out.data) + out_len,
330 static_cast<T*>(out.data) + in_len,
331 in);
332 }
333 return out;
334}
335
336template<bool TrivialAssign>
337struct CloneFromImpl;
338
339template<>
341{
342 template<typename T, typename A, typename C>
343 static void fn(RefMut<A> lhs_alloc,
344 RefMut<C> cloner,
349 VEG_CONCEPT(alloc::nothrow_alloc<A>) && //
350 VEG_CONCEPT(alloc::nothrow_clone<C, T, A>) && //
351 VEG_CONCEPT(alloc::nothrow_clone_from<C, T, A>))
352 {
353
355 usize rhs_len = (rhs_raw.end - rhs_raw.data);
356
357 if (!(lhs_alloc == rhs_alloc)) {
358 T* data = lhs_copy.data;
359 T* data_end = lhs_copy.end;
360
361 // clean up old alloc
362 _collections::backward_destroy(lhs_alloc, cloner, data, data_end);
363
364 // assign before deallocation in case it fails
365 lhs_raw = {};
366 lhs_copy = {};
367
368 // don't need to deallocate on backward_destroy failure, since lhs can
369 // still access and reuse the allocation
372 static_cast<void*>(data),
373 mem::Layout{ (lhs_copy.end_alloc - lhs_copy.data) * sizeof(T),
374 alignof(T) });
375 }
376
378
379 if (lhs_raw.data == nullptr) {
380 usize len = rhs_raw.end - rhs_raw.data;
381
383 _collections::alloc_and_copy(lhs_alloc, cloner, rhs_raw.data, len);
384 T* data = static_cast<T*>(blk.data);
385 lhs_raw = {
386 data,
387 data + len,
388 data + blk.byte_cap / sizeof(T),
389 };
390 return;
391 }
392
393 usize assign_len = _detail::min2(lhs_copy.len(), rhs_raw.len());
394 // copy assign until the shared len
395 _collections::slice_clone_from( //
396 lhs_alloc,
397 cloner,
398 lhs_copy.data,
399 lhs_copy.data + assign_len,
400 rhs_raw.data);
401
402 // destroy from the shared len until end of lhs
403 lhs_raw.end = lhs_raw.data + assign_len;
404 _collections::backward_destroy( //
405 lhs_alloc,
406 cloner,
407 lhs_copy.data + assign_len,
408 lhs_copy.end);
409
410 // pass allocation to realloc_and_append
411
412 lhs_raw = {};
413 // realloc and copy construct new elements until end of rhs
414 mem::AllocBlock block = _collections::realloc_and_append(
415 lhs_alloc,
416 cloner,
418 lhs_copy.data,
419 (lhs_copy.end_alloc - lhs_copy.data) * sizeof(T),
420 }, // out
421 assign_len, // out_len
422 rhs_raw.data + assign_len, // in
423 rhs_len - assign_len); // in_len
424
426 static_cast<T*>(block.data),
427 static_cast<T*>(block.data) + rhs_len,
428 static_cast<T*>(block.data) + block.byte_cap / sizeof(T),
429 };
430 }
431};
432template<>
434{
435 template<typename T, typename A, typename C>
436 static void fn(RefMut<A> lhs_alloc,
437 RefMut<C> cloner,
442 VEG_CONCEPT(alloc::nothrow_dealloc<A>) &&
443 VEG_CONCEPT(alloc::nothrow_alloc<A>))
444 {
445
447
448 bool need_to_realloc = (!(lhs_alloc.get() == rhs_alloc.get()) ||
449 (lhs_copy.cap() < rhs_raw.len()));
450 if (need_to_realloc) {
451 T* data = lhs_copy.data;
452 usize cap = lhs_copy.cap();
453
454 // assign before deallocation in case it fails
455 lhs_raw = {};
458 static_cast<void*>(data),
459 mem::Layout{ cap * sizeof(T), alignof(T) });
460 lhs_copy = {};
461 }
462
464
465 // allocate and copy all elements
466 if (need_to_realloc) {
467 mem::AllocBlock block = _collections::alloc_and_copy( //
468 lhs_alloc,
469 cloner,
470 rhs_raw.data,
471 rhs_raw.len());
472 lhs_raw.data = static_cast<T*>(block.data);
473 lhs_raw.end_alloc = lhs_raw.data + block.byte_cap / sizeof(T);
474 } else {
475 _collections::slice_clone( //
476 lhs_alloc,
477 cloner,
478 lhs_copy.data,
479 lhs_copy.data + rhs_raw.len(),
480 rhs_raw.data);
481 }
482 lhs_raw.end = lhs_raw.data + rhs_raw.len();
483 }
484};
485
486template<typename T, typename A, typename C>
487VEG_INLINE void
489 RefMut<C> cloner,
494 VEG_CONCEPT(alloc::nothrow_alloc<A>) && //
495 VEG_CONCEPT(alloc::nothrow_clone<C, T, A>) && //
496 VEG_CONCEPT(alloc::nothrow_clone_from<C, T, A>))
497{
498 _collections::CloneFromImpl<
499 mem::Cloner<C>::template trivial_clone<T>::value>::fn(lhs_alloc,
500 cloner,
501 lhs_raw,
502 rhs_alloc,
503 rhs_raw);
504}
505} // namespace _collections
506} // namespace _detail
507
508namespace _detail {
509namespace _vector {
510
511template<typename T>
512struct RawVectorMoveRaii /* NOLINT */
513{
515
516 RawVectorMoveRaii() = default;
517 RawVectorMoveRaii(FromRawParts /*tag*/,
518 vector::RawVector<T> inner) VEG_NOEXCEPT : _{ inner } {};
521 {
522 rhs._ = {};
523 }
528};
529
530template<typename T, typename A>
532 :
533 // alloc manager needs to be constructed first
534 Tuple<A, RawVectorMoveRaii<T>>
535{
536 using Tuple<A, RawVectorMoveRaii<T>>::Tuple;
537
538public:
539 VecAlloc(VecAlloc const&) = default;
540 VecAlloc(VecAlloc&&) = default;
541 auto operator=(VecAlloc const&) -> VecAlloc& = default;
542 auto operator=(VecAlloc&&) -> VecAlloc& = default;
543
545 {
546 vector::RawVector<T> raw = (*this)[1_c]._;
547 if ((raw.data != nullptr) && (raw.end_alloc != 0)) {
548
549 // FIXME: if asan is enabled, before sanitizing make sure that:
550 // - begin is 8 byte aligned
551 // - either:
552 // - end is 8 byte aligned
553 // - A is the SystemAlloc
554#if VEG_HAS_ASAN
555 _detail::__sanitizer_annotate_contiguous_container( //
556 raw.data,
557 raw.end_alloc,
558 raw.data,
559 raw.end_alloc);
560#endif
561
563 mut((*this)[0_c]),
564 static_cast<void*>(raw.data),
565 mem::Layout{ usize(raw.end_alloc - raw.data) * sizeof(T), alignof(T) });
566 }
567 }
568};
569} // namespace _vector
570} // namespace _detail
571
572#if VEG_HAS_ASAN
573#define __VEG_ASAN_ANNOTATE() /* NOLINT */ \
574 if (ptr() != nullptr) { \
575 _detail::__sanitizer_annotate_contiguous_container( \
576 ptr(), ptr() + capacity(), ptr() + len(), ptr() + capacity()); \
577 } \
578 auto&& _veglib_asan = defer([&]() noexcept { \
579 if (ptr() != nullptr) { \
580 _detail::__sanitizer_annotate_contiguous_container( \
581 ptr(), ptr() + capacity(), ptr() + capacity(), ptr() + len()); \
582 } \
583 }); \
584 (void)_veglib_asan
585#else
586#define __VEG_ASAN_ANNOTATE() /* NOLINT */ (void)0;
587#endif
588
589namespace _detail {
590namespace _collections {
591template<typename T,
592 typename A = mem::SystemAlloc,
596{
597private:
599
600public:
603 {
604 return ref(_[0_c]);
605 }
607 VEG_NOEXCEPT->Ref<vector::RawVector<T>>
608 {
609 return ref(_[1_c]._);
610 }
613 {
614 return mut(_[0_c]);
615 }
616 VEG_NODISCARD VEG_INLINE VEG_CPP14(constexpr) auto raw_mut(Unsafe /*tag*/)
617 VEG_NOEXCEPT->RefMut<vector::RawVector<T>>
618 {
619 return mut(_[1_c]._);
620 }
621
622private:
623 VEG_INLINE void _reserve_grow_exact_impl(Unsafe /*tag*/, usize new_cap)
624 VEG_NOEXCEPT_IF(VEG_CONCEPT(alloc::nothrow_grow<A>))
625 {
627
628 vector::RawVector<T>& raw = this->raw_mut(unsafe).get();
629 auto len = usize(this->len());
630
632 this->alloc_mut(unsafe),
633 static_cast<void*>(raw.data),
635 usize(byte_capacity()),
636 alignof(T),
637 },
638 new_cap * sizeof(T),
639 mem::RelocFn{ collections::relocate_pointer<T>::value });
640
641 T* data = static_cast<T*>(new_block.data);
642 raw = {
643 data,
644 data + len,
645 data + new_block.byte_cap / sizeof(T),
646 };
647 }
648 VEG_INLINE void _reserve_grow_exact(Unsafe /*tag*/, isize new_cap)
649 VEG_NOEXCEPT_IF(VEG_CONCEPT(alloc::nothrow_grow<A>))
650 {
651 isize old_len = len();
652 this->_reserve_grow_exact_impl(unsafe, usize(new_cap));
653 meta::unreachable_if(capacity() < new_cap);
654 meta::unreachable_if(len() != old_len);
655 }
656 VEG_INLINE void _reserve_one_more(Unsafe /*tag*/)
657 {
658 this->_reserve_grow_exact(
659 unsafe,
661 }
662
663 static_assert(VEG_CONCEPT(nothrow_move_assignable<A>), ".");
664 static_assert(VEG_CONCEPT(nothrow_movable<A>), ".");
665
666public:
668 VEG_NOEXCEPT_IF(Dtor == mem::DtorAvailable::yes_nothrow &&
669 VEG_CONCEPT(alloc::nothrow_dealloc<A>))
670 {
671 static_assert(Dtor == mem::DtorAvailableFor<T>::value, ".");
673 if (raw.data != nullptr) {
674 this->clear();
675 }
676 }
677
678 VEG_INLINE VecImpl() = default;
679
681 VecImpl(Unsafe /*unsafe*/,
682 FromRawParts /*tag*/,
684 A alloc) VEG_NOEXCEPT
685 : _{
686 tuplify,
687 VEG_FWD(alloc),
689 }
690 {
691 }
692
695 {
696 {
697 auto cleanup = static_cast<VecImpl&&>(*this);
698 }
699
700 // can't fail
701 this->alloc_mut(unsafe).get() =
702 static_cast<A&&>(rhs.alloc_mut(unsafe).get());
703 this->raw_mut(unsafe).get() = rhs.raw_ref().get();
704 rhs.raw_mut(unsafe).get() = {};
705
706 return *this;
707 };
708
709 explicit VecImpl(VecImpl const& rhs)
711 VEG_CONCEPT(alloc::nothrow_alloc<A>) &&
713 : _{ rhs._ }
714 {
715 static_assert(Copy == mem::CopyAvailableFor<T>::value, ".");
717 vector::RawVector<T> rhs_raw = rhs.raw_ref().get();
719 _detail::_collections::alloc_and_copy(this->alloc_mut(unsafe),
720 mut(mem::DefaultCloner{}),
721 rhs_raw.data,
722 usize(rhs.len()));
723
724 T* data = static_cast<T*>(blk.data);
725 this->raw_mut(unsafe).get() = vector::RawVector<T>{
726 data,
727 data + usize(rhs.len()),
728 data + blk.byte_cap / sizeof(T),
729 };
730 }
731
732 auto operator=(VecImpl const& rhs)
734 VEG_CONCEPT(alloc::nothrow_alloc<A>) &&
736 {
737 static_assert(Copy == mem::CopyAvailableFor<T>::value, ".");
738 if (this != mem::addressof(rhs)) {
740
742 mut(mem::DefaultCloner{}),
743 this->raw_mut(unsafe).get(),
744 rhs.alloc_ref(),
745 rhs.raw_ref().get());
746 }
747 return *this;
748 }
749
751 VEG_NOEXCEPT_IF(VEG_CONCEPT(alloc::nothrow_grow<A>))
752 {
753 if (new_cap > capacity()) {
754 this->_reserve_grow_exact(unsafe, new_cap);
755 }
756 }
758 VEG_NOEXCEPT_IF(VEG_CONCEPT(alloc::nothrow_grow<A>))
759 {
760 auto cap = capacity();
761 if (new_cap > cap) {
762 this->reserve_exact(isize(
764 }
765 }
766
769 {
770 VEG_DEBUG_ASSERT_ALL_OF(0 <= n, n <= len());
772
773 vector::RawVector<T>& raw = this->raw_mut(unsafe).get();
774
775 T* end = raw.end;
776 raw.end -= n;
778 this->alloc_mut(unsafe), mut(mem::DefaultCloner{}), end - n, end);
779 }
780
787
788 VEG_INLINE auto pop_unchecked(Unsafe /*unsafe*/)
790 {
791 VEG_DEBUG_ASSERT(1 <= len());
792 T* last = raw_ref().get().end - 1;
793 T t = static_cast<T&&>(*last);
794 --raw_mut(unsafe).get().end;
795 mem::destroy_at(last);
796 return t;
797 }
798 VEG_INLINE auto pop_mid_unchecked(Unsafe /*unsafe*/, isize i)
800 {
801 VEG_DEBUG_ASSERT(0 <= i);
803 T* elem = raw_ref().get().data + i;
804 T t = static_cast<T&&>(*elem);
805
806 // this block does not throw
807 {
808 mem::destroy_at(elem);
809 _detail::_collections::relocate<T>( //
810 elem,
811 elem + 1,
812 sizeof(T) * usize(len() - i - 1));
813 }
814 --raw_mut(unsafe).get().end;
815
816 return t;
817 }
818
820 {
821 VEG_ASSERT(1 <= len());
822 return pop_unchecked(unsafe);
823 }
826 {
827 VEG_ASSERT(0 <= i);
828 VEG_ASSERT(i < len());
829 return pop_mid_unchecked(unsafe, i);
830 }
831
836
837 VEG_TEMPLATE(typename U = T,
838 requires(VEG_CONCEPT(constructible<U>)),
839 void resize,
840 (n, isize))
841 VEG_NOEXCEPT_IF(VEG_CONCEPT(alloc::nothrow_grow<A>) &&
843 {
844
845 vector::RawVector<T>& raw = raw_mut(unsafe).get();
846
847 if (n > len()) {
848 reserve(n);
849 {
851 ::new (static_cast<void*>(ptr_mut() + len())) T[usize(n - len())]{};
852 raw.end = raw.data + n;
853 }
854 } else {
855 pop_several_unchecked(unsafe, len() - n);
856 }
857 }
858
859 VEG_TEMPLATE(typename U = T,
860 requires(VEG_CONCEPT(constructible<U>)),
862 (n, isize))
863 VEG_NOEXCEPT_IF(VEG_CONCEPT(alloc::nothrow_grow<A>) &&
865 {
866
867 vector::RawVector<T>& raw = raw_mut(unsafe).get();
868
869 if (n > len()) {
870 reserve(n);
871 {
873 ::new (static_cast<void*>(ptr_mut() + len())) T[usize(n - len())];
874 raw.end = raw.data + n;
875 }
876 } else {
877 pop_several_unchecked(unsafe, len() - n);
878 }
879 }
880
881 VEG_TEMPLATE(typename Fn,
882 requires(VEG_CONCEPT(fn_once<Fn, T>)),
884 (fn, Fn),
885 (i, isize))
887 VEG_CONCEPT(alloc::nothrow_alloc<A>))
888 {
889 static_assert(VEG_CONCEPT(nothrow_fn_once<Fn, T>), ".");
890
891 VEG_ASSERT_ALL_OF(0 <= i, i <= len());
892
893 reserve(len() + 1);
894 {
896 vector::RawVector<T>& raw = this->raw_mut(unsafe).get();
897 T* elem = raw.data + i;
898 _detail::_collections::relocate_backward<T>( //
899 elem + 1,
900 elem,
901 sizeof(T) * usize(raw.end - elem));
902 mem::construct_with(elem, VEG_FWD(fn));
903 ++raw.end;
904 }
905 }
906 VEG_INLINE void push_mid(T value, isize i)
908 VEG_CONCEPT(alloc::nothrow_alloc<A>))
909 {
910 this->push_mid_with(_detail::MoveFn<T>{ VEG_FWD(value) }, i);
911 }
912
913 VEG_TEMPLATE(typename Fn,
914 requires(VEG_CONCEPT(fn_once<Fn, T>)),
916 (/*tag*/, Unsafe),
917 (fn, Fn))
919 VEG_CONCEPT(alloc::nothrow_alloc<A>))
920 {
922
923 vector::RawVector<T>& raw = this->raw_mut(unsafe).get();
924 mem::construct_with(raw.end, VEG_FWD(fn));
925 ++raw.end;
926 }
927
928 VEG_TEMPLATE(typename Fn,
929 requires(VEG_CONCEPT(fn_once<Fn, T>)),
931 (fn, Fn))
933 VEG_CONCEPT(alloc::nothrow_alloc<A>))
934 {
936 if (HEDLEY_UNLIKELY(raw.end == raw.end_alloc)) {
937 this->_reserve_one_more(unsafe);
938 }
939 this->push_with_unchecked(unsafe, VEG_FWD(fn));
940 }
941 VEG_INLINE void push(T value)
943 VEG_CONCEPT(alloc::nothrow_alloc<A>))
944 {
945 this->push_with(_detail::MoveFn<T>{ VEG_FWD(value) });
946 }
947 VEG_INLINE void push_unchecked(Unsafe /*tag*/, T value)
949 {
950 this->push_with_unchecked(unsafe, _detail::MoveFn<T>{ VEG_FWD(value) });
951 }
952
954 {
955 return { unsafe, from_raw_parts, ptr(), len() };
956 }
958 {
959 return { unsafe, from_raw_parts, ptr_mut(), len() };
960 }
961
963 {
964 return this->raw_ref().get().data;
965 }
967 {
968 return const_cast<T*>(this->ptr());
969 }
971 {
972 auto& raw = this->raw_ref().get();
973 return isize(raw.end - raw.data);
974 }
976 {
977 auto& raw = this->raw_ref().get();
978 return isize(raw.end_alloc - raw.data);
979 }
981 {
982 auto& raw = this->raw_ref().get();
983 return meta::is_consteval()
984 ? (raw.end_alloc - raw.data) * isize(sizeof(T))
985 : (reinterpret_cast<char const*>(raw.end_alloc) -
986 reinterpret_cast<char const*>(raw.data));
987 }
989 {
990 VEG_ASSERT(usize(i) < usize(len()));
991 return this->ptr()[i];
992 }
994 {
995 return const_cast<T&>(static_cast<VecImpl const*>(this)->operator[](i));
996 }
997};
998} // namespace _collections
999} // namespace _detail
1000
1001template <
1002 typename T,
1003 typename A = mem::SystemAlloc,
1007 private meta::if_t< //
1008 Copy == mem::CopyAvailable::no,
1009 _detail::NoCopy,
1010 _detail::Empty>,
1011 public _detail::_collections::VecImpl<T, A, Dtor, Copy>
1012{
1013
1015 Vec() = default;
1017};
1018
1019template<typename T, typename A>
1023template<typename T, typename A>
1027} // namespace veg
1028} // namespace linalg
1029} // namespace proxsuite
1030
1031#undef __VEG_ASAN_ANNOTATE
1032
1034#endif /* end of include guard VEG_VECTOR_HPP_QWFSH3ROS */
#define VEG_DEBUG_ASSERT_ALL_OF(...)
Definition assert.hpp:44
#define VEG_DEBUG_ASSERT(...)
Definition assert.hpp:38
#define VEG_ASSERT_ALL_OF(...)
#define VEG_ASSERT(...)
#define HEDLEY_UNLIKELY(expr)
#define VEG_CONCEPT(...)
Definition macros.hpp:1243
#define VEG_INLINE
Definition macros.hpp:118
#define VEG_FWD(X)
Definition macros.hpp:569
void slice_clone(RefMut< A > alloc, RefMut< C > cloner, T *out, T *out_end, T const *in)
Definition vec.hpp:162
VEG_INLINE constexpr auto vector_grow_choose(usize current_cap, usize new_cap) noexcept -> usize
Definition vec.hpp:49
auto relocate_backward(void *out, void const *in, usize nbytes) noexcept -> void *
Definition vec.hpp:213
void slice_clone_from(RefMut< A > alloc, RefMut< C > cloner, T *out, T *out_end, T const *in) VEG_NOEXCEPT_IF(true)
Definition vec.hpp:175
VEG_INLINE void clone_from(RefMut< A > lhs_alloc, RefMut< C > cloner, vector::RawVector< T > &lhs_raw, Ref< A > rhs_alloc, vector::RawVector< T > const rhs_raw) VEG_NOEXCEPT_IF(VEG_CONCEPT(nothrow_copy_assignable< A >) &&VEG_CONCEPT(alloc
Definition vec.hpp:488
void backward_destroy(RefMut< A > alloc, RefMut< C > cloner, T *ptr, T *ptr_end)
auto relocate(void *out, void const *in, usize nbytes) noexcept -> void *
Definition vec.hpp:197
VEG_INLINE constexpr auto vector_grow_compute(usize current_cap) noexcept -> usize
Definition vec.hpp:43
VEG_INLINE constexpr auto max2(T a, T b) noexcept -> T
Definition vec.hpp:35
VEG_INLINE constexpr auto min2(T a, T b) noexcept -> T
Definition vec.hpp:28
auto memmove(void *dest, void const *src, usize nbytes) noexcept -> void *
Definition alloc.hpp:55
typename _detail::_meta::conditional_< B >::template type< T, F > if_t
Definition core.hpp:83
_detail::_meta::make_signed< usize >::Type isize
Definition typedefs.hpp:43
decltype(sizeof(0)) usize
Definition macros.hpp:702
#define VEG_NODISCARD
Definition prologue.hpp:97
#define VEG_NOEXCEPT
Definition prologue.hpp:30
#define VEG_CPP14(...)
Definition prologue.hpp:71
#define VEG_NOEXCEPT_IF(...)
Definition prologue.hpp:31
VEG_NODISCARD VEG_INLINE constexpr auto get() const noexcept -> T const &
Definition ref.hpp:41
VEG_INLINE VEG_CPP14(constexpr) void operator()()
Definition vec.hpp:234
static void fn(RefMut< A > lhs_alloc, RefMut< C > cloner, vector::RawVector< T > &lhs_raw, Ref< A > rhs_alloc, vector::RawVector< T > const rhs_raw) VEG_NOEXCEPT_IF(VEG_CONCEPT(nothrow_copy_assignable< A >) &&VEG_CONCEPT(alloc
Definition vec.hpp:343
static void fn(RefMut< A > lhs_alloc, RefMut< C > cloner, vector::RawVector< T > &lhs_raw, Ref< A > rhs_alloc, vector::RawVector< T > const rhs_raw) VEG_NOEXCEPT_IF(VEG_CONCEPT(nothrow_copy_assignable< A >) &&VEG_CONCEPT(alloc
Definition vec.hpp:436
static void fn(RefMut< A > alloc, RefMut< C > cloner, T *out, T *out_end, T const *in) VEG_NOEXCEPT_IF(false)
Definition vec.hpp:140
static VEG_CPP14(constexpr) void fn(RefMut< A > alloc
VEG_NODISCARD VEG_INLINE auto operator[](isize i) const VEG_NOEXCEPT -> T const &
Definition vec.hpp:988
VEG_INLINE auto pop() VEG_NOEXCEPT_IF(VEG_CONCEPT(nothrow_movable< T >)) -> T
Definition vec.hpp:819
VEG_TEMPLATE(typename U=T, requires(VEG_CONCEPT(constructible< U >)), void resize_for_overwrite,(n, isize)) VEG_NOEXCEPT_IF(VEG_CONCEPT(alloc
Definition vec.hpp:859
VEG_INLINE auto pop_mid_unchecked(Unsafe, isize i) VEG_NOEXCEPT_IF(VEG_CONCEPT(nothrow_movable< T >)) -> T
Definition vec.hpp:798
VEG_NODISCARD VEG_INLINE auto capacity() const VEG_NOEXCEPT -> isize
Definition vec.hpp:975
VEG_INLINE void pop_several_unchecked(Unsafe unsafe, isize n)
Definition vec.hpp:767
VEG_TEMPLATE(typename Fn, requires(VEG_CONCEPT(fn_once< Fn, T >)), VEG_INLINE void push_mid_with,(fn, Fn),(i, isize)) VEG_NOEXCEPT_IF(VEG_CONCEPT(nothrow_fn_once< Fn
VEG_INLINE VecImpl(Unsafe, FromRawParts, vector::RawVector< T > rawvec, A alloc) VEG_NOEXCEPT
Definition vec.hpp:681
VEG_NODISCARD VEG_INLINE auto ptr_mut() VEG_NOEXCEPT -> T *
Definition vec.hpp:966
VEG_INLINE void push_unchecked(Unsafe, T value)
Definition vec.hpp:947
VEG_INLINE void push_mid(T value, isize i) VEG_NOEXCEPT_IF(VEG_CONCEPT(nothrow_movable< T >) &&VEG_CONCEPT(alloc
Definition vec.hpp:906
VEG_NODISCARD VEG_INLINE VEG_CPP14(constexpr) auto alloc_mut(Unsafe) VEG_NOEXCEPT -> RefMut< A >
Definition vec.hpp:611
VEG_NODISCARD VEG_INLINE auto operator[](isize i) VEG_NOEXCEPT -> T &
Definition vec.hpp:993
VEG_NODISCARD VEG_INLINE auto as_mut() VEG_NOEXCEPT -> SliceMut< T >
Definition vec.hpp:957
VEG_NODISCARD VEG_INLINE auto ptr() const VEG_NOEXCEPT -> T const *
Definition vec.hpp:962
VEG_NODISCARD VEG_INLINE VEG_CPP14(constexpr) auto raw_ref() const VEG_NOEXCEPT -> Ref< vector::RawVector< T > >
Definition vec.hpp:606
VEG_INLINE auto operator=(VecImpl &&rhs) -> VecImpl &
Definition vec.hpp:694
VEG_NODISCARD VEG_INLINE auto byte_capacity() const VEG_NOEXCEPT -> isize
Definition vec.hpp:980
T && VEG_CONCEPT(alloc::nothrow_alloc< A >))
Definition vec.hpp:887
VEG_TEMPLATE(typename U=T, requires(VEG_CONCEPT(constructible< U >)), void resize,(n, isize)) VEG_NOEXCEPT_IF(VEG_CONCEPT(alloc
Definition vec.hpp:837
VEG_NODISCARD VEG_INLINE VEG_CPP14(constexpr) auto raw_mut(Unsafe) VEG_NOEXCEPT -> RefMut< vector::RawVector< T > >
Definition vec.hpp:616
auto operator=(VecImpl const &rhs) VEG_NOEXCEPT_IF(VEG_CONCEPT(nothrow_copy_assignable< A >) &&VEG_CONCEPT(alloc
Definition vec.hpp:732
VecImpl(VecImpl const &rhs) VEG_NOEXCEPT_IF(VEG_CONCEPT(nothrow_copyable< A >) &&VEG_CONCEPT(alloc
Definition vec.hpp:709
VEG_NODISCARD VEG_INLINE auto as_ref() const VEG_NOEXCEPT -> Slice< T >
Definition vec.hpp:953
VEG_NODISCARD VEG_INLINE auto len() const VEG_NOEXCEPT -> isize
Definition vec.hpp:970
VEG_INLINE auto pop_unchecked(Unsafe) VEG_NOEXCEPT_IF(VEG_CONCEPT(nothrow_movable< T >)) -> T
Definition vec.hpp:788
VEG_TEMPLATE(typename Fn, requires(VEG_CONCEPT(fn_once< Fn, T >)), VEG_INLINE void push_with_unchecked,(, Unsafe),(fn, Fn)) VEG_NOEXCEPT_IF(VEG_CONCEPT(nothrow_fn_once< Fn
VEG_INLINE void push(T value) VEG_NOEXCEPT_IF(VEG_CONCEPT(nothrow_movable< T >) &&VEG_CONCEPT(alloc
Definition vec.hpp:941
VEG_NODISCARD VEG_INLINE VEG_CPP14(constexpr) auto alloc_ref() const VEG_NOEXCEPT -> Ref< A >
Definition vec.hpp:602
VEG_INLINE auto pop_mid(isize i) VEG_NOEXCEPT_IF(VEG_CONCEPT(nothrow_movable< T >)) -> T
Definition vec.hpp:824
VEG_TEMPLATE(typename Fn, requires(VEG_CONCEPT(fn_once< Fn, T >)), VEG_INLINE void push_with,(fn, Fn)) VEG_NOEXCEPT_IF(VEG_CONCEPT(nothrow_fn_once< Fn
VEG_INLINE void reserve_exact(isize new_cap)
Definition vec.hpp:750
VEG_INLINE void clear() VEG_NOEXCEPT_IF(VEG_CONCEPT(nothrow_destructible< T >))
Definition vec.hpp:832
RawVectorMoveRaii(FromRawParts, vector::RawVector< T > inner) VEG_NOEXCEPT
Definition vec.hpp:517
VEG_INLINE VEG_CPP14(constexpr) RawVectorMoveRaii(RawVectorMoveRaii &&rhs) VEG_NOEXCEPT
Definition vec.hpp:519
VEG_INLINE VEG_CPP14(constexpr) RawVectorMoveRaii(RawVectorMoveRaii const &) VEG_NOEXCEPT
Definition vec.hpp:524
auto operator=(VecAlloc const &) -> VecAlloc &=default
auto operator=(VecAlloc &&) -> VecAlloc &=default
VEG_INLINE ~VecAlloc() VEG_NOEXCEPT_IF(VEG_CONCEPT(alloc
Definition vec.hpp:544
VEG_INLINE constexpr auto cap() const noexcept -> usize
Definition vec.hpp:94
VEG_INLINE constexpr auto len() const noexcept -> usize
Definition vec.hpp:90
#define __VEG_ASAN_ANNOTATE()
Definition vec.hpp:586