1#[cfg(all(feature = "alloc", not(feature = "std")))]
2use alloc::vec::Vec;
3
4use crate::base::allocator::Allocator;
5use crate::base::constraint::{SameNumberOfRows, ShapeConstraint};
6use crate::base::default_allocator::DefaultAllocator;
7use crate::base::dimension::{Dim, DimName, Dyn, U1};
8use crate::base::storage::{IsContiguous, Owned, RawStorage, RawStorageMut, ReshapableStorage};
9use crate::base::{Scalar, Vector};
10
11#[cfg(feature = "serde-serialize-no-std")]
12use serde::{
13 de::{Deserialize, Deserializer, Error},
14 ser::{Serialize, Serializer},
15};
16
17use crate::Storage;
18use std::mem::MaybeUninit;
19
20#[repr(C)]
27#[derive(Eq, Debug, Clone, PartialEq)]
28pub struct VecStorage<T, R: Dim, C: Dim> {
29 data: Vec<T>,
30 nrows: R,
31 ncols: C,
32}
33
34impl<T> Default for VecStorage<T, Dyn, Dyn> {
35 fn default() -> Self {
36 Self {
37 data: Vec::new(),
38 nrows: Dyn::from_usize(0),
39 ncols: Dyn::from_usize(0),
40 }
41 }
42}
43
44impl<T, R: DimName> Default for VecStorage<T, R, Dyn> {
45 fn default() -> Self {
46 Self {
47 data: Vec::new(),
48 nrows: R::name(),
49 ncols: Dyn::from_usize(0),
50 }
51 }
52}
53
54impl<T, C: DimName> Default for VecStorage<T, Dyn, C> {
55 fn default() -> Self {
56 Self {
57 data: Vec::new(),
58 nrows: Dyn::from_usize(0),
59 ncols: C::name(),
60 }
61 }
62}
63
64impl<T: Default, R: DimName, C: DimName> Default for VecStorage<T, R, C> {
65 fn default() -> Self {
66 let nrows = R::name();
67 let ncols = C::name();
68 let mut data = Vec::new();
69 data.resize_with(nrows.value() * ncols.value(), Default::default);
70 Self { data, nrows, ncols }
71 }
72}
73
74#[cfg(feature = "serde-serialize")]
75impl<T, R: Dim, C: Dim> Serialize for VecStorage<T, R, C>
76where
77 T: Serialize,
78 R: Serialize,
79 C: Serialize,
80{
81 fn serialize<Ser>(&self, serializer: Ser) -> Result<Ser::Ok, Ser::Error>
82 where
83 Ser: Serializer,
84 {
85 (&self.data, &self.nrows, &self.ncols).serialize(serializer)
86 }
87}
88
89#[cfg(feature = "serde-serialize")]
90impl<'a, T, R: Dim, C: Dim> Deserialize<'a> for VecStorage<T, R, C>
91where
92 T: Deserialize<'a>,
93 R: Deserialize<'a>,
94 C: Deserialize<'a>,
95{
96 fn deserialize<Des>(deserializer: Des) -> Result<Self, Des::Error>
97 where
98 Des: Deserializer<'a>,
99 {
100 let (data, nrows, ncols): (Vec<T>, R, C) = Deserialize::deserialize(deserializer)?;
101
102 if nrows.value() * ncols.value() != data.len() {
105 return Err(Des::Error::custom(format!(
106 "Expected {} components, found {}",
107 nrows.value() * ncols.value(),
108 data.len()
109 )));
110 }
111
112 Ok(Self { data, nrows, ncols })
113 }
114}
115
116#[deprecated(note = "renamed to `VecStorage`")]
117pub type MatrixVec<T, R, C> = VecStorage<T, R, C>;
119
120impl<T, R: Dim, C: Dim> VecStorage<T, R, C> {
121 #[inline]
123 pub fn new(nrows: R, ncols: C, data: Vec<T>) -> Self {
124 assert!(
125 nrows.value() * ncols.value() == data.len(),
126 "Data storage buffer dimension mismatch."
127 );
128 Self { data, nrows, ncols }
129 }
130
131 #[inline]
133 #[must_use]
134 pub fn as_vec(&self) -> &Vec<T> {
135 &self.data
136 }
137
138 #[inline]
144 pub unsafe fn as_vec_mut(&mut self) -> &mut Vec<T> {
145 &mut self.data
146 }
147
148 #[inline]
155 pub unsafe fn resize(mut self, sz: usize) -> Vec<MaybeUninit<T>> {
156 let len = self.len();
157
158 let new_data = if sz < len {
159 self.data.set_len(sz);
162 self.data.shrink_to_fit();
163
164 Vec::from_raw_parts(
168 self.data.as_mut_ptr() as *mut MaybeUninit<T>,
169 self.data.len(),
170 self.data.capacity(),
171 )
172 } else {
173 self.data.reserve_exact(sz - len);
174
175 let mut new_data = Vec::from_raw_parts(
179 self.data.as_mut_ptr() as *mut MaybeUninit<T>,
180 self.data.len(),
181 self.data.capacity(),
182 );
183
184 new_data.set_len(sz);
187 new_data
188 };
189
190 std::mem::forget(self);
193 new_data
194 }
195
196 #[inline]
198 #[must_use]
199 pub fn len(&self) -> usize {
200 self.data.len()
201 }
202
203 #[inline]
205 #[must_use]
206 pub fn is_empty(&self) -> bool {
207 self.len() == 0
208 }
209
210 #[inline]
212 pub fn as_slice(&self) -> &[T] {
213 &self.data[..]
214 }
215
216 #[inline]
218 pub fn as_mut_slice(&mut self) -> &mut [T] {
219 &mut self.data[..]
220 }
221}
222
223impl<T, R: Dim, C: Dim> From<VecStorage<T, R, C>> for Vec<T> {
224 fn from(vec: VecStorage<T, R, C>) -> Self {
225 vec.data
226 }
227}
228
229unsafe impl<T, C: Dim> RawStorage<T, Dyn, C> for VecStorage<T, Dyn, C> {
236 type RStride = U1;
237 type CStride = Dyn;
238
239 #[inline]
240 fn ptr(&self) -> *const T {
241 self.data.as_ptr()
242 }
243
244 #[inline]
245 fn shape(&self) -> (Dyn, C) {
246 (self.nrows, self.ncols)
247 }
248
249 #[inline]
250 fn strides(&self) -> (Self::RStride, Self::CStride) {
251 (Self::RStride::name(), self.nrows)
252 }
253
254 #[inline]
255 fn is_contiguous(&self) -> bool {
256 true
257 }
258
259 #[inline]
260 unsafe fn as_slice_unchecked(&self) -> &[T] {
261 &self.data
262 }
263}
264
265unsafe impl<T: Scalar, C: Dim> Storage<T, Dyn, C> for VecStorage<T, Dyn, C>
266where
267 DefaultAllocator: Allocator<Dyn, C, Buffer<T> = Self>,
268{
269 #[inline]
270 fn into_owned(self) -> Owned<T, Dyn, C>
271 where
272 DefaultAllocator: Allocator<Dyn, C>,
273 {
274 self
275 }
276
277 #[inline]
278 fn clone_owned(&self) -> Owned<T, Dyn, C>
279 where
280 DefaultAllocator: Allocator<Dyn, C>,
281 {
282 self.clone()
283 }
284
285 #[inline]
286 fn forget_elements(mut self) {
287 unsafe { self.data.set_len(0) };
294 }
295}
296
297unsafe impl<T, R: DimName> RawStorage<T, R, Dyn> for VecStorage<T, R, Dyn> {
298 type RStride = U1;
299 type CStride = R;
300
301 #[inline]
302 fn ptr(&self) -> *const T {
303 self.data.as_ptr()
304 }
305
306 #[inline]
307 fn shape(&self) -> (R, Dyn) {
308 (self.nrows, self.ncols)
309 }
310
311 #[inline]
312 fn strides(&self) -> (Self::RStride, Self::CStride) {
313 (Self::RStride::name(), self.nrows)
314 }
315
316 #[inline]
317 fn is_contiguous(&self) -> bool {
318 true
319 }
320
321 #[inline]
322 unsafe fn as_slice_unchecked(&self) -> &[T] {
323 &self.data
324 }
325}
326
327unsafe impl<T: Scalar, R: DimName> Storage<T, R, Dyn> for VecStorage<T, R, Dyn>
328where
329 DefaultAllocator: Allocator<R, Dyn, Buffer<T> = Self>,
330{
331 #[inline]
332 fn into_owned(self) -> Owned<T, R, Dyn>
333 where
334 DefaultAllocator: Allocator<R, Dyn>,
335 {
336 self
337 }
338
339 #[inline]
340 fn clone_owned(&self) -> Owned<T, R, Dyn>
341 where
342 DefaultAllocator: Allocator<R, Dyn>,
343 {
344 self.clone()
345 }
346
347 #[inline]
348 fn forget_elements(mut self) {
349 unsafe { self.data.set_len(0) };
356 }
357}
358
359unsafe impl<T, C: Dim> RawStorageMut<T, Dyn, C> for VecStorage<T, Dyn, C> {
365 #[inline]
366 fn ptr_mut(&mut self) -> *mut T {
367 self.data.as_mut_ptr()
368 }
369
370 #[inline]
371 unsafe fn as_mut_slice_unchecked(&mut self) -> &mut [T] {
372 &mut self.data[..]
373 }
374}
375
376unsafe impl<T, R: Dim, C: Dim> IsContiguous for VecStorage<T, R, C> {}
377
378impl<T, C1, C2> ReshapableStorage<T, Dyn, C1, Dyn, C2> for VecStorage<T, Dyn, C1>
379where
380 T: Scalar,
381 C1: Dim,
382 C2: Dim,
383{
384 type Output = VecStorage<T, Dyn, C2>;
385
386 fn reshape_generic(self, nrows: Dyn, ncols: C2) -> Self::Output {
387 assert_eq!(nrows.value() * ncols.value(), self.data.len());
388 VecStorage {
389 data: self.data,
390 nrows,
391 ncols,
392 }
393 }
394}
395
396impl<T, C1, R2> ReshapableStorage<T, Dyn, C1, R2, Dyn> for VecStorage<T, Dyn, C1>
397where
398 T: Scalar,
399 C1: Dim,
400 R2: DimName,
401{
402 type Output = VecStorage<T, R2, Dyn>;
403
404 fn reshape_generic(self, nrows: R2, ncols: Dyn) -> Self::Output {
405 assert_eq!(nrows.value() * ncols.value(), self.data.len());
406 VecStorage {
407 data: self.data,
408 nrows,
409 ncols,
410 }
411 }
412}
413
414unsafe impl<T, R: DimName> RawStorageMut<T, R, Dyn> for VecStorage<T, R, Dyn> {
415 #[inline]
416 fn ptr_mut(&mut self) -> *mut T {
417 self.data.as_mut_ptr()
418 }
419
420 #[inline]
421 unsafe fn as_mut_slice_unchecked(&mut self) -> &mut [T] {
422 &mut self.data[..]
423 }
424}
425
426impl<T, R1, C2> ReshapableStorage<T, R1, Dyn, Dyn, C2> for VecStorage<T, R1, Dyn>
427where
428 T: Scalar,
429 R1: DimName,
430 C2: Dim,
431{
432 type Output = VecStorage<T, Dyn, C2>;
433
434 fn reshape_generic(self, nrows: Dyn, ncols: C2) -> Self::Output {
435 assert_eq!(nrows.value() * ncols.value(), self.data.len());
436 VecStorage {
437 data: self.data,
438 nrows,
439 ncols,
440 }
441 }
442}
443
444impl<T, R1, R2> ReshapableStorage<T, R1, Dyn, R2, Dyn> for VecStorage<T, R1, Dyn>
445where
446 T: Scalar,
447 R1: DimName,
448 R2: DimName,
449{
450 type Output = VecStorage<T, R2, Dyn>;
451
452 fn reshape_generic(self, nrows: R2, ncols: Dyn) -> Self::Output {
453 assert_eq!(nrows.value() * ncols.value(), self.data.len());
454 VecStorage {
455 data: self.data,
456 nrows,
457 ncols,
458 }
459 }
460}
461
462impl<T, R: Dim> Extend<T> for VecStorage<T, R, Dyn> {
463 fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
471 self.data.extend(iter);
472 self.ncols = Dyn(self.data.len() / self.nrows.value());
473 assert!(self.data.len() % self.nrows.value() == 0,
474 "The number of elements produced by the given iterator was not a multiple of the number of rows.");
475 }
476}
477
478impl<'a, T: 'a + Copy, R: Dim> Extend<&'a T> for VecStorage<T, R, Dyn> {
479 fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
487 self.extend(iter.into_iter().copied())
488 }
489}
490
491impl<T, R, RV, SV> Extend<Vector<T, RV, SV>> for VecStorage<T, R, Dyn>
492where
493 T: Scalar,
494 R: Dim,
495 RV: Dim,
496 SV: RawStorage<T, RV>,
497 ShapeConstraint: SameNumberOfRows<R, RV>,
498{
499 fn extend<I: IntoIterator<Item = Vector<T, RV, SV>>>(&mut self, iter: I) {
507 let nrows = self.nrows.value();
508 let iter = iter.into_iter();
509 let (lower, _upper) = iter.size_hint();
510 self.data.reserve(nrows * lower);
511 for vector in iter {
512 assert_eq!(nrows, vector.shape().0);
513 self.data.extend(vector.iter().cloned());
514 }
515 self.ncols = Dyn(self.data.len() / nrows);
516 }
517}
518
519impl<T> Extend<T> for VecStorage<T, Dyn, U1> {
520 fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
523 self.data.extend(iter);
524 self.nrows = Dyn(self.data.len());
525 }
526}