From 74b7865a0e5a05f7f1647c93196402bdcb8a0197 Mon Sep 17 00:00:00 2001 From: Alex Severin Date: Fri, 10 May 2024 20:03:05 +0300 Subject: [PATCH] capacity for dataset --- src/dataset/impl_dataset.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/dataset/impl_dataset.rs b/src/dataset/impl_dataset.rs index dc225d29a..b8148793f 100644 --- a/src/dataset/impl_dataset.rs +++ b/src/dataset/impl_dataset.rs @@ -603,13 +603,13 @@ where )> { let targets = self.as_targets(); let fold_size = targets.len() / k; - let mut res = Vec::new(); // Generates all k folds of records and targets let mut records_chunks: Vec<_> = self.records.axis_chunks_iter(Axis(0), fold_size).collect(); let mut targets_chunks: Vec<_> = targets.axis_chunks_iter(Axis(0), fold_size).collect(); + let mut res = Vec::with_capacity(k); // For each iteration, take the first chunk for both records and targets as the validation set and // concatenate all the other chunks to create the training set. In the end swap the first chunk with the // one in the next index so that it is ready for the next iteration @@ -742,7 +742,7 @@ where let targets = self.ntargets(); let tshape = self.targets.raw_dim(); - let mut objs: Vec = Vec::new(); + let mut objs: Vec = Vec::with_capacity(k); { let records_sl = self.records.as_slice_mut().unwrap();