Skip to content

Commit

Permalink
Rename to validRatio
Browse files Browse the repository at this point in the history
  • Loading branch information
kartikdutt18 committed May 31, 2020
1 parent 040decb commit bb66ce8
Show file tree
Hide file tree
Showing 2 changed files with 9 additions and 9 deletions.
8 changes: 4 additions & 4 deletions dataloader/dataloader.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -63,14 +63,14 @@ class DataLoader
*
* @param datasetPath Path or name of dataset.
* @param shuffle whether or not to shuffle the data.
* @param testRatio Ratio of dataset to be used for validation set.
* @param validRatio Ratio of dataset to be used for validation set.
* @param useScaler Use feature scaler for pre-processing the dataset.
* @param augmentation Adds augmentation to training data only.
* @param augmentationProbability Probability of applying augmentation on dataset.
*/
DataLoader(const std::string& dataset,
const bool shuffle,
const double testRatio = 0.25,
const double validRatio = 0.25,
const bool useScaler = true,
const std::vector<std::string> augmentation =
std::vector<std::string>(),
Expand All @@ -85,7 +85,7 @@ class DataLoader
* Note: This option augmentation to NULL, set ratio to 1 and
* scaler will be used to only transform the test data.
* @param shuffle Boolean to determine whether or not to shuffle the data.
* @param testRatio Ratio of dataset to be used for validation set.
* @param validRatio Ratio of dataset to be used for validation set.
* @param useScaler Fits the scaler on training data and transforms dataset.
* @param dropHeader Drops the first row from CSV.
* @param startInputFeatures First Index which will be fed into the model as input.
Expand All @@ -106,7 +106,7 @@ class DataLoader
void LoadCSV(const std::string& datasetPath,
const bool loadTrainData = true,
const bool shuffle = true,
const double testRatio = 0.25,
const double validRatio = 0.25,
const bool useScaler = false,
const bool dropHeader = false,
const int startInputFeatures = -1,
Expand Down
10 changes: 5 additions & 5 deletions dataloader/dataloader_impl.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ template<
DatasetX, DatasetY, ScalerType
>::DataLoader(const std::string& dataset,
const bool shuffle,
const double testRatio,
const double validRatio,
const bool useScaler,
const std::vector<std::string> augmentation,
const double augmentationProbability)
Expand All @@ -49,14 +49,14 @@ template<

if (datasetMap[dataset].loadCSV)
{
LoadCSV(datasetMap[dataset].trainPath, true, shuffle, testRatio,
LoadCSV(datasetMap[dataset].trainPath, true, shuffle, validRatio,
useScaler, datasetMap[dataset].dropHeader,
datasetMap[dataset].startTrainingInputFeatures,
datasetMap[dataset].endTrainingInputFeatures,
datasetMap[dataset].endTrainingPredictionFeatures,
datasetMap[dataset].endTrainingPredictionFeatures);

LoadCSV(datasetMap[dataset].testPath, false, false, testRatio, useScaler,
LoadCSV(datasetMap[dataset].testPath, false, false, validRatio, useScaler,
datasetMap[dataset].dropHeader,
datasetMap[dataset].startTestingInputFeatures,
datasetMap[dataset].endTestingInputFeatures);
Expand Down Expand Up @@ -85,7 +85,7 @@ template<
>::LoadCSV(const std::string& datasetPath,
const bool loadTrainData,
const bool shuffle,
const double testRatio,
const double validRatio,
const bool useScaler,
const bool dropHeader,
const int startInputFeatures,
Expand All @@ -104,7 +104,7 @@ template<
if (loadTrainData)
{
arma::mat trainDataset, validDataset;
data::Split(dataset, trainDataset, validDataset, testRatio, shuffle);
data::Split(dataset, trainDataset, validDataset, validRatio, shuffle);

trainFeatures = trainDataset.rows(WrapIndex(startInputFeatures,
trainDataset.n_rows), WrapIndex(endInputFeatures,
Expand Down

0 comments on commit bb66ce8

Please sign in to comment.