diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..952bbd3 --- /dev/null +++ b/.gitignore @@ -0,0 +1,2 @@ +*.so +*.so.* diff --git a/.travis.yml b/.travis.yml index 6980984..f59f9b5 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,14 +1,23 @@ sudo: required -dist: xenial language: go go: - "1.14" go_import_path: mlpack.org/v1/mlpack -before_install: - - go get -u gonum.org/v1/gonum/... - - make deps setup_armadillo download build sudo_install clean +matrix : + include : + - os : linux + dist : xenial + before_install: + - go get -u gonum.org/v1/gonum/... + - make deps setup_armadillo download build sudo_install clean + - os : osx + dist : xcode9.4 + before_install: + - go get -u gonum.org/v1/gonum/... + - HOMEBREW_NO_AUTO_UPDATE=1 brew install cmake curl git unzip openblas armadillo boost + - make download build sudo_install clean script: - go test -v . ./tests diff --git a/Dockerfile b/Dockerfile index 938372e..ea074df 100644 --- a/Dockerfile +++ b/Dockerfile @@ -15,8 +15,7 @@ RUN apt-get update -qq && \ apt-get install -y --no-install-recommends sudo \ unzip build-essential cmake git pkg-config \ curl ca-certificates libcurl4-openssl-dev libssl-dev \ - libboost-math-dev libboost-program-options-dev \ - libboost-test-dev libboost-serialization-dev libarmadillo-dev && \ + libboost-math-dev libboost-test-dev libboost-serialization-dev libarmadillo-dev && \ apt-get clean && rm -rf /usr/share/man/?? && rm -rf /usr/share/man/??_* && \ rm -rf /var/lib/apt/lists/* && rm -rf /usr/share/locale/* && \ rm -rf /var/cache/debconf/*-old && rm -rf /usr/share/doc/* diff --git a/LICENSE.txt b/LICENSE.txt index 03dc24e..02bf885 100644 --- a/LICENSE.txt +++ b/LICENSE.txt @@ -1,48 +1,26 @@ -BSD 3-Clause License +Copyright (c) 2007-2020 mlpack contributors. All rights reserved. -mlpack is provided without any warranty of fitness for any purpose. You -can redistribute the library and/or modify it under the terms of the 3-clause -BSD license. The text of the 3-clause BSD license is contained below. - -mlpack contains some reproductions of the source code of Armadillo, which is -licensed under the Mozilla Public License v2.0 (MPL2). This code is found in -src/mlpack/core/arma_extend/ and more details on the licensing are available -there. - -mlpack also contains some reproductions of the source code of Boost, which is -licensed under the Boost Software License, version 1.0. This code is found in -src/mlpack/core/boost_backport/ and more details on the licensing are available -there. - -mlpack may contain some usage of the source code of stb, which is licensed -under the MIT License and the Public Domain (www.unlicense.org). This code -is used in src/mlpack/core/data/load_image.hpp. - ----- -Copyright (c) 2007-2020, mlpack contributors (see COPYRIGHT.txt) -All rights reserved. - -Redistribution and use of mlpack in source and binary forms, with or without +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this -list of conditions and the following disclaimer. + list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, -this list of conditions and the following disclaimer in the documentation and/or -other materials provided with the distribution. - -3. Neither the name of the copyright holder nor the names of its contributors -may be used to endorse or promote products derived from this software without -specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR -ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON -ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/Makefile b/Makefile index 95a78bd..da7348d 100644 --- a/Makefile +++ b/Makefile @@ -2,7 +2,7 @@ .PHONY: test deps download build clean docker # mlpack version to use. -MLPACK_VERSION?=3.3.2 +MLPACK_VERSION?=3.4.0 # armadillo version to use. ARMA_VERSION?=8.400.0 @@ -14,11 +14,9 @@ GOVERSION?=1.13.1 TMP_DIR?=/tmp/ # Package list for each well-known Linux distribution -RPMS = cmake curl git unzip boost-devel boost-test boost-program-options \ - boost-math armadillo-devel +RPMS = cmake curl git unzip boost-devel boost-test boost-math armadillo-devel DEBS = unzip build-essential cmake curl git pkg-config libboost-math-dev \ - libboost-program-options-dev libboost-test-dev libboost-serialization-dev \ - libarmadillo-dev + libboost-test-dev libboost-serialization-dev libarmadillo-dev # Detect Linux distribution UNAME_S := $(shell uname -s) @@ -57,59 +55,50 @@ deps_debian: # Download and install Armadillo. setup_armadillo: - rm -rf $(TMP_DIR)armadillo - mkdir $(TMP_DIR)armadillo - cd $(TMP_DIR)armadillo + rm -rf $(TMP_DIR)armadillo && mkdir $(TMP_DIR)armadillo && cd $(TMP_DIR)armadillo && \ curl https://ftp.fau.de/macports/distfiles/armadillo/armadillo-$(ARMA_VERSION).tar.xz \ - | tar -xvJ && cd armadillo* && \ - cmake . && make && sudo make install && cd .. && rm -rf armadillo* + | tar -xvJ && cd armadillo* && \ + cmake . && make && sudo make install && cd .. && rm -rf armadillo* # Download mlpack source. download: - rm -rf $(TMP_DIR)mlpack - mkdir $(TMP_DIR)mlpack - cd $(TMP_DIR)mlpack - curl -Lo mlpack.zip https://www.mlpack.org/files/mlpack-$(MLPACK_VERSION).tar.gz - tar -xvzpf mlpack.zip - rm mlpack.zip - cd - + rm -rf $(TMP_DIR)mlpack && mkdir $(TMP_DIR)mlpack && cd $(TMP_DIR)mlpack && \ + curl -Lo mlpack.zip https://www.mlpack.org/files/mlpack-$(MLPACK_VERSION).tar.gz && \ + tar -xvzpf mlpack.zip && rm mlpack.zip && cd - # Build mlpack(go shared libraries). build: - cd $(TMP_DIR)mlpack/mlpack-$(MLPACK_VERSION) - mkdir build - cd build + cd $(TMP_DIR)mlpack/mlpack-$(MLPACK_VERSION) && mkdir build && cd build && \ cmake -D BUILD_TESTS=OFF \ -D BUILD_JULIA_BINDINGS=OFF \ -D BUILD_PYTHON_BINDINGS=OFF \ -D BUILD_CLI_EXECUTABLES=OFF \ -D BUILD_GO_BINDINGS=OFF \ - -D BUILD_GO_SHLIB=ON .. - $(MAKE) -j $(shell nproc --all) - $(MAKE) preinstall - cd - + -D BUILD_GO_SHLIB=ON .. && \ + $(MAKE) -j $(shell nproc --all) && $(MAKE) preinstall && cd - # Cleanup temporary build files. clean: - go clean --cache - rm -rf $(TMP_DIR)armadillo - rm -rf $(TMP_DIR)mlpack + go clean --cache && rm -rf $(TMP_DIR)armadillo && rm -rf $(TMP_DIR)mlpack # Do everything. install: - @make deps -ifneq ($(UNAME_S),Darwin) + @make deps +ifneq ($(UNAME_S),Darwin) @make setup_armadillo -endif +endif @make download build sudo_install clean test # Install system wide. sudo_install: - cd $(TMP_DIR)mlpack/mlpack-$(MLPACK_VERSION)/build - sudo $(MAKE) install - sudo ldconfig - cd - + cd $(TMP_DIR)mlpack/mlpack-$(MLPACK_VERSION)/build && sudo $(MAKE) install && cd - +ifneq ($(UNAME_S),Darwin) + cd $(TMP_DIR)mlpack/mlpack-$(MLPACK_VERSION)/build && sudo ldconfig && cd - +else + cd $(TMP_DIR)mlpack/mlpack-$(MLPACK_VERSION)/build && \ + sudo update_dyld_shared_cache && cd - +endif # Runs tests. test: go test -v . ./tests diff --git a/adaboost.go b/adaboost.go index 8785c62..99ababd 100644 --- a/adaboost.go +++ b/adaboost.go @@ -8,11 +8,7 @@ package mlpack */ import "C" -import ( - "gonum.org/v1/gonum/mat" - "runtime" - "unsafe" -) +import "gonum.org/v1/gonum/mat" type AdaboostOptionalParam struct { InputModel *adaBoostModel @@ -38,23 +34,6 @@ func AdaboostOptions() *AdaboostOptionalParam { } } -type adaBoostModel struct { - mem unsafe.Pointer -} - -func (m *adaBoostModel) allocAdaBoostModel(identifier string) { - m.mem = C.mlpackGetAdaBoostModelPtr(C.CString(identifier)) - runtime.KeepAlive(m) -} - -func (m *adaBoostModel) getAdaBoostModel(identifier string) { - m.allocAdaBoostModel(identifier) -} - -func setAdaBoostModel(identifier string, ptr *adaBoostModel) { - C.mlpackSetAdaBoostModelPtr(C.CString(identifier), (unsafe.Pointer)(ptr.mem)) -} - /* This program implements the AdaBoost (or Adaptive Boosting) algorithm. The variant of AdaBoost implemented here is AdaBoost.MH. It uses a weak learner, @@ -83,7 +62,7 @@ func setAdaBoostModel(identifier string, ptr *adaBoostModel) { Note: the following parameter is deprecated and will be removed in mlpack 4.0.0: "Output". Use "Predictions" instead of "Output". - + For example, to run AdaBoost on an input dataset data with labels labelsand perceptrons as the weak learner type, storing the trained model in model, one could use the following command: @@ -107,7 +86,6 @@ func setAdaBoostModel(identifier string, ptr *adaBoostModel) { _, _, predictions, _ := mlpack.Adaboost(param) - Input parameters: - InputModel (adaBoostModel): Input AdaBoost model. diff --git a/approx_kfn.go b/approx_kfn.go index a9eda2a..afa65f1 100644 --- a/approx_kfn.go +++ b/approx_kfn.go @@ -8,11 +8,7 @@ package mlpack */ import "C" -import ( - "gonum.org/v1/gonum/mat" - "runtime" - "unsafe" -) +import "gonum.org/v1/gonum/mat" type ApproxKfnOptionalParam struct { Algorithm string @@ -42,23 +38,6 @@ func ApproxKfnOptions() *ApproxKfnOptionalParam { } } -type approxkfnModel struct { - mem unsafe.Pointer -} - -func (m *approxkfnModel) allocApproxKFNModel(identifier string) { - m.mem = C.mlpackGetApproxKFNModelPtr(C.CString(identifier)) - runtime.KeepAlive(m) -} - -func (m *approxkfnModel) getApproxKFNModel(identifier string) { - m.allocApproxKFNModel(identifier) -} - -func setApproxKFNModel(identifier string, ptr *approxkfnModel) { - C.mlpackSetApproxKFNModelPtr(C.CString(identifier), (unsafe.Pointer)(ptr.mem)) -} - /* This program implements two strategies for furthest neighbor search. These strategies are: @@ -93,7 +72,7 @@ func setApproxKFNModel(identifier string, ptr *approxkfnModel) { Results for each query point can be stored with the "Neighbors" and "Distances" output parameters. Each row of these output matrices holds the k distances or neighbor indices for each query point. - + For example, to find the 5 approximate furthest neighbors with reference_set as the reference set and query_set as the query set using DrusillaSelect, storing the furthest neighbor indices to neighbors and the furthest neighbor @@ -131,7 +110,6 @@ func setApproxKFNModel(identifier string, ptr *approxkfnModel) { _, neighbors, _ := mlpack.ApproxKfn(param) - Input parameters: - Algorithm (string): Algorithm to use: 'ds' or 'qdafn'. Default value diff --git a/arma_util.go b/arma_util.go index 4c36832..84dacfd 100644 --- a/arma_util.go +++ b/arma_util.go @@ -2,10 +2,10 @@ package mlpack /* #cgo CFLAGS: -I. -I/capi -g -Wall -Wno-unused-variable -#cgo LDFLAGS: -L. -lgo_util +#cgo LDFLAGS: -L. -lmlpack_go_util #include #include -#include +#include #include */ import "C" diff --git a/bayesian_linear_regression.go b/bayesian_linear_regression.go new file mode 100644 index 0000000..7a576cc --- /dev/null +++ b/bayesian_linear_regression.go @@ -0,0 +1,192 @@ +package mlpack + +/* +#cgo CFLAGS: -I./capi -Wall +#cgo LDFLAGS: -L. -lmlpack_go_bayesian_linear_regression +#include +#include +*/ +import "C" + +import "gonum.org/v1/gonum/mat" + +type BayesianLinearRegressionOptionalParam struct { + Center bool + Input *mat.Dense + InputModel *bayesianLinearRegression + Responses *mat.Dense + Scale bool + Test *mat.Dense + Verbose bool +} + +func BayesianLinearRegressionOptions() *BayesianLinearRegressionOptionalParam { + return &BayesianLinearRegressionOptionalParam{ + Center: false, + Input: nil, + InputModel: nil, + Responses: nil, + Scale: false, + Test: nil, + Verbose: false, + } +} + +/* + An implementation of the bayesian linear regression. + This model is a probabilistic view and implementation of the linear + regression. The final solution is obtained by computing a posterior + distribution from gaussian likelihood and a zero mean gaussian isotropic + prior distribution on the solution. + Optimization is AUTOMATIC and does not require cross validation. The + optimization is performed by maximization of the evidence function. Parameters + are tuned during the maximization of the marginal likelihood. This procedure + includes the Ockham's razor that penalizes over complex solutions. + + This program is able to train a Bayesian linear regression model or load a + model from file, output regression predictions for a test set, and save the + trained model to a file. + + To train a BayesianLinearRegression model, the "Input" and + "Responses"parameters must be given. The "Center"and "Scale" parameters + control the centering and the normalizing options. A trained model can be + saved with the "OutputModel". If no training is desired at all, a model can be + passed via the "InputModel" parameter. + + The program can also provide predictions for test data using either the + trained model or the given input model. Test points can be specified with the + "Test" parameter. Predicted responses to the test points can be saved with + the "Predictions" output parameter. The corresponding standard deviation can + be save by precising the "Stds" parameter. + + For example, the following command trains a model on the data data and + responses responseswith center set to true and scale set to false (so, + Bayesian linear regression is being solved, and then the model is saved to + blr_model: + + // Initialize optional parameters for BayesianLinearRegression(). + param := mlpack.BayesianLinearRegressionOptions() + param.Input = data + param.Responses = responses + param.Center = 1 + param.Scale = 0 + + blr_model, _, _ := mlpack.BayesianLinearRegression(param) + + The following command uses the blr_model to provide predicted responses for + the data test and save those responses to test_predictions: + + // Initialize optional parameters for BayesianLinearRegression(). + param := mlpack.BayesianLinearRegressionOptions() + param.InputModel = &blr_model + param.Test = test + + _, test_predictions, _ := mlpack.BayesianLinearRegression(param) + + Because the estimator computes a predictive distribution instead of simple + point estimate, the "Stds" parameter allows to save the prediction + uncertainties: + + // Initialize optional parameters for BayesianLinearRegression(). + param := mlpack.BayesianLinearRegressionOptions() + param.InputModel = &blr_model + param.Test = test + + _, test_predictions, stds := mlpack.BayesianLinearRegression(param) + + Input parameters: + + - Center (bool): Center the data and fit the intercept if enabled. + - Input (mat.Dense): Matrix of covariates (X). + - InputModel (bayesianLinearRegression): Trained + BayesianLinearRegression model to use. + - Responses (mat.Dense): Matrix of responses/observations (y). + - Scale (bool): Scale each feature by their standard deviations if + enabled. + - Test (mat.Dense): Matrix containing points to regress on (test + points). + - Verbose (bool): Display informational messages and the full list of + parameters and timers at the end of execution. + + Output parameters: + + - outputModel (bayesianLinearRegression): Output + BayesianLinearRegression model. + - predictions (mat.Dense): If --test_file is specified, this file is + where the predicted responses will be saved. + - stds (mat.Dense): If specified, this is where the standard deviations + of the predictive distribution will be saved. + + */ +func BayesianLinearRegression(param *BayesianLinearRegressionOptionalParam) (bayesianLinearRegression, *mat.Dense, *mat.Dense) { + resetTimers() + enableTimers() + disableBacktrace() + disableVerbose() + restoreSettings("BayesianLinearRegression") + + // Detect if the parameter was passed; set if so. + if param.Center != false { + setParamBool("center", param.Center) + setPassed("center") + } + + // Detect if the parameter was passed; set if so. + if param.Input != nil { + gonumToArmaMat("input", param.Input) + setPassed("input") + } + + // Detect if the parameter was passed; set if so. + if param.InputModel != nil { + setBayesianLinearRegression("input_model", param.InputModel) + setPassed("input_model") + } + + // Detect if the parameter was passed; set if so. + if param.Responses != nil { + gonumToArmaRow("responses", param.Responses) + setPassed("responses") + } + + // Detect if the parameter was passed; set if so. + if param.Scale != false { + setParamBool("scale", param.Scale) + setPassed("scale") + } + + // Detect if the parameter was passed; set if so. + if param.Test != nil { + gonumToArmaMat("test", param.Test) + setPassed("test") + } + + // Detect if the parameter was passed; set if so. + if param.Verbose != false { + setParamBool("verbose", param.Verbose) + setPassed("verbose") + enableVerbose() + } + + // Mark all output options as passed. + setPassed("output_model") + setPassed("predictions") + setPassed("stds") + + // Call the mlpack program. + C.mlpackBayesianLinearRegression() + + // Initialize result variable and get output. + var outputModel bayesianLinearRegression + outputModel.getBayesianLinearRegression("output_model") + var predictionsPtr mlpackArma + predictions := predictionsPtr.armaToGonumMat("predictions") + var stdsPtr mlpackArma + stds := stdsPtr.armaToGonumMat("stds") + + // Clear settings. + clearSettings() + + // Return output(s). + return outputModel, predictions, stds +} diff --git a/capi/adaboost.h b/capi/adaboost.h index b9baff9..7d0be37 100644 --- a/capi/adaboost.h +++ b/capi/adaboost.h @@ -1,16 +1,34 @@ -#include +/** + * @file capi/adaboost.h + * + * This is an autogenerated header file for functions specified to the %NAME% + * binding to be called by Go. + */ +#ifndef GO_adaboost_H +#define GO_adaboost_H + #include #if defined(__cplusplus) || defined(c_plusplus) -extern "C" { + +extern "C" +{ #endif +extern void mlpackAdaboost(); + +// Any definitions of methods for dealing with model pointers will be put below +// this comment, if needed. + +// Set the pointer to a AdaBoostModel parameter. extern void mlpackSetAdaBoostModelPtr(const char* identifier, void* value); -extern void *mlpackGetAdaBoostModelPtr(const char* identifier); +// Get the pointer to a AdaBoostModel parameter. +extern void* mlpackGetAdaBoostModelPtr(const char* identifier); -extern void mlpackAdaboost(); #if defined(__cplusplus) || defined(c_plusplus) } #endif + +#endif diff --git a/capi/approx_kfn.h b/capi/approx_kfn.h index c0647b4..99e97a3 100644 --- a/capi/approx_kfn.h +++ b/capi/approx_kfn.h @@ -1,16 +1,34 @@ -#include +/** + * @file capi/approx_kfn.h + * + * This is an autogenerated header file for functions specified to the %NAME% + * binding to be called by Go. + */ +#ifndef GO_approx_kfn_H +#define GO_approx_kfn_H + #include #if defined(__cplusplus) || defined(c_plusplus) -extern "C" { + +extern "C" +{ #endif +extern void mlpackApproxKfn(); + +// Any definitions of methods for dealing with model pointers will be put below +// this comment, if needed. + +// Set the pointer to a ApproxKFNModel parameter. extern void mlpackSetApproxKFNModelPtr(const char* identifier, void* value); -extern void *mlpackGetApproxKFNModelPtr(const char* identifier); +// Get the pointer to a ApproxKFNModel parameter. +extern void* mlpackGetApproxKFNModelPtr(const char* identifier); -extern void mlpackApproxKfn(); #if defined(__cplusplus) || defined(c_plusplus) } #endif + +#endif diff --git a/capi/arma_util.h b/capi/arma_util.h index 0729787..be1505c 100644 --- a/capi/arma_util.h +++ b/capi/arma_util.h @@ -146,7 +146,7 @@ int mlpackNumElemCol(const char* identifier); int mlpackNumElemUcol(const char* identifier); /** - * Call CLI::SetParam>(). + * Call IO::SetParam>(). */ void mlpackToArmaMatWithInfo(const char* identifier, const bool* dimensions, diff --git a/capi/bayesian_linear_regression.h b/capi/bayesian_linear_regression.h new file mode 100644 index 0000000..4005773 --- /dev/null +++ b/capi/bayesian_linear_regression.h @@ -0,0 +1,34 @@ +/** + * @file capi/bayesian_linear_regression.h + * + * This is an autogenerated header file for functions specified to the %NAME% + * binding to be called by Go. + */ +#ifndef GO_bayesian_linear_regression_H +#define GO_bayesian_linear_regression_H + +#include + +#if defined(__cplusplus) || defined(c_plusplus) + +extern "C" +{ +#endif + +extern void mlpackBayesianLinearRegression(); + +// Any definitions of methods for dealing with model pointers will be put below +// this comment, if needed. + +// Set the pointer to a BayesianLinearRegression parameter. +extern void mlpackSetBayesianLinearRegressionPtr(const char* identifier, void* value); + +// Get the pointer to a BayesianLinearRegression parameter. +extern void* mlpackGetBayesianLinearRegressionPtr(const char* identifier); + + +#if defined(__cplusplus) || defined(c_plusplus) +} +#endif + +#endif diff --git a/capi/cf.h b/capi/cf.h index 446ae6d..5d8ca6a 100644 --- a/capi/cf.h +++ b/capi/cf.h @@ -1,16 +1,34 @@ -#include +/** + * @file capi/cf.h + * + * This is an autogenerated header file for functions specified to the %NAME% + * binding to be called by Go. + */ +#ifndef GO_cf_H +#define GO_cf_H + #include #if defined(__cplusplus) || defined(c_plusplus) -extern "C" { + +extern "C" +{ #endif +extern void mlpackCf(); + +// Any definitions of methods for dealing with model pointers will be put below +// this comment, if needed. + +// Set the pointer to a CFModel parameter. extern void mlpackSetCFModelPtr(const char* identifier, void* value); -extern void *mlpackGetCFModelPtr(const char* identifier); +// Get the pointer to a CFModel parameter. +extern void* mlpackGetCFModelPtr(const char* identifier); -extern void mlpackCf(); #if defined(__cplusplus) || defined(c_plusplus) } #endif + +#endif diff --git a/capi/dbscan.h b/capi/dbscan.h index 13f686a..0cfce23 100644 --- a/capi/dbscan.h +++ b/capi/dbscan.h @@ -1,12 +1,28 @@ -#include +/** + * @file capi/dbscan.h + * + * This is an autogenerated header file for functions specified to the %NAME% + * binding to be called by Go. + */ +#ifndef GO_dbscan_H +#define GO_dbscan_H + #include #if defined(__cplusplus) || defined(c_plusplus) -extern "C" { + +extern "C" +{ #endif extern void mlpackDbscan(); +// Any definitions of methods for dealing with model pointers will be put below +// this comment, if needed. + + #if defined(__cplusplus) || defined(c_plusplus) } #endif + +#endif diff --git a/capi/decision_stump.h b/capi/decision_stump.h index 53cd7c6..b044a76 100644 --- a/capi/decision_stump.h +++ b/capi/decision_stump.h @@ -1,16 +1,34 @@ -#include +/** + * @file capi/decision_stump.h + * + * This is an autogenerated header file for functions specified to the %NAME% + * binding to be called by Go. + */ +#ifndef GO_decision_stump_H +#define GO_decision_stump_H + #include #if defined(__cplusplus) || defined(c_plusplus) -extern "C" { + +extern "C" +{ #endif +extern void mlpackDecisionStump(); + +// Any definitions of methods for dealing with model pointers will be put below +// this comment, if needed. + +// Set the pointer to a DSModel parameter. extern void mlpackSetDSModelPtr(const char* identifier, void* value); -extern void *mlpackGetDSModelPtr(const char* identifier); +// Get the pointer to a DSModel parameter. +extern void* mlpackGetDSModelPtr(const char* identifier); -extern void mlpackDecisionStump(); #if defined(__cplusplus) || defined(c_plusplus) } #endif + +#endif diff --git a/capi/decision_tree.h b/capi/decision_tree.h index edcae6f..494df66 100644 --- a/capi/decision_tree.h +++ b/capi/decision_tree.h @@ -1,16 +1,34 @@ -#include +/** + * @file capi/decision_tree.h + * + * This is an autogenerated header file for functions specified to the %NAME% + * binding to be called by Go. + */ +#ifndef GO_decision_tree_H +#define GO_decision_tree_H + #include #if defined(__cplusplus) || defined(c_plusplus) -extern "C" { + +extern "C" +{ #endif +extern void mlpackDecisionTree(); + +// Any definitions of methods for dealing with model pointers will be put below +// this comment, if needed. + +// Set the pointer to a DecisionTreeModel parameter. extern void mlpackSetDecisionTreeModelPtr(const char* identifier, void* value); -extern void *mlpackGetDecisionTreeModelPtr(const char* identifier); +// Get the pointer to a DecisionTreeModel parameter. +extern void* mlpackGetDecisionTreeModelPtr(const char* identifier); -extern void mlpackDecisionTree(); #if defined(__cplusplus) || defined(c_plusplus) } #endif + +#endif diff --git a/capi/det.h b/capi/det.h index f4d3962..a352be4 100644 --- a/capi/det.h +++ b/capi/det.h @@ -1,16 +1,34 @@ -#include +/** + * @file capi/det.h + * + * This is an autogenerated header file for functions specified to the %NAME% + * binding to be called by Go. + */ +#ifndef GO_det_H +#define GO_det_H + #include #if defined(__cplusplus) || defined(c_plusplus) -extern "C" { + +extern "C" +{ #endif +extern void mlpackDet(); + +// Any definitions of methods for dealing with model pointers will be put below +// this comment, if needed. + +// Set the pointer to a DTree<> parameter. extern void mlpackSetDTreePtr(const char* identifier, void* value); -extern void *mlpackGetDTreePtr(const char* identifier); +// Get the pointer to a DTree<> parameter. +extern void* mlpackGetDTreePtr(const char* identifier); -extern void mlpackDet(); #if defined(__cplusplus) || defined(c_plusplus) } #endif + +#endif diff --git a/capi/emst.h b/capi/emst.h index 0c3728d..a833ae4 100644 --- a/capi/emst.h +++ b/capi/emst.h @@ -1,12 +1,28 @@ -#include +/** + * @file capi/emst.h + * + * This is an autogenerated header file for functions specified to the %NAME% + * binding to be called by Go. + */ +#ifndef GO_emst_H +#define GO_emst_H + #include #if defined(__cplusplus) || defined(c_plusplus) -extern "C" { + +extern "C" +{ #endif extern void mlpackEmst(); +// Any definitions of methods for dealing with model pointers will be put below +// this comment, if needed. + + #if defined(__cplusplus) || defined(c_plusplus) } #endif + +#endif diff --git a/capi/fastmks.h b/capi/fastmks.h index 801adcc..4895f27 100644 --- a/capi/fastmks.h +++ b/capi/fastmks.h @@ -1,16 +1,34 @@ -#include +/** + * @file capi/fastmks.h + * + * This is an autogenerated header file for functions specified to the %NAME% + * binding to be called by Go. + */ +#ifndef GO_fastmks_H +#define GO_fastmks_H + #include #if defined(__cplusplus) || defined(c_plusplus) -extern "C" { + +extern "C" +{ #endif +extern void mlpackFastmks(); + +// Any definitions of methods for dealing with model pointers will be put below +// this comment, if needed. + +// Set the pointer to a FastMKSModel parameter. extern void mlpackSetFastMKSModelPtr(const char* identifier, void* value); -extern void *mlpackGetFastMKSModelPtr(const char* identifier); +// Get the pointer to a FastMKSModel parameter. +extern void* mlpackGetFastMKSModelPtr(const char* identifier); -extern void mlpackFastmks(); #if defined(__cplusplus) || defined(c_plusplus) } #endif + +#endif diff --git a/capi/gmm_generate.h b/capi/gmm_generate.h new file mode 100644 index 0000000..e624c6d --- /dev/null +++ b/capi/gmm_generate.h @@ -0,0 +1,34 @@ +/** + * @file capi/gmm_generate.h + * + * This is an autogenerated header file for functions specified to the %NAME% + * binding to be called by Go. + */ +#ifndef GO_gmm_generate_H +#define GO_gmm_generate_H + +#include + +#if defined(__cplusplus) || defined(c_plusplus) + +extern "C" +{ +#endif + +extern void mlpackGmmGenerate(); + +// Any definitions of methods for dealing with model pointers will be put below +// this comment, if needed. + +// Set the pointer to a GMM parameter. +extern void mlpackSetGMMPtr(const char* identifier, void* value); + +// Get the pointer to a GMM parameter. +extern void* mlpackGetGMMPtr(const char* identifier); + + +#if defined(__cplusplus) || defined(c_plusplus) +} +#endif + +#endif diff --git a/capi/gmm_probability.h b/capi/gmm_probability.h new file mode 100644 index 0000000..979698c --- /dev/null +++ b/capi/gmm_probability.h @@ -0,0 +1,34 @@ +/** + * @file capi/gmm_probability.h + * + * This is an autogenerated header file for functions specified to the %NAME% + * binding to be called by Go. + */ +#ifndef GO_gmm_probability_H +#define GO_gmm_probability_H + +#include + +#if defined(__cplusplus) || defined(c_plusplus) + +extern "C" +{ +#endif + +extern void mlpackGmmProbability(); + +// Any definitions of methods for dealing with model pointers will be put below +// this comment, if needed. + +// Set the pointer to a GMM parameter. +extern void mlpackSetGMMPtr(const char* identifier, void* value); + +// Get the pointer to a GMM parameter. +extern void* mlpackGetGMMPtr(const char* identifier); + + +#if defined(__cplusplus) || defined(c_plusplus) +} +#endif + +#endif diff --git a/capi/gmm_train.h b/capi/gmm_train.h index 715ae99..b152483 100644 --- a/capi/gmm_train.h +++ b/capi/gmm_train.h @@ -1,16 +1,34 @@ -#include +/** + * @file capi/gmm_train.h + * + * This is an autogenerated header file for functions specified to the %NAME% + * binding to be called by Go. + */ +#ifndef GO_gmm_train_H +#define GO_gmm_train_H + #include #if defined(__cplusplus) || defined(c_plusplus) -extern "C" { + +extern "C" +{ #endif +extern void mlpackGmmTrain(); + +// Any definitions of methods for dealing with model pointers will be put below +// this comment, if needed. + +// Set the pointer to a GMM parameter. extern void mlpackSetGMMPtr(const char* identifier, void* value); -extern void *mlpackGetGMMPtr(const char* identifier); +// Get the pointer to a GMM parameter. +extern void* mlpackGetGMMPtr(const char* identifier); -extern void mlpackGmmTrain(); #if defined(__cplusplus) || defined(c_plusplus) } #endif + +#endif diff --git a/capi/hmm_generate.h b/capi/hmm_generate.h new file mode 100644 index 0000000..1b47d98 --- /dev/null +++ b/capi/hmm_generate.h @@ -0,0 +1,34 @@ +/** + * @file capi/hmm_generate.h + * + * This is an autogenerated header file for functions specified to the %NAME% + * binding to be called by Go. + */ +#ifndef GO_hmm_generate_H +#define GO_hmm_generate_H + +#include + +#if defined(__cplusplus) || defined(c_plusplus) + +extern "C" +{ +#endif + +extern void mlpackHmmGenerate(); + +// Any definitions of methods for dealing with model pointers will be put below +// this comment, if needed. + +// Set the pointer to a HMMModel parameter. +extern void mlpackSetHMMModelPtr(const char* identifier, void* value); + +// Get the pointer to a HMMModel parameter. +extern void* mlpackGetHMMModelPtr(const char* identifier); + + +#if defined(__cplusplus) || defined(c_plusplus) +} +#endif + +#endif diff --git a/capi/hmm_loglik.h b/capi/hmm_loglik.h new file mode 100644 index 0000000..329b542 --- /dev/null +++ b/capi/hmm_loglik.h @@ -0,0 +1,34 @@ +/** + * @file capi/hmm_loglik.h + * + * This is an autogenerated header file for functions specified to the %NAME% + * binding to be called by Go. + */ +#ifndef GO_hmm_loglik_H +#define GO_hmm_loglik_H + +#include + +#if defined(__cplusplus) || defined(c_plusplus) + +extern "C" +{ +#endif + +extern void mlpackHmmLoglik(); + +// Any definitions of methods for dealing with model pointers will be put below +// this comment, if needed. + +// Set the pointer to a HMMModel parameter. +extern void mlpackSetHMMModelPtr(const char* identifier, void* value); + +// Get the pointer to a HMMModel parameter. +extern void* mlpackGetHMMModelPtr(const char* identifier); + + +#if defined(__cplusplus) || defined(c_plusplus) +} +#endif + +#endif diff --git a/capi/hmm_train.h b/capi/hmm_train.h new file mode 100644 index 0000000..30f1341 --- /dev/null +++ b/capi/hmm_train.h @@ -0,0 +1,34 @@ +/** + * @file capi/hmm_train.h + * + * This is an autogenerated header file for functions specified to the %NAME% + * binding to be called by Go. + */ +#ifndef GO_hmm_train_H +#define GO_hmm_train_H + +#include + +#if defined(__cplusplus) || defined(c_plusplus) + +extern "C" +{ +#endif + +extern void mlpackHmmTrain(); + +// Any definitions of methods for dealing with model pointers will be put below +// this comment, if needed. + +// Set the pointer to a HMMModel parameter. +extern void mlpackSetHMMModelPtr(const char* identifier, void* value); + +// Get the pointer to a HMMModel parameter. +extern void* mlpackGetHMMModelPtr(const char* identifier); + + +#if defined(__cplusplus) || defined(c_plusplus) +} +#endif + +#endif diff --git a/capi/hmm_viterbi.h b/capi/hmm_viterbi.h new file mode 100644 index 0000000..3f39c95 --- /dev/null +++ b/capi/hmm_viterbi.h @@ -0,0 +1,34 @@ +/** + * @file capi/hmm_viterbi.h + * + * This is an autogenerated header file for functions specified to the %NAME% + * binding to be called by Go. + */ +#ifndef GO_hmm_viterbi_H +#define GO_hmm_viterbi_H + +#include + +#if defined(__cplusplus) || defined(c_plusplus) + +extern "C" +{ +#endif + +extern void mlpackHmmViterbi(); + +// Any definitions of methods for dealing with model pointers will be put below +// this comment, if needed. + +// Set the pointer to a HMMModel parameter. +extern void mlpackSetHMMModelPtr(const char* identifier, void* value); + +// Get the pointer to a HMMModel parameter. +extern void* mlpackGetHMMModelPtr(const char* identifier); + + +#if defined(__cplusplus) || defined(c_plusplus) +} +#endif + +#endif diff --git a/capi/hoeffding_tree.h b/capi/hoeffding_tree.h index 0df3475..c20114e 100644 --- a/capi/hoeffding_tree.h +++ b/capi/hoeffding_tree.h @@ -1,16 +1,34 @@ -#include +/** + * @file capi/hoeffding_tree.h + * + * This is an autogenerated header file for functions specified to the %NAME% + * binding to be called by Go. + */ +#ifndef GO_hoeffding_tree_H +#define GO_hoeffding_tree_H + #include #if defined(__cplusplus) || defined(c_plusplus) -extern "C" { + +extern "C" +{ #endif +extern void mlpackHoeffdingTree(); + +// Any definitions of methods for dealing with model pointers will be put below +// this comment, if needed. + +// Set the pointer to a HoeffdingTreeModel parameter. extern void mlpackSetHoeffdingTreeModelPtr(const char* identifier, void* value); -extern void *mlpackGetHoeffdingTreeModelPtr(const char* identifier); +// Get the pointer to a HoeffdingTreeModel parameter. +extern void* mlpackGetHoeffdingTreeModelPtr(const char* identifier); -extern void mlpackHoeffdingTree(); #if defined(__cplusplus) || defined(c_plusplus) } #endif + +#endif diff --git a/capi/image_converter.h b/capi/image_converter.h index 30a2861..da0cb19 100644 --- a/capi/image_converter.h +++ b/capi/image_converter.h @@ -1,12 +1,28 @@ -#include +/** + * @file capi/image_converter.h + * + * This is an autogenerated header file for functions specified to the %NAME% + * binding to be called by Go. + */ +#ifndef GO_image_converter_H +#define GO_image_converter_H + #include #if defined(__cplusplus) || defined(c_plusplus) -extern "C" { + +extern "C" +{ #endif extern void mlpackImageConverter(); +// Any definitions of methods for dealing with model pointers will be put below +// this comment, if needed. + + #if defined(__cplusplus) || defined(c_plusplus) } #endif + +#endif diff --git a/capi/cli_util.h b/capi/io_util.h similarity index 93% rename from capi/cli_util.h rename to capi/io_util.h index bd2085f..900f095 100644 --- a/capi/cli_util.h +++ b/capi/io_util.h @@ -1,5 +1,5 @@ /** - * @file bindings/go/mlpack/capi/cli_util.h + * @file bindings/go/mlpack/capi/io_util.h * @author Yasmine Dumouchel * @author Yashwant Singh * @@ -10,8 +10,8 @@ * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ -#ifndef MLPACK_BINDINGS_GO_MLPACK_CLI_UTIL_H -#define MLPACK_BINDINGS_GO_MLPACK_CLI_UTIL_H +#ifndef MLPACK_BINDINGS_GO_MLPACK_IO_UTIL_H +#define MLPACK_BINDINGS_GO_MLPACK_IO_UTIL_H #include #include @@ -66,13 +66,13 @@ void mlpackSetParamVectorStr(const char* identifier, const size_t element); /** - * Call CLI::SetParam>() to set the length. + * Call IO::SetParam>() to set the length. */ void mlpackSetParamVectorStrLen(const char* identifier, const size_t length); /** - * Check if CLI has a specified parameter. + * Check if IO has a specified parameter. */ bool mlpackHasParam(const char* identifier); diff --git a/capi/kde.h b/capi/kde.h new file mode 100644 index 0000000..ecffca5 --- /dev/null +++ b/capi/kde.h @@ -0,0 +1,34 @@ +/** + * @file capi/kde.h + * + * This is an autogenerated header file for functions specified to the %NAME% + * binding to be called by Go. + */ +#ifndef GO_kde_H +#define GO_kde_H + +#include + +#if defined(__cplusplus) || defined(c_plusplus) + +extern "C" +{ +#endif + +extern void mlpackKde(); + +// Any definitions of methods for dealing with model pointers will be put below +// this comment, if needed. + +// Set the pointer to a KDEModel parameter. +extern void mlpackSetKDEModelPtr(const char* identifier, void* value); + +// Get the pointer to a KDEModel parameter. +extern void* mlpackGetKDEModelPtr(const char* identifier); + + +#if defined(__cplusplus) || defined(c_plusplus) +} +#endif + +#endif diff --git a/capi/kernel_pca.h b/capi/kernel_pca.h index cefc6c1..b930c1a 100644 --- a/capi/kernel_pca.h +++ b/capi/kernel_pca.h @@ -1,12 +1,28 @@ -#include +/** + * @file capi/kernel_pca.h + * + * This is an autogenerated header file for functions specified to the %NAME% + * binding to be called by Go. + */ +#ifndef GO_kernel_pca_H +#define GO_kernel_pca_H + #include #if defined(__cplusplus) || defined(c_plusplus) -extern "C" { + +extern "C" +{ #endif extern void mlpackKernelPca(); +// Any definitions of methods for dealing with model pointers will be put below +// this comment, if needed. + + #if defined(__cplusplus) || defined(c_plusplus) } #endif + +#endif diff --git a/capi/kfn.h b/capi/kfn.h index 004b3c8..6214fcf 100644 --- a/capi/kfn.h +++ b/capi/kfn.h @@ -1,16 +1,34 @@ -#include +/** + * @file capi/kfn.h + * + * This is an autogenerated header file for functions specified to the %NAME% + * binding to be called by Go. + */ +#ifndef GO_kfn_H +#define GO_kfn_H + #include #if defined(__cplusplus) || defined(c_plusplus) -extern "C" { + +extern "C" +{ #endif +extern void mlpackKfn(); + +// Any definitions of methods for dealing with model pointers will be put below +// this comment, if needed. + +// Set the pointer to a KFNModel parameter. extern void mlpackSetKFNModelPtr(const char* identifier, void* value); -extern void *mlpackGetKFNModelPtr(const char* identifier); +// Get the pointer to a KFNModel parameter. +extern void* mlpackGetKFNModelPtr(const char* identifier); -extern void mlpackKfn(); #if defined(__cplusplus) || defined(c_plusplus) } #endif + +#endif diff --git a/capi/kmeans.h b/capi/kmeans.h index d30228a..18b48fc 100644 --- a/capi/kmeans.h +++ b/capi/kmeans.h @@ -1,12 +1,28 @@ -#include +/** + * @file capi/kmeans.h + * + * This is an autogenerated header file for functions specified to the %NAME% + * binding to be called by Go. + */ +#ifndef GO_kmeans_H +#define GO_kmeans_H + #include #if defined(__cplusplus) || defined(c_plusplus) -extern "C" { + +extern "C" +{ #endif extern void mlpackKmeans(); +// Any definitions of methods for dealing with model pointers will be put below +// this comment, if needed. + + #if defined(__cplusplus) || defined(c_plusplus) } #endif + +#endif diff --git a/capi/knn.h b/capi/knn.h index b8de4f5..fe5bb46 100644 --- a/capi/knn.h +++ b/capi/knn.h @@ -1,16 +1,34 @@ -#include +/** + * @file capi/knn.h + * + * This is an autogenerated header file for functions specified to the %NAME% + * binding to be called by Go. + */ +#ifndef GO_knn_H +#define GO_knn_H + #include #if defined(__cplusplus) || defined(c_plusplus) -extern "C" { + +extern "C" +{ #endif +extern void mlpackKnn(); + +// Any definitions of methods for dealing with model pointers will be put below +// this comment, if needed. + +// Set the pointer to a KNNModel parameter. extern void mlpackSetKNNModelPtr(const char* identifier, void* value); -extern void *mlpackGetKNNModelPtr(const char* identifier); +// Get the pointer to a KNNModel parameter. +extern void* mlpackGetKNNModelPtr(const char* identifier); -extern void mlpackKnn(); #if defined(__cplusplus) || defined(c_plusplus) } #endif + +#endif diff --git a/capi/krann.h b/capi/krann.h index 02dec2c..ddebb82 100644 --- a/capi/krann.h +++ b/capi/krann.h @@ -1,16 +1,34 @@ -#include +/** + * @file capi/krann.h + * + * This is an autogenerated header file for functions specified to the %NAME% + * binding to be called by Go. + */ +#ifndef GO_krann_H +#define GO_krann_H + #include #if defined(__cplusplus) || defined(c_plusplus) -extern "C" { + +extern "C" +{ #endif +extern void mlpackKrann(); + +// Any definitions of methods for dealing with model pointers will be put below +// this comment, if needed. + +// Set the pointer to a RANNModel parameter. extern void mlpackSetRANNModelPtr(const char* identifier, void* value); -extern void *mlpackGetRANNModelPtr(const char* identifier); +// Get the pointer to a RANNModel parameter. +extern void* mlpackGetRANNModelPtr(const char* identifier); -extern void mlpackKrann(); #if defined(__cplusplus) || defined(c_plusplus) } #endif + +#endif diff --git a/capi/lars.h b/capi/lars.h index 1280353..91501df 100644 --- a/capi/lars.h +++ b/capi/lars.h @@ -1,16 +1,34 @@ -#include +/** + * @file capi/lars.h + * + * This is an autogenerated header file for functions specified to the %NAME% + * binding to be called by Go. + */ +#ifndef GO_lars_H +#define GO_lars_H + #include #if defined(__cplusplus) || defined(c_plusplus) -extern "C" { + +extern "C" +{ #endif +extern void mlpackLars(); + +// Any definitions of methods for dealing with model pointers will be put below +// this comment, if needed. + +// Set the pointer to a LARS parameter. extern void mlpackSetLARSPtr(const char* identifier, void* value); -extern void *mlpackGetLARSPtr(const char* identifier); +// Get the pointer to a LARS parameter. +extern void* mlpackGetLARSPtr(const char* identifier); -extern void mlpackLars(); #if defined(__cplusplus) || defined(c_plusplus) } #endif + +#endif diff --git a/capi/linear_regression.h b/capi/linear_regression.h index 8420df0..e9ccab9 100644 --- a/capi/linear_regression.h +++ b/capi/linear_regression.h @@ -1,16 +1,34 @@ -#include +/** + * @file capi/linear_regression.h + * + * This is an autogenerated header file for functions specified to the %NAME% + * binding to be called by Go. + */ +#ifndef GO_linear_regression_H +#define GO_linear_regression_H + #include #if defined(__cplusplus) || defined(c_plusplus) -extern "C" { + +extern "C" +{ #endif +extern void mlpackLinearRegression(); + +// Any definitions of methods for dealing with model pointers will be put below +// this comment, if needed. + +// Set the pointer to a LinearRegression parameter. extern void mlpackSetLinearRegressionPtr(const char* identifier, void* value); -extern void *mlpackGetLinearRegressionPtr(const char* identifier); +// Get the pointer to a LinearRegression parameter. +extern void* mlpackGetLinearRegressionPtr(const char* identifier); -extern void mlpackLinearRegression(); #if defined(__cplusplus) || defined(c_plusplus) } #endif + +#endif diff --git a/capi/linear_svm.h b/capi/linear_svm.h index 68d5f19..8b06e1c 100644 --- a/capi/linear_svm.h +++ b/capi/linear_svm.h @@ -1,16 +1,34 @@ -#include +/** + * @file capi/linear_svm.h + * + * This is an autogenerated header file for functions specified to the %NAME% + * binding to be called by Go. + */ +#ifndef GO_linear_svm_H +#define GO_linear_svm_H + #include #if defined(__cplusplus) || defined(c_plusplus) -extern "C" { + +extern "C" +{ #endif +extern void mlpackLinearSvm(); + +// Any definitions of methods for dealing with model pointers will be put below +// this comment, if needed. + +// Set the pointer to a LinearSVMModel parameter. extern void mlpackSetLinearSVMModelPtr(const char* identifier, void* value); -extern void *mlpackGetLinearSVMModelPtr(const char* identifier); +// Get the pointer to a LinearSVMModel parameter. +extern void* mlpackGetLinearSVMModelPtr(const char* identifier); -extern void mlpackLinearSvm(); #if defined(__cplusplus) || defined(c_plusplus) } #endif + +#endif diff --git a/capi/lmnn.h b/capi/lmnn.h index 27fa4fa..8c09035 100644 --- a/capi/lmnn.h +++ b/capi/lmnn.h @@ -1,12 +1,28 @@ -#include +/** + * @file capi/lmnn.h + * + * This is an autogenerated header file for functions specified to the %NAME% + * binding to be called by Go. + */ +#ifndef GO_lmnn_H +#define GO_lmnn_H + #include #if defined(__cplusplus) || defined(c_plusplus) -extern "C" { + +extern "C" +{ #endif extern void mlpackLmnn(); +// Any definitions of methods for dealing with model pointers will be put below +// this comment, if needed. + + #if defined(__cplusplus) || defined(c_plusplus) } #endif + +#endif diff --git a/capi/local_coordinate_coding.h b/capi/local_coordinate_coding.h index 9e78f28..3955812 100644 --- a/capi/local_coordinate_coding.h +++ b/capi/local_coordinate_coding.h @@ -1,16 +1,34 @@ -#include +/** + * @file capi/local_coordinate_coding.h + * + * This is an autogenerated header file for functions specified to the %NAME% + * binding to be called by Go. + */ +#ifndef GO_local_coordinate_coding_H +#define GO_local_coordinate_coding_H + #include #if defined(__cplusplus) || defined(c_plusplus) -extern "C" { + +extern "C" +{ #endif +extern void mlpackLocalCoordinateCoding(); + +// Any definitions of methods for dealing with model pointers will be put below +// this comment, if needed. + +// Set the pointer to a LocalCoordinateCoding parameter. extern void mlpackSetLocalCoordinateCodingPtr(const char* identifier, void* value); -extern void *mlpackGetLocalCoordinateCodingPtr(const char* identifier); +// Get the pointer to a LocalCoordinateCoding parameter. +extern void* mlpackGetLocalCoordinateCodingPtr(const char* identifier); -extern void mlpackLocalCoordinateCoding(); #if defined(__cplusplus) || defined(c_plusplus) } #endif + +#endif diff --git a/capi/logistic_regression.h b/capi/logistic_regression.h index 2a809c0..5eb877b 100644 --- a/capi/logistic_regression.h +++ b/capi/logistic_regression.h @@ -1,16 +1,34 @@ -#include +/** + * @file capi/logistic_regression.h + * + * This is an autogenerated header file for functions specified to the %NAME% + * binding to be called by Go. + */ +#ifndef GO_logistic_regression_H +#define GO_logistic_regression_H + #include #if defined(__cplusplus) || defined(c_plusplus) -extern "C" { + +extern "C" +{ #endif +extern void mlpackLogisticRegression(); + +// Any definitions of methods for dealing with model pointers will be put below +// this comment, if needed. + +// Set the pointer to a LogisticRegression<> parameter. extern void mlpackSetLogisticRegressionPtr(const char* identifier, void* value); -extern void *mlpackGetLogisticRegressionPtr(const char* identifier); +// Get the pointer to a LogisticRegression<> parameter. +extern void* mlpackGetLogisticRegressionPtr(const char* identifier); -extern void mlpackLogisticRegression(); #if defined(__cplusplus) || defined(c_plusplus) } #endif + +#endif diff --git a/capi/lsh.h b/capi/lsh.h index a973623..582dad2 100644 --- a/capi/lsh.h +++ b/capi/lsh.h @@ -1,16 +1,34 @@ -#include +/** + * @file capi/lsh.h + * + * This is an autogenerated header file for functions specified to the %NAME% + * binding to be called by Go. + */ +#ifndef GO_lsh_H +#define GO_lsh_H + #include #if defined(__cplusplus) || defined(c_plusplus) -extern "C" { + +extern "C" +{ #endif +extern void mlpackLsh(); + +// Any definitions of methods for dealing with model pointers will be put below +// this comment, if needed. + +// Set the pointer to a LSHSearch<> parameter. extern void mlpackSetLSHSearchPtr(const char* identifier, void* value); -extern void *mlpackGetLSHSearchPtr(const char* identifier); +// Get the pointer to a LSHSearch<> parameter. +extern void* mlpackGetLSHSearchPtr(const char* identifier); -extern void mlpackLsh(); #if defined(__cplusplus) || defined(c_plusplus) } #endif + +#endif diff --git a/capi/mean_shift.h b/capi/mean_shift.h index 3324a63..be4c02d 100644 --- a/capi/mean_shift.h +++ b/capi/mean_shift.h @@ -1,12 +1,28 @@ -#include +/** + * @file capi/mean_shift.h + * + * This is an autogenerated header file for functions specified to the %NAME% + * binding to be called by Go. + */ +#ifndef GO_mean_shift_H +#define GO_mean_shift_H + #include #if defined(__cplusplus) || defined(c_plusplus) -extern "C" { + +extern "C" +{ #endif extern void mlpackMeanShift(); +// Any definitions of methods for dealing with model pointers will be put below +// this comment, if needed. + + #if defined(__cplusplus) || defined(c_plusplus) } #endif + +#endif diff --git a/capi/nbc.h b/capi/nbc.h index f2b1420..58010d9 100644 --- a/capi/nbc.h +++ b/capi/nbc.h @@ -1,16 +1,34 @@ -#include +/** + * @file capi/nbc.h + * + * This is an autogenerated header file for functions specified to the %NAME% + * binding to be called by Go. + */ +#ifndef GO_nbc_H +#define GO_nbc_H + #include #if defined(__cplusplus) || defined(c_plusplus) -extern "C" { + +extern "C" +{ #endif +extern void mlpackNbc(); + +// Any definitions of methods for dealing with model pointers will be put below +// this comment, if needed. + +// Set the pointer to a NBCModel parameter. extern void mlpackSetNBCModelPtr(const char* identifier, void* value); -extern void *mlpackGetNBCModelPtr(const char* identifier); +// Get the pointer to a NBCModel parameter. +extern void* mlpackGetNBCModelPtr(const char* identifier); -extern void mlpackNbc(); #if defined(__cplusplus) || defined(c_plusplus) } #endif + +#endif diff --git a/capi/nca.h b/capi/nca.h index 9dedee4..d99e9e7 100644 --- a/capi/nca.h +++ b/capi/nca.h @@ -1,12 +1,28 @@ -#include +/** + * @file capi/nca.h + * + * This is an autogenerated header file for functions specified to the %NAME% + * binding to be called by Go. + */ +#ifndef GO_nca_H +#define GO_nca_H + #include #if defined(__cplusplus) || defined(c_plusplus) -extern "C" { + +extern "C" +{ #endif extern void mlpackNca(); +// Any definitions of methods for dealing with model pointers will be put below +// this comment, if needed. + + #if defined(__cplusplus) || defined(c_plusplus) } #endif + +#endif diff --git a/capi/nmf.h b/capi/nmf.h index d097a38..46294db 100644 --- a/capi/nmf.h +++ b/capi/nmf.h @@ -1,12 +1,28 @@ -#include +/** + * @file capi/nmf.h + * + * This is an autogenerated header file for functions specified to the %NAME% + * binding to be called by Go. + */ +#ifndef GO_nmf_H +#define GO_nmf_H + #include #if defined(__cplusplus) || defined(c_plusplus) -extern "C" { + +extern "C" +{ #endif extern void mlpackNmf(); +// Any definitions of methods for dealing with model pointers will be put below +// this comment, if needed. + + #if defined(__cplusplus) || defined(c_plusplus) } #endif + +#endif diff --git a/capi/pca.h b/capi/pca.h index 5a3f0a4..2c71b37 100644 --- a/capi/pca.h +++ b/capi/pca.h @@ -1,12 +1,28 @@ -#include +/** + * @file capi/pca.h + * + * This is an autogenerated header file for functions specified to the %NAME% + * binding to be called by Go. + */ +#ifndef GO_pca_H +#define GO_pca_H + #include #if defined(__cplusplus) || defined(c_plusplus) -extern "C" { + +extern "C" +{ #endif extern void mlpackPca(); +// Any definitions of methods for dealing with model pointers will be put below +// this comment, if needed. + + #if defined(__cplusplus) || defined(c_plusplus) } #endif + +#endif diff --git a/capi/perceptron.h b/capi/perceptron.h index e5aa95a..084de07 100644 --- a/capi/perceptron.h +++ b/capi/perceptron.h @@ -1,16 +1,34 @@ -#include +/** + * @file capi/perceptron.h + * + * This is an autogenerated header file for functions specified to the %NAME% + * binding to be called by Go. + */ +#ifndef GO_perceptron_H +#define GO_perceptron_H + #include #if defined(__cplusplus) || defined(c_plusplus) -extern "C" { + +extern "C" +{ #endif +extern void mlpackPerceptron(); + +// Any definitions of methods for dealing with model pointers will be put below +// this comment, if needed. + +// Set the pointer to a PerceptronModel parameter. extern void mlpackSetPerceptronModelPtr(const char* identifier, void* value); -extern void *mlpackGetPerceptronModelPtr(const char* identifier); +// Get the pointer to a PerceptronModel parameter. +extern void* mlpackGetPerceptronModelPtr(const char* identifier); -extern void mlpackPerceptron(); #if defined(__cplusplus) || defined(c_plusplus) } #endif + +#endif diff --git a/capi/preprocess_binarize.h b/capi/preprocess_binarize.h index cb66de7..81e6c19 100644 --- a/capi/preprocess_binarize.h +++ b/capi/preprocess_binarize.h @@ -1,12 +1,28 @@ -#include +/** + * @file capi/preprocess_binarize.h + * + * This is an autogenerated header file for functions specified to the %NAME% + * binding to be called by Go. + */ +#ifndef GO_preprocess_binarize_H +#define GO_preprocess_binarize_H + #include #if defined(__cplusplus) || defined(c_plusplus) -extern "C" { + +extern "C" +{ #endif extern void mlpackPreprocessBinarize(); +// Any definitions of methods for dealing with model pointers will be put below +// this comment, if needed. + + #if defined(__cplusplus) || defined(c_plusplus) } #endif + +#endif diff --git a/capi/preprocess_describe.h b/capi/preprocess_describe.h index bdd1e37..53f543e 100644 --- a/capi/preprocess_describe.h +++ b/capi/preprocess_describe.h @@ -1,12 +1,28 @@ -#include +/** + * @file capi/preprocess_describe.h + * + * This is an autogenerated header file for functions specified to the %NAME% + * binding to be called by Go. + */ +#ifndef GO_preprocess_describe_H +#define GO_preprocess_describe_H + #include #if defined(__cplusplus) || defined(c_plusplus) -extern "C" { + +extern "C" +{ #endif extern void mlpackPreprocessDescribe(); +// Any definitions of methods for dealing with model pointers will be put below +// this comment, if needed. + + #if defined(__cplusplus) || defined(c_plusplus) } #endif + +#endif diff --git a/capi/preprocess_one_hot_encoding.h b/capi/preprocess_one_hot_encoding.h new file mode 100644 index 0000000..327a300 --- /dev/null +++ b/capi/preprocess_one_hot_encoding.h @@ -0,0 +1,28 @@ +/** + * @file capi/preprocess_one_hot_encoding.h + * + * This is an autogenerated header file for functions specified to the %NAME% + * binding to be called by Go. + */ +#ifndef GO_preprocess_one_hot_encoding_H +#define GO_preprocess_one_hot_encoding_H + +#include + +#if defined(__cplusplus) || defined(c_plusplus) + +extern "C" +{ +#endif + +extern void mlpackPreprocessOneHotEncoding(); + +// Any definitions of methods for dealing with model pointers will be put below +// this comment, if needed. + + +#if defined(__cplusplus) || defined(c_plusplus) +} +#endif + +#endif diff --git a/capi/preprocess_scale.h b/capi/preprocess_scale.h index 72a0b1a..ba8b567 100644 --- a/capi/preprocess_scale.h +++ b/capi/preprocess_scale.h @@ -1,16 +1,34 @@ -#include +/** + * @file capi/preprocess_scale.h + * + * This is an autogenerated header file for functions specified to the %NAME% + * binding to be called by Go. + */ +#ifndef GO_preprocess_scale_H +#define GO_preprocess_scale_H + #include #if defined(__cplusplus) || defined(c_plusplus) -extern "C" { + +extern "C" +{ #endif +extern void mlpackPreprocessScale(); + +// Any definitions of methods for dealing with model pointers will be put below +// this comment, if needed. + +// Set the pointer to a ScalingModel parameter. extern void mlpackSetScalingModelPtr(const char* identifier, void* value); -extern void *mlpackGetScalingModelPtr(const char* identifier); +// Get the pointer to a ScalingModel parameter. +extern void* mlpackGetScalingModelPtr(const char* identifier); -extern void mlpackPreprocessScale(); #if defined(__cplusplus) || defined(c_plusplus) } #endif + +#endif diff --git a/capi/preprocess_split.h b/capi/preprocess_split.h index 2a843e1..b36255e 100644 --- a/capi/preprocess_split.h +++ b/capi/preprocess_split.h @@ -1,12 +1,28 @@ -#include +/** + * @file capi/preprocess_split.h + * + * This is an autogenerated header file for functions specified to the %NAME% + * binding to be called by Go. + */ +#ifndef GO_preprocess_split_H +#define GO_preprocess_split_H + #include #if defined(__cplusplus) || defined(c_plusplus) -extern "C" { + +extern "C" +{ #endif extern void mlpackPreprocessSplit(); +// Any definitions of methods for dealing with model pointers will be put below +// this comment, if needed. + + #if defined(__cplusplus) || defined(c_plusplus) } #endif + +#endif diff --git a/capi/radical.h b/capi/radical.h index ed29699..c86d53e 100644 --- a/capi/radical.h +++ b/capi/radical.h @@ -1,12 +1,28 @@ -#include +/** + * @file capi/radical.h + * + * This is an autogenerated header file for functions specified to the %NAME% + * binding to be called by Go. + */ +#ifndef GO_radical_H +#define GO_radical_H + #include #if defined(__cplusplus) || defined(c_plusplus) -extern "C" { + +extern "C" +{ #endif extern void mlpackRadical(); +// Any definitions of methods for dealing with model pointers will be put below +// this comment, if needed. + + #if defined(__cplusplus) || defined(c_plusplus) } #endif + +#endif diff --git a/capi/random_forest.h b/capi/random_forest.h index 2553ec7..5d29f4c 100644 --- a/capi/random_forest.h +++ b/capi/random_forest.h @@ -1,16 +1,34 @@ -#include +/** + * @file capi/random_forest.h + * + * This is an autogenerated header file for functions specified to the %NAME% + * binding to be called by Go. + */ +#ifndef GO_random_forest_H +#define GO_random_forest_H + #include #if defined(__cplusplus) || defined(c_plusplus) -extern "C" { + +extern "C" +{ #endif +extern void mlpackRandomForest(); + +// Any definitions of methods for dealing with model pointers will be put below +// this comment, if needed. + +// Set the pointer to a RandomForestModel parameter. extern void mlpackSetRandomForestModelPtr(const char* identifier, void* value); -extern void *mlpackGetRandomForestModelPtr(const char* identifier); +// Get the pointer to a RandomForestModel parameter. +extern void* mlpackGetRandomForestModelPtr(const char* identifier); -extern void mlpackRandomForest(); #if defined(__cplusplus) || defined(c_plusplus) } #endif + +#endif diff --git a/capi/range_search.h b/capi/range_search.h deleted file mode 100644 index 1a85729..0000000 --- a/capi/range_search.h +++ /dev/null @@ -1,16 +0,0 @@ -#include -#include - -#if defined(__cplusplus) || defined(c_plusplus) -extern "C" { -#endif - -extern void mlpackSetRSModelPtr(const char* identifier, void* value); - -extern void *mlpackGetRSModelPtr(const char* identifier); - -extern void mlpackRangeSearch(); - -#if defined(__cplusplus) || defined(c_plusplus) -} -#endif diff --git a/capi/softmax_regression.h b/capi/softmax_regression.h index 17579ab..6408466 100644 --- a/capi/softmax_regression.h +++ b/capi/softmax_regression.h @@ -1,16 +1,34 @@ -#include +/** + * @file capi/softmax_regression.h + * + * This is an autogenerated header file for functions specified to the %NAME% + * binding to be called by Go. + */ +#ifndef GO_softmax_regression_H +#define GO_softmax_regression_H + #include #if defined(__cplusplus) || defined(c_plusplus) -extern "C" { + +extern "C" +{ #endif +extern void mlpackSoftmaxRegression(); + +// Any definitions of methods for dealing with model pointers will be put below +// this comment, if needed. + +// Set the pointer to a SoftmaxRegression parameter. extern void mlpackSetSoftmaxRegressionPtr(const char* identifier, void* value); -extern void *mlpackGetSoftmaxRegressionPtr(const char* identifier); +// Get the pointer to a SoftmaxRegression parameter. +extern void* mlpackGetSoftmaxRegressionPtr(const char* identifier); -extern void mlpackSoftmaxRegression(); #if defined(__cplusplus) || defined(c_plusplus) } #endif + +#endif diff --git a/capi/sparse_coding.h b/capi/sparse_coding.h index b57b0fb..29aceba 100644 --- a/capi/sparse_coding.h +++ b/capi/sparse_coding.h @@ -1,16 +1,34 @@ -#include +/** + * @file capi/sparse_coding.h + * + * This is an autogenerated header file for functions specified to the %NAME% + * binding to be called by Go. + */ +#ifndef GO_sparse_coding_H +#define GO_sparse_coding_H + #include #if defined(__cplusplus) || defined(c_plusplus) -extern "C" { + +extern "C" +{ #endif +extern void mlpackSparseCoding(); + +// Any definitions of methods for dealing with model pointers will be put below +// this comment, if needed. + +// Set the pointer to a SparseCoding parameter. extern void mlpackSetSparseCodingPtr(const char* identifier, void* value); -extern void *mlpackGetSparseCodingPtr(const char* identifier); +// Get the pointer to a SparseCoding parameter. +extern void* mlpackGetSparseCodingPtr(const char* identifier); -extern void mlpackSparseCoding(); #if defined(__cplusplus) || defined(c_plusplus) } #endif + +#endif diff --git a/capi/test_go_binding.h b/capi/test_go_binding.h index 4e5fb99..c0b6a53 100644 --- a/capi/test_go_binding.h +++ b/capi/test_go_binding.h @@ -1,16 +1,34 @@ -#include +/** + * @file capi/test_go_binding.h + * + * This is an autogenerated header file for functions specified to the %NAME% + * binding to be called by Go. + */ +#ifndef GO_test_go_binding_H +#define GO_test_go_binding_H + #include #if defined(__cplusplus) || defined(c_plusplus) -extern "C" { + +extern "C" +{ #endif +extern void mlpackTestGoBinding(); + +// Any definitions of methods for dealing with model pointers will be put below +// this comment, if needed. + +// Set the pointer to a GaussianKernel parameter. extern void mlpackSetGaussianKernelPtr(const char* identifier, void* value); -extern void *mlpackGetGaussianKernelPtr(const char* identifier); +// Get the pointer to a GaussianKernel parameter. +extern void* mlpackGetGaussianKernelPtr(const char* identifier); -extern void mlpackTestGoBinding(); #if defined(__cplusplus) || defined(c_plusplus) } #endif + +#endif diff --git a/cf.go b/cf.go index 2fa56d3..bb1eb61 100644 --- a/cf.go +++ b/cf.go @@ -8,11 +8,7 @@ package mlpack */ import "C" -import ( - "gonum.org/v1/gonum/mat" - "runtime" - "unsafe" -) +import "gonum.org/v1/gonum/mat" type CfOptionalParam struct { Algorithm string @@ -56,23 +52,6 @@ func CfOptions() *CfOptionalParam { } } -type cfModel struct { - mem unsafe.Pointer -} - -func (m *cfModel) allocCFModel(identifier string) { - m.mem = C.mlpackGetCFModelPtr(C.CString(identifier)) - runtime.KeepAlive(m) -} - -func (m *cfModel) getCFModel(identifier string) { - m.allocCFModel(identifier) -} - -func setCFModel(identifier string, ptr *cfModel) { - C.mlpackSetCFModelPtr(C.CString(identifier), (unsafe.Pointer)(ptr.mem)) -} - /* This program performs collaborative filtering (CF) on the given dataset. Given a list of user, item and preferences (the "Training" parameter), the program @@ -130,7 +109,7 @@ func setCFModel(identifier string, ptr *cfModel) { - 'z_score' -- Z-Score Normalization A trained model may be saved to with the "OutputModel" output parameter. - + To train a CF model on a dataset training_set using NMF for decomposition and saving the trained model to model, one could call: @@ -153,7 +132,6 @@ func setCFModel(identifier string, ptr *cfModel) { recommendations, _ := mlpack.Cf(param) - Input parameters: - Algorithm (string): Algorithm used for matrix factorization. Default diff --git a/dbscan.go b/dbscan.go index ab2bdde..3069bac 100644 --- a/dbscan.go +++ b/dbscan.go @@ -8,9 +8,7 @@ package mlpack */ import "C" -import ( - "gonum.org/v1/gonum/mat" -) +import "gonum.org/v1/gonum/mat" type DbscanOptionalParam struct { Epsilon float64 @@ -54,7 +52,7 @@ func DbscanOptions() *DbscanOptionalParam { 'hilbert-r', 'r-plus', 'r-plus-plus', 'cover', 'ball'. The "SingleMode" parameter will force single-tree search (as opposed to the default dual-tree search), and '"Naive" will force brute-force range search. - + An example usage to run DBSCAN on the dataset in input with a radius of 0.5 and a minimum cluster size of 5 is given below: @@ -65,7 +63,6 @@ func DbscanOptions() *DbscanOptionalParam { _, _ := mlpack.Dbscan(input, param) - Input parameters: - input (mat.Dense): Input dataset to cluster. diff --git a/decision_stump.go b/decision_stump.go index 4d176a5..8c38506 100644 --- a/decision_stump.go +++ b/decision_stump.go @@ -8,11 +8,7 @@ package mlpack */ import "C" -import ( - "gonum.org/v1/gonum/mat" - "runtime" - "unsafe" -) +import "gonum.org/v1/gonum/mat" type DecisionStumpOptionalParam struct { BucketSize int @@ -34,23 +30,6 @@ func DecisionStumpOptions() *DecisionStumpOptionalParam { } } -type dsModel struct { - mem unsafe.Pointer -} - -func (m *dsModel) allocDSModel(identifier string) { - m.mem = C.mlpackGetDSModelPtr(C.CString(identifier)) - runtime.KeepAlive(m) -} - -func (m *dsModel) getDSModel(identifier string) { - m.allocDSModel(identifier) -} - -func setDSModel(identifier string, ptr *dsModel) { - C.mlpackSetDSModelPtr(C.CString(identifier), (unsafe.Pointer)(ptr.mem)) -} - /* This program implements a decision stump, which is a single-level decision tree. The decision stump will split on one dimension of the input data, and @@ -85,7 +64,6 @@ func setDSModel(identifier string, ptr *dsModel) { parameter. That stump may later be re-used in subsequent calls to this program (or others). - Input parameters: - BucketSize (int): The minimum number of training points in each diff --git a/decision_tree.go b/decision_tree.go index cc5ffa2..2d965de 100644 --- a/decision_tree.go +++ b/decision_tree.go @@ -8,11 +8,7 @@ package mlpack */ import "C" -import ( - "gonum.org/v1/gonum/mat" - "runtime" - "unsafe" -) +import "gonum.org/v1/gonum/mat" type DecisionTreeOptionalParam struct { InputModel *decisionTreeModel @@ -46,23 +42,6 @@ func DecisionTreeOptions() *DecisionTreeOptionalParam { } } -type decisionTreeModel struct { - mem unsafe.Pointer -} - -func (m *decisionTreeModel) allocDecisionTreeModel(identifier string) { - m.mem = C.mlpackGetDecisionTreeModelPtr(C.CString(identifier)) - runtime.KeepAlive(m) -} - -func (m *decisionTreeModel) getDecisionTreeModel(identifier string) { - m.allocDecisionTreeModel(identifier) -} - -func setDecisionTreeModel(identifier string, ptr *decisionTreeModel) { - C.mlpackSetDecisionTreeModelPtr(C.CString(identifier), (unsafe.Pointer)(ptr.mem)) -} - /* Train and evaluate using a decision tree. Given a dataset containing numeric or categorical features, and associated labels for each point in the dataset, @@ -88,7 +67,7 @@ func setDecisionTreeModel(identifier string, ptr *decisionTreeModel) { "TestLabels" parameter. Predictions for each test point may be saved via the "Predictions" output parameter. Class probabilities for each prediction may be saved with the "Probabilities" output parameter. - + For example, to train a decision tree with a minimum leaf size of 20 on the dataset contained in data with labels labels, saving the output model to tree and printing the training error, one could call @@ -115,7 +94,6 @@ func setDecisionTreeModel(identifier string, ptr *decisionTreeModel) { _, predictions, _ := mlpack.DecisionTree(param) - Input parameters: - InputModel (decisionTreeModel): Pre-trained decision tree, to be used diff --git a/det.go b/det.go index 1ac1e2e..ced22da 100644 --- a/det.go +++ b/det.go @@ -8,11 +8,7 @@ package mlpack */ import "C" -import ( - "gonum.org/v1/gonum/mat" - "runtime" - "unsafe" -) +import "gonum.org/v1/gonum/mat" type DetOptionalParam struct { Folds int @@ -40,23 +36,6 @@ func DetOptions() *DetOptionalParam { } } -type dTree struct { - mem unsafe.Pointer -} - -func (m *dTree) allocDTree(identifier string) { - m.mem = C.mlpackGetDTreePtr(C.CString(identifier)) - runtime.KeepAlive(m) -} - -func (m *dTree) getDTree(identifier string) { - m.allocDTree(identifier) -} - -func setDTree(identifier string, ptr *dTree) { - C.mlpackSetDTreePtr(C.CString(identifier), (unsafe.Pointer)(ptr.mem)) -} - /* This program performs a number of functions related to Density Estimation Trees. The optimal Density Estimation Tree (DET) can be trained on a set of @@ -83,7 +62,6 @@ func setDTree(identifier string, ptr *dTree) { given as the parameter "InputModel". The density estimates for the test points may be saved using the "TestSetEstimates" output parameter. - Input parameters: - Folds (int): The number of folds of cross-validation to perform for diff --git a/doc.go b/doc.go index 13c8c9e..7523912 100644 --- a/doc.go +++ b/doc.go @@ -7,4 +7,4 @@ programs, Go bindings, and C++ classes which can then be integrated into larger-scale machine learning solutions. */ -package mlpack +package mlpack // import "mlpack.org/v1/mlpack" diff --git a/emst.go b/emst.go index 0fe86db..df41cf8 100644 --- a/emst.go +++ b/emst.go @@ -8,9 +8,7 @@ package mlpack */ import "C" -import ( - "gonum.org/v1/gonum/mat" -) +import "gonum.org/v1/gonum/mat" type EmstOptionalParam struct { LeafSize int @@ -39,7 +37,7 @@ func EmstOptions() *EmstOptionalParam { brute-force search is used (this is typically much slower in low dimensions). The leaf size does not affect the results, but it may have some effect on the runtime of the algorithm. - + For example, the minimum spanning tree of the input dataset data can be calculated with a leaf size of 20 and stored as spanning_tree using the following command: @@ -55,7 +53,6 @@ func EmstOptions() *EmstOptionalParam { second dimension corresponds to the greater index of the edge; and the third column corresponds to the distance between the two points. - Input parameters: - input (mat.Dense): Input data matrix. diff --git a/fastmks.go b/fastmks.go index b5f2a42..580e651 100644 --- a/fastmks.go +++ b/fastmks.go @@ -8,11 +8,7 @@ package mlpack */ import "C" -import ( - "gonum.org/v1/gonum/mat" - "runtime" - "unsafe" -) +import "gonum.org/v1/gonum/mat" type FastmksOptionalParam struct { Bandwidth float64 @@ -48,30 +44,13 @@ func FastmksOptions() *FastmksOptionalParam { } } -type fastmksModel struct { - mem unsafe.Pointer -} - -func (m *fastmksModel) allocFastMKSModel(identifier string) { - m.mem = C.mlpackGetFastMKSModelPtr(C.CString(identifier)) - runtime.KeepAlive(m) -} - -func (m *fastmksModel) getFastMKSModel(identifier string) { - m.allocFastMKSModel(identifier) -} - -func setFastMKSModel(identifier string, ptr *fastmksModel) { - C.mlpackSetFastMKSModelPtr(C.CString(identifier), (unsafe.Pointer)(ptr.mem)) -} - /* This program will find the k maximum kernels of a set of points, using a query set and a reference set (which can optionally be the same set). More specifically, for each point in the query set, the k points in the reference set with maximum kernel evaluations are found. The kernel function used is specified with the "Kernel" parameter. - + For example, the following command will calculate, for each point in the query set query, the five points in the reference set reference with maximum kernel evaluation using the linear kernel. The kernel evaluations may be saved with @@ -96,7 +75,6 @@ func setFastMKSModel(identifier string, ptr *fastmksModel) { This program performs FastMKS using a cover tree. The base used to build the cover tree can be specified with the "Base" parameter. - Input parameters: - Bandwidth (float64): Bandwidth (for Gaussian, Epanechnikov, and diff --git a/gmm_generate.go b/gmm_generate.go new file mode 100644 index 0000000..bd7a5f7 --- /dev/null +++ b/gmm_generate.go @@ -0,0 +1,96 @@ +package mlpack + +/* +#cgo CFLAGS: -I./capi -Wall +#cgo LDFLAGS: -L. -lmlpack_go_gmm_generate +#include +#include +*/ +import "C" + +import "gonum.org/v1/gonum/mat" + +type GmmGenerateOptionalParam struct { + Seed int + Verbose bool +} + +func GmmGenerateOptions() *GmmGenerateOptionalParam { + return &GmmGenerateOptionalParam{ + Seed: 0, + Verbose: false, + } +} + +/* + This program is able to generate samples from a pre-trained GMM (use gmm_train + to train a GMM). The pre-trained GMM must be specified with the "InputModel" + parameter. The number of samples to generate is specified by the "Samples" + parameter. Output samples may be saved with the "Output" output parameter. + + The following command can be used to generate 100 samples from the pre-trained + GMM gmm and store those generated samples in samples: + + // Initialize optional parameters for GmmGenerate(). + param := mlpack.GmmGenerateOptions() + + samples := mlpack.GmmGenerate(&gmm, 100, param) + + Input parameters: + + - inputModel (gmm): Input GMM model to generate samples from. + - samples (int): Number of samples to generate. + - Seed (int): Random seed. If 0, 'std::time(NULL)' is used. Default + value 0. + - Verbose (bool): Display informational messages and the full list of + parameters and timers at the end of execution. + + Output parameters: + + - output (mat.Dense): Matrix to save output samples in. + + */ +func GmmGenerate(inputModel *gmm, samples int, param *GmmGenerateOptionalParam) (*mat.Dense) { + resetTimers() + enableTimers() + disableBacktrace() + disableVerbose() + restoreSettings("GMM Sample Generator") + + // Detect if the parameter was passed; set if so. + setGMM("input_model", inputModel) + setPassed("input_model") + + // Detect if the parameter was passed; set if so. + setParamInt("samples", samples) + setPassed("samples") + + // Detect if the parameter was passed; set if so. + if param.Seed != 0 { + setParamInt("seed", param.Seed) + setPassed("seed") + } + + // Detect if the parameter was passed; set if so. + if param.Verbose != false { + setParamBool("verbose", param.Verbose) + setPassed("verbose") + enableVerbose() + } + + // Mark all output options as passed. + setPassed("output") + + // Call the mlpack program. + C.mlpackGmmGenerate() + + // Initialize result variable and get output. + var outputPtr mlpackArma + output := outputPtr.armaToGonumMat("output") + + // Clear settings. + clearSettings() + + // Return output(s). + return output +} diff --git a/gmm_probability.go b/gmm_probability.go new file mode 100644 index 0000000..1e15fa0 --- /dev/null +++ b/gmm_probability.go @@ -0,0 +1,87 @@ +package mlpack + +/* +#cgo CFLAGS: -I./capi -Wall +#cgo LDFLAGS: -L. -lmlpack_go_gmm_probability +#include +#include +*/ +import "C" + +import "gonum.org/v1/gonum/mat" + +type GmmProbabilityOptionalParam struct { + Verbose bool +} + +func GmmProbabilityOptions() *GmmProbabilityOptionalParam { + return &GmmProbabilityOptionalParam{ + Verbose: false, + } +} + +/* + This program calculates the probability that given points came from a given + GMM (that is, P(X | gmm)). The GMM is specified with the "InputModel" + parameter, and the points are specified with the "Input" parameter. The + output probabilities may be saved via the "Output" output parameter. + + So, for example, to calculate the probabilities of each point in points coming + from the pre-trained GMM gmm, while storing those probabilities in probs, the + following command could be used: + + // Initialize optional parameters for GmmProbability(). + param := mlpack.GmmProbabilityOptions() + + probs := mlpack.GmmProbability(&gmm, points, param) + + Input parameters: + + - input (mat.Dense): Input matrix to calculate probabilities of. + - inputModel (gmm): Input GMM to use as model. + - Verbose (bool): Display informational messages and the full list of + parameters and timers at the end of execution. + + Output parameters: + + - output (mat.Dense): Matrix to store calculated probabilities in. + + */ +func GmmProbability(input *mat.Dense, inputModel *gmm, param *GmmProbabilityOptionalParam) (*mat.Dense) { + resetTimers() + enableTimers() + disableBacktrace() + disableVerbose() + restoreSettings("GMM Probability Calculator") + + // Detect if the parameter was passed; set if so. + gonumToArmaMat("input", input) + setPassed("input") + + // Detect if the parameter was passed; set if so. + setGMM("input_model", inputModel) + setPassed("input_model") + + // Detect if the parameter was passed; set if so. + if param.Verbose != false { + setParamBool("verbose", param.Verbose) + setPassed("verbose") + enableVerbose() + } + + // Mark all output options as passed. + setPassed("output") + + // Call the mlpack program. + C.mlpackGmmProbability() + + // Initialize result variable and get output. + var outputPtr mlpackArma + output := outputPtr.armaToGonumMat("output") + + // Clear settings. + clearSettings() + + // Return output(s). + return output +} diff --git a/gmm_train.go b/gmm_train.go index b5bb89f..67a1a26 100644 --- a/gmm_train.go +++ b/gmm_train.go @@ -8,11 +8,7 @@ package mlpack */ import "C" -import ( - "gonum.org/v1/gonum/mat" - "runtime" - "unsafe" -) +import "gonum.org/v1/gonum/mat" type GmmTrainOptionalParam struct { DiagonalCovariance bool @@ -48,23 +44,6 @@ func GmmTrainOptions() *GmmTrainOptionalParam { } } -type gmm struct { - mem unsafe.Pointer -} - -func (m *gmm) allocGMM(identifier string) { - m.mem = C.mlpackGetGMMPtr(C.CString(identifier)) - runtime.KeepAlive(m) -} - -func (m *gmm) getGMM(identifier string) { - m.allocGMM(identifier) -} - -func setGMM(identifier string, ptr *gmm) { - C.mlpackSetGMMPtr(C.CString(identifier), (unsafe.Pointer)(ptr.mem)) -} - /* This program takes a parametric estimate of a Gaussian mixture model (GMM) using the EM algorithm to find the maximum likelihood estimate. The model may @@ -103,7 +82,7 @@ func setGMM(identifier string, ptr *gmm) { positive definite. Specifying the flag can cause faster runtime, but may also cause non-positive definite covariance matrices, which will cause the program to crash. - + As an example, to train a 6-Gaussian GMM on the data in data with a maximum of 100 iterations of EM and 3 trials, saving the trained GMM to gmm, the following command can be used: @@ -123,7 +102,6 @@ func setGMM(identifier string, ptr *gmm) { new_gmm := mlpack.GmmTrain(data2, 6, param) - Input parameters: - gaussians (int): Number of Gaussians in the GMM. diff --git a/go.mod b/go.mod new file mode 100644 index 0000000..28d7f43 --- /dev/null +++ b/go.mod @@ -0,0 +1,5 @@ +module mlpack.org/v1/mlpack + +go 1.13 + +require gonum.org/v1/gonum v0.7.0 diff --git a/go.sum b/go.sum new file mode 100644 index 0000000..0ee9003 --- /dev/null +++ b/go.sum @@ -0,0 +1,18 @@ +github.com/ajstarks/svgo v0.0.0-20180226025133-644b8db467af/go.mod h1:K08gAheRH3/J6wwsYMMT4xOr94bZjxIelGM0+d/wbFw= +github.com/fogleman/gg v1.2.1-0.20190220221249-0403632d5b90/go.mod h1:R/bRT+9gY/C5z7JzPU0zXsXHKM4/ayA+zqcVNZzPa1k= +github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0/go.mod h1:E/TSTwGwJL78qG/PmXZO1EjYhfJinVAhrmmHX6Z8B9k= +github.com/jung-kurt/gofpdf v1.0.3-0.20190309125859-24315acbbda5/go.mod h1:7Id9E/uU8ce6rXgefFLlgrJj/GYY22cpxn+r32jIOes= +golang.org/x/exp v0.0.0-20180321215751-8460e604b9de/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20180807140117-3d87b88a115f/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20190125153040-c74c464bbbf2 h1:y102fOLFqhV41b+4GPiJoa0k/x+pJcEi2/HB1Y5T6fU= +golang.org/x/exp v0.0.0-20190125153040-c74c464bbbf2/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/image v0.0.0-20180708004352-c73c2afc3b81/go.mod h1:ux5Hcp/YLpHSI86hEcLt0YII63i6oz57MZXIpbrjZUs= +golang.org/x/tools v0.0.0-20180525024113-a5b4c53f6e8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190206041539-40960b6deb8e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +gonum.org/v1/gonum v0.0.0-20180816165407-929014505bf4/go.mod h1:Y+Yx5eoAFn32cQvJDxZx5Dpnq+c3wtXuadVZAcxbbBo= +gonum.org/v1/gonum v0.7.0 h1:Hdks0L0hgznZLG9nzXb8vZ0rRvqNvAcgAp84y7Mwkgw= +gonum.org/v1/gonum v0.7.0/go.mod h1:L02bwd0sqlsvRv41G7wGWFCsVNZFv/k1xzGIxeANHGM= +gonum.org/v1/netlib v0.0.0-20190313105609-8cb42192e0e0 h1:OE9mWmgKkjJyEmDAAtGMPjXu+YNeGvK9VTSHY6+Qihc= +gonum.org/v1/netlib v0.0.0-20190313105609-8cb42192e0e0/go.mod h1:wa6Ws7BG/ESfp6dHfk7C6KdzKA7wR7u/rKwOGE66zvw= +gonum.org/v1/plot v0.0.0-20190515093506-e2840ee46a6b/go.mod h1:Wt8AAjI+ypCyYX3nZBvf6cAIx93T+c/OS2HFAYskSZc= +rsc.io/pdf v0.1.1/go.mod h1:n8OzWcQ6Sp37PL01nO98y4iUCRdTGarVfzxY20ICaU4= diff --git a/hmm_generate.go b/hmm_generate.go new file mode 100644 index 0000000..cacd1a5 --- /dev/null +++ b/hmm_generate.go @@ -0,0 +1,114 @@ +package mlpack + +/* +#cgo CFLAGS: -I./capi -Wall +#cgo LDFLAGS: -L. -lmlpack_go_hmm_generate +#include +#include +*/ +import "C" + +import "gonum.org/v1/gonum/mat" + +type HmmGenerateOptionalParam struct { + Seed int + StartState int + Verbose bool +} + +func HmmGenerateOptions() *HmmGenerateOptionalParam { + return &HmmGenerateOptionalParam{ + Seed: 0, + StartState: 0, + Verbose: false, + } +} + +/* + This utility takes an already-trained HMM, specified as the "Model" parameter, + and generates a random observation sequence and hidden state sequence based on + its parameters. The observation sequence may be saved with the "Output" output + parameter, and the internal state sequence may be saved with the "State" + output parameter. + + The state to start the sequence in may be specified with the "StartState" + parameter. + + For example, to generate a sequence of length 150 from the HMM hmm and save + the observation sequence to observations and the hidden state sequence to + states, the following command may be used: + + // Initialize optional parameters for HmmGenerate(). + param := mlpack.HmmGenerateOptions() + + observations, states := mlpack.HmmGenerate(&hmm, 150, param) + + Input parameters: + + - length (int): Length of sequence to generate. + - model (hmmModel): Trained HMM to generate sequences with. + - Seed (int): Random seed. If 0, 'std::time(NULL)' is used. Default + value 0. + - StartState (int): Starting state of sequence. Default value 0. + - Verbose (bool): Display informational messages and the full list of + parameters and timers at the end of execution. + + Output parameters: + + - output (mat.Dense): Matrix to save observation sequence to. + - state (mat.Dense): Matrix to save hidden state sequence to. + + */ +func HmmGenerate(length int, model *hmmModel, param *HmmGenerateOptionalParam) (*mat.Dense, *mat.Dense) { + resetTimers() + enableTimers() + disableBacktrace() + disableVerbose() + restoreSettings("Hidden Markov Model (HMM) Sequence Generator") + + // Detect if the parameter was passed; set if so. + setParamInt("length", length) + setPassed("length") + + // Detect if the parameter was passed; set if so. + setHMMModel("model", model) + setPassed("model") + + // Detect if the parameter was passed; set if so. + if param.Seed != 0 { + setParamInt("seed", param.Seed) + setPassed("seed") + } + + // Detect if the parameter was passed; set if so. + if param.StartState != 0 { + setParamInt("start_state", param.StartState) + setPassed("start_state") + } + + // Detect if the parameter was passed; set if so. + if param.Verbose != false { + setParamBool("verbose", param.Verbose) + setPassed("verbose") + enableVerbose() + } + + // Mark all output options as passed. + setPassed("output") + setPassed("state") + + // Call the mlpack program. + C.mlpackHmmGenerate() + + // Initialize result variable and get output. + var outputPtr mlpackArma + output := outputPtr.armaToGonumMat("output") + var statePtr mlpackArma + state := statePtr.armaToGonumUmat("state") + + // Clear settings. + clearSettings() + + // Return output(s). + return output, state +} diff --git a/hmm_loglik.go b/hmm_loglik.go new file mode 100644 index 0000000..63cb2a3 --- /dev/null +++ b/hmm_loglik.go @@ -0,0 +1,86 @@ +package mlpack + +/* +#cgo CFLAGS: -I./capi -Wall +#cgo LDFLAGS: -L. -lmlpack_go_hmm_loglik +#include +#include +*/ +import "C" + +import "gonum.org/v1/gonum/mat" + +type HmmLoglikOptionalParam struct { + Verbose bool +} + +func HmmLoglikOptions() *HmmLoglikOptionalParam { + return &HmmLoglikOptionalParam{ + Verbose: false, + } +} + +/* + This utility takes an already-trained HMM, specified with the "InputModel" + parameter, and evaluates the log-likelihood of a sequence of observations, + given with the "Input" parameter. The computed log-likelihood is given as + output. + + For example, to compute the log-likelihood of the sequence seq with the + pre-trained HMM hmm, the following command may be used: + + // Initialize optional parameters for HmmLoglik(). + param := mlpack.HmmLoglikOptions() + + _ := mlpack.HmmLoglik(seq, &hmm, param) + + Input parameters: + + - input (mat.Dense): File containing observations, + - inputModel (hmmModel): File containing HMM. + - Verbose (bool): Display informational messages and the full list of + parameters and timers at the end of execution. + + Output parameters: + + - logLikelihood (float64): Log-likelihood of the sequence. Default + value 0. + + */ +func HmmLoglik(input *mat.Dense, inputModel *hmmModel, param *HmmLoglikOptionalParam) (float64) { + resetTimers() + enableTimers() + disableBacktrace() + disableVerbose() + restoreSettings("Hidden Markov Model (HMM) Sequence Log-Likelihood") + + // Detect if the parameter was passed; set if so. + gonumToArmaMat("input", input) + setPassed("input") + + // Detect if the parameter was passed; set if so. + setHMMModel("input_model", inputModel) + setPassed("input_model") + + // Detect if the parameter was passed; set if so. + if param.Verbose != false { + setParamBool("verbose", param.Verbose) + setPassed("verbose") + enableVerbose() + } + + // Mark all output options as passed. + setPassed("log_likelihood") + + // Call the mlpack program. + C.mlpackHmmLoglik() + + // Initialize result variable and get output. + logLikelihood := getParamDouble("log_likelihood") + + // Clear settings. + clearSettings() + + // Return output(s). + return logLikelihood +} diff --git a/hmm_train.go b/hmm_train.go new file mode 100644 index 0000000..ae9654c --- /dev/null +++ b/hmm_train.go @@ -0,0 +1,167 @@ +package mlpack + +/* +#cgo CFLAGS: -I./capi -Wall +#cgo LDFLAGS: -L. -lmlpack_go_hmm_train +#include +#include +*/ +import "C" + + +type HmmTrainOptionalParam struct { + Batch bool + Gaussians int + InputModel *hmmModel + LabelsFile string + Seed int + States int + Tolerance float64 + Type string + Verbose bool +} + +func HmmTrainOptions() *HmmTrainOptionalParam { + return &HmmTrainOptionalParam{ + Batch: false, + Gaussians: 0, + InputModel: nil, + LabelsFile: "", + Seed: 0, + States: 0, + Tolerance: 1e-05, + Type: "gaussian", + Verbose: false, + } +} + +/* + This program allows a Hidden Markov Model to be trained on labeled or + unlabeled data. It supports four types of HMMs: Discrete HMMs, Gaussian HMMs, + GMM HMMs, or Diagonal GMM HMMs + + Either one input sequence can be specified (with "InputFile"), or, a file + containing files in which input sequences can be found (when + "InputFile"and"Batch" are used together). In addition, labels can be provided + in the file specified by "LabelsFile", and if "Batch" is used, the file given + to "LabelsFile" should contain a list of files of labels corresponding to the + sequences in the file given to "InputFile". + + The HMM is trained with the Baum-Welch algorithm if no labels are provided. + The tolerance of the Baum-Welch algorithm can be set with the + "Tolerance"option. By default, the transition matrix is randomly initialized + and the emission distributions are initialized to fit the extent of the data. + + Optionally, a pre-created HMM model can be used as a guess for the transition + matrix and emission probabilities; this is specifiable with "OutputModel". + + Input parameters: + + - inputFile (string): File containing input observations. + - Batch (bool): If true, input_file (and if passed, labels_file) are + expected to contain a list of files to use as input observation + sequences (and label sequences). + - Gaussians (int): Number of gaussians in each GMM (necessary when type + is 'gmm'). Default value 0. + - InputModel (hmmModel): Pre-existing HMM model to initialize training + with. + - LabelsFile (string): Optional file of hidden states, used for labeled + training. Default value ''. + - Seed (int): Random seed. If 0, 'std::time(NULL)' is used. Default + value 0. + - States (int): Number of hidden states in HMM (necessary, unless + model_file is specified). Default value 0. + - Tolerance (float64): Tolerance of the Baum-Welch algorithm. Default + value 1e-05. + - Type (string): Type of HMM: discrete | gaussian | diag_gmm | gmm. + Default value 'gaussian'. + - Verbose (bool): Display informational messages and the full list of + parameters and timers at the end of execution. + + Output parameters: + + - outputModel (hmmModel): Output for trained HMM. + + */ +func HmmTrain(inputFile string, param *HmmTrainOptionalParam) (hmmModel) { + resetTimers() + enableTimers() + disableBacktrace() + disableVerbose() + restoreSettings("Hidden Markov Model (HMM) Training") + + // Detect if the parameter was passed; set if so. + setParamString("input_file", inputFile) + setPassed("input_file") + + // Detect if the parameter was passed; set if so. + if param.Batch != false { + setParamBool("batch", param.Batch) + setPassed("batch") + } + + // Detect if the parameter was passed; set if so. + if param.Gaussians != 0 { + setParamInt("gaussians", param.Gaussians) + setPassed("gaussians") + } + + // Detect if the parameter was passed; set if so. + if param.InputModel != nil { + setHMMModel("input_model", param.InputModel) + setPassed("input_model") + } + + // Detect if the parameter was passed; set if so. + if param.LabelsFile != "" { + setParamString("labels_file", param.LabelsFile) + setPassed("labels_file") + } + + // Detect if the parameter was passed; set if so. + if param.Seed != 0 { + setParamInt("seed", param.Seed) + setPassed("seed") + } + + // Detect if the parameter was passed; set if so. + if param.States != 0 { + setParamInt("states", param.States) + setPassed("states") + } + + // Detect if the parameter was passed; set if so. + if param.Tolerance != 1e-05 { + setParamDouble("tolerance", param.Tolerance) + setPassed("tolerance") + } + + // Detect if the parameter was passed; set if so. + if param.Type != "gaussian" { + setParamString("type", param.Type) + setPassed("type") + } + + // Detect if the parameter was passed; set if so. + if param.Verbose != false { + setParamBool("verbose", param.Verbose) + setPassed("verbose") + enableVerbose() + } + + // Mark all output options as passed. + setPassed("output_model") + + // Call the mlpack program. + C.mlpackHmmTrain() + + // Initialize result variable and get output. + var outputModel hmmModel + outputModel.getHMMModel("output_model") + + // Clear settings. + clearSettings() + + // Return output(s). + return outputModel +} diff --git a/hmm_viterbi.go b/hmm_viterbi.go new file mode 100644 index 0000000..3d12399 --- /dev/null +++ b/hmm_viterbi.go @@ -0,0 +1,87 @@ +package mlpack + +/* +#cgo CFLAGS: -I./capi -Wall +#cgo LDFLAGS: -L. -lmlpack_go_hmm_viterbi +#include +#include +*/ +import "C" + +import "gonum.org/v1/gonum/mat" + +type HmmViterbiOptionalParam struct { + Verbose bool +} + +func HmmViterbiOptions() *HmmViterbiOptionalParam { + return &HmmViterbiOptionalParam{ + Verbose: false, + } +} + +/* + This utility takes an already-trained HMM, specified as "InputModel", and + evaluates the most probable hidden state sequence of a given sequence of + observations (specified as '"Input", using the Viterbi algorithm. The + computed state sequence may be saved using the "Output" output parameter. + + For example, to predict the state sequence of the observations obs using the + HMM hmm, storing the predicted state sequence to states, the following command + could be used: + + // Initialize optional parameters for HmmViterbi(). + param := mlpack.HmmViterbiOptions() + + states := mlpack.HmmViterbi(obs, &hmm, param) + + Input parameters: + + - input (mat.Dense): Matrix containing observations, + - inputModel (hmmModel): Trained HMM to use. + - Verbose (bool): Display informational messages and the full list of + parameters and timers at the end of execution. + + Output parameters: + + - output (mat.Dense): File to save predicted state sequence to. + + */ +func HmmViterbi(input *mat.Dense, inputModel *hmmModel, param *HmmViterbiOptionalParam) (*mat.Dense) { + resetTimers() + enableTimers() + disableBacktrace() + disableVerbose() + restoreSettings("Hidden Markov Model (HMM) Viterbi State Prediction") + + // Detect if the parameter was passed; set if so. + gonumToArmaMat("input", input) + setPassed("input") + + // Detect if the parameter was passed; set if so. + setHMMModel("input_model", inputModel) + setPassed("input_model") + + // Detect if the parameter was passed; set if so. + if param.Verbose != false { + setParamBool("verbose", param.Verbose) + setPassed("verbose") + enableVerbose() + } + + // Mark all output options as passed. + setPassed("output") + + // Call the mlpack program. + C.mlpackHmmViterbi() + + // Initialize result variable and get output. + var outputPtr mlpackArma + output := outputPtr.armaToGonumUmat("output") + + // Clear settings. + clearSettings() + + // Return output(s). + return output +} diff --git a/hoeffding_tree.go b/hoeffding_tree.go index c47181f..e10c558 100644 --- a/hoeffding_tree.go +++ b/hoeffding_tree.go @@ -8,11 +8,7 @@ package mlpack */ import "C" -import ( - "gonum.org/v1/gonum/mat" - "runtime" - "unsafe" -) +import "gonum.org/v1/gonum/mat" type HoeffdingTreeOptionalParam struct { BatchMode bool @@ -52,23 +48,6 @@ func HoeffdingTreeOptions() *HoeffdingTreeOptionalParam { } } -type hoeffdingTreeModel struct { - mem unsafe.Pointer -} - -func (m *hoeffdingTreeModel) allocHoeffdingTreeModel(identifier string) { - m.mem = C.mlpackGetHoeffdingTreeModelPtr(C.CString(identifier)) - runtime.KeepAlive(m) -} - -func (m *hoeffdingTreeModel) getHoeffdingTreeModel(identifier string) { - m.allocHoeffdingTreeModel(identifier) -} - -func setHoeffdingTreeModel(identifier string, ptr *hoeffdingTreeModel) { - C.mlpackSetHoeffdingTreeModelPtr(C.CString(identifier), (unsafe.Pointer)(ptr.mem)) -} - /* This program implements Hoeffding trees, a form of streaming decision tree suited best for large (or streaming) datasets. This program supports both @@ -94,7 +73,7 @@ func setHoeffdingTreeModel(identifier string, ptr *hoeffdingTreeModel) { "TestLabels" parameter. Predictions for each test point may be saved with the "Predictions" output parameter, and class probabilities for each prediction may be saved with the "Probabilities" output parameter. - + For example, to train a Hoeffding tree with confidence 0.99 with data dataset, saving the trained tree to tree, the following command may be used: @@ -116,7 +95,6 @@ func setHoeffdingTreeModel(identifier string, ptr *hoeffdingTreeModel) { _, predictions, class_probs := mlpack.HoeffdingTree(param) - Input parameters: - BatchMode (bool): If true, samples will be considered in batch diff --git a/image_converter.go b/image_converter.go index ea8da71..2780b0b 100644 --- a/image_converter.go +++ b/image_converter.go @@ -8,9 +8,7 @@ package mlpack */ import "C" -import ( - "gonum.org/v1/gonum/mat" -) +import "gonum.org/v1/gonum/mat" type ImageConverterOptionalParam struct { Channels int @@ -42,7 +40,9 @@ func ImageConverterOptions() *ImageConverterOptionalParam { There are other options too, that can be specified such as "Quality". You can also provide a dataset and save them as images using "Dataset" and - "Save" as an parameter. An example to load an image : + "Save" as an parameter. + + An example to load an image : // Initialize optional parameters for ImageConverter(). param := mlpack.ImageConverterOptions() @@ -64,7 +64,6 @@ func ImageConverterOptions() *ImageConverterOptionalParam { _ := mlpack.ImageConverter(X, param) - Input parameters: - input ([]string): Image filenames which have to be loaded/saved. diff --git a/cli_util.go b/io_util.go similarity index 97% rename from cli_util.go rename to io_util.go index d040151..4c63e54 100644 --- a/cli_util.go +++ b/io_util.go @@ -2,8 +2,8 @@ package mlpack /* #cgo CFLAGS: -I. -I/capi -g -Wall -#cgo LDFLAGS: -L${SRCDIR} -Wl,-rpath,${SRCDIR} -lgo_util -#include +#cgo LDFLAGS: -L${SRCDIR} -Wl,-rpath,${SRCDIR} -lmlpack_go_util +#include */ import "C" diff --git a/kde.go b/kde.go new file mode 100644 index 0000000..daf724e --- /dev/null +++ b/kde.go @@ -0,0 +1,292 @@ +package mlpack + +/* +#cgo CFLAGS: -I./capi -Wall +#cgo LDFLAGS: -L. -lmlpack_go_kde +#include +#include +*/ +import "C" + +import "gonum.org/v1/gonum/mat" + +type KdeOptionalParam struct { + AbsError float64 + Algorithm string + Bandwidth float64 + InitialSampleSize int + InputModel *kdeModel + Kernel string + McBreakCoef float64 + McEntryCoef float64 + McProbability float64 + MonteCarlo bool + Query *mat.Dense + Reference *mat.Dense + RelError float64 + Tree string + Verbose bool +} + +func KdeOptions() *KdeOptionalParam { + return &KdeOptionalParam{ + AbsError: 0, + Algorithm: "dual-tree", + Bandwidth: 1, + InitialSampleSize: 100, + InputModel: nil, + Kernel: "gaussian", + McBreakCoef: 0.4, + McEntryCoef: 3, + McProbability: 0.95, + MonteCarlo: false, + Query: nil, + Reference: nil, + RelError: 0.05, + Tree: "kd-tree", + Verbose: false, + } +} + +/* + This program performs a Kernel Density Estimation. KDE is a non-parametric way + of estimating probability density function. For each query point the program + will estimate its probability density by applying a kernel function to each + reference point. The computational complexity of this is O(N^2) where there + are N query points and N reference points, but this implementation will + typically see better performance as it uses an approximate dual or single tree + algorithm for acceleration. + + Dual or single tree optimization avoids many barely relevant calculations (as + kernel function values decrease with distance), so it is an approximate + computation. You can specify the maximum relative error tolerance for each + query value with "RelError" as well as the maximum absolute error tolerance + with the parameter "AbsError". This program runs using an Euclidean metric. + Kernel function can be selected using the "Kernel" option. You can also choose + what which type of tree to use for the dual-tree algorithm with "Tree". It is + also possible to select whether to use dual-tree algorithm or single-tree + algorithm using the "Algorithm" option. + + Monte Carlo estimations can be used to accelerate the KDE estimate when the + Gaussian Kernel is used. This provides a probabilistic guarantee on the the + error of the resulting KDE instead of an absolute guarantee.To enable Monte + Carlo estimations, the "MonteCarlo" flag can be used, and success probability + can be set with the "McProbability" option. It is possible to set the initial + sample size for the Monte Carlo estimation using "InitialSampleSize". This + implementation will only consider a node, as a candidate for the Monte Carlo + estimation, if its number of descendant nodes is bigger than the initial + sample size. This can be controlled using a coefficient that will multiply the + initial sample size and can be set using "McEntryCoef". To avoid using the + same amount of computations an exact approach would take, this program + recurses the tree whenever a fraction of the amount of the node's descendant + points have already been computed. This fraction is set using "McBreakCoef". + + For example, the following will run KDE using the data in ref_data for + training and the data in qu_data as query data. It will apply an Epanechnikov + kernel with a 0.2 bandwidth to each reference point and use a KD-Tree for the + dual-tree optimization. The returned predictions will be within 5% of the real + KDE value for each query point. + + // Initialize optional parameters for Kde(). + param := mlpack.KdeOptions() + param.Reference = ref_data + param.Query = qu_data + param.Bandwidth = 0.2 + param.Kernel = "epanechnikov" + param.Tree = "kd-tree" + param.RelError = 0.05 + + _, out_data := mlpack.Kde(param) + + the predicted density estimations will be stored in out_data. + If no "Query" is provided, then KDE will be computed on the "Reference" + dataset. + It is possible to select either a reference dataset or an input model but not + both at the same time. If an input model is selected and parameter values are + not set (e.g. "Bandwidth") then default parameter values will be used. + + In addition to the last program call, it is also possible to activate Monte + Carlo estimations if a Gaussian kernel is used. This can provide faster + results, but the KDE will only have a probabilistic guarantee of meeting the + desired error bound (instead of an absolute guarantee). The following example + will run KDE using a Monte Carlo estimation when possible. The results will be + within a 5% of the real KDE value with a 95% probability. Initial sample size + for the Monte Carlo estimation will be 200 points and a node will be a + candidate for the estimation only when it contains 700 (i.e. 3.5*200) points. + If a node contains 700 points and 420 (i.e. 0.6*700) have already been + sampled, then the algorithm will recurse instead of keep sampling. + + // Initialize optional parameters for Kde(). + param := mlpack.KdeOptions() + param.Reference = ref_data + param.Query = qu_data + param.Bandwidth = 0.2 + param.Kernel = "gaussian" + param.Tree = "kd-tree" + param.RelError = 0.05 + param.MonteCarlo = + param.McProbability = 0.95 + param.InitialSampleSize = 200 + param.McEntryCoef = 3.5 + param.McBreakCoef = 0.6 + + _, out_data := mlpack.Kde(param) + + Input parameters: + + - AbsError (float64): Relative error tolerance for the prediction. + Default value 0. + - Algorithm (string): Algorithm to use for the prediction.('dual-tree', + 'single-tree'). Default value 'dual-tree'. + - Bandwidth (float64): Bandwidth of the kernel. Default value 1. + - InitialSampleSize (int): Initial sample size for Monte Carlo + estimations. Default value 100. + - InputModel (kdeModel): Contains pre-trained KDE model. + - Kernel (string): Kernel to use for the prediction.('gaussian', + 'epanechnikov', 'laplacian', 'spherical', 'triangular'). Default value + 'gaussian'. + - McBreakCoef (float64): Controls what fraction of the amount of node's + descendants is the limit for the sample size before it recurses. + Default value 0.4. + - McEntryCoef (float64): Controls how much larger does the amount of + node descendants has to be compared to the initial sample size in order + to be a candidate for Monte Carlo estimations. Default value 3. + - McProbability (float64): Probability of the estimation being bounded + by relative error when using Monte Carlo estimations. Default value + 0.95. + - MonteCarlo (bool): Whether to use Monte Carlo estimations when + possible. + - Query (mat.Dense): Query dataset to KDE on. + - Reference (mat.Dense): Input reference dataset use for KDE. + - RelError (float64): Relative error tolerance for the prediction. + Default value 0.05. + - Tree (string): Tree to use for the prediction.('kd-tree', + 'ball-tree', 'cover-tree', 'octree', 'r-tree'). Default value + 'kd-tree'. + - Verbose (bool): Display informational messages and the full list of + parameters and timers at the end of execution. + + Output parameters: + + - outputModel (kdeModel): If specified, the KDE model will be saved + here. + - predictions (mat.Dense): Vector to store density predictions. + + */ +func Kde(param *KdeOptionalParam) (kdeModel, *mat.Dense) { + resetTimers() + enableTimers() + disableBacktrace() + disableVerbose() + restoreSettings("Kernel Density Estimation") + + // Detect if the parameter was passed; set if so. + if param.AbsError != 0 { + setParamDouble("abs_error", param.AbsError) + setPassed("abs_error") + } + + // Detect if the parameter was passed; set if so. + if param.Algorithm != "dual-tree" { + setParamString("algorithm", param.Algorithm) + setPassed("algorithm") + } + + // Detect if the parameter was passed; set if so. + if param.Bandwidth != 1 { + setParamDouble("bandwidth", param.Bandwidth) + setPassed("bandwidth") + } + + // Detect if the parameter was passed; set if so. + if param.InitialSampleSize != 100 { + setParamInt("initial_sample_size", param.InitialSampleSize) + setPassed("initial_sample_size") + } + + // Detect if the parameter was passed; set if so. + if param.InputModel != nil { + setKDEModel("input_model", param.InputModel) + setPassed("input_model") + } + + // Detect if the parameter was passed; set if so. + if param.Kernel != "gaussian" { + setParamString("kernel", param.Kernel) + setPassed("kernel") + } + + // Detect if the parameter was passed; set if so. + if param.McBreakCoef != 0.4 { + setParamDouble("mc_break_coef", param.McBreakCoef) + setPassed("mc_break_coef") + } + + // Detect if the parameter was passed; set if so. + if param.McEntryCoef != 3 { + setParamDouble("mc_entry_coef", param.McEntryCoef) + setPassed("mc_entry_coef") + } + + // Detect if the parameter was passed; set if so. + if param.McProbability != 0.95 { + setParamDouble("mc_probability", param.McProbability) + setPassed("mc_probability") + } + + // Detect if the parameter was passed; set if so. + if param.MonteCarlo != false { + setParamBool("monte_carlo", param.MonteCarlo) + setPassed("monte_carlo") + } + + // Detect if the parameter was passed; set if so. + if param.Query != nil { + gonumToArmaMat("query", param.Query) + setPassed("query") + } + + // Detect if the parameter was passed; set if so. + if param.Reference != nil { + gonumToArmaMat("reference", param.Reference) + setPassed("reference") + } + + // Detect if the parameter was passed; set if so. + if param.RelError != 0.05 { + setParamDouble("rel_error", param.RelError) + setPassed("rel_error") + } + + // Detect if the parameter was passed; set if so. + if param.Tree != "kd-tree" { + setParamString("tree", param.Tree) + setPassed("tree") + } + + // Detect if the parameter was passed; set if so. + if param.Verbose != false { + setParamBool("verbose", param.Verbose) + setPassed("verbose") + enableVerbose() + } + + // Mark all output options as passed. + setPassed("output_model") + setPassed("predictions") + + // Call the mlpack program. + C.mlpackKde() + + // Initialize result variable and get output. + var outputModel kdeModel + outputModel.getKDEModel("output_model") + var predictionsPtr mlpackArma + predictions := predictionsPtr.armaToGonumCol("predictions") + + // Clear settings. + clearSettings() + + // Return output(s). + return outputModel, predictions +} diff --git a/kernel_pca.go b/kernel_pca.go index f1b5859..7bb75b4 100644 --- a/kernel_pca.go +++ b/kernel_pca.go @@ -8,9 +8,7 @@ package mlpack */ import "C" -import ( - "gonum.org/v1/gonum/mat" -) +import "gonum.org/v1/gonum/mat" type KernelPcaOptionalParam struct { Bandwidth float64 @@ -46,14 +44,6 @@ func KernelPcaOptions() *KernelPcaOptionalParam { For the case where a linear kernel is used, this reduces to regular PCA. - For example, the following command will perform KPCA on the dataset input - using the Gaussian kernel, and saving the transformed data to transformed: - - // Initialize optional parameters for KernelPca(). - param := mlpack.KernelPcaOptions() - - transformed := mlpack.KernelPca(input, "gaussian", param) - The kernels that are supported are listed below: * 'linear': the standard linear dot product (same as normal PCA): @@ -88,6 +78,13 @@ func KernelPcaOptions() *KernelPcaOptionalParam { "Sampling" parameter is used. The sampling scheme for the Nystroem method can be chosen from the following list: 'kmeans', 'random', 'ordered'. + For example, the following command will perform KPCA on the dataset input + using the Gaussian kernel, and saving the transformed data to transformed: + + // Initialize optional parameters for KernelPca(). + param := mlpack.KernelPcaOptions() + + transformed := mlpack.KernelPca(input, "gaussian", param) Input parameters: diff --git a/kfn.go b/kfn.go index d3af1f4..591fe40 100644 --- a/kfn.go +++ b/kfn.go @@ -8,11 +8,7 @@ package mlpack */ import "C" -import ( - "gonum.org/v1/gonum/mat" - "runtime" - "unsafe" -) +import "gonum.org/v1/gonum/mat" type KfnOptionalParam struct { Algorithm string @@ -50,28 +46,11 @@ func KfnOptions() *KfnOptionalParam { } } -type kfnModel struct { - mem unsafe.Pointer -} - -func (m *kfnModel) allocKFNModel(identifier string) { - m.mem = C.mlpackGetKFNModelPtr(C.CString(identifier)) - runtime.KeepAlive(m) -} - -func (m *kfnModel) getKFNModel(identifier string) { - m.allocKFNModel(identifier) -} - -func setKFNModel(identifier string, ptr *kfnModel) { - C.mlpackSetKFNModelPtr(C.CString(identifier), (unsafe.Pointer)(ptr.mem)) -} - /* This program will calculate the k-furthest-neighbors of a set of points. You may specify a separate set of reference points and query points, or just a reference set which will be used as both the reference and query set. - + For example, the following will calculate the 5 furthest neighbors of eachpoint in input and store the distances in distances and the neighbors in neighbors: @@ -89,7 +68,6 @@ func setKFNModel(identifier string, ptr *kfnModel) { Row i and column j in the distances output file corresponds to the distance between those two points. - Input parameters: - Algorithm (string): Type of neighbor search: 'naive', 'single_tree', diff --git a/kmeans.go b/kmeans.go index d35f5b9..a3af736 100644 --- a/kmeans.go +++ b/kmeans.go @@ -8,9 +8,7 @@ package mlpack */ import "C" -import ( - "gonum.org/v1/gonum/mat" -) +import "gonum.org/v1/gonum/mat" type KmeansOptionalParam struct { Algorithm string @@ -81,7 +79,7 @@ func KmeansOptions() *KmeansOptionalParam { Initial clustering assignments may be specified using the "InitialCentroids" parameter, and the maximum number of iterations may be specified with the "MaxIterations" parameter. - + As an example, to use Hamerly's algorithm to perform k-means clustering with k=10 on the dataset data, saving the centroids to centroids and the assignments for each point to assignments, the following command could be @@ -103,7 +101,6 @@ func KmeansOptions() *KmeansOptionalParam { final, _ := mlpack.Kmeans(data, 10, param) - Input parameters: - clusters (int): Number of clusters to find (0 autodetects from diff --git a/knn.go b/knn.go index 75affb9..6f4e870 100644 --- a/knn.go +++ b/knn.go @@ -8,11 +8,7 @@ package mlpack */ import "C" -import ( - "gonum.org/v1/gonum/mat" - "runtime" - "unsafe" -) +import "gonum.org/v1/gonum/mat" type KnnOptionalParam struct { Algorithm string @@ -52,29 +48,12 @@ func KnnOptions() *KnnOptionalParam { } } -type knnModel struct { - mem unsafe.Pointer -} - -func (m *knnModel) allocKNNModel(identifier string) { - m.mem = C.mlpackGetKNNModelPtr(C.CString(identifier)) - runtime.KeepAlive(m) -} - -func (m *knnModel) getKNNModel(identifier string) { - m.allocKNNModel(identifier) -} - -func setKNNModel(identifier string, ptr *knnModel) { - C.mlpackSetKNNModelPtr(C.CString(identifier), (unsafe.Pointer)(ptr.mem)) -} - /* This program will calculate the k-nearest-neighbors of a set of points using kd-trees or cover trees (cover tree support is experimental and may be slow). You may specify a separate set of reference points and query points, or just a reference set which will be used as both the reference and query set. - + For example, the following command will calculate the 5 nearest neighbors of each point in input and store the distances in distances and the neighbors in neighbors: @@ -92,7 +71,6 @@ func setKNNModel(identifier string, ptr *knnModel) { column i in the distances output matrix corresponds to the distance between those two points. - Input parameters: - Algorithm (string): Type of neighbor search: 'naive', 'single_tree', diff --git a/krann.go b/krann.go index cc1b49a..42e4119 100644 --- a/krann.go +++ b/krann.go @@ -8,11 +8,7 @@ package mlpack */ import "C" -import ( - "gonum.org/v1/gonum/mat" - "runtime" - "unsafe" -) +import "gonum.org/v1/gonum/mat" type KrannOptionalParam struct { Alpha float64 @@ -54,30 +50,13 @@ func KrannOptions() *KrannOptionalParam { } } -type rannModel struct { - mem unsafe.Pointer -} - -func (m *rannModel) allocRANNModel(identifier string) { - m.mem = C.mlpackGetRANNModelPtr(C.CString(identifier)) - runtime.KeepAlive(m) -} - -func (m *rannModel) getRANNModel(identifier string) { - m.allocRANNModel(identifier) -} - -func setRANNModel(identifier string, ptr *rannModel) { - C.mlpackSetRANNModelPtr(C.CString(identifier), (unsafe.Pointer)(ptr.mem)) -} - /* This program will calculate the k rank-approximate-nearest-neighbors of a set of points. You may specify a separate set of reference points and query points, or just a reference set which will be used as both the reference and query set. You must specify the rank approximation (in %) (and optionally the success probability). - + For example, the following will return 5 neighbors from the top 0.1% of the data (with probability 0.95) for each point in input and store the distances in distances and the neighbors in neighbors.csv: @@ -102,7 +81,6 @@ func setRANNModel(identifier string, ptr *rannModel) { index j. Row i and column j in the distances output file corresponds to the distance between those two points. - Input parameters: - Alpha (float64): The desired success probability. Default value diff --git a/lars.go b/lars.go index f801c45..1b3ada2 100644 --- a/lars.go +++ b/lars.go @@ -8,11 +8,7 @@ package mlpack */ import "C" -import ( - "gonum.org/v1/gonum/mat" - "runtime" - "unsafe" -) +import "gonum.org/v1/gonum/mat" type LarsOptionalParam struct { Input *mat.Dense @@ -38,23 +34,6 @@ func LarsOptions() *LarsOptionalParam { } } -type lars struct { - mem unsafe.Pointer -} - -func (m *lars) allocLARS(identifier string) { - m.mem = C.mlpackGetLARSPtr(C.CString(identifier)) - runtime.KeepAlive(m) -} - -func (m *lars) getLARS(identifier string) { - m.allocLARS(identifier) -} - -func setLARS(identifier string, ptr *lars) { - C.mlpackSetLARSPtr(C.CString(identifier), (unsafe.Pointer)(ptr.mem)) -} - /* An implementation of LARS: Least Angle Regression (Stagewise/laSso). This is a stage-wise homotopy-based algorithm for L1-regularized linear regression @@ -92,7 +71,7 @@ func setLARS(identifier string, ptr *lars) { trained model or the given input model. Test points can be specified with the "Test" parameter. Predicted responses to the test points can be saved with the "OutputPredictions" output parameter. - + For example, the following command trains a model on the data data and responses responses with lambda1 set to 0.4 and lambda2 set to 0 (so, LASSO is being solved), and then the model is saved to lasso_model: @@ -116,7 +95,6 @@ func setLARS(identifier string, ptr *lars) { _, test_predictions := mlpack.Lars(param) - Input parameters: - Input (mat.Dense): Matrix of covariates (X). diff --git a/linear_regression.go b/linear_regression.go index 0282001..69e4f3a 100644 --- a/linear_regression.go +++ b/linear_regression.go @@ -8,11 +8,7 @@ package mlpack */ import "C" -import ( - "gonum.org/v1/gonum/mat" - "runtime" - "unsafe" -) +import "gonum.org/v1/gonum/mat" type LinearRegressionOptionalParam struct { InputModel *linearRegression @@ -34,23 +30,6 @@ func LinearRegressionOptions() *LinearRegressionOptionalParam { } } -type linearRegression struct { - mem unsafe.Pointer -} - -func (m *linearRegression) allocLinearRegression(identifier string) { - m.mem = C.mlpackGetLinearRegressionPtr(C.CString(identifier)) - runtime.KeepAlive(m) -} - -func (m *linearRegression) getLinearRegression(identifier string) { - m.allocLinearRegression(identifier) -} - -func setLinearRegression(identifier string, ptr *linearRegression) { - C.mlpackSetLinearRegressionPtr(C.CString(identifier), (unsafe.Pointer)(ptr.mem)) -} - /* An implementation of simple linear regression and simple ridge regression using ordinary least squares. This solves the problem @@ -73,7 +52,7 @@ func setLinearRegression(identifier string, ptr *linearRegression) { and the predicted responses y' may be saved with the "OutputPredictions" output parameter. This type of regression is related to least-angle regression, which mlpack implements as the 'lars' program. - + For example, to run a linear regression on the dataset X with responses y, saving the trained model to lr_model, the following command could be used: @@ -94,7 +73,6 @@ func setLinearRegression(identifier string, ptr *linearRegression) { _, X_test_responses := mlpack.LinearRegression(param) - Input parameters: - InputModel (linearRegression): Existing LinearRegression model to diff --git a/linear_svm.go b/linear_svm.go index 4631285..c98052b 100644 --- a/linear_svm.go +++ b/linear_svm.go @@ -8,11 +8,7 @@ package mlpack */ import "C" -import ( - "gonum.org/v1/gonum/mat" - "runtime" - "unsafe" -) +import "gonum.org/v1/gonum/mat" type LinearSvmOptionalParam struct { Delta float64 @@ -56,23 +52,6 @@ func LinearSvmOptions() *LinearSvmOptionalParam { } } -type linearsvmModel struct { - mem unsafe.Pointer -} - -func (m *linearsvmModel) allocLinearSVMModel(identifier string) { - m.mem = C.mlpackGetLinearSVMModelPtr(C.CString(identifier)) - runtime.KeepAlive(m) -} - -func (m *linearsvmModel) getLinearSVMModel(identifier string) { - m.allocLinearSVMModel(identifier) -} - -func setLinearSVMModel(identifier string, ptr *linearsvmModel) { - C.mlpackSetLinearSVMModelPtr(C.CString(identifier), (unsafe.Pointer)(ptr.mem)) -} - /* An implementation of linear SVMs that uses either L-BFGS or parallel SGD (stochastic gradient descent) to train the model. @@ -111,7 +90,7 @@ func setLinearSVMModel(identifier string, ptr *linearsvmModel) { without the "Training" parameter, so long as an existing linear SVM model is given with the "InputModel" parameter. The output predictions from the linear SVM model may be saved with the "Predictions" parameter. - + As an example, to train a LinaerSVM on the data 'data' with labels 'labels' with L2 regularization of 0.1, saving the model to 'lsvm_model', the following command may be used: @@ -136,7 +115,6 @@ func setLinearSVMModel(identifier string, ptr *linearsvmModel) { _, predictions, _ := mlpack.LinearSvm(param) - Input parameters: - Delta (float64): Margin of difference between correct class and other diff --git a/lmnn.go b/lmnn.go index a85c46a..53cf960 100644 --- a/lmnn.go +++ b/lmnn.go @@ -8,9 +8,7 @@ package mlpack */ import "C" -import ( - "gonum.org/v1/gonum/mat" -) +import "gonum.org/v1/gonum/mat" type LmnnOptionalParam struct { BatchSize int @@ -120,7 +118,7 @@ func LmnnOptions() *LmnnOptionalParam { specifying the "Normalize" parameter. By default, the AMSGrad optimizer is used. - + Example - Let's say we want to learn distance on iris dataset with number of targets as 3 using BigBatch_SGD optimizer. A simple call for the same will look like: @@ -144,7 +142,6 @@ func LmnnOptions() *LmnnOptionalParam { _, output, _ := mlpack.MlpackLmnn(letter_recognition, param) - Input parameters: - input (mat.Dense): Input dataset to run LMNN on. diff --git a/local_coordinate_coding.go b/local_coordinate_coding.go index 26e9c1d..2afe3cb 100644 --- a/local_coordinate_coding.go +++ b/local_coordinate_coding.go @@ -8,11 +8,7 @@ package mlpack */ import "C" -import ( - "gonum.org/v1/gonum/mat" - "runtime" - "unsafe" -) +import "gonum.org/v1/gonum/mat" type LocalCoordinateCodingOptionalParam struct { Atoms int @@ -44,23 +40,6 @@ func LocalCoordinateCodingOptions() *LocalCoordinateCodingOptionalParam { } } -type localCoordinateCoding struct { - mem unsafe.Pointer -} - -func (m *localCoordinateCoding) allocLocalCoordinateCoding(identifier string) { - m.mem = C.mlpackGetLocalCoordinateCodingPtr(C.CString(identifier)) - runtime.KeepAlive(m) -} - -func (m *localCoordinateCoding) getLocalCoordinateCoding(identifier string) { - m.allocLocalCoordinateCoding(identifier) -} - -func setLocalCoordinateCoding(identifier string, ptr *localCoordinateCoding) { - C.mlpackSetLocalCoordinateCodingPtr(C.CString(identifier), (unsafe.Pointer)(ptr.mem)) -} - /* An implementation of Local Coordinate Coding (LCC), which codes data that approximately lives on a manifold using a variation of l1-norm regularized @@ -81,8 +60,9 @@ func setLocalCoordinateCoding(identifier string, ptr *localCoordinateCoding) { To run this program, the input matrix X must be specified (with -i), along with the number of atoms in the dictionary (-k). An initial dictionary may also be specified with the "InitialDictionary" parameter. The l1-norm - regularization parameter is specified with the "Lambda" parameter. For - example, to run LCC on the dataset data using 200 atoms and an + regularization parameter is specified with the "Lambda" parameter. + + For example, to run LCC on the dataset data using 200 atoms and an l1-regularization parameter of 0.1, saving the dictionary "Dictionary" and the codes into "Codes", use @@ -110,7 +90,6 @@ func setLocalCoordinateCoding(identifier string, ptr *localCoordinateCoding) { new_codes, _, _ := mlpack.LocalCoordinateCoding(param) - Input parameters: - Atoms (int): Number of atoms in the dictionary. Default value 0. diff --git a/logistic_regression.go b/logistic_regression.go index b5cdd4c..399c88e 100644 --- a/logistic_regression.go +++ b/logistic_regression.go @@ -8,11 +8,7 @@ package mlpack */ import "C" -import ( - "gonum.org/v1/gonum/mat" - "runtime" - "unsafe" -) +import "gonum.org/v1/gonum/mat" type LogisticRegressionOptionalParam struct { BatchSize int @@ -46,23 +42,6 @@ func LogisticRegressionOptions() *LogisticRegressionOptionalParam { } } -type logisticRegression struct { - mem unsafe.Pointer -} - -func (m *logisticRegression) allocLogisticRegression(identifier string) { - m.mem = C.mlpackGetLogisticRegressionPtr(C.CString(identifier)) - runtime.KeepAlive(m) -} - -func (m *logisticRegression) getLogisticRegression(identifier string) { - m.allocLogisticRegression(identifier) -} - -func setLogisticRegression(identifier string, ptr *logisticRegression) { - C.mlpackSetLogisticRegressionPtr(C.CString(identifier), (unsafe.Pointer)(ptr.mem)) -} - /* An implementation of L2-regularized logistic regression using either the L-BFGS optimizer or SGD (stochastic gradient descent). This solves the @@ -116,7 +95,7 @@ func setLogisticRegression(identifier string, ptr *logisticRegression) { This implementation of logistic regression does not support the general multi-class case but instead only the two-class case. Any labels must be either 0 or 1. For more classes, see the softmax_regression program. - + As an example, to train a logistic regression model on the data 'data' with labels 'labels' with L2 regularization of 0.1, saving the model to 'lr_model', the following command may be used: @@ -139,7 +118,6 @@ func setLogisticRegression(identifier string, ptr *logisticRegression) { predictions, _, _, _, _ := mlpack.LogisticRegression(param) - Input parameters: - BatchSize (int): Batch size for SGD. Default value 64. diff --git a/lsh.go b/lsh.go index 7dc0256..4471c4e 100644 --- a/lsh.go +++ b/lsh.go @@ -8,11 +8,7 @@ package mlpack */ import "C" -import ( - "gonum.org/v1/gonum/mat" - "runtime" - "unsafe" -) +import "gonum.org/v1/gonum/mat" type LshOptionalParam struct { BucketSize int @@ -48,29 +44,12 @@ func LshOptions() *LshOptionalParam { } } -type lshSearch struct { - mem unsafe.Pointer -} - -func (m *lshSearch) allocLSHSearch(identifier string) { - m.mem = C.mlpackGetLSHSearchPtr(C.CString(identifier)) - runtime.KeepAlive(m) -} - -func (m *lshSearch) getLSHSearch(identifier string) { - m.allocLSHSearch(identifier) -} - -func setLSHSearch(identifier string, ptr *lshSearch) { - C.mlpackSetLSHSearchPtr(C.CString(identifier), (unsafe.Pointer)(ptr.mem)) -} - /* This program will calculate the k approximate-nearest-neighbors of a set of points using locality-sensitive hashing. You may specify a separate set of reference points and query points, or just a reference set which will be used as both the reference and query set. - + For example, the following will return 5 neighbors from the data for each point in input and store the distances in distances and the neighbors in neighbors: @@ -95,7 +74,6 @@ func setLSHSearch(identifier string, ptr *lshSearch) { This program also has many other parameters to control its functionality; see the parameter-specific documentation for more information. - Input parameters: - BucketSize (int): The size of a bucket in the second level hash. diff --git a/mean_shift.go b/mean_shift.go index 23ec38c..8da6efc 100644 --- a/mean_shift.go +++ b/mean_shift.go @@ -8,9 +8,7 @@ package mlpack */ import "C" -import ( - "gonum.org/v1/gonum/mat" -) +import "gonum.org/v1/gonum/mat" type MeanShiftOptionalParam struct { ForceConvergence bool @@ -44,7 +42,7 @@ func MeanShiftOptions() *MeanShiftOptionalParam { The output labels may be saved with the "Output" output parameter and the centroids of each cluster may be saved with the "Centroid" output parameter. - + For example, to run mean shift clustering on the dataset data and store the centroids to centroids, the following command may be used: @@ -53,7 +51,6 @@ func MeanShiftOptions() *MeanShiftOptionalParam { centroids, _ := mlpack.MeanShift(data, param) - Input parameters: - input (mat.Dense): Input dataset to perform clustering on. diff --git a/models.go b/models.go new file mode 100644 index 0000000..d69ae5f --- /dev/null +++ b/models.go @@ -0,0 +1,520 @@ +package mlpack + +/* +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +*/ +import "C" + +import ( + "runtime" + "unsafe" +) + +type gaussianKernel struct { + mem unsafe.Pointer +} + +func (m *gaussianKernel) allocGaussianKernel(identifier string) { + m.mem = C.mlpackGetGaussianKernelPtr(C.CString(identifier)) + runtime.KeepAlive(m) +} + +func (m *gaussianKernel) getGaussianKernel(identifier string) { + m.allocGaussianKernel(identifier) +} + +func setGaussianKernel(identifier string, ptr *gaussianKernel) { + C.mlpackSetGaussianKernelPtr(C.CString(identifier), (unsafe.Pointer)(ptr.mem)) +} + +type adaBoostModel struct { + mem unsafe.Pointer +} + +func (m *adaBoostModel) allocAdaBoostModel(identifier string) { + m.mem = C.mlpackGetAdaBoostModelPtr(C.CString(identifier)) + runtime.KeepAlive(m) +} + +func (m *adaBoostModel) getAdaBoostModel(identifier string) { + m.allocAdaBoostModel(identifier) +} + +func setAdaBoostModel(identifier string, ptr *adaBoostModel) { + C.mlpackSetAdaBoostModelPtr(C.CString(identifier), (unsafe.Pointer)(ptr.mem)) +} + +type approxkfnModel struct { + mem unsafe.Pointer +} + +func (m *approxkfnModel) allocApproxKFNModel(identifier string) { + m.mem = C.mlpackGetApproxKFNModelPtr(C.CString(identifier)) + runtime.KeepAlive(m) +} + +func (m *approxkfnModel) getApproxKFNModel(identifier string) { + m.allocApproxKFNModel(identifier) +} + +func setApproxKFNModel(identifier string, ptr *approxkfnModel) { + C.mlpackSetApproxKFNModelPtr(C.CString(identifier), (unsafe.Pointer)(ptr.mem)) +} + +type bayesianLinearRegression struct { + mem unsafe.Pointer +} + +func (m *bayesianLinearRegression) allocBayesianLinearRegression(identifier string) { + m.mem = C.mlpackGetBayesianLinearRegressionPtr(C.CString(identifier)) + runtime.KeepAlive(m) +} + +func (m *bayesianLinearRegression) getBayesianLinearRegression(identifier string) { + m.allocBayesianLinearRegression(identifier) +} + +func setBayesianLinearRegression(identifier string, ptr *bayesianLinearRegression) { + C.mlpackSetBayesianLinearRegressionPtr(C.CString(identifier), (unsafe.Pointer)(ptr.mem)) +} + +type cfModel struct { + mem unsafe.Pointer +} + +func (m *cfModel) allocCFModel(identifier string) { + m.mem = C.mlpackGetCFModelPtr(C.CString(identifier)) + runtime.KeepAlive(m) +} + +func (m *cfModel) getCFModel(identifier string) { + m.allocCFModel(identifier) +} + +func setCFModel(identifier string, ptr *cfModel) { + C.mlpackSetCFModelPtr(C.CString(identifier), (unsafe.Pointer)(ptr.mem)) +} + +type dsModel struct { + mem unsafe.Pointer +} + +func (m *dsModel) allocDSModel(identifier string) { + m.mem = C.mlpackGetDSModelPtr(C.CString(identifier)) + runtime.KeepAlive(m) +} + +func (m *dsModel) getDSModel(identifier string) { + m.allocDSModel(identifier) +} + +func setDSModel(identifier string, ptr *dsModel) { + C.mlpackSetDSModelPtr(C.CString(identifier), (unsafe.Pointer)(ptr.mem)) +} + +type decisionTreeModel struct { + mem unsafe.Pointer +} + +func (m *decisionTreeModel) allocDecisionTreeModel(identifier string) { + m.mem = C.mlpackGetDecisionTreeModelPtr(C.CString(identifier)) + runtime.KeepAlive(m) +} + +func (m *decisionTreeModel) getDecisionTreeModel(identifier string) { + m.allocDecisionTreeModel(identifier) +} + +func setDecisionTreeModel(identifier string, ptr *decisionTreeModel) { + C.mlpackSetDecisionTreeModelPtr(C.CString(identifier), (unsafe.Pointer)(ptr.mem)) +} + +type dTree struct { + mem unsafe.Pointer +} + +func (m *dTree) allocDTree(identifier string) { + m.mem = C.mlpackGetDTreePtr(C.CString(identifier)) + runtime.KeepAlive(m) +} + +func (m *dTree) getDTree(identifier string) { + m.allocDTree(identifier) +} + +func setDTree(identifier string, ptr *dTree) { + C.mlpackSetDTreePtr(C.CString(identifier), (unsafe.Pointer)(ptr.mem)) +} + +type fastmksModel struct { + mem unsafe.Pointer +} + +func (m *fastmksModel) allocFastMKSModel(identifier string) { + m.mem = C.mlpackGetFastMKSModelPtr(C.CString(identifier)) + runtime.KeepAlive(m) +} + +func (m *fastmksModel) getFastMKSModel(identifier string) { + m.allocFastMKSModel(identifier) +} + +func setFastMKSModel(identifier string, ptr *fastmksModel) { + C.mlpackSetFastMKSModelPtr(C.CString(identifier), (unsafe.Pointer)(ptr.mem)) +} + +type gmm struct { + mem unsafe.Pointer +} + +func (m *gmm) allocGMM(identifier string) { + m.mem = C.mlpackGetGMMPtr(C.CString(identifier)) + runtime.KeepAlive(m) +} + +func (m *gmm) getGMM(identifier string) { + m.allocGMM(identifier) +} + +func setGMM(identifier string, ptr *gmm) { + C.mlpackSetGMMPtr(C.CString(identifier), (unsafe.Pointer)(ptr.mem)) +} + +type hmmModel struct { + mem unsafe.Pointer +} + +func (m *hmmModel) allocHMMModel(identifier string) { + m.mem = C.mlpackGetHMMModelPtr(C.CString(identifier)) + runtime.KeepAlive(m) +} + +func (m *hmmModel) getHMMModel(identifier string) { + m.allocHMMModel(identifier) +} + +func setHMMModel(identifier string, ptr *hmmModel) { + C.mlpackSetHMMModelPtr(C.CString(identifier), (unsafe.Pointer)(ptr.mem)) +} + +type hoeffdingTreeModel struct { + mem unsafe.Pointer +} + +func (m *hoeffdingTreeModel) allocHoeffdingTreeModel(identifier string) { + m.mem = C.mlpackGetHoeffdingTreeModelPtr(C.CString(identifier)) + runtime.KeepAlive(m) +} + +func (m *hoeffdingTreeModel) getHoeffdingTreeModel(identifier string) { + m.allocHoeffdingTreeModel(identifier) +} + +func setHoeffdingTreeModel(identifier string, ptr *hoeffdingTreeModel) { + C.mlpackSetHoeffdingTreeModelPtr(C.CString(identifier), (unsafe.Pointer)(ptr.mem)) +} + +type kdeModel struct { + mem unsafe.Pointer +} + +func (m *kdeModel) allocKDEModel(identifier string) { + m.mem = C.mlpackGetKDEModelPtr(C.CString(identifier)) + runtime.KeepAlive(m) +} + +func (m *kdeModel) getKDEModel(identifier string) { + m.allocKDEModel(identifier) +} + +func setKDEModel(identifier string, ptr *kdeModel) { + C.mlpackSetKDEModelPtr(C.CString(identifier), (unsafe.Pointer)(ptr.mem)) +} + +type lars struct { + mem unsafe.Pointer +} + +func (m *lars) allocLARS(identifier string) { + m.mem = C.mlpackGetLARSPtr(C.CString(identifier)) + runtime.KeepAlive(m) +} + +func (m *lars) getLARS(identifier string) { + m.allocLARS(identifier) +} + +func setLARS(identifier string, ptr *lars) { + C.mlpackSetLARSPtr(C.CString(identifier), (unsafe.Pointer)(ptr.mem)) +} + +type linearRegression struct { + mem unsafe.Pointer +} + +func (m *linearRegression) allocLinearRegression(identifier string) { + m.mem = C.mlpackGetLinearRegressionPtr(C.CString(identifier)) + runtime.KeepAlive(m) +} + +func (m *linearRegression) getLinearRegression(identifier string) { + m.allocLinearRegression(identifier) +} + +func setLinearRegression(identifier string, ptr *linearRegression) { + C.mlpackSetLinearRegressionPtr(C.CString(identifier), (unsafe.Pointer)(ptr.mem)) +} + +type linearsvmModel struct { + mem unsafe.Pointer +} + +func (m *linearsvmModel) allocLinearSVMModel(identifier string) { + m.mem = C.mlpackGetLinearSVMModelPtr(C.CString(identifier)) + runtime.KeepAlive(m) +} + +func (m *linearsvmModel) getLinearSVMModel(identifier string) { + m.allocLinearSVMModel(identifier) +} + +func setLinearSVMModel(identifier string, ptr *linearsvmModel) { + C.mlpackSetLinearSVMModelPtr(C.CString(identifier), (unsafe.Pointer)(ptr.mem)) +} + +type localCoordinateCoding struct { + mem unsafe.Pointer +} + +func (m *localCoordinateCoding) allocLocalCoordinateCoding(identifier string) { + m.mem = C.mlpackGetLocalCoordinateCodingPtr(C.CString(identifier)) + runtime.KeepAlive(m) +} + +func (m *localCoordinateCoding) getLocalCoordinateCoding(identifier string) { + m.allocLocalCoordinateCoding(identifier) +} + +func setLocalCoordinateCoding(identifier string, ptr *localCoordinateCoding) { + C.mlpackSetLocalCoordinateCodingPtr(C.CString(identifier), (unsafe.Pointer)(ptr.mem)) +} + +type logisticRegression struct { + mem unsafe.Pointer +} + +func (m *logisticRegression) allocLogisticRegression(identifier string) { + m.mem = C.mlpackGetLogisticRegressionPtr(C.CString(identifier)) + runtime.KeepAlive(m) +} + +func (m *logisticRegression) getLogisticRegression(identifier string) { + m.allocLogisticRegression(identifier) +} + +func setLogisticRegression(identifier string, ptr *logisticRegression) { + C.mlpackSetLogisticRegressionPtr(C.CString(identifier), (unsafe.Pointer)(ptr.mem)) +} + +type lshSearch struct { + mem unsafe.Pointer +} + +func (m *lshSearch) allocLSHSearch(identifier string) { + m.mem = C.mlpackGetLSHSearchPtr(C.CString(identifier)) + runtime.KeepAlive(m) +} + +func (m *lshSearch) getLSHSearch(identifier string) { + m.allocLSHSearch(identifier) +} + +func setLSHSearch(identifier string, ptr *lshSearch) { + C.mlpackSetLSHSearchPtr(C.CString(identifier), (unsafe.Pointer)(ptr.mem)) +} + +type nbcModel struct { + mem unsafe.Pointer +} + +func (m *nbcModel) allocNBCModel(identifier string) { + m.mem = C.mlpackGetNBCModelPtr(C.CString(identifier)) + runtime.KeepAlive(m) +} + +func (m *nbcModel) getNBCModel(identifier string) { + m.allocNBCModel(identifier) +} + +func setNBCModel(identifier string, ptr *nbcModel) { + C.mlpackSetNBCModelPtr(C.CString(identifier), (unsafe.Pointer)(ptr.mem)) +} + +type knnModel struct { + mem unsafe.Pointer +} + +func (m *knnModel) allocKNNModel(identifier string) { + m.mem = C.mlpackGetKNNModelPtr(C.CString(identifier)) + runtime.KeepAlive(m) +} + +func (m *knnModel) getKNNModel(identifier string) { + m.allocKNNModel(identifier) +} + +func setKNNModel(identifier string, ptr *knnModel) { + C.mlpackSetKNNModelPtr(C.CString(identifier), (unsafe.Pointer)(ptr.mem)) +} + +type kfnModel struct { + mem unsafe.Pointer +} + +func (m *kfnModel) allocKFNModel(identifier string) { + m.mem = C.mlpackGetKFNModelPtr(C.CString(identifier)) + runtime.KeepAlive(m) +} + +func (m *kfnModel) getKFNModel(identifier string) { + m.allocKFNModel(identifier) +} + +func setKFNModel(identifier string, ptr *kfnModel) { + C.mlpackSetKFNModelPtr(C.CString(identifier), (unsafe.Pointer)(ptr.mem)) +} + +type perceptronModel struct { + mem unsafe.Pointer +} + +func (m *perceptronModel) allocPerceptronModel(identifier string) { + m.mem = C.mlpackGetPerceptronModelPtr(C.CString(identifier)) + runtime.KeepAlive(m) +} + +func (m *perceptronModel) getPerceptronModel(identifier string) { + m.allocPerceptronModel(identifier) +} + +func setPerceptronModel(identifier string, ptr *perceptronModel) { + C.mlpackSetPerceptronModelPtr(C.CString(identifier), (unsafe.Pointer)(ptr.mem)) +} + +type scalingModel struct { + mem unsafe.Pointer +} + +func (m *scalingModel) allocScalingModel(identifier string) { + m.mem = C.mlpackGetScalingModelPtr(C.CString(identifier)) + runtime.KeepAlive(m) +} + +func (m *scalingModel) getScalingModel(identifier string) { + m.allocScalingModel(identifier) +} + +func setScalingModel(identifier string, ptr *scalingModel) { + C.mlpackSetScalingModelPtr(C.CString(identifier), (unsafe.Pointer)(ptr.mem)) +} + +type randomForestModel struct { + mem unsafe.Pointer +} + +func (m *randomForestModel) allocRandomForestModel(identifier string) { + m.mem = C.mlpackGetRandomForestModelPtr(C.CString(identifier)) + runtime.KeepAlive(m) +} + +func (m *randomForestModel) getRandomForestModel(identifier string) { + m.allocRandomForestModel(identifier) +} + +func setRandomForestModel(identifier string, ptr *randomForestModel) { + C.mlpackSetRandomForestModelPtr(C.CString(identifier), (unsafe.Pointer)(ptr.mem)) +} + +type rannModel struct { + mem unsafe.Pointer +} + +func (m *rannModel) allocRANNModel(identifier string) { + m.mem = C.mlpackGetRANNModelPtr(C.CString(identifier)) + runtime.KeepAlive(m) +} + +func (m *rannModel) getRANNModel(identifier string) { + m.allocRANNModel(identifier) +} + +func setRANNModel(identifier string, ptr *rannModel) { + C.mlpackSetRANNModelPtr(C.CString(identifier), (unsafe.Pointer)(ptr.mem)) +} + +type softmaxRegression struct { + mem unsafe.Pointer +} + +func (m *softmaxRegression) allocSoftmaxRegression(identifier string) { + m.mem = C.mlpackGetSoftmaxRegressionPtr(C.CString(identifier)) + runtime.KeepAlive(m) +} + +func (m *softmaxRegression) getSoftmaxRegression(identifier string) { + m.allocSoftmaxRegression(identifier) +} + +func setSoftmaxRegression(identifier string, ptr *softmaxRegression) { + C.mlpackSetSoftmaxRegressionPtr(C.CString(identifier), (unsafe.Pointer)(ptr.mem)) +} + +type sparseCoding struct { + mem unsafe.Pointer +} + +func (m *sparseCoding) allocSparseCoding(identifier string) { + m.mem = C.mlpackGetSparseCodingPtr(C.CString(identifier)) + runtime.KeepAlive(m) +} + +func (m *sparseCoding) getSparseCoding(identifier string) { + m.allocSparseCoding(identifier) +} + +func setSparseCoding(identifier string, ptr *sparseCoding) { + C.mlpackSetSparseCodingPtr(C.CString(identifier), (unsafe.Pointer)(ptr.mem)) +} + diff --git a/nbc.go b/nbc.go index 884fd86..be7dc61 100644 --- a/nbc.go +++ b/nbc.go @@ -8,11 +8,7 @@ package mlpack */ import "C" -import ( - "gonum.org/v1/gonum/mat" - "runtime" - "unsafe" -) +import "gonum.org/v1/gonum/mat" type NbcOptionalParam struct { IncrementalVariance bool @@ -34,23 +30,6 @@ func NbcOptions() *NbcOptionalParam { } } -type nbcModel struct { - mem unsafe.Pointer -} - -func (m *nbcModel) allocNBCModel(identifier string) { - m.mem = C.mlpackGetNBCModelPtr(C.CString(identifier)) - runtime.KeepAlive(m) -} - -func (m *nbcModel) getNBCModel(identifier string) { - m.allocNBCModel(identifier) -} - -func setNBCModel(identifier string, ptr *nbcModel) { - C.mlpackSetNBCModelPtr(C.CString(identifier), (unsafe.Pointer)(ptr.mem)) -} - /* This program trains the Naive Bayes classifier on the given labeled training set, or loads a model from the given model file, and then may use that trained @@ -76,7 +55,7 @@ func setNBCModel(identifier string, ptr *nbcModel) { Note: the "Output" and "OutputProbs" parameters are deprecated and will be removed in mlpack 4.0.0. Use "Predictions" and "Probabilities" instead. - + For example, to train a Naive Bayes classifier on the dataset data with labels labels and save the model to nbc_model, the following command may be used: @@ -97,7 +76,6 @@ func setNBCModel(identifier string, ptr *nbcModel) { predictions, _, _, _, _ := mlpack.Nbc(param) - Input parameters: - IncrementalVariance (bool): The variance of each class will be diff --git a/nca.go b/nca.go index 640374b..fd4a8ef 100644 --- a/nca.go +++ b/nca.go @@ -8,9 +8,7 @@ package mlpack */ import "C" -import ( - "gonum.org/v1/gonum/mat" -) +import "gonum.org/v1/gonum/mat" type NcaOptionalParam struct { ArmijoConstant float64 @@ -104,7 +102,6 @@ func NcaOptions() *NcaOptionalParam { By default, the SGD optimizer is used. - Input parameters: - input (mat.Dense): Input dataset to run NCA on. diff --git a/nmf.go b/nmf.go index ecbbc7c..100972e 100644 --- a/nmf.go +++ b/nmf.go @@ -8,9 +8,7 @@ package mlpack */ import "C" -import ( - "gonum.org/v1/gonum/mat" -) +import "gonum.org/v1/gonum/mat" type NmfOptionalParam struct { InitialH *mat.Dense @@ -55,7 +53,7 @@ func NmfOptions() *NmfOptionalParam { The maximum number of iterations is specified with "MaxIterations", and the minimum residue required for algorithm termination is specified with the "MinResidue" parameter. - + For example, to run NMF on the input matrix V using the 'multdist' update rules with a rank-10 decomposition and storing the decomposed matrices into W and H, the following command could be used: @@ -66,7 +64,6 @@ func NmfOptions() *NmfOptionalParam { H, W := mlpack.Nmf(V, 10, param) - Input parameters: - input (mat.Dense): Input dataset to perform NMF on. diff --git a/pca.go b/pca.go index 2e8b850..4b94440 100644 --- a/pca.go +++ b/pca.go @@ -8,9 +8,7 @@ package mlpack */ import "C" -import ( - "gonum.org/v1/gonum/mat" -) +import "gonum.org/v1/gonum/mat" type PcaOptionalParam struct { DecompositionMethod string @@ -46,7 +44,7 @@ func PcaOptions() *PcaOptionalParam { Multiple different decomposition techniques can be used. The method to use can be specified with the "DecompositionMethod" parameter, and it may take the values 'exact', 'randomized', or 'quic'. - + For example, to reduce the dimensionality of the matrix data to 5 dimensions using randomized SVD for the decomposition, storing the output matrix to data_mod, the following command can be used: @@ -58,7 +56,6 @@ func PcaOptions() *PcaOptionalParam { data_mod := mlpack.Pca(data, param) - Input parameters: - input (mat.Dense): Input dataset to perform PCA on. diff --git a/perceptron.go b/perceptron.go index 35bf45d..4552826 100644 --- a/perceptron.go +++ b/perceptron.go @@ -8,11 +8,7 @@ package mlpack */ import "C" -import ( - "gonum.org/v1/gonum/mat" - "runtime" - "unsafe" -) +import "gonum.org/v1/gonum/mat" type PerceptronOptionalParam struct { InputModel *perceptronModel @@ -34,23 +30,6 @@ func PerceptronOptions() *PerceptronOptionalParam { } } -type perceptronModel struct { - mem unsafe.Pointer -} - -func (m *perceptronModel) allocPerceptronModel(identifier string) { - m.mem = C.mlpackGetPerceptronModelPtr(C.CString(identifier)) - runtime.KeepAlive(m) -} - -func (m *perceptronModel) getPerceptronModel(identifier string) { - m.allocPerceptronModel(identifier) -} - -func setPerceptronModel(identifier string, ptr *perceptronModel) { - C.mlpackSetPerceptronModelPtr(C.CString(identifier), (unsafe.Pointer)(ptr.mem)) -} - /* This program implements a perceptron, which is a single level neural network. The perceptron makes its predictions based on a linear predictor function @@ -71,7 +50,7 @@ func setPerceptronModel(identifier string, ptr *perceptronModel) { Note: the following parameter is deprecated and will be removed in mlpack 4.0.0: "Output". Use "Predictions" instead of "Output". - + The training data given with the "Training" option may have class labels as its last dimension (so, if the training data is in CSV format, labels should be the last column). Alternately, the "Labels" parameter may be used to @@ -109,7 +88,6 @@ func setPerceptronModel(identifier string, ptr *perceptronModel) { a 3-dimensional dataset with a perceptron that has been trained on 8 dimensions will cause an error. - Input parameters: - InputModel (perceptronModel): Input perceptron model. diff --git a/preprocess_binarize.go b/preprocess_binarize.go index 64b1375..f47c540 100644 --- a/preprocess_binarize.go +++ b/preprocess_binarize.go @@ -8,9 +8,7 @@ package mlpack */ import "C" -import ( - "gonum.org/v1/gonum/mat" -) +import "gonum.org/v1/gonum/mat" type PreprocessBinarizeOptionalParam struct { Dimension int @@ -35,7 +33,7 @@ func PreprocessBinarizeOptions() *PreprocessBinarizeOptionalParam { parameter; the default threshold is 0.0. The binarized matrix may be saved with the "Output" output parameter. - + For example, if we want to set all variables greater than 5 in the dataset X to 1 and variables less than or equal to 5.0 to 0, and save the result to Y, we could run @@ -56,7 +54,6 @@ func PreprocessBinarizeOptions() *PreprocessBinarizeOptionalParam { Y := mlpack.PreprocessBinarize(X, param) - Input parameters: - input (mat.Dense): Input data matrix. diff --git a/preprocess_describe.go b/preprocess_describe.go index d5dabb8..147bd20 100644 --- a/preprocess_describe.go +++ b/preprocess_describe.go @@ -8,9 +8,7 @@ package mlpack */ import "C" -import ( - "gonum.org/v1/gonum/mat" -) +import "gonum.org/v1/gonum/mat" type PreprocessDescribeOptionalParam struct { Dimension int @@ -45,7 +43,7 @@ func PreprocessDescribeOptions() *PreprocessDescribeOptionalParam { dimension to analyze if there are too many dimensions. The "Population" parameter can be specified when the dataset should be considered as a population. Otherwise, the dataset will be considered as a sample. - + So, a simple example where we want to print out statistical facts about the dataset X using the default settings, we could run @@ -66,7 +64,6 @@ func PreprocessDescribeOptions() *PreprocessDescribeOptionalParam { := mlpack.PreprocessDescribe(X, param) - Input parameters: - input (mat.Dense): Matrix containing data, diff --git a/preprocess_one_hot_encoding.go b/preprocess_one_hot_encoding.go new file mode 100644 index 0000000..643410c --- /dev/null +++ b/preprocess_one_hot_encoding.go @@ -0,0 +1,89 @@ +package mlpack + +/* +#cgo CFLAGS: -I./capi -Wall +#cgo LDFLAGS: -L. -lmlpack_go_preprocess_one_hot_encoding +#include +#include +*/ +import "C" + +import "gonum.org/v1/gonum/mat" + +type PreprocessOneHotEncodingOptionalParam struct { + Verbose bool +} + +func PreprocessOneHotEncodingOptions() *PreprocessOneHotEncodingOptionalParam { + return &PreprocessOneHotEncodingOptionalParam{ + Verbose: false, + } +} + +/* + This utility takes a dataset and a vector of indices and does one-hot encoding + of the respective features at those indices. Indices represent the IDs of the + dimensions to be one-hot encoded. + + The output matrix with encoded features may be saved with the "Output" + parameters. + + So, a simple example where we want to encode 1st and 3rd feature from dataset + X into X_output would be + + // Initialize optional parameters for PreprocessOneHotEncoding(). + param := mlpack.PreprocessOneHotEncodingOptions() + + X_ouput := mlpack.PreprocessOneHotEncoding(X, 1, 3, param) + + Input parameters: + + - dimensions ([]int): Index of dimensions thatneed to be one-hot + encoded. + - input (mat.Dense): Matrix containing data. + - Verbose (bool): Display informational messages and the full list of + parameters and timers at the end of execution. + + Output parameters: + + - output (mat.Dense): Matrix to save one-hot encoded features data to. + + */ +func PreprocessOneHotEncoding(dimensions []int, input *mat.Dense, param *PreprocessOneHotEncodingOptionalParam) (*mat.Dense) { + resetTimers() + enableTimers() + disableBacktrace() + disableVerbose() + restoreSettings("One Hot Encoding") + + // Detect if the parameter was passed; set if so. + setParamVecInt("dimensions", dimensions) + setPassed("dimensions") + + // Detect if the parameter was passed; set if so. + gonumToArmaMat("input", input) + setPassed("input") + + // Detect if the parameter was passed; set if so. + if param.Verbose != false { + setParamBool("verbose", param.Verbose) + setPassed("verbose") + enableVerbose() + } + + // Mark all output options as passed. + setPassed("output") + + // Call the mlpack program. + C.mlpackPreprocessOneHotEncoding() + + // Initialize result variable and get output. + var outputPtr mlpackArma + output := outputPtr.armaToGonumMat("output") + + // Clear settings. + clearSettings() + + // Return output(s). + return output +} diff --git a/preprocess_scale.go b/preprocess_scale.go index 67a24e3..d014155 100644 --- a/preprocess_scale.go +++ b/preprocess_scale.go @@ -8,11 +8,7 @@ package mlpack */ import "C" -import ( - "gonum.org/v1/gonum/mat" - "runtime" - "unsafe" -) +import "gonum.org/v1/gonum/mat" type PreprocessScaleOptionalParam struct { Epsilon float64 @@ -38,23 +34,6 @@ func PreprocessScaleOptions() *PreprocessScaleOptionalParam { } } -type scalingModel struct { - mem unsafe.Pointer -} - -func (m *scalingModel) allocScalingModel(identifier string) { - m.mem = C.mlpackGetScalingModelPtr(C.CString(identifier)) - runtime.KeepAlive(m) -} - -func (m *scalingModel) getScalingModel(identifier string) { - m.allocScalingModel(identifier) -} - -func setScalingModel(identifier string, ptr *scalingModel) { - C.mlpackSetScalingModelPtr(C.CString(identifier), (unsafe.Pointer)(ptr.mem)) -} - /* This utility takes a dataset and performs feature scaling using one of the six scaler methods namely: 'max_abs_scaler', 'mean_normalization', @@ -68,7 +47,7 @@ func setScalingModel(identifier string, ptr *scalingModel) { The model to scale features can be saved using "OutputModel" and later can be loaded back using"InputModel". - + So, a simple example where we want to scale the dataset X into X_scaled with standard_scaler as scaler_method, we could run @@ -111,7 +90,6 @@ func setScalingModel(identifier string, ptr *scalingModel) { X_scaled, _ := mlpack.PreprocessScale(X, param) - Input parameters: - input (mat.Dense): Matrix containing data. diff --git a/preprocess_split.go b/preprocess_split.go index 427b945..c01171d 100644 --- a/preprocess_split.go +++ b/preprocess_split.go @@ -8,9 +8,7 @@ package mlpack */ import "C" -import ( - "gonum.org/v1/gonum/mat" -) +import "gonum.org/v1/gonum/mat" type PreprocessSplitOptionalParam struct { InputLabels *mat.Dense @@ -43,7 +41,7 @@ func PreprocessSplitOptions() *PreprocessSplitOptionalParam { "InputLabels" parameter. Splitting labels works the same way as splitting the data. The output training and test labels may be saved with the "TrainingLabels" and "TestLabels" output parameters, respectively. - + So, a simple example where we want to split the dataset X into X_train and X_test with 60% of the data in the training set and 40% of the dataset in the test set, we could run @@ -76,7 +74,6 @@ func PreprocessSplitOptions() *PreprocessSplitOptionalParam { X_test, y_test, X_train, y_train := mlpack.PreprocessSplit(X, param) - Input parameters: - input (mat.Dense): Matrix containing data. diff --git a/radical.go b/radical.go index 763efad..62439b1 100644 --- a/radical.go +++ b/radical.go @@ -8,9 +8,7 @@ package mlpack */ import "C" -import ( - "gonum.org/v1/gonum/mat" -) +import "gonum.org/v1/gonum/mat" type RadicalOptionalParam struct { Angles int @@ -45,7 +43,7 @@ func RadicalOptions() *RadicalOptionalParam { parameter. The output matrix Y may be saved with the "OutputIc" output parameter, and the output unmixing matrix W may be saved with the "OutputUnmixing" output parameter. - + For example, to perform ICA on the matrix X with 40 replicates, saving the independent components to ic, the following command may be used: @@ -55,7 +53,6 @@ func RadicalOptions() *RadicalOptionalParam { ic, _ := mlpack.Radical(X, param) - Input parameters: - input (mat.Dense): Input dataset for ICA. diff --git a/random_forest.go b/random_forest.go index 19d76f1..147601c 100644 --- a/random_forest.go +++ b/random_forest.go @@ -8,11 +8,7 @@ package mlpack */ import "C" -import ( - "gonum.org/v1/gonum/mat" - "runtime" - "unsafe" -) +import "gonum.org/v1/gonum/mat" type RandomForestOptionalParam struct { InputModel *randomForestModel @@ -48,23 +44,6 @@ func RandomForestOptions() *RandomForestOptionalParam { } } -type randomForestModel struct { - mem unsafe.Pointer -} - -func (m *randomForestModel) allocRandomForestModel(identifier string) { - m.mem = C.mlpackGetRandomForestModelPtr(C.CString(identifier)) - runtime.KeepAlive(m) -} - -func (m *randomForestModel) getRandomForestModel(identifier string) { - m.allocRandomForestModel(identifier) -} - -func setRandomForestModel(identifier string, ptr *randomForestModel) { - C.mlpackSetRandomForestModelPtr(C.CString(identifier), (unsafe.Pointer)(ptr.mem)) -} - /* This program is an implementation of the standard random forest classification algorithm by Leo Breiman. A random forest can be trained and saved for later @@ -94,7 +73,7 @@ func setRandomForestModel(identifier string, ptr *randomForestModel) { specified with the "TestLabels" parameter. Predictions for each test point may be saved via the "Predictions"output parameter. Class probabilities for each prediction may be saved with the "Probabilities" output parameter. - + For example, to train a random forest with a minimum leaf size of 20 using 10 trees on the dataset contained in datawith labels labels, saving the output random forest to rf_model and printing the training error, one could call @@ -121,7 +100,6 @@ func setRandomForestModel(identifier string, ptr *randomForestModel) { _, predictions, _ := mlpack.RandomForest(param) - Input parameters: - InputModel (randomForestModel): Pre-trained random forest to use for diff --git a/range_search.go b/range_search.go deleted file mode 100644 index 713ce16..0000000 --- a/range_search.go +++ /dev/null @@ -1,230 +0,0 @@ -package mlpack - -/* -#cgo CFLAGS: -I./capi -Wall -#cgo LDFLAGS: -L. -lmlpack_go_range_search -#include -#include -*/ -import "C" - -import ( - "gonum.org/v1/gonum/mat" - "runtime" - "unsafe" -) - -type RangeSearchOptionalParam struct { - InputModel *rsModel - LeafSize int - Max float64 - Min float64 - Naive bool - Query *mat.Dense - RandomBasis bool - Reference *mat.Dense - Seed int - SingleMode bool - TreeType string - Verbose bool -} - -func RangeSearchOptions() *RangeSearchOptionalParam { - return &RangeSearchOptionalParam{ - InputModel: nil, - LeafSize: 20, - Max: 0, - Min: 0, - Naive: false, - Query: nil, - RandomBasis: false, - Reference: nil, - Seed: 0, - SingleMode: false, - TreeType: "kd", - Verbose: false, - } -} - -type rsModel struct { - mem unsafe.Pointer -} - -func (m *rsModel) allocRSModel(identifier string) { - m.mem = C.mlpackGetRSModelPtr(C.CString(identifier)) - runtime.KeepAlive(m) -} - -func (m *rsModel) getRSModel(identifier string) { - m.allocRSModel(identifier) -} - -func setRSModel(identifier string, ptr *rsModel) { - C.mlpackSetRSModelPtr(C.CString(identifier), (unsafe.Pointer)(ptr.mem)) -} - -/* - This program implements range search with a Euclidean distance metric. For a - given query point, a given range, and a given set of reference points, the - program will return all of the reference points with distance to the query - point in the given range. This is performed for an entire set of query - points. You may specify a separate set of reference and query points, or only - a reference set -- which is then used as both the reference and query set. - The given range is taken to be inclusive (that is, points with a distance - exactly equal to the minimum and maximum of the range are included in the - results). - - For example, the following will calculate the points within the range [2, 5] - of each point in 'input.csv' and store the distances in 'distances.csv' and - the neighbors in 'neighbors.csv': - - $ range_search --min=2 --max=5 --reference_file=input.csv - --distances_file=distances.csv --neighbors_file=neighbors.csv - - The output files are organized such that line i corresponds to the points - found for query point i. Because sometimes 0 points may be found in the given - range, lines of the output files may be empty. The points are not ordered in - any specific manner. - - Because the number of points returned for each query point may differ, the - resultant CSV-like files may not be loadable by many programs. However, at - this time a better way to store this non-square result is not known. As a - result, any output files will be written as CSVs in this manner, regardless of - the given extension. - - - Input parameters: - - - InputModel (rsModel): File containing pre-trained range search - model. - - LeafSize (int): Leaf size for tree building (used for kd-trees, vp - trees, random projection trees, UB trees, R trees, R* trees, X trees, - Hilbert R trees, R+ trees, R++ trees, and octrees). Default value 20. - - Max (float64): Upper bound in range (if not specified, +inf will be - used. Default value 0. - - Min (float64): Lower bound in range. Default value 0. - - Naive (bool): If true, O(n^2) naive mode is used for computation. - - Query (mat.Dense): File containing query points (optional). - - RandomBasis (bool): Before tree-building, project the data onto a - random orthogonal basis. - - Reference (mat.Dense): Matrix containing the reference dataset. - - Seed (int): Random seed (if 0, std::time(NULL) is used). Default - value 0. - - SingleMode (bool): If true, single-tree search is used (as opposed to - dual-tree search). - - TreeType (string): Type of tree to use: 'kd', 'vp', 'rp', 'max-rp', - 'ub', 'cover', 'r', 'r-star', 'x', 'ball', 'hilbert-r', 'r-plus', - 'r-plus-plus', 'oct'. Default value 'kd'. - - Verbose (bool): Display informational messages and the full list of - parameters and timers at the end of execution. - - Output parameters: - - - distancesFile (string): File to output distances into. Default value - ''. - - neighborsFile (string): File to output neighbors into. Default value - ''. - - outputModel (rsModel): If specified, the range search model will be - saved to the given file. - - */ -func RangeSearch(param *RangeSearchOptionalParam) (string, string, rsModel) { - resetTimers() - enableTimers() - disableBacktrace() - disableVerbose() - restoreSettings("Range Search") - - // Detect if the parameter was passed; set if so. - if param.InputModel != nil { - setRSModel("input_model", param.InputModel) - setPassed("input_model") - } - - // Detect if the parameter was passed; set if so. - if param.LeafSize != 20 { - setParamInt("leaf_size", param.LeafSize) - setPassed("leaf_size") - } - - // Detect if the parameter was passed; set if so. - if param.Max != 0 { - setParamDouble("max", param.Max) - setPassed("max") - } - - // Detect if the parameter was passed; set if so. - if param.Min != 0 { - setParamDouble("min", param.Min) - setPassed("min") - } - - // Detect if the parameter was passed; set if so. - if param.Naive != false { - setParamBool("naive", param.Naive) - setPassed("naive") - } - - // Detect if the parameter was passed; set if so. - if param.Query != nil { - gonumToArmaMat("query", param.Query) - setPassed("query") - } - - // Detect if the parameter was passed; set if so. - if param.RandomBasis != false { - setParamBool("random_basis", param.RandomBasis) - setPassed("random_basis") - } - - // Detect if the parameter was passed; set if so. - if param.Reference != nil { - gonumToArmaMat("reference", param.Reference) - setPassed("reference") - } - - // Detect if the parameter was passed; set if so. - if param.Seed != 0 { - setParamInt("seed", param.Seed) - setPassed("seed") - } - - // Detect if the parameter was passed; set if so. - if param.SingleMode != false { - setParamBool("single_mode", param.SingleMode) - setPassed("single_mode") - } - - // Detect if the parameter was passed; set if so. - if param.TreeType != "kd" { - setParamString("tree_type", param.TreeType) - setPassed("tree_type") - } - - // Detect if the parameter was passed; set if so. - if param.Verbose != false { - setParamBool("verbose", param.Verbose) - setPassed("verbose") - enableVerbose() - } - - // Mark all output options as passed. - setPassed("distances_file") - setPassed("neighbors_file") - setPassed("output_model") - - // Call the mlpack program. - C.mlpackRangeSearch() - - // Initialize result variable and get output. - distancesFile := getParamString("distances_file") - neighborsFile := getParamString("neighbors_file") - var outputModel rsModel - outputModel.getRSModel("output_model") - - // Clear settings. - clearSettings() - - // Return output(s). - return distancesFile, neighborsFile, outputModel -} diff --git a/rel/deployment.md b/rel/deployment.md index b7039c8..f669fd2 100644 --- a/rel/deployment.md +++ b/rel/deployment.md @@ -5,11 +5,15 @@ into an automated script: 1. Check out mlpack code. 2. Configure and build the `go` target: `make go` - 3. Copy the complete folder of the Go bindings (`build/src/mlpack/bindings/go/src/mlpack.org/v1/mlpack`) + 3. First, remove the previously auto-generated go-bindings from the root of mlpack-go repository. +```sh + rm -r -v !(Makefile|Dockerfile|README.md|LICENSE.txt|rel) +``` + 4. Then, copy the complete folder of the Go bindings (`build/src/mlpack/bindings/go/src/mlpack.org/v1/mlpack`) to the root of mlpack-go repository. - 4. Remove all the `.so` files. + 5. Remove all the `.so` files. ```sh rm -f *.so *.so.* ``` - 5. Manually change the `PACKAGE_VERSION` in `MAKEFILE`. - 6. Commit any changed files and any added files in `root` and `capi/` folder of mlpack-go repository. + 6. Manually change the `PACKAGE_VERSION` in `MAKEFILE`. + 7. Commit any changed files and any added files in `root` and `capi/` folder of mlpack-go repository. diff --git a/softmax_regression.go b/softmax_regression.go index a4f924b..97a58a9 100644 --- a/softmax_regression.go +++ b/softmax_regression.go @@ -8,11 +8,7 @@ package mlpack */ import "C" -import ( - "gonum.org/v1/gonum/mat" - "runtime" - "unsafe" -) +import "gonum.org/v1/gonum/mat" type SoftmaxRegressionOptionalParam struct { InputModel *softmaxRegression @@ -42,23 +38,6 @@ func SoftmaxRegressionOptions() *SoftmaxRegressionOptionalParam { } } -type softmaxRegression struct { - mem unsafe.Pointer -} - -func (m *softmaxRegression) allocSoftmaxRegression(identifier string) { - m.mem = C.mlpackGetSoftmaxRegressionPtr(C.CString(identifier)) - runtime.KeepAlive(m) -} - -func (m *softmaxRegression) getSoftmaxRegression(identifier string) { - m.allocSoftmaxRegression(identifier) -} - -func setSoftmaxRegression(identifier string, ptr *softmaxRegression) { - C.mlpackSetSoftmaxRegressionPtr(C.CString(identifier), (unsafe.Pointer)(ptr.mem)) -} - /* This program performs softmax regression, a generalization of logistic regression to the multiclass case, and has support for L2 regularization. The @@ -84,7 +63,7 @@ func setSoftmaxRegression(identifier string, ptr *softmaxRegression) { the "Predictions" output parameter. If labels are specified for the test data with the "TestLabels" parameter, then the program will print the accuracy of the predictions on the given test set and its corresponding labels. - + For example, to train a softmax regression model on the data dataset with labels labels with a maximum of 1000 iterations for training, saving the trained model to sr_model, the following command can be used: @@ -106,7 +85,6 @@ func setSoftmaxRegression(identifier string, ptr *softmaxRegression) { _, predictions := mlpack.SoftmaxRegression(param) - Input parameters: - InputModel (softmaxRegression): File containing existing model diff --git a/sparse_coding.go b/sparse_coding.go index 4ca3b29..c832c30 100644 --- a/sparse_coding.go +++ b/sparse_coding.go @@ -8,11 +8,7 @@ package mlpack */ import "C" -import ( - "gonum.org/v1/gonum/mat" - "runtime" - "unsafe" -) +import "gonum.org/v1/gonum/mat" type SparseCodingOptionalParam struct { Atoms int @@ -48,23 +44,6 @@ func SparseCodingOptions() *SparseCodingOptionalParam { } } -type sparseCoding struct { - mem unsafe.Pointer -} - -func (m *sparseCoding) allocSparseCoding(identifier string) { - m.mem = C.mlpackGetSparseCodingPtr(C.CString(identifier)) - runtime.KeepAlive(m) -} - -func (m *sparseCoding) getSparseCoding(identifier string) { - m.allocSparseCoding(identifier) -} - -func setSparseCoding(identifier string, ptr *sparseCoding) { - C.mlpackSetSparseCodingPtr(C.CString(identifier), (unsafe.Pointer)(ptr.mem)) -} - /* An implementation of Sparse Coding with Dictionary Learning, which achieves sparsity via an l1-norm regularizer on the codes (LASSO) or an (l1+l2)-norm @@ -90,7 +69,7 @@ func setSparseCoding(identifier string, ptr *sparseCoding) { "Atoms" parameter). It is also possible to specify an initial dictionary for the optimization, with the "InitialDictionary" parameter. An input model may be specified with the "InputModel" parameter. - + As an example, to build a sparse coding model on the dataset data using 200 atoms and an l1-regularization parameter of 0.1, saving the model into model, use @@ -113,7 +92,6 @@ func setSparseCoding(identifier string, ptr *sparseCoding) { codes, _, _ := mlpack.SparseCoding(param) - Input parameters: - Atoms (int): Number of atoms in the dictionary. Default value 15. diff --git a/test_go_binding.go b/test_go_binding.go index 96414fa..cc1598f 100644 --- a/test_go_binding.go +++ b/test_go_binding.go @@ -8,11 +8,7 @@ package mlpack */ import "C" -import ( - "gonum.org/v1/gonum/mat" - "runtime" - "unsafe" -) +import "gonum.org/v1/gonum/mat" type TestGoBindingOptionalParam struct { BuildModel bool @@ -48,28 +44,9 @@ func TestGoBindingOptions() *TestGoBindingOptionalParam { } } -type gaussianKernel struct { - mem unsafe.Pointer -} - -func (m *gaussianKernel) allocGaussianKernel(identifier string) { - m.mem = C.mlpackGetGaussianKernelPtr(C.CString(identifier)) - runtime.KeepAlive(m) -} - -func (m *gaussianKernel) getGaussianKernel(identifier string) { - m.allocGaussianKernel(identifier) -} - -func setGaussianKernel(identifier string, ptr *gaussianKernel) { - C.mlpackSetGaussianKernelPtr(C.CString(identifier), (unsafe.Pointer)(ptr.mem)) -} - /* - A simple program to test Golang binding functionality. You can build mlpack - with the BUILD_TESTS option set to off, and this binding will no longer be - built. - + A simple program to test Go binding functionality. You can build mlpack with + the BUILD_TESTS option set to off, and this binding will no longer be built. Input parameters: