Regression

Next to learning a distinct classification task, a regression model learns to predict continuous target values.

           
from photonai.base.PhotonBase import Hyperpipe, PipelineElement, PipelineSwitch
from photonai.optimization.Hyperparameters import Categorical, IntegerRange
from photonai.configuration.Register import PhotonRegister
from sklearn.model_selection import KFold
from sklearn.datasets import load_diabetes

X,y = load_diabetes(True)

# DESIGN YOUR PIPELINE
my_pipe = Hyperpipe('basic_regression_pipe',
                    optimizer='sk_opt',
                    metrics=['mean_squared_error', 'pearson_correlation'],  # the performance metrics of your interest
                    best_config_metric='mean_squared_error',  # after hyperparameter search, the metric declares the winner config
                    outer_cv=KFold(n_splits=3),  # repeat hyperparameter search three times
                    inner_cv=KFold(n_splits=3),  # test each configuration ten times respectively
                    verbosity=1)


# NOW FIND OUT MORE ABOUT A SPECIFIC ELEMENT
PhotonRegister.info('RandomForestRegressor')
PhotonRegister.info('LinearRegression')

# ADD ELEMENTS TO YOUR PIPELINE
# first normalize all features
my_pipe += PipelineElement('StandardScaler')
# add a PCA with variable way of n_components
my_pipe += PipelineElement('PCA', hyperparameters={'n_components': IntegerRange(5, 20)}, test_disabled=True)
# then we choose a learning algorithm
my_pipe += PipelineElement('RandomForestRegressor', hyperparameters={'n_estimators': IntegerRange(10, 100)})


# NOW TRAIN YOUR PIPELINE
my_pipe.fit(X, y)