# Source code for gemseo.mlearning.core.selection

# -*- coding: utf-8 -*-
# Copyright 2021 IRT Saint Exupéry, https://www.irt-saintexupery.com
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.

# Contributors:
#    INITIAL AUTHORS - initial API and implementation and/or initial
#                         documentation
#        :author: Syver Doving Agdestein
#        :author: Matthias De Lozzo
#    OTHER AUTHORS   - MACROSCOPIC CHANGES
"""This module contains a class to select a machine learning algorithm from a list.

Machine learning is used to find relations or underlying structures in data.
There is however no algorithm that is universally better than the others
for an arbitrary problem.
As for optimization, there is *no free lunch* for machine learning [wolpert]_.

.. [wolpert] Wolpert, David H.
"The lack of a priori distinctions between learning algorithms."
Neural computation 8.7 (1996): 1341-1390.

Provided a quality measure,
one can thus compare the performances of different machine learning algorithms.

This process can be easily performed using the class :class:.MLAlgoSelection.

A machine learning algorithm is built using a set of (hyper)parameters,
before the learning takes place.
In order to choose the best hyperparameters,
a simple grid search over different values may be sufficient.
The :class:.MLAlgoSelection does this.
It can also perform a more advanced form of optimization
than a simple grid search over predefined values,
using the class :class:.MLAlgoCalibration.

.. seealso::

:mod:~gemseo.mlearning.core.ml_algo
:mod:~gemseo.mlearning.core.calibration
"""
from __future__ import division, unicode_literals

from itertools import product
from typing import Optional, Sequence, Tuple, Union

from gemseo.algos.design_space import DesignSpace
from gemseo.core.dataset import Dataset
from gemseo.core.scenario import ScenarioInputDataType
from gemseo.mlearning.core.calibration import MLAlgoCalibration
from gemseo.mlearning.core.factory import MLAlgoFactory
from gemseo.mlearning.core.ml_algo import MLAlgo
from gemseo.mlearning.qual_measure.quality_measure import MLQualityMeasure
from gemseo.mlearning.qual_measure.quality_measure import (
OptionType as MeasureOptionType,
)

[docs]class MLAlgoSelection(object):
"""Machine learning algorithm selector.

Attributes:
dataset (Dataset): The learning dataset.
measure (str): The name of a quality measure
to measure the quality of the machine learning algorithms.
measure_options (Dict[str,Union[int,Dataset]]): The options for the method
to evaluate the quality measure.
factory (MLAlgoFactory): The factory
used for the instantiation of machine learning algorithms.
candidates (List[Tuple[MLAlgo,float]]): The candidate machine learning
algorithms, after possible calibration, and their quality measures.
"""

def __init__(
self,
dataset,  # type: Dataset
measure,  # type: str
eval_method=MLQualityMeasure.LEARN,  # type: str
samples=None,  # type: Optional[Sequence[int]]
**measure_options  # type:MeasureOptionType
):  # type: (...) -> None
"""
Args:
dataset: The learning dataset.
measure: The name of a quality measure
to measure the quality of the machine learning algorithms.
eval_method: The name of the method to evaluate the quality measure.
samples: The indices of the learning samples to consider.
Other indices are neither used for training nor for testing.
If None, use all the samples.
**measure_options: The options for the method
to evaluate the quality measure.
The option 'multioutput' will be set to False.

Raises:
ValueError: If the unsupported "multioutput" option is enabled.
"""
self.dataset = dataset
self.measure = measure
self.measure_options = dict(
samples=samples, method=eval_method, **measure_options
)
self.factory = MLAlgoFactory()

self.candidates = []

if "multioutput" in measure_options and measure_options["multioutput"]:
raise ValueError(
"MLAlgoSelection does not support multioutput. "
"The measure shall return one value."
)
self.measure_options["multioutput"] = False

self,
name,  # type:str
calib_space=None,  # type: Optional[DesignSpace]
calib_algo=None,  # type: Optional[ScenarioInputDataType]
**option_lists
):  # type: (...) -> None
"""Add a machine learning algorithm candidate.

Args:
name: The name of a machine learning algorithm.
calib_space: The design space
defining the parameters to be calibrated
with a :class:.MLAlgoCalibration.
If None, do not perform calibration.
calib_algo: The name and the parameters
of the optimization algorithm,
e.g. {"algo": "fullfact", "n_samples": 10}).
If None, do not perform calibration.
**option_lists: The parameters
for the machine learning algorithm candidate.
Each parameter has to be enclosed within a list.
The list may contain different values
to try out for the given parameter,
or only one.

Examples:
>>>     "LinearRegression",
>>>     penalty_level=[0, 0.1, 1, 10, 20],
>>>     l2_penalty_ratio=[0, 0.5, 1],
>>>     fit_intercept=[True],
>>> )
"""
keys, values = option_lists.keys(), option_lists.values()

# Set initial quality to the worst possible value
if self.measure.SMALLER_IS_BETTER:
quality = float("inf")
else:
quality = -float("inf")

for prodvalues in product(*values):
params = dict(zip(keys, prodvalues))
if not calib_space:
algo_new = self.factory.create(name, data=self.dataset, **params)
quality_new = self.measure(algo_new).evaluate(**self.measure_options)
else:
calib = MLAlgoCalibration(
name,
self.dataset,
calib_space.variables_names,
calib_space,
self.measure,
self.measure_options,
**params
)
calib.execute(calib_algo)
algo_new = calib.optimal_algorithm
quality_new = calib.optimal_criterion

if self.measure.is_better(quality_new, quality):
algo = algo_new
quality = quality_new

# Learn on the entire dataset if a cross-validation scheme was used
algo.learn(self.measure_options["samples"])

self.candidates.append((algo, quality))

[docs]    def select(
self,
return_quality=False,  # type:bool
):  # type: (...) -> Union[MLAlgo,Tuple[MLAlgo,float]]
"""Select the best model.

The model is chosen through a grid search
over candidates and their options,
as well as an eventual optimization over the parameters
in the calibration space.

Args:
return_quality: If True, the quality of the best model will be returned.

Returns:
The best model and its quality if required.
"""
candidate = self.candidates[0]
for new_candidate in self.candidates[1:]:
if self.measure.is_better(new_candidate[1], candidate[1]):
candidate = new_candidate
if not return_quality:
candidate = candidate[0]
return candidate