@File : meta_ea.py @Time : 2024/04/25 09:54:42 @Author : Alejandro Marrero @Version : 1.0 @Contact : amarrerd@ull.edu.es @License : (C)Copyright 2024, Alejandro Marrero @Desc : None

NNEATuner

Neural Network Evolutionary Algorithm Tuner This class implements a CMA-ES based tuner for neural networks. It allows to optimize the weights of a neural network to generate transformed spaces in optimization domains. It uses the DEAP library for the evolutionary algorithm

Source code in digneapy/transformers/tuner.py
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
class NNEATuner:
    """Neural Network Evolutionary Algorithm Tuner
    This class implements a CMA-ES based tuner for neural networks.
    It allows to optimize the weights of a neural network to generate
    transformed spaces in optimization domains.
    It uses the DEAP library for the evolutionary algorithm"""

    def __init__(
        self,
        eval_fn: Callable,
        dimension: int,
        transformer: KerasNN | TorchNN,
        centroid: Optional[Sequence[float]] = None,
        sigma: float = 1.0,
        lambda_: int = 50,
        generations: int = 250,
        direction: Direction = Direction.MAXIMISE,
        n_jobs: int = 1,
    ):
        """Creates a new NNEATuner instance

        Args:
            eval_fn (Callable): Funtion to evaluate the fitness of a neural network
            weights. It must return a single float value representing the fitness.
            This function will be called with a list of weights as input.
            It must be defined before creating the tuner instance.
            dimension (int): Number of weights in the neural network.
            centroid (Optional[Sequence[float]], optional): Starting point for the CMA-ES algorithm.
            sigma (float, optional): Defaults to 1.0.
            lambda_ (int, optional): Population size. Defaults to 50.
            generations (int, optional): Number of generatios to perform. Defaults to 250.
            direction (Direction, optional): Optimisation direction. Defaults to Direction.MAXIMISE.
            n_jobs (int, optional): Number of workers. Defaults to 1.

        Raises:
            ValueError: If eval_fn is None or if direction is not a valid Direction.
        """
        if eval_fn is None:
            raise ValueError(
                "eval_fn cannot be None in NNTuner. Please give a valid evaluation function."
            )
        if transformer is None or not isinstance(transformer, (KerasNN, TorchNN)):
            raise ValueError(
                "transformer cannot be None in NNTuner. Please give a valid transformer (KerasNN or TorchNN)."
            )
        self.eval_fn = eval_fn
        self.dimension = dimension
        self.transformer = transformer
        self.centroid = centroid if centroid is not None else [0.0] * self.dimension
        self.sigma = sigma
        self._lambda = lambda_ if lambda_ != 0 else 50
        self.generations = generations
        self.__performed_gens = 0  # These vars are used to save the data in CSV files
        self.__evaluated_inds = 0

        if not isinstance(direction, Direction):
            msg = f"Direction: {direction} not available. Please choose between {Direction.values()}"
            raise ValueError(msg)

        self.direction = direction
        self.toolbox = base.Toolbox()
        self.toolbox.register("evaluate", self.evaluation)
        self.strategy = cma.Strategy(
            centroid=self.centroid, sigma=self.sigma, lambda_=self._lambda
        )
        if self.direction == Direction.MAXIMISE:
            self.toolbox.register("generate", self.strategy.generate, creator.IndMax)
        else:
            self.toolbox.register("generate", self.strategy.generate, creator.IndMin)
        self.toolbox.register("update", self.strategy.update)
        if n_jobs < 1:
            msg = "The number of jobs must be at least 1."
            raise ValueError(msg)
        elif n_jobs > 1:
            self.n_processors = n_jobs
            self.pool = Pool(processes=self.n_processors)
            self.toolbox.register("map", self.pool.map)

        self.hof = tools.HallOfFame(1)
        self.stats = tools.Statistics(lambda ind: ind.fitness.values)
        self.stats.register("avg", np.mean)
        self.stats.register("std", np.std)
        self.stats.register("min", np.min)
        self.stats.register("max", np.max)

    def evaluation(self, individual: Sequence[float]) -> tuple[float]:
        """Evaluates a chromosome of weights for a NN
        to generate spaces in optimisation domains

        Args:
            individual (Sequence[float]): Sequence of weights for a NN transformer

        Returns:
            tuple[float]: Space coverage of the space create from the NN transformer
        """
        self.transformer.update_weights(individual)
        # filename = f"dataset_generation_{self.__performed_gens}_individual_{self.__evaluated_inds}.csv"
        self.__evaluated_inds += 1
        if self.__evaluated_inds == self._lambda:
            self.__performed_gens += 1
            self.__evaluated_inds = 0

        fitness = self.eval_fn(self.transformer)
        return (fitness,)

    def __call__(self):
        population, logbook = algorithms.eaGenerateUpdate(
            self.toolbox,
            ngen=self.generations,
            stats=self.stats,
            halloffame=self.hof,
            verbose=True,
        )
        return (self.hof[0], population, logbook)

__init__(eval_fn, dimension, transformer, centroid=None, sigma=1.0, lambda_=50, generations=250, direction=Direction.MAXIMISE, n_jobs=1)

Creates a new NNEATuner instance

Parameters:
  • eval_fn (Callable) –

    Funtion to evaluate the fitness of a neural network

  • dimension (int) –

    Number of weights in the neural network.

  • centroid (Optional[Sequence[float]], default: None ) –

    Starting point for the CMA-ES algorithm.

  • sigma (float, default: 1.0 ) –

    Defaults to 1.0.

  • lambda_ (int, default: 50 ) –

    Population size. Defaults to 50.

  • generations (int, default: 250 ) –

    Number of generatios to perform. Defaults to 250.

  • direction (Direction, default: MAXIMISE ) –

    Optimisation direction. Defaults to Direction.MAXIMISE.

  • n_jobs (int, default: 1 ) –

    Number of workers. Defaults to 1.

Raises:
  • ValueError

    If eval_fn is None or if direction is not a valid Direction.

Source code in digneapy/transformers/tuner.py
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
def __init__(
    self,
    eval_fn: Callable,
    dimension: int,
    transformer: KerasNN | TorchNN,
    centroid: Optional[Sequence[float]] = None,
    sigma: float = 1.0,
    lambda_: int = 50,
    generations: int = 250,
    direction: Direction = Direction.MAXIMISE,
    n_jobs: int = 1,
):
    """Creates a new NNEATuner instance

    Args:
        eval_fn (Callable): Funtion to evaluate the fitness of a neural network
        weights. It must return a single float value representing the fitness.
        This function will be called with a list of weights as input.
        It must be defined before creating the tuner instance.
        dimension (int): Number of weights in the neural network.
        centroid (Optional[Sequence[float]], optional): Starting point for the CMA-ES algorithm.
        sigma (float, optional): Defaults to 1.0.
        lambda_ (int, optional): Population size. Defaults to 50.
        generations (int, optional): Number of generatios to perform. Defaults to 250.
        direction (Direction, optional): Optimisation direction. Defaults to Direction.MAXIMISE.
        n_jobs (int, optional): Number of workers. Defaults to 1.

    Raises:
        ValueError: If eval_fn is None or if direction is not a valid Direction.
    """
    if eval_fn is None:
        raise ValueError(
            "eval_fn cannot be None in NNTuner. Please give a valid evaluation function."
        )
    if transformer is None or not isinstance(transformer, (KerasNN, TorchNN)):
        raise ValueError(
            "transformer cannot be None in NNTuner. Please give a valid transformer (KerasNN or TorchNN)."
        )
    self.eval_fn = eval_fn
    self.dimension = dimension
    self.transformer = transformer
    self.centroid = centroid if centroid is not None else [0.0] * self.dimension
    self.sigma = sigma
    self._lambda = lambda_ if lambda_ != 0 else 50
    self.generations = generations
    self.__performed_gens = 0  # These vars are used to save the data in CSV files
    self.__evaluated_inds = 0

    if not isinstance(direction, Direction):
        msg = f"Direction: {direction} not available. Please choose between {Direction.values()}"
        raise ValueError(msg)

    self.direction = direction
    self.toolbox = base.Toolbox()
    self.toolbox.register("evaluate", self.evaluation)
    self.strategy = cma.Strategy(
        centroid=self.centroid, sigma=self.sigma, lambda_=self._lambda
    )
    if self.direction == Direction.MAXIMISE:
        self.toolbox.register("generate", self.strategy.generate, creator.IndMax)
    else:
        self.toolbox.register("generate", self.strategy.generate, creator.IndMin)
    self.toolbox.register("update", self.strategy.update)
    if n_jobs < 1:
        msg = "The number of jobs must be at least 1."
        raise ValueError(msg)
    elif n_jobs > 1:
        self.n_processors = n_jobs
        self.pool = Pool(processes=self.n_processors)
        self.toolbox.register("map", self.pool.map)

    self.hof = tools.HallOfFame(1)
    self.stats = tools.Statistics(lambda ind: ind.fitness.values)
    self.stats.register("avg", np.mean)
    self.stats.register("std", np.std)
    self.stats.register("min", np.min)
    self.stats.register("max", np.max)

evaluation(individual)

Evaluates a chromosome of weights for a NN to generate spaces in optimisation domains

Parameters:
  • individual (Sequence[float]) –

    Sequence of weights for a NN transformer

Returns:
  • tuple[float]

    tuple[float]: Space coverage of the space create from the NN transformer

Source code in digneapy/transformers/tuner.py
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
def evaluation(self, individual: Sequence[float]) -> tuple[float]:
    """Evaluates a chromosome of weights for a NN
    to generate spaces in optimisation domains

    Args:
        individual (Sequence[float]): Sequence of weights for a NN transformer

    Returns:
        tuple[float]: Space coverage of the space create from the NN transformer
    """
    self.transformer.update_weights(individual)
    # filename = f"dataset_generation_{self.__performed_gens}_individual_{self.__evaluated_inds}.csv"
    self.__evaluated_inds += 1
    if self.__evaluated_inds == self._lambda:
        self.__performed_gens += 1
        self.__evaluated_inds = 0

    fitness = self.eval_fn(self.transformer)
    return (fitness,)