-
Notifications
You must be signed in to change notification settings - Fork 3
/
Copy pathlin_sig_exp_experiment.py
114 lines (94 loc) · 4.8 KB
/
lin_sig_exp_experiment.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
#!python3
# Copyright (C) 2020 Victor O. Costa
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# Python standard library
import math
import sys
import io
# Own
from ant_colony_for_continuous_domains import AELACOr, AGDACOr
## 3rd party
import numpy as np
# Benchmarking functions
from deap.benchmarks import rosenbrock, schwefel, ackley, griewank, himmelblau # Load functions used as train instances
def flatten_cost(cost_function):
def flattened_cost(x):
return cost_function(x)[0]
return flattened_cost
def function_cost(function, variables_range, bounded, adaptive_mechanism, map_type, function_evals):
""" Given a function, an adaptive mechanism for ACOr, parameters and linearity of adaptive parameter control,
return the average results of a number runs on a suite of functions."""
if not isinstance(adaptive_mechanism, str) or not isinstance(map_type, str):
print("Error, both adaptive mechanism and map type must be strings")
exit(-1)
if adaptive_mechanism.upper() != "AEL" and adaptive_mechanism.upper() != "AGD":
print("Error, ACOr mechanism must be either \"AEL\" or \"AGS\"")
exit(-1)
if map_type != 'lin' and map_type != 'sig' and map_type != 'exp':
print("Error, map type must be \"lin\", \"sig\" or \"exp\"")
exit(-1)
# Base ACOr parameters, from (Socha, 2008)
# Number of function evaluations (F.E.) = k + iterations * m
k = 50
m = 10
if adaptive_mechanism == "AEL":
colony = AELACOr()
colony.set_verbosity(False)
xi = 0.85
min_q = 1e-2
max_q = 1.0
colony.set_parameters(m, k, xi, min_q, max_q, map_type, [function_evals])
else:
colony = AGDACOr()
colony.set_verbosity(False)
q = 1e-2
min_xi = 0.1
max_xi = 0.93
colony.set_parameters(m, k, q, min_xi, max_xi, map_type, [function_evals])
# Define ranges and bounding of each variable
dimensionality = 3 # Number of variables for all functions
ranges = [variables_range for _ in range(dimensionality)]
is_bounded = [bounded for _ in range(dimensionality)]
colony.define_variables(ranges, is_bounded)
# Get cost for the given function
colony.set_cost(flatten_cost(function))
function_cost = colony.optimize()[-1][-1]
return function_cost
def run_lin_sig_exp_mappings():
""" Run AELACOr and AGDACOr in 4 train functions, each using linear, sigmoidal and exponential mappings from SR to parameters. """
train_functions = [rosenbrock , schwefel , ackley , griewank]
train_functions_names = ['rosenbrock', 'schwefel','ackley','griewank']
functions_bounding = {'rosenbrock': False,
'schwefel': True,
'ackley': True,
'griewank': True}
functions_ranges = {'rosenbrock': [-10 , 10], # unbounded, values used in initialization only
'schwefel': [-500, 500],
'ackley': [-15 , 30],
'griewank': [-600, 600]}
metaheuristic_runs = 100
for map_type in (['lin', 'sig', 'exp']):
for mechanism in ['AEL', 'AGD'] :
metaheuristic_function_evals = 5000
# For each train function, run a metaheuristic N times and save results
for function, function_str in zip(train_functions, train_functions_names):
variables_bounded = functions_bounding[function_str]
variables_range = functions_ranges[function_str]
# Run metaheuristic N times
function_costs = []
for i in range(metaheuristic_runs):
cost = function_cost(function, variables_range, variables_bounded, mechanism, map_type, metaheuristic_function_evals)
function_costs.append(cost)
#print(str(i) + '. ' + str(cost))
np.save('./results/lin_sig_exp/' + map_type + '_'+ mechanism + '_' + function_str + '_eval.npy', function_costs)
if __name__ == '__main__':
run_lin_sig_exp_mappings()