go(algo,de): implement jDE (wip)
All checks were successful
continuous-integration/drone/push Build is passing

This commit is contained in:
leo 2023-01-21 02:35:29 +01:00
parent cba6e62b50
commit b742f0e091
Signed by: wanderer
SSH Key Fingerprint: SHA256:Dp8+iwKHSlrMEHzE3bJnPng70I7LEsa3IJXRH/U+idQ
4 changed files with 303 additions and 23 deletions

@ -38,6 +38,13 @@ func getComparisonOfMeansPics() []report.Pic {
return comparisonOfMeansPicList.Pics
}
// saveAlgoMeans saves algo bench means safely.
func saveAlgoMeans(sabm stats.AlgoBenchMean) {
mu.Lock()
meanStats.AlgoMeans = append(meanStats.AlgoMeans, sabm)
mu.Unlock()
}
// GetMeanStats returns a pointer of type stats.MeanStats to a sorted package
// global 'meanStats'.
func GetMeanStats() *stats.MeanStats {
@ -338,8 +345,11 @@ func DojDE(wg *sync.WaitGroup, m *sync.Mutex) {
algoStats := make([][]stats.Stats, funcCount)
// ch serves as a way to get the actual computed output.
ch := make(chan []stats.Stats, funcCount)
// chAlgoMeans := make(chan *stats.AlgoBenchMean, 1)
chAlgoMeans := make(chan *stats.AlgoBenchMean, funcCount)
defer close(ch)
defer close(chAlgoMeans)
// jDE params.
np := 50
@ -360,7 +370,7 @@ func DojDE(wg *sync.WaitGroup, m *sync.Mutex) {
// 0..17 to choose a mutation strategy,
// 0..1 to select a parameter self-adaptation scheme,
// np >= 4 as initial population size.
jDE.Init(-1, 30, 0, 0, np, f, cr, bench.DimensionsGA, bench.FuncNames[i], ch)
jDE.Init(-1, 30, 0, 0, np, f, cr, bench.DimensionsGA, bench.FuncNames[i], ch, chAlgoMeans)
go jDE.Run()
}
@ -368,8 +378,11 @@ func DojDE(wg *sync.WaitGroup, m *sync.Mutex) {
// get results.
for i := range algoStats {
s := <-ch
aM := <-chAlgoMeans
algoStats[i] = s
saveAlgoMeans(*aM)
}
pCh := make(chan report.PicList, funcCount*len(bench.DimensionsGA))

@ -6,10 +6,14 @@ package de
import (
"log"
"os"
"sort"
"time"
"golang.org/x/exp/rand"
"git.dotya.ml/wanderer/math-optim/bench"
"git.dotya.ml/wanderer/math-optim/stats"
"gonum.org/v1/gonum/stat/distuv"
)
// JDE is a holder for the settings of an instance of a self-adapting
@ -37,6 +41,8 @@ type JDE struct {
BenchName string
// ch is a channel for writing back computed results.
ch chan []stats.Stats
// chAlgoMeans is a channel for writing back algo means.
chAlgoMeans chan *stats.AlgoBenchMean
// initialised denotes the initialisation state of the struct.
initialised bool
}
@ -58,7 +64,7 @@ const (
var jDELogger = log.New(os.Stderr, " *** δ jDE:", log.Ldate|log.Ltime|log.Lshortfile)
// Init initialises the jDE algorithm, performs sanity checks on the inputs.
func (j *JDE) Init(generations, benchMinIters, mutStrategy, adptScheme, np int, f, cr float64, dimensions []int, bench string, ch chan []stats.Stats) {
func (j *JDE) Init(generations, benchMinIters, mutStrategy, adptScheme, np int, f, cr float64, dimensions []int, bench string, ch chan []stats.Stats, chAlgoMeans chan *stats.AlgoBenchMean) {
if j == nil {
jDELogger.Fatalln("jDE needs to be initialised before calling RunjDE, exiting...")
}
@ -106,18 +112,19 @@ func (j *JDE) Init(generations, benchMinIters, mutStrategy, adptScheme, np int,
j.Dimensions = dimensions
j.BenchName = bench
j.ch = ch
j.chAlgoMeans = chAlgoMeans
j.initialised = true
}
// InitAndRun initialises the jDE algorithm, performs sanity checks on the
// inputs and calls the Run method.
func (j *JDE) InitAndRun(generations, benchMinIters, mutStrategy, adptScheme, np int, f, cr float64, dimensions []int, bench string, ch chan []stats.Stats) {
func (j *JDE) InitAndRun(generations, benchMinIters, mutStrategy, adptScheme, np int, f, cr float64, dimensions []int, bench string, ch chan []stats.Stats, chAlgoMeans chan *stats.AlgoBenchMean) {
if j == nil {
jDELogger.Fatalln("jDE is nil, NewjDE() needs to be called first. exiting...")
}
j.Init(generations, benchMinIters, mutStrategy, adptScheme, np, f, cr, dimensions, bench, ch)
j.Init(generations, benchMinIters, mutStrategy, adptScheme, np, f, cr, dimensions, bench, ch, chAlgoMeans)
j.Run()
}
@ -132,9 +139,58 @@ func (j *JDE) Run() {
jDELogger.Fatalln("jDE needs to be initialised before calling Run(), exiting...")
}
var jDEStats []stats.Stats
jDEMeans := &stats.AlgoBenchMean{
Algo: "jDE",
BenchMeans: make([]stats.BenchMean, 0, len(j.Dimensions)),
}
// run evolve for for all dimensions.
for _, dim := range j.Dimensions {
maxFES := bench.GetGAMaxFES(dim)
fesPerIter := int(float64(maxFES / j.NP))
jDEStatDimX := &stats.Stats{
Algo: "jDE",
Dimens: dim,
Iterations: j.BenchMinIters,
Generations: maxFES,
}
funcStats := &stats.FuncStats{BenchName: j.BenchName}
dimXMean := &stats.BenchMean{
Bench: j.BenchName,
Dimens: dim,
Iterations: j.BenchMinIters,
Generations: maxFES,
Neighbours: -1,
}
benchFuncParams := bench.FunctionParams[j.BenchName]
uniDist := distuv.Uniform{
Min: benchFuncParams.Min(),
Max: benchFuncParams.Max(),
}
jDELogger.Printf("running bench \"%s\" for %dD, maxFES: %d\n",
j.BenchName, dim, maxFES,
)
funcStats.BenchResults = make([]stats.BenchRound, j.BenchMinIters)
// rand.Seed(uint64(time.Now().UnixNano()))
// create and seed a source of preudo-randomness
src := rand.NewSource(uint64(rand.Int63()))
// track execution duration.
start := time.Now()
for iter := 0; iter < j.BenchMinIters; iter++ {
jDELogger.Printf("run: %d, bench: %s, %dD, started at %s",
iter, j.BenchName, dim, start,
)
funcStats.BenchResults[iter].Iteration = iter
uniDist.Src = src
// create a population with known params.
pop := newPopulation(j.BenchName, j.NP, dim)
@ -143,15 +199,211 @@ func (j *JDE) Run() {
pop.Seed = uint64(time.Now().UnixNano())
// initialise the population.
pop.Init()
// jDELogger.Printf("%+v\n", pop.Population)
j.evolve(maxFES, pop)
var bestResult float64
// the core.
for i := 0; i < fesPerIter; i++ {
r := j.evaluate(pop)
// distinguish the first or any of the subsequent iterations.
switch i {
case 0:
bestResult = r
default:
jDELogger.Printf("run: %d, bench: %s, %dD, iteration: %d/%d",
iter, j.BenchName, dim, i, fesPerIter-1,
)
// call evolve where jDE runs.
j.evolve(pop, &uniDist)
// take measurements of current population fitness.
r = j.evaluate(pop)
// save if better.
if r < bestResult {
bestResult = r
}
}
funcStats.BenchResults[iter].Results = append(
funcStats.BenchResults[iter].Results,
bestResult,
)
// this block makes sure we properly count func evaluations for
// the purpose of correctly comparable plot comparison. i.e.
// append the winning (current best) value NP-1 (the first best
// is already saved at this point) times to represent the fact
// that while evaluating (and comparing) other population
// individuals to the current best value is taking place in the
// background, the current best value itself is kept around and
// symbolically saved as the best of the Generation.
for x := 0; x < j.NP-1; x++ {
funcStats.BenchResults[iter].Results = append(
funcStats.BenchResults[iter].Results,
bestResult,
)
}
}
}
elapsed := time.Since(start)
jDELogger.Printf("completed: bench: %s, %dD, computing took %s\n",
j.BenchName, dim, elapsed,
)
// get mean vals.
dimXMean.MeanVals = stats.GetMeanVals(funcStats.BenchResults, maxFES)
funcStats.MeanVals = dimXMean.MeanVals
jDEStatDimX.BenchFuncStats = append(
jDEStatDimX.BenchFuncStats, *funcStats,
)
jDEStats = append(jDEStats, *jDEStatDimX)
jDEMeans.BenchMeans = append(jDEMeans.BenchMeans, *dimXMean)
}
sort.Sort(jDEMeans)
j.chAlgoMeans <- jDEMeans
j.ch <- jDEStats
}
// evaluate evaluates the fitness of current population.
func (j *JDE) evaluate(pop *Population) float64 {
f := bench.Functions[pop.Problem]
bestIndividual := pop.Population[0].CurX
bestSolution := f(bestIndividual)
for _, v := range pop.Population {
if solution := f(v.CurX); solution < bestSolution {
bestSolution = solution
}
}
return bestSolution
}
// evolve evolves a population by running the jDE (self-adapting Differential
// Evolution) algorithm on the passed population until termination conditions
// are met.
func (j *JDE) evolve(maxFES int, pop *Population) {}
// nolint: gocognit
func (j *JDE) evolve(pop *Population, uniDist *distuv.Uniform) {
popCount := len(pop.Population)
for i, currentIndividual := range pop.Population {
idcs := make([]int, 0, popCount-1)
// gather indices.
for k := 0; k < popCount; k++ {
if k != i {
idcs = append(idcs, k)
}
}
// randomly choose 3 of those idcs.
selectedIdcs := make([]int, 0)
selectedA := false
selectedB := false
selectedC := false
for !selectedA {
candidateA := rand.Intn(len(idcs)) % len(idcs)
if candidateA != i {
selectedIdcs = append(selectedIdcs, candidateA)
selectedA = true
}
}
for !selectedB {
a := selectedIdcs[0]
candidateB := rand.Intn(len(idcs)) % len(idcs)
if candidateB != i && candidateB != a {
selectedIdcs = append(selectedIdcs, candidateB)
selectedB = true
}
}
for !selectedC {
a := selectedIdcs[0]
b := selectedIdcs[1]
candidateC := rand.Intn(len(idcs)) % len(idcs)
if candidateC != i && candidateC != a && candidateC != b {
selectedIdcs = append(selectedIdcs, candidateC)
selectedC = true
}
}
// selected contains the selected population individuals.
selected := make([]PopulationIndividual, 0)
// select individuals for rand/1/bin
for _, idx := range selectedIdcs {
for k := 0; k < popCount; k++ {
if k == idx {
selected = append(selected, pop.Population[idx])
}
}
}
mutant := make([]float64, pop.Dimen)
// mutate.
for k := 0; k < pop.Dimen; k++ {
// mutant = a + mut * (b - c)
mutant[k] = selected[0].CurX[k] + (j.F * (selected[1].CurX[k] - selected[2].CurX[k]))
// clip values to <0;1>.
switch {
case mutant[k] < 0:
mutant[k] = 0
case mutant[k] > 1:
mutant[k] = 1
}
}
crossPoints := make([]bool, pop.Dimen)
// prepare crossover points (binomial crossover).
for k := 0; k < pop.Dimen; k++ {
if v := uniDist.Rand(); v < j.CR {
crossPoints[k] = true
} else {
crossPoints[k] = false
}
}
trial := make([]float64, pop.Dimen)
// recombine using crossover points.
for k := 0; k < pop.Dimen; k++ {
if crossPoints[k] {
trial[k] = currentIndividual.CurX[k]
}
}
f := bench.Functions[pop.Problem]
currentFitness := f(currentIndividual.CurX)
trialFitness := f(trial)
// replace if better.
if trialFitness < currentFitness {
pop.Population[i].CurX = trial
}
}
}
// NewjDE returns a pointer to a new, uninitialised jDE instance.
func NewjDE() *JDE {

@ -97,16 +97,29 @@ func (p *Population) SetV(n int, nuV DecisionVector) {}
// Init initialises all individuals to random values.
func (p *Population) Init() {
uniform := distuv.Uniform{}
benchFuncParams := bench.FunctionParams[p.Problem]
uniform := distuv.Uniform{
Min: benchFuncParams.Min(),
Max: benchFuncParams.Max(),
}
uniform.Src = rand.NewSource(p.Seed)
for _, v := range p.Population {
jDELogger.Printf("population initialisation - popCount: %d, seed: %d\n",
len(p.Population), p.Seed,
)
for i, v := range p.Population {
v.CurX = make([]float64, p.Dimen)
for i := 0; i < p.Dimen; i++ {
v.CurX[i] = uniform.Rand()
for j := 0; j < p.Dimen; j++ {
v.CurX[j] = uniform.Rand()
}
p.Population[i] = v
}
jDELogger.Println("population initialised")
}
// Reinit reinitialises all individuals.
@ -144,7 +157,7 @@ func newPopulation(benchProblem string, np, dimen int) *Population {
p.Dimen = dimen
// pre-alloc.
p.Population = make([]PopulationIndividual, 0, np)
p.Population = make([]PopulationIndividual, np)
return p
}

14
run.go

@ -24,7 +24,8 @@ func run() {
if *generate {
// atm we're only doing Random search and SHC
algoCount := 2
// algoCount := 2
algoCount := 1
var wg sync.WaitGroup
@ -32,15 +33,16 @@ func run() {
var m sync.Mutex
go algo.DoRandomSearch(&wg, &m)
go algo.DoStochasticHillClimbing(&wg, &m)
// go algo.DoStochasticHillClimbing100Neigh(&wg, &m)
// go algo.DoRandomSearch(&wg, &m)
// go algo.DoStochasticHillClimbing(&wg, &m)
// // go algo.DoStochasticHillClimbing100Neigh(&wg, &m)
go algo.DojDE(&wg, &m)
wg.Wait()
pL, benchCount := algo.PrepComparisonOfMeans(&wg)
// pL, benchCount := algo.PrepComparisonOfMeans(&wg)
report.SaveComparisonOfMeans(*pL, benchCount)
// report.SaveComparisonOfMeans(*pL, benchCount)
report.SaveTexAllPics()
report.SaveTexAllTables()
}