Skip to content

Commit

Permalink
The PAO cometh
Browse files Browse the repository at this point in the history
add PAO - Particle attractor algorithm
  • Loading branch information
MDCHAMP authored Jan 10, 2023
2 parents dc45fa1 + e588f91 commit 068bab7
Show file tree
Hide file tree
Showing 13 changed files with 459 additions and 183 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/pytest-benchmarking.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: [3.9]
python-version: [3.11]

steps:
- uses: actions/checkout@v2
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/pytest-testing.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: [3.5, 3.6, 3.7, 3.8, 3.9]
python-version: ['3.7', '3.8', '3.9', '3.10', '3.11']

steps:
- uses: actions/checkout@v2
Expand Down
4 changes: 3 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -150,4 +150,6 @@ local/
*.old

# Local sractch
.misc/
.misc/
Paper
fig.png
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
# This call to setup() does all the work
setup(
name="freelunch",
version="0.0.14",
version="0.0.15",
description="Heuristic and meta-heuristic optimisation suite in Python",
long_description=rm,
long_description_content_type="text/markdown",
Expand Down
2 changes: 1 addition & 1 deletion src/freelunch/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
__version__ = "0.0.14"
__version__ = "0.0.15"

from freelunch.optimisers import *
import freelunch.benchmarks as benchmarks
6 changes: 6 additions & 0 deletions src/freelunch/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,6 +106,12 @@ def _obj(self, obj, vec):
except(ValueError, TypeError):
return None

def curve_callback(self, best=None, mean=None):
if best is None:
self.data = {'best':[], 'mean':[]}
else:
self.data['best'].append(best)
self.data['mean'].append(mean)

# Subclasses for granularity

Expand Down
142 changes: 135 additions & 7 deletions src/freelunch/benchmarks.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,12 +13,10 @@ class benchmark:
'''
default_bounds = lambda n:None
rtm_optimum = lambda n:None
tol=None

def __init__(self, n=None):
if n is None:
self.n = 2
else:
self.n = n
def __init__(self, n=2):
self.n = n
self.bounds = self.default_bounds()
self.optimum = self.rtn_optimum()

Expand All @@ -27,8 +25,7 @@ def __call__(self, dna):
raise ZeroLengthSolutionError('An empty trial solution was passed')
return self.obj(dna)

# %%

# %% some misc (v0.x) benchmarks

class ackley(benchmark):
'''
Expand Down Expand Up @@ -97,3 +94,134 @@ def obj(self, dna):
t2 = - 0.1 * np.exp(-np.sum(dna**2))
return t1 + t2



# %% https://robertmarks.org/Classes/ENGR5358/Papers/functions.pdf

class DeJong(benchmark):
'''
DeJong's 1st function in n dimensions
'''
default_bounds = lambda self:np.array([[-5.12, 5.12]]*self.n)
rtn_optimum = lambda self:np.array([0]*self.n)
f0 = 0

def obj(self, dna):
return np.sum(dna**2)

class HyperElipsoid(benchmark):
'''
HyperElipsoid function in n dimensions
'''
default_bounds = lambda self:np.array([[-5.12, 5.12]]*self.n)
rtn_optimum = lambda self:np.array([0]*self.n)
f0 = 0

def obj(self, dna):
return np.sum(np.arange(1,self.n+1)*dna**2)

class RotatedHyperElipsoid(benchmark):
'''
RotatedHyperElipsoid function in n dimensions
'''
default_bounds = lambda self:np.array([[-65.536, 65.536]]*self.n)
rtn_optimum = lambda self:np.array([0]*self.n)
f0 = 0

def obj(self, dna):
out = 0
for i in range(self.n):
out += np.sum(dna[:i+1]**2)
return out

class Rosenbrock(benchmark):
'''
Rosenbrock's function in n dimensions (banana function)
'''
default_bounds = lambda self:np.array([[-2.048, 2.048]]*self.n)
rtn_optimum = lambda self:np.array([1]*self.n)
f0 = 0

def obj(self, dna):
return np.sum(100*(dna[1:] - dna[:-1]**2)**2 + (1-dna[:-1])**2)


class Ragstrin(benchmark):
'''
Ragstrin's function in n dimensions
'''
default_bounds = lambda self:np.array([[-5.12, 5.12]]*self.n)
rtn_optimum = lambda self:np.array([0]*self.n)
f0 = 0

def obj(self, dna):
return 10 * self.n + np.sum(dna**2 - 10*np.cos(2* np.pi*dna))

class Schwefel(benchmark):
'''
Schwefel's function in n dimensions
(divided through by dimension for constant f0)
'''
default_bounds = lambda self:np.array([[-500, 500]]*self.n)
rtn_optimum = lambda self:np.array([420.9687]*self.n)
f0 = 0

def obj(self, dna):
return 418.9828872721625-np.sum(dna*np.sin(np.sqrt(np.abs(dna))))/self.n

class Griewangk(benchmark):
'''
Griewangk's function in n dimensions
'''
default_bounds = lambda self:np.array([[-600, 600]]*self.n)
rtn_optimum = lambda self:np.array([0]*self.n)
f0 = 0

def obj(self, dna):
return (1/4000)* np.sum(dna**2) - np.prod(np.cos(dna/np.sqrt(np.arange(self.n)+1))) + 1

class PowerSum(benchmark):
'''
Powersum function in n dimensions
'''
default_bounds = lambda self:np.array([[-1, 1]]*self.n)
rtn_optimum = lambda self:np.array([0]*self.n)
f0 = 0

def obj(self, dna):
out = 0
for i,x in enumerate(dna):
out+=np.abs(x)**(i+1)
return out

class Ackley(benchmark):
'''
Ackely function in n dimensions
'''

default_bounds = lambda self:np.array([[-32.768, 32.768]]*self.n)
rtn_optimum = lambda self:np.array([0]*self.n)
f0 = 0

a,b,c = 20, 0.2, 2*np.pi
def obj(self, dna):
t1 = -self.a * np.exp(-self.b * (1/len(dna)) * np.sum(dna**2))
t2 = - np.exp(1/len(dna) * np.sum(np.cos(self.c * dna)))
t3 = self.a + np.exp(1)
return t1 + t2 + t3


MOLGA_TEST_SUITE = [
DeJong,
HyperElipsoid,
RotatedHyperElipsoid,
Rosenbrock,
Ragstrin,
Schwefel,
Griewangk,
PowerSum,
Ackley
]
Loading

0 comments on commit 068bab7

Please sign in to comment.