Skip to content

Commit

Permalink
Merge pull request #3 from jungtaekkim/0.1.6
Browse files Browse the repository at this point in the history
0.1.6
  • Loading branch information
jungtaekkim committed Jan 12, 2022
2 parents 3650aae + 05d116b commit 6e82748
Show file tree
Hide file tree
Showing 61 changed files with 638 additions and 61 deletions.
29 changes: 29 additions & 0 deletions .github/workflows/pytest.yml
@@ -0,0 +1,29 @@
name: pytest

on: [push, pull_request]

jobs:
test:
strategy:
matrix:
python-version:
- '3.6'
- '3.7'
- '3.8'
- '3.9'
- '3.10'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
python -m pip install --upgrade pip setuptools
pip install pytest
pip install scipy
- name: Run pytest
run: |
pip install .
pytest
16 changes: 0 additions & 16 deletions .travis.yml

This file was deleted.

2 changes: 1 addition & 1 deletion LICENSE
@@ -1,6 +1,6 @@
MIT License

Copyright (c) 2019-2021 Jungtaek Kim
Copyright (c) 2019-2022 Jungtaek Kim

Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
Expand Down
9 changes: 3 additions & 6 deletions README.md
@@ -1,9 +1,9 @@
# BayesO Benchmarks
[![Build Status](https://app.travis-ci.com/jungtaekkim/bayeso-benchmarks.svg?branch=main)](https://app.travis-ci.com/jungtaekkim/bayeso-benchmarks)
[![Build Status](https://github.com/jungtaekkim/bayeso-benchmarks/actions/workflows/pytest.yml/badge.svg)](https://github.com/jungtaekkim/bayeso-benchmarks/actions/workflows/pytest.yml)
[![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT)

Benchmarks for Bayesian optimization.
The details of benchmark functions can be found in [these notes](http://jungtaek.github.io/notes/benchmarks_bo.pdf).
The details of benchmark functions can be found in [these notes](https://jungtaek.github.io/notes/benchmarks_bo.pdf).

## Installation
We recommend installing it with `virtualenv`.
Expand Down Expand Up @@ -65,10 +65,7 @@ Y_noise = obj_fun.output_gaussian_noise(X)
```

## Author
* [Jungtaek Kim](http://jungtaek.github.io) (POSTECH)

## Contact
* Jungtaek Kim: [jtkim@postech.ac.kr](mailto:jtkim@postech.ac.kr)
* [Jungtaek Kim](https://jungtaek.github.io) (POSTECH)

## License
[MIT License](LICENSE)
8 changes: 6 additions & 2 deletions bayeso_benchmarks/__init__.py
@@ -1,9 +1,10 @@
#
# author: Jungtaek Kim (jtkim@postech.ac.kr)
# last updated: June 24, 2021
# last updated: October 23, 2021
#

__version__ = '0.1.5'

__version__ = '0.1.6'


from bayeso_benchmarks.inf_dim_ackley import Ackley
Expand All @@ -24,6 +25,9 @@
from bayeso_benchmarks.two_dim_eggholder import Eggholder
from bayeso_benchmarks.two_dim_goldsteinprice import GoldsteinPrice
from bayeso_benchmarks.two_dim_holdertable import HolderTable
from bayeso_benchmarks.two_dim_kim1 import Kim1
from bayeso_benchmarks.two_dim_kim2 import Kim2
from bayeso_benchmarks.two_dim_kim3 import Kim3
from bayeso_benchmarks.two_dim_michalewicz import Michalewicz
from bayeso_benchmarks.two_dim_sixhumpcamel import SixHumpCamel
from bayeso_benchmarks.two_dim_threehumpcamel import ThreeHumpCamel
Expand Down
7 changes: 6 additions & 1 deletion bayeso_benchmarks/benchmark_base.py
Expand Up @@ -74,6 +74,11 @@ def function(self, bx):
def _output(self, X):
assert isinstance(X, np.ndarray)

bounds = self.get_bounds()

assert np.all(X >= bounds[:, 0])
assert np.all(X <= bounds[:, 1])

if len(X.shape) == 2:
list_results = [self.function(bx) for bx in X]
else:
Expand Down Expand Up @@ -220,6 +225,6 @@ def sample_uniform(self, num_points, seed=None):
bounds = self.get_bounds()

points = random_state_.uniform(size=(num_points, dim_problem))
points = (bounds[:, 0] + (bounds[:, 1] - bounds[:, 0])) * points
points = bounds[:, 0] + (bounds[:, 1] - bounds[:, 0]) * points

return points
5 changes: 4 additions & 1 deletion bayeso_benchmarks/inf_dim_ackley.py
Expand Up @@ -40,4 +40,7 @@ def __init__(self, dim_problem, seed=None):

function = lambda bx: fun_target(bx, dim_problem)

Function.__init__(self, dim_bx, bounds, global_minimizers, global_minimum, function, dim_problem=dim_problem, seed=seed)
try:
super().__init__(dim_bx, bounds, global_minimizers, global_minimum, function, dim_problem=dim_problem, seed=seed)
except:
super(Ackley, self).__init__(dim_bx, bounds, global_minimizers, global_minimum, function, dim_problem=dim_problem, seed=seed)
5 changes: 4 additions & 1 deletion bayeso_benchmarks/inf_dim_cosines.py
Expand Up @@ -33,4 +33,7 @@ def __init__(self, dim_problem, seed=None):

function = lambda bx: fun_target(bx, dim_problem)

Function.__init__(self, dim_bx, bounds, global_minimizers, global_minimum, function, dim_problem=dim_problem, seed=seed)
try:
super().__init__(dim_bx, bounds, global_minimizers, global_minimum, function, dim_problem=dim_problem, seed=seed)
except:
super(Cosines, self).__init__(dim_bx, bounds, global_minimizers, global_minimum, function, dim_problem=dim_problem, seed=seed)
5 changes: 4 additions & 1 deletion bayeso_benchmarks/inf_dim_rosenbrock.py
Expand Up @@ -37,4 +37,7 @@ def __init__(self, dim_problem, seed=None):

function = lambda bx: fun_target(bx, dim_problem)

Function.__init__(self, dim_bx, bounds, global_minimizers, global_minimum, function, dim_problem=dim_problem, seed=seed)
try:
super().__init__(dim_bx, bounds, global_minimizers, global_minimum, function, dim_problem=dim_problem, seed=seed)
except:
super(Rosenbrock, self).__init__(dim_bx, bounds, global_minimizers, global_minimum, function, dim_problem=dim_problem, seed=seed)
5 changes: 4 additions & 1 deletion bayeso_benchmarks/inf_dim_sphere.py
Expand Up @@ -36,4 +36,7 @@ def __init__(self, dim_problem, seed=None):

function = lambda bx: fun_target(bx, dim_problem)

Function.__init__(self, dim_bx, bounds, global_minimizers, global_minimum, function, dim_problem=dim_problem, seed=seed)
try:
super().__init__(dim_bx, bounds, global_minimizers, global_minimum, function, dim_problem=dim_problem, seed=seed)
except:
super(Sphere, self).__init__(dim_bx, bounds, global_minimizers, global_minimum, function, dim_problem=dim_problem, seed=seed)
5 changes: 4 additions & 1 deletion bayeso_benchmarks/one_dim_constant.py
Expand Up @@ -43,4 +43,7 @@ def __init__(self,
global_minimum = constant
function = lambda bx: fun_target(bx, dim_bx, constant)

Function.__init__(self, dim_bx, bounds, global_minimizers, global_minimum, function, seed=seed)
try:
super().__init__(dim_bx, bounds, global_minimizers, global_minimum, function, seed=seed)
except:
super(Constant, self).__init__(dim_bx, bounds, global_minimizers, global_minimum, function, seed=seed)
5 changes: 4 additions & 1 deletion bayeso_benchmarks/one_dim_gramacyandlee2012.py
Expand Up @@ -30,4 +30,7 @@ def __init__(self, seed=None):
global_minimum = -0.86901113
function = lambda bx: fun_target(bx, dim_bx)

Function.__init__(self, dim_bx, bounds, global_minimizers, global_minimum, function, seed=seed)
try:
super().__init__(dim_bx, bounds, global_minimizers, global_minimum, function, seed=seed)
except:
super(GramacyAndLee2012, self).__init__(dim_bx, bounds, global_minimizers, global_minimum, function, seed=seed)
5 changes: 4 additions & 1 deletion bayeso_benchmarks/one_dim_linear.py
Expand Up @@ -47,4 +47,7 @@ def __init__(self,
global_minimum = slope * bounds[0, 1]
function = lambda bx: fun_target(bx, dim_bx, slope)

Function.__init__(self, dim_bx, bounds, global_minimizers, global_minimum, function, seed=seed)
try:
super().__init__(dim_bx, bounds, global_minimizers, global_minimum, function, seed=seed)
except:
super(Linear, self).__init__(dim_bx, bounds, global_minimizers, global_minimum, function, seed=seed)
5 changes: 4 additions & 1 deletion bayeso_benchmarks/one_dim_step.py
Expand Up @@ -55,4 +55,7 @@ def __init__(self,
global_minimum = np.min(step_values)
function = lambda bx: fun_target(bx, dim_bx, steps, step_values)

Function.__init__(self, dim_bx, bounds, global_minimizers, global_minimum, function, seed=seed)
try:
super().__init__(dim_bx, bounds, global_minimizers, global_minimum, function, seed=seed)
except:
super(Step, self).__init__(dim_bx, bounds, global_minimizers, global_minimum, function, seed=seed)
5 changes: 4 additions & 1 deletion bayeso_benchmarks/six_dim_hartmann6d.py
Expand Up @@ -63,4 +63,7 @@ def __init__(self,
global_minimum = -3.322368
function = lambda bx: fun_target(bx, dim_bx)

Function.__init__(self, dim_bx, bounds, global_minimizers, global_minimum, function, seed=seed)
try:
super().__init__(dim_bx, bounds, global_minimizers, global_minimum, function, seed=seed)
except:
super(Hartmann6D, self).__init__(dim_bx, bounds, global_minimizers, global_minimum, function, seed=seed)
5 changes: 4 additions & 1 deletion bayeso_benchmarks/three_dim_hartmann3d.py
Expand Up @@ -60,4 +60,7 @@ def __init__(self,
global_minimum = -3.86278
function = lambda bx: fun_target(bx, dim_bx)

Function.__init__(self, dim_bx, bounds, global_minimizers, global_minimum, function, seed=seed)
try:
super().__init__(dim_bx, bounds, global_minimizers, global_minimum, function, seed=seed)
except:
super(Hartmann3D, self).__init__(dim_bx, bounds, global_minimizers, global_minimum, function, seed=seed)
5 changes: 4 additions & 1 deletion bayeso_benchmarks/two_dim_beale.py
Expand Up @@ -31,4 +31,7 @@ def __init__(self, seed=None):
global_minimum = 0.0
function = lambda bx: fun_target(bx, dim_bx)

Function.__init__(self, dim_bx, bounds, global_minimizers, global_minimum, function, seed=seed)
try:
super().__init__(dim_bx, bounds, global_minimizers, global_minimum, function, seed=seed)
except:
super(Beale, self).__init__(dim_bx, bounds, global_minimizers, global_minimum, function, seed=seed)
5 changes: 4 additions & 1 deletion bayeso_benchmarks/two_dim_bohachevsky.py
Expand Up @@ -31,4 +31,7 @@ def __init__(self, seed=None):
global_minimum = 0.0
function = lambda bx: fun_target(bx, dim_bx)

Function.__init__(self, dim_bx, bounds, global_minimizers, global_minimum, function, seed=seed)
try:
super().__init__(dim_bx, bounds, global_minimizers, global_minimum, function, seed=seed)
except:
super(Bohachevsky, self).__init__(dim_bx, bounds, global_minimizers, global_minimum, function, seed=seed)
5 changes: 4 additions & 1 deletion bayeso_benchmarks/two_dim_branin.py
Expand Up @@ -53,4 +53,7 @@ def __init__(self,
global_minimum = 0.3978874
function = lambda bx: fun_target(bx, dim_bx, a, b, c, r, s, t)

Function.__init__(self, dim_bx, bounds, global_minimizers, global_minimum, function, seed=seed)
try:
super().__init__(dim_bx, bounds, global_minimizers, global_minimum, function, seed=seed)
except:
super(Branin, self).__init__(dim_bx, bounds, global_minimizers, global_minimum, function, seed=seed)
5 changes: 4 additions & 1 deletion bayeso_benchmarks/two_dim_dejong5.py
Expand Up @@ -48,4 +48,7 @@ def __init__(self, seed=None):
global_minimum = 0.9980038
function = lambda bx: fun_target(bx, dim_bx)

Function.__init__(self, dim_bx, bounds, global_minimizers, global_minimum, function, seed=seed)
try:
super().__init__(dim_bx, bounds, global_minimizers, global_minimum, function, seed=seed)
except:
super(DeJong5, self).__init__(dim_bx, bounds, global_minimizers, global_minimum, function, seed=seed)
5 changes: 4 additions & 1 deletion bayeso_benchmarks/two_dim_dropwave.py
Expand Up @@ -31,4 +31,7 @@ def __init__(self, seed=None):
global_minimum = -1.0
function = lambda bx: fun_target(bx, dim_bx)

Function.__init__(self, dim_bx, bounds, global_minimizers, global_minimum, function, seed=seed)
try:
super().__init__(dim_bx, bounds, global_minimizers, global_minimum, function, seed=seed)
except:
super(DropWave, self).__init__(dim_bx, bounds, global_minimizers, global_minimum, function, seed=seed)
5 changes: 4 additions & 1 deletion bayeso_benchmarks/two_dim_eggholder.py
Expand Up @@ -38,4 +38,7 @@ def __init__(self,
global_minimum = -959.6406627
function = lambda bx: fun_target(bx, dim_bx)

Function.__init__(self, dim_bx, bounds, global_minimizers, global_minimum, function, seed=seed)
try:
super().__init__(dim_bx, bounds, global_minimizers, global_minimum, function, seed=seed)
except:
super(Eggholder, self).__init__(dim_bx, bounds, global_minimizers, global_minimum, function, seed=seed)
5 changes: 4 additions & 1 deletion bayeso_benchmarks/two_dim_goldsteinprice.py
Expand Up @@ -39,4 +39,7 @@ def __init__(self, seed=None):
global_minimum = 3.0
function = lambda bx: fun_target(bx, dim_bx)

Function.__init__(self, dim_bx, bounds, global_minimizers, global_minimum, function, seed=seed)
try:
super().__init__(dim_bx, bounds, global_minimizers, global_minimum, function, seed=seed)
except:
super(GoldsteinPrice, self).__init__(dim_bx, bounds, global_minimizers, global_minimum, function, seed=seed)
5 changes: 4 additions & 1 deletion bayeso_benchmarks/two_dim_holdertable.py
Expand Up @@ -34,4 +34,7 @@ def __init__(self, seed=None):
global_minimum = -19.2085026
function = lambda bx: fun_target(bx, dim_bx)

Function.__init__(self, dim_bx, bounds, global_minimizers, global_minimum, function, seed=seed)
try:
super().__init__(dim_bx, bounds, global_minimizers, global_minimum, function, seed=seed)
except:
super(HolderTable, self).__init__(dim_bx, bounds, global_minimizers, global_minimum, function, seed=seed)
37 changes: 37 additions & 0 deletions bayeso_benchmarks/two_dim_kim1.py
@@ -0,0 +1,37 @@
#
# author: Jungtaek Kim (jtkim@postech.ac.kr)
# last updated: October 27, 2021
#

import numpy as np

from bayeso_benchmarks.benchmark_base import Function


def fun_target(bx, dim_bx):
assert len(bx.shape) == 1
assert bx.shape[0] == dim_bx

y = np.sin(bx[0]) + np.cos(bx[1]) + 0.016 * (bx[0] - 5.0)**2 + 0.008 * (bx[1] - 5.0)**2
return y


class Kim1(Function):
def __init__(self, seed=None):
assert isinstance(seed, (type(None), int))

dim_bx = 2
bounds = np.array([
[-16.0, 16.0],
[-16.0, 16.0],
])
global_minimizers = np.array([
[4.72130726, 3.17086303],
])
global_minimum = -1.9715232347905773
function = lambda bx: fun_target(bx, dim_bx)

try:
super().__init__(dim_bx, bounds, global_minimizers, global_minimum, function, seed=seed)
except:
super(Kim1, self).__init__(dim_bx, bounds, global_minimizers, global_minimum, function, seed=seed)
42 changes: 42 additions & 0 deletions bayeso_benchmarks/two_dim_kim2.py
@@ -0,0 +1,42 @@
#
# author: Jungtaek Kim (jtkim@postech.ac.kr)
# last updated: October 27, 2021
#

import numpy as np

from bayeso_benchmarks.benchmark_base import Function


def fun_target(bx, dim_bx):
assert len(bx.shape) == 1
assert bx.shape[0] == dim_bx

y = np.sin(bx[0] / 1.0) + np.cos(bx[1] / 1.0) \
+ np.sin(bx[0] / 2.0) + np.cos(bx[1] / 2.0) \
+ np.sin(bx[0] / 4.0) + np.cos(bx[1] / 4.0) \
+ np.sin(bx[0] / 8.0) + np.cos(bx[1] / 8.0) \
+ np.sin(bx[0] / 16.0) + np.cos(bx[1] / 16.0) \
+ 0.0032 * (bx[0] - 20.0)**2 + 0.0016 * (bx[1] - 20.0)**2
return y


class Kim2(Function):
def __init__(self, seed=None):
assert isinstance(seed, (type(None), int))

dim_bx = 2
bounds = np.array([
[-128.0, 128.0],
[-128.0, 128.0],
])
global_minimizers = np.array([
[-2.1013466, 34.14526252],
])
global_minimum = -3.454387473489018
function = lambda bx: fun_target(bx, dim_bx)

try:
super().__init__(dim_bx, bounds, global_minimizers, global_minimum, function, seed=seed)
except:
super(Kim2, self).__init__(dim_bx, bounds, global_minimizers, global_minimum, function, seed=seed)

0 comments on commit 6e82748

Please sign in to comment.