Skip to content

Commit

Permalink
'1223'
Browse files Browse the repository at this point in the history
  • Loading branch information
smasky committed Dec 23, 2024
1 parent 79e1387 commit c185a00
Show file tree
Hide file tree
Showing 12 changed files with 287 additions and 104 deletions.
3 changes: 1 addition & 2 deletions UQPyL/optimization/multi_objective/moasmo.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,7 @@
from ...surrogates import Mo_Surrogates
from ..algorithmABC import Algorithm, Population, Verbose
from .nsga_ii import NSGAII
from ...surrogates.rbf import RBF

from ...surrogates.rbf.radial_basis_function import RBF

class MOASMO(Algorithm):
'''
Expand Down
2 changes: 1 addition & 1 deletion UQPyL/problems/pratical_problem.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ def __init__(self, objFunc: callable,
self.conFunc = conFunc

if name is None:
name = self.__class__.__name__
self.name = self.__class__.__name__
else:
self.name = name

Expand Down
3 changes: 2 additions & 1 deletion UQPyL/surrogates/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,4 +6,5 @@
from . import kriging
from . import mars
from . import svr
from . import fnn
from . import fnn
from .auto_tuner import autoTuner
19 changes: 0 additions & 19 deletions UQPyL/surrogates/autoTuner.py

This file was deleted.

100 changes: 100 additions & 0 deletions UQPyL/surrogates/auto_tuner.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,100 @@
from typing import Literal
import numpy as np

from .surrogateABC import Surrogate
from ..optimization.algorithmABC import Algorithm
from ..utility.model_selections import RandSelect
from ..utility.metrics import r_square
from ..problems.pratical_problem import PracticalProblem
class autoTuner():
def __init__(self, optimizer: Algorithm, model: Surrogate):

self.optimizer = optimizer

self.model = model

def logIdx(self, paraInfos):

parasType = self.model.setting.parasType

I = []
for name, idx in paraInfos.items():
if parasType[name] == 0:
I.append(idx)

return np.concatenate(I)

def opTune(self, xData: np.ndarray , yData: np.ndarray, paraList: list, ratio: int = 10, useLog: bool = True):

xData, yData = self.model.__check_and_scale__(xData, yData)

xDataCopy, yDataCopy = np.copy(xData), np.copy(yData)

if self.model.name in ["GPR", "KRG"]:
self.model.setKernel(self.model.kernel, xData.shape[1])
elif self.model.name in ["RBF"]:
self.model.setKernel(self.model.kernel)

selector = RandSelect(ratio)

trainIdx, testIdx = selector.split(xData)

xTrain, yTrain = xData[trainIdx], yData[trainIdx]
xTest, yTest = xData[testIdx], yData[testIdx]

paraInfos, ub, lb = self.model.setting.getParaInfos(paraList)
nInput = ub.size

if useLog:
idx = self.logIdx(paraInfos)

if self.optimizer.type == 'EA':

def objFunc(X):

Y = np.zeros((X.shape[0], 1))

XX = X.copy()
if useLog:
XX[:, idx] = np.exp(XX[:, idx])

for i, x in enumerate(XX):

self.model.setting.assignValues(paraInfos, x)

try:
self.model._fitPure(xTrain, yTrain)

yPred = self.model.predict(self.model.__X_inverse_transform__(xTest))

obj = -1*r_square(self.model.__Y_inverse_transform__(yTest), yPred)

except Exception:
obj = np.inf

Y[i, 0] = obj

return Y

if useLog:
ub[idx] = np.log(ub[idx])
lb[idx] = np.log(lb[idx])

problem = PracticalProblem(objFunc, nInput, 1, ub, lb)

res = self.optimizer.run(problem=problem)

bestDec = res.bestDec; bestObj = res.bestObj

if useLog:
bestDec[idx] = np.exp(bestDec[idx])

self.model.setting.assignValues(paraInfos, bestDec)

self.model._fitPure(xDataCopy, yDataCopy)

return bestDec, bestObj

def getParaList(self):

return list(self.model.setting.parasValue.keys())
15 changes: 10 additions & 5 deletions UQPyL/surrogates/gp/gaussian_process.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,8 @@

class GPR(Surrogate):

name = "GPR"

def __init__(self, scalers: Tuple[Optional[Scaler], Optional[Scaler]] = (None, None),
polyFeature: PolynomialFeatures = None,
kernel: BaseKernel = RBF(),
Expand All @@ -29,7 +31,7 @@ def __init__(self, scalers: Tuple[Optional[Scaler], Optional[Scaler]] = (None, N
self.fitMode = fitMode

if isinstance(optimizer, Algorithm):
optimizer.verbose = False
# optimizer.verbose = False
optimizer.saveFlag = False
optimizer.logFlag = False
else:
Expand All @@ -38,14 +40,14 @@ def __init__(self, scalers: Tuple[Optional[Scaler], Optional[Scaler]] = (None, N
self.optimizer = optimizer

self.kernel = kernel
self.addSetting(kernel.setting)

self.n_restarts_optimizer = n_restarts_optimizer

###---------------------------------public function---------------------------------------###
def fit(self, xTrain: np.ndarray, yTrain: np.ndarray):

xTrain, yTrain = self.__check_and_scale__(xTrain, yTrain)
self.xTrain = xTrain; self.yTrain = yTrain

self.setKernel(self.kernel, xTrain.shape[1])

Expand All @@ -56,7 +58,7 @@ def fit(self, xTrain: np.ndarray, yTrain: np.ndarray):
self._fitPredictError(xTrain, yTrain)

else:
self._fitPureLikelihood(xTrain, yTrain)
self._fitPure(xTrain, yTrain)

def predict(self, xPred: np.ndarray, Output_std: bool=False):

Expand Down Expand Up @@ -157,9 +159,11 @@ def objFunc(varValues):
self.assignPara(paraInfos, np.exp(bestDec))
self._objfunc(self.xTrain, self.yTrain, record=True) #TODO

def _fitPureLikelihood(self, xTrain, yTrain):
def _fitPure(self, xTrain, yTrain):

self._objfunc( xTrain, yTrain, record=True )
self.xTrain = xTrain; self.yTrain = yTrain

self._objfunc( xTrain, yTrain, record=True )

def _fitLikelihood(self, xTrain: np.ndarray, yTrain: np.ndarray):

Expand Down Expand Up @@ -210,6 +214,7 @@ def objFunc(varValues):
self.assignPara(paraInfos, np.exp(bestDec))

#Prepare for prediction
self.xTrain = xTrain; self.yTrain = yTrain
self._objfunc(xTrain, yTrain, record=True)

def _objfunc(self, xTrain, yTrain, record=False):
Expand Down
19 changes: 16 additions & 3 deletions UQPyL/surrogates/kriging/kriging.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,8 @@ class KRG(Surrogate):
*'MaxminScaler'
"""
name = "KRG"

def __init__(self,
scalers: Tuple[Optional[Scaler], Optional[Scaler]]=(None, None),
polyFeature: PolynomialFeatures=None,
Expand Down Expand Up @@ -114,7 +116,8 @@ def __init__(self,
if not isinstance(kernel, BaseKernel):
raise ValueError("The kernel must be the instance of surrogates.kriging.kernel!")

self.kernel=kernel
self.kernel = kernel
self.addSetting(kernel.setting)

if(regression=='poly0'):
self.regrFunc=regrpoly0
Expand Down Expand Up @@ -171,16 +174,25 @@ def fit(self, xTrain: np.ndarray, yTrain: np.ndarray):

elif(self.fitMode =='predictError'):
self._fit_predict_error(xTrain, yTrain)

self.xTrain = xTrain; self.yTrain = yTrain

else:
self._fitPure(xTrain, yTrain)

###-------------------private functions----------------------###
def setKernel(self, kernel, N):

kernel.initialize(N)
self.addSetting(kernel.setting)
# kernel.setting=self.setting

def _fitPure(self, xTrain, yTrain):

self.xTrain = xTrain; self.yTrain = yTrain

F, D= self._initialize(xTrain)

self._objFunc(yTrain, F, D, record=True)

def _fit_predict_error(self, tol_xTrain, tol_yTrain):

RS = RandSelect(20)
Expand Down Expand Up @@ -297,6 +309,7 @@ def objFunc(thetas):
bestDec = res.bestDec
bestObj = obj

self.xTrain = xTrain; self.yTrain = yTrain
self.assignPara(paraInfos, bestDec)
self._objFunc(yTrain, F, D, record=True)

Expand Down
56 changes: 30 additions & 26 deletions UQPyL/surrogates/rbf/radial_basis_function.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@

from .kernel import BaseKernel, Cubic
from ..surrogateABC import Surrogate
from ...optimization.algorithmABC import Algorithm
from ...utility.metrics import r_square
from ...utility.scalers import Scaler
from ...utility.polynomial_features import PolynomialFeatures
Expand All @@ -14,7 +13,10 @@
class RBF(Surrogate):
'''
Radial basis function network
'''
'''

name = "RBF"

def __init__(self, scalers: Tuple[Optional[Scaler], Optional[Scaler]]=(None, None), polyFeature: PolynomialFeatures=None,
kernel: Optional[BaseKernel]=Cubic(),
C_smooth: int=0.0, C_smooth_lb: int=1e-5, C_smooth_ub: int=1e5):
Expand All @@ -24,11 +26,12 @@ def __init__(self, scalers: Tuple[Optional[Scaler], Optional[Scaler]]=(None, Non
self.setPara("C_smooth", C_smooth, C_smooth_lb, C_smooth_ub)

self.kernel = kernel
self.setting.mergeSetting(kernel.setting)


def setKernel(self, kernel: BaseKernel):

self.kernel = kernel

self.setting.mergeSetting(self.kernel.setting)

def _get_tail_matrix(self, kernel: BaseKernel, train_X: np.ndarray):
Expand Down Expand Up @@ -72,46 +75,47 @@ def _fitPure(self, xTrain: np.ndarray, yTrain: np.ndarray):
self.coe_lambda=solve[:nSample, :]
self.xTrain=xTrain

def _fitPredictError(self, xTrain: np.ndarray, yTrain: np.ndarray):
tol_xTrain = np.copy(xTrain)
tol_yTrain = np.copy(yTrain)
# def _fitPredictError(self, xTrain: np.ndarray, yTrain: np.ndarray):
# tol_xTrain = np.copy(xTrain)
# tol_yTrain = np.copy(yTrain)

RS = RandSelect(10)
train, test = RS.split(tol_xTrain)
# RS = RandSelect(10)
# train, test = RS.split(tol_xTrain)

xTest = tol_xTrain[test,:]; yTest = tol_yTrain[test,:]
xTrain = tol_xTrain[train,:]; yTrain = tol_yTrain[train,:]
# xTest = tol_xTrain[test,:]; yTest = tol_yTrain[test,:]
# xTrain = tol_xTrain[train,:]; yTrain = tol_yTrain[train,:]

self.xTrain = xTrain; self.yTrain = yTrain
# self.xTrain = xTrain; self.yTrain = yTrain

nameList = list(self.setting.parasValue.keys())
# nameList = list(self.setting.parasValue.keys())

paraInfos, ub, lb = self.setting.getParaInfos(nameList)
nInput = ub.size #TODO
# paraInfos, ub, lb = self.setting.getParaInfos(nameList)
# nInput = ub.size #TODO

def objFunc(varValues):
# def objFunc(varValues):

varValues = np.exp(varValues)
objs = np.ones(varValues.shape[0])
# varValues = np.exp(varValues)
# objs = np.ones(varValues.shape[0])

for i, varValue in enumerate(varValues):
# for i, varValue in enumerate(varValues):

self.assignPara(paraInfos, varValue)
# self.assignPara(paraInfos, varValue)

obj=self._fitPure(xTrain, yTrain)
if obj==-np.inf:
objs[i] = obj*-1
# obj=self._fitPure(xTrain, yTrain)
# if obj==-np.inf:
# objs[i] = obj*-1

else:
yPred = self.predict(self.__X_inverse_transform__(xTest))
objs[i] = -1*r_square(self.__Y_inverse_transform__(yTest), yPred)
# else:
# yPred = self.predict(self.__X_inverse_transform__(xTest))
# objs[i] = -1*r_square(self.__Y_inverse_transform__(yTest), yPred)

return objs.reshape( (-1, 1) )
# return objs.reshape( (-1, 1) )

###--------------------------public functions----------------------------###
def fit(self, xTrain: np.ndarray, yTrain: np.ndarray):

xTrain, yTrain=self.__check_and_scale__(xTrain, yTrain)

self._fitPure(xTrain, yTrain)

def predict(self, xPred: np.ndarray):
Expand Down
Loading

0 comments on commit c185a00

Please sign in to comment.