function [y_pred,p1,beta,bias,ker] = get_ypred_svr(x,y,x_test,nvar,c,p1,kr,C) % This function builds an PRS metamodel and predicts the function value at % test points. % This function can also be used to determine and output the SVR parameters % ("p1", "beta", "bias", "ker") needed to create an analytic function. A value % for x_test must be input with the appropriate number of design variables because % the function expects it. % ---Variable Descriptions--- % x = normalized design(training points): in an % [num_of_points by num_of_variables] matrix % y = vector of function values at each x % x_test = test point(s): in an % [num_of_points by num_of_variables] matrix % nvar = the number of design variables % c = user chosen insensitive zone paramter % p1 = user chosen kernel parameter % kr = user chosen kernel function % y_pred = SVR metamodel prediction at the test point(s) % beta = SVR parameter("Difference of Lagrange Multipliers" - Gunn Toolbox) % bias = SVR bias % ker = Kernel name associated with user chosen "kr" % --Currently Supported Kernel Functions-- % kr = 1 -> linear; % kr = 2 -> sigmoid; % kr = 3 -> Spline; % kr = 4 -> bSpline; % kr = 5 -> fourier; % kr = 6 -> erfb; % kr = 7 -> anova; % kr = 8 -> rbf; % C = user Chosen Penalty Parameter % This function uses the Guassian Process Toolbox: % Gunn, SR. �Support vector machines for classification and % regression,� Technical Report, University of Southampton, 1997. % Link to download Toolbox % http://www.isis.ecs.soton.ac.uk/resources/svminfo/ % Add the path name where the svmgunn toolbox folder is located addpath('...\svmgunn') global beta bias ker xt yt b global p1 n = size(x,1); % number of experiments n_test = size(x_test,1); % number of test points yn = (y-ones(n,1)*mean(y))./(ones(n,1)*max(abs(y-ones(n,1)*mean(y)),[],1)); %================================ % 3) Support Vector Regression %================================ % 3.1) SVR parameters % [C] = C_estimation(xt,yt,zt); % Parameter that controls the amount of penalization for % % points outside the e-tube % C = mean(y) + 3*std(y); % e = 3; % Insensitive zone range = abs(max(yn)-min(yn)); e = c/100*range; %-------------------------------- % 3.2) Kernel & loss function specification % ker = 'poly'; % Kernel function [spline] % p1 = 3; % Polynomial degree % loss = 'eInsensitive'; % Loss function [eInsensitive, quadratic] %-------------------------------- % Kernel function if kr == 1 ker = 'linear'; elseif kr == 2 ker = 'sigmoid'; elseif kr == 3 ker = 'spline'; elseif kr == 4 ker = 'bspline'; elseif kr == 5 ker = 'fourier'; elseif kr == 6 ker = 'erfb'; elseif kr == 7 ker = 'anova'; elseif kr == 8 ker = 'rbf'; end % p1 = std(y); % Polynomial degree % p1 = pp*p1; loss = 'eInsensitive'; % Loss function [eInsensitive, quadratic] % 3.3) SVR solution % [nsv,beta,bias] = svr([xt,yt],zt,ker,C,loss,e); % From Steve R. Gunn (Matlab Toolbox) [nsv,beta,bias] = svr(x,yn,ker,C,loss,e); % From Steve R. Gunn (Matlab Toolbox) % 3.4) Prediction y_pred = svroutput(x,x_test,ker,beta,bias); y_pred = y_pred.*max(abs(y-ones(n,1)*mean(y)))+mean(y);