%%% Parameters (magic numbers) % gamma_a=0; gamma_b=0; % in usual practice, these are defaulted to zero, unless a true Gamma prior on w is desired, rather than the Jefferey prior (which is an improper prior) alpha_sc=1; % starting alpha k=10; % num dimensions n=1000; % num training examples n_test=500; % num test examples sc=0.8; % how far apart are classes %cen=[0 1 0; sqrt(3)/2 -1/2 0; -sqrt(3)/2 -1/2 0; 0 0 1]; %cen=[[rand(5,3) zeros(5,1)]; [0 0 0 1]]; %cen=[0 0 0; 1 0 0; 0 1 0; 0 0 1]; cen=[-1 -1; 1 1]; %%% Constructing the sample data [C,k_true]=size(cen); cenpad=[cen zeros(C,k-k_true)]; X=[]; Y=[]; X_test=[]; Y_test=[]; for i=1:C X=[X;randn(n,k)+repmat(cenpad(i,:)*sc,n,1)]; X_test=[X_test;randn(n_test,k)+repmat(cenpad(i,:)*sc,n_test,1)]; Y=[Y;i*ones(n,1)]; Y_test=[Y_test;i*ones(n_test,1)]; end alpha0=alpha_sc*ones(k*(C-1),1); %%% Run multiclass RVM if C==2, %% if binary classification, try a "faster" version, namely [Tipping02] [a0]=reestimate(X,Y,alpha_sc); end alpha0_phi=2*alpha_sc*ones(k*C,1); for i=1:C, phi{i}=X; phi_test{i}=X_test; end; [a1,w1,b1]=reestimate(phi,Y,alpha_sc*2); %%% if one wants the test labels, do Y_pred=logist_phi_response(phi_test,w1,b1); er_test=sum(Y_test~=Y_pred)/length(Y_test) if C==2, %% in binary case, verify the "faster" version agrees with general case a0, a1(1:length(a1)/2)/2 end return