From 063966d73074c6497bc2fd610f09bd1c06a3193b Mon Sep 17 00:00:00 2001 From: Michael Zhang Date: Sat, 7 Oct 2023 22:42:35 -0500 Subject: [PATCH] gaus class --- gauss_class/gauss2d.m | 35 ++++++++++++++++++++++++ gauss_class/gauss_class_1D.m | 33 +++++++++++++++++++++++ gauss_class/gauss_class_2D.m | 52 ++++++++++++++++++++++++++++++++++++ 3 files changed, 120 insertions(+) create mode 100644 gauss_class/gauss2d.m create mode 100644 gauss_class/gauss_class_1D.m create mode 100644 gauss_class/gauss_class_2D.m diff --git a/gauss_class/gauss2d.m b/gauss_class/gauss2d.m new file mode 100644 index 0000000..ea2143d --- /dev/null +++ b/gauss_class/gauss2d.m @@ -0,0 +1,35 @@ +% CSCI 5521 Introduction to Machine Learning +% Rui Kuang +% Demonstration of 2-D Gaussians + +%Try Sigma = [0.5, 0;0, 0.5];Sigma = [0.7, 0;0, 0.3];Sigma = [0.7, 0.2;0.2, 0.3] + +mu = [0 0]; +Sigma = [0.7, 0.2;0.2, 0.3]; +x1 = -3:.2:3; x2 = -3:.2:3; +[X1,X2] = meshgrid(x1,x2); +%pdf +F = mvnpdf([X1(:) X2(:)],mu,Sigma); +F = reshape(F,length(x2),length(x1)); +subplot(1,2,1); +surf(x1,x2,F); +caxis([min(F(:))-.5*range(F(:)),max(F(:))]); +axis([-3 3 -3 3 0 .4]) +xlabel('x1'); ylabel('x2'); zlabel('Probability Density'); +subplot(1,2,2); +contour(x1,x2,F,[.0001 .001 .01 .05:.1:.95 .99 .999 .9999],'ShowText','on'); + +%contour +figure +i=1; +for rho = -0.8:0.4:0.8 + Sigma(1,2)=rho*sqrt(Sigma(1,1)*Sigma(2,2)); + Sigma(2,1)=Sigma(1,2); + F = mvnpdf([X1(:) X2(:)],mu,Sigma); + F = reshape(F,length(x2),length(x1)); + subplot(1,5,i); + i=i+1; + contour(x1,x2,F,[.0001 .001 .01 .05:.1:.95 .99 .999 .9999]); + title (sprintf('rho = %f',rho)); + xlabel('x1'); ylabel('x2'); +end \ No newline at end of file diff --git a/gauss_class/gauss_class_1D.m b/gauss_class/gauss_class_1D.m new file mode 100644 index 0000000..0682f9c --- /dev/null +++ b/gauss_class/gauss_class_1D.m @@ -0,0 +1,33 @@ +% CSCI 5521 Introduction to Machine Learning +% Rui Kuang +% Demonstration of Classification by 1-D Gaussians + +%mean and standard deviation of class blue +mu1 = -2;sd1 = 2; + +%mean and standard deviation of class red +mu2 = 2;sd2 = 4; + +%generate x-axis +sd = max(sd1,sd2); +ix = -6*sd-1:1e-1:6*sd+1; %covers more than 99% of the curve + +iy1 = pdf('normal', ix, mu1, sd1); +iy2 = pdf('normal', ix, mu2, sd2); + +subplot(1,2,1); +plot(ix,iy1,'b'); hold on; +plot(ix,iy2,'r'); +title('PDF P(X)'); + +%prior +p1=0.8; +p2=1-p1; + +%calculate the posteriors +iy1_n = p1*iy1 ./ (p1*iy1+p2*iy2); +iy2_n = p2*iy2 ./ (p1*iy1+p2*iy2); +subplot(1,2,2); +plot(ix,iy1_n,'b'); hold on; +plot(ix,iy2_n,'r'); +title('Posteriors P(C | x)'); \ No newline at end of file diff --git a/gauss_class/gauss_class_2D.m b/gauss_class/gauss_class_2D.m new file mode 100644 index 0000000..473afbd --- /dev/null +++ b/gauss_class/gauss_class_2D.m @@ -0,0 +1,52 @@ +% CSCI 5521 Introduction to Machine Learning +% Rui Kuang +% Demonstration of Classification by 2-D Gaussians + +mu1 = [-1 -1]; +mu2 = [1 1]; + +% Equal diagnoal covariance matrix + Sigma1 = [1 0; 0 1]; + Sigma2 = [1 0; 0 1]; + +% Diagnoal covariance matrix +% Sigma1 = [1 0; 0 0.5]; +% Sigma2 = [1 0; 0 0.5]; + +% Shared covariance matrix +% Sigma1 = [1 0.3; 0.3 0.5]; +% Sigma2 = [1 0.3; 0.3 0.5]; + + x1 = -10:.1:10; x2 = -10:.1:10; + +% covariance matrix (increase the range for visualization) +% Sigma1 = [1 0.1; 0.1 0.5]; +% Sigma2 = [0.5 0.3; 0.3 1]; +% x1 = -40:.1:40; x2 = -40:.1:40; + +[X1,X2] = meshgrid(x1,x2); +%pdf1 +F1 = mvnpdf([X1(:) X2(:)],mu1,Sigma1); +F1 = reshape(F1,length(x2),length(x1)); +subplot(1,2,1); +surf(x1,x2,F1); hold on; + +%pdf2 +F2 = mvnpdf([X1(:) X2(:)],mu2,Sigma2); +F2 = reshape(F2,length(x2),length(x1)); +surf(x1,x2,F2); +caxis([min(F2(:))-.5*range(F2(:)),max(F2(:))]); +axis([-4 4 -4 4 0 .4]) +xlabel('x1'); ylabel('x2'); zlabel('Probability Density'); + +%decosopm boundary +%F1 = mvnpdf([X1(:) X2(:)],mu1,Sigma1); +%F1 = reshape(F1,length(x2),length(x1)); +%F2 = mvnpdf([X1(:) X2(:)],mu2,Sigma2); +%F2 = reshape(F2,length(x2),length(x1)); + +cmp = F1 > F2; +subplot(1,2,2); +imagesc(X1(:),X2(:),cmp); + +xlabel('x1'); ylabel('x2');