This commit is contained in:
Michael Zhang 2023-10-01 18:09:50 -05:00
parent 744d84b767
commit 7f43b1d097
14 changed files with 1614 additions and 17 deletions

3
.gitignore vendored Normal file
View File

@ -0,0 +1,3 @@
*.asv
.vscode
*.pdf

View File

@ -0,0 +1,49 @@
% implements Bayes_Learning, returns the outputs (p1: learned Bernoulli
% parameters of the first class, p2: learned Bernoulli parameters of the
% second class, pc1: best prior of the first class, pc2: best prior of the
% second class
function [p1,p2,pc1,pc2] = Bayes_Learning(training_data, validation_data)
[train_row_size, column_size] = size (training_data); % dimension of training data
[valid_row_size, ~] = size (validation_data); % dimension of validation data
X = training_data(1:train_row_size, 1:column_size-1); %Training data
% (1) TODO: find label counts of class 1 and class 2
% (2) TODO: get MLE p1, p2
% Use different P(C_1) and P(C_2) on validation set
% We compute g(x) = based on priors P(C_1), P(C_2), MLE estimator p1, p2, and x_{1*D}
error_table = zeros(11,4); % build an error table with 4 columns of : sigma, P(C1), P(C2), error_rate
index = 1; % row index of error table
for sigma = [0.00001,0.0001,0.001,0.01,0.1,1,2,3,4,5,6]
P_C1 = 1-(exp(-sigma)); % set priors using formula P(C1)=1-(exp(-sigma))
P_C2 = 1 - P_C1;
error_count = 0; % total number of errors to be count
% (3) TODO: compute likelihood for class1 and class2 , then compute the posterior
% probability for both classes (posterior = prior x likelihood).
% Classify each validation sample as whichever class has the higher posterior probability.
% If the sample is misclassified, increment the error count (error_count = error_count + 1);
error_table(index,1) = sigma;
error_table(index,2) = P_C1;
error_table(index,3) = P_C2;
error_table(index,4) = error_count/valid_row_size; % update error table
index = index + 1;
end
% get the best priors
[~, I] = min(error_table(:,4)); % find row index of the lowest error rate on validation set
pc1 = error_table(I,2);
pc2 = error_table(I,3); % best priors
% print error table to terminal
fprintf('\n Error rates of all priors on validation set: \n\n');
fprintf(' sigma P(C1) P(C2) error rate on validation set\n\n');
disp(error_table);
end

View File

@ -0,0 +1,41 @@
% implements Bayes Testing, return the test error (p1: learned Bernoulli
% parameters of the first class, p2: learned Bernoulli parameters of the
% second class; pc1: best prior of the first class, pc2: best prior of the
% second class
function test_error = Bayes_Testing(test_data, p1, p2, pc1, pc2)
% (1) TODO: classify the test set using the learned parameters p1, p2, pc1, pc2
[test_row_size, column_size] = size(test_data); % dimension of test data
X = test_data(1:test_row_size, 1:column_size-1); % test data
y = test_data(:,column_size); % test labels
c = 0;
for i = 1:test_row_size
x = X(i, :);
correct_label = y(i);
postc1 = prod(p1 .^ x .* (1 - p1) .^ (1 - x)) * pc1;
postc2 = prod(p2 .^ x .* (1 - p2) .^ (1 - x)) * pc2;
if postc1 > postc2
lab = 1;
else
lab = 2;
end
if lab == correct_label
c = c + 1;
end
end
test_error = (test_row_size - c) / test_row_size;
% (2) TODO: compute error rate and print it
% (test_error = # of incorrectly classified / total number of test samples
fprintf('Error rate on the test dataset is: \n\n');
disp(test_error);
end

25
assignments/hwk01/HW1.md Normal file
View File

@ -0,0 +1,25 @@
---
geometry: margin=2cm
output: pdf_document
---
\renewcommand{\c}[1]{\textcolor{gray}{#1}}
1. **(20 points)**
\c{Derive the VC dimension of the following classifiers.}
2.
3. **(20 points)**
\c{Let $P (x|C)$ denote a Bernoulli density function for a class $C \in {C_1, C_2}$
and $P (C)$ denote the prior}
a. \c{Given the priors $P (C_1)$ and $P (C_2)$, and the Bernoulli densities
specified by $p_1 \equiv p(x = 0|C_1)$ and $p_2 \equiv p(x = 0|C_2)$, derive the
classification rules for classifying a sample $x$ into $C_1$ and $C_2$ based on the
posteriors $P (C_1|x)$ and $P (C_2|x)$. (Hint: give rules for classifying $x = 0$ and
$x = 1$.)}
For $x=0$, the posteriors $P(C_i | x)$ are given by $P(C_i | x = 0) = \frac{p(x = 0 | C_i) p(C_i)}{p(x = 0)}$.
- $p(x = 0 | C_i)$ is given to us as $p_1$

View File

@ -0,0 +1,29 @@
% solves question 1d, Print a table of error rate of each prior on the
% validation set and the error rate using the best prior on the test set.
% use functions MLE_Learning.m, Bayes_Learning.m, Bayes_Testing.m
% load data
load('training_data.txt');
load('validation_data.txt');
load('testing_data.txt');
%Part 1: using the first two columns to test MLE_Learning and Bayes_Testing
%function
training2_data = training_data(:,[1,2,end]);
testing2_data = testing_data(:,[1,2,end]);
[p1,p2,pc1,pc2] = MLE_Learning(training2_data);
test_error = Bayes_Testing(testing2_data, p1, p2, pc1, pc2); % use parameters to calculate test error
%Part 2: using the compmlete dataset to test MLE_Learning
[p1,p2,pc1,pc2] = MLE_Learning(training_data);
test_error = Bayes_Testing(testing_data, p1, p2, pc1, pc2); % use parameters to calculate test error
%Part 3: using validataion set to do Bayes_Learning
[p1,p2,pc1,pc2] = Bayes_Learning(training_data, validation_data); % get p1, p2, pc1, pc2
[pc1,pc2] %show the best prior
test_error = Bayes_Testing(testing_data, p1, p2, pc1, pc2); % use parameters to calculate test error
% by calling Bayes_Learning and Bayes_Testing, the error table of
% validataion data and test data error is automatically printed to command
% window

View File

@ -0,0 +1,25 @@
% implements MLE_Learning, returns the outputs (p1: learned Bernoulli
% parameters of the first class, p2: learned Bernoulli parameters of the
% second class; pc1: prior of the first class, pc2: prior of the
% second class
function [p1,p2,pc1,pc2] = MLE_Learning(training_data)
[train_row_size, column_size] = size(training_data); % dimension of training data
X = training_data(1:train_row_size, 1:column_size-1); %Training data
y = training_data(:,column_size); % training labels
% (1) TODO: find label counts of class 1 and class 2
class1_rows = find(y == 1);
class1_count = length(class1_rows);
class2_rows = find(y == 2);
class2_count = length(class2_rows);
% (2) TODO: compute priors pc1, pc2
pc1 = class1_count / train_row_size;
pc2 = class2_count / train_row_size;
% (3) TODO: compute maximum likelihood estimate (MLE) p1, p2
p1 = sum(X(class1_rows, :)) / class1_count;
p2 = sum(X(class2_rows, :)) / class2_count;

View File

@ -0,0 +1,2 @@
watch:
watchexec -e md -- pandoc HW1.md -o HW1.pdf

View File

@ -0,0 +1,14 @@
from sympy.abc import i, k, m, n, x
import sympy
def prob_2a():
# f = sympy.Function('f')
def f(x, theta): return (1.0 / theta) * sympy.exp(- x / theta)
log = sympy.Sum(f, (k, 1, n))
print(log)
prob_2a()

View File

@ -0,0 +1,200 @@
1 0 0 0 0 0 0 0 1 1 1
0 0 0 0 1 0 0 0 0 0 1
1 1 0 1 1 0 1 1 1 1 1
1 1 0 0 0 0 1 0 0 0 1
0 1 0 0 1 0 1 0 1 0 1
1 1 0 0 1 0 0 1 1 0 1
1 1 0 0 1 0 0 0 0 0 1
1 0 0 0 0 0 0 1 0 0 1
1 0 0 0 0 0 1 0 1 0 1
0 0 0 0 0 0 1 0 0 0 1
1 1 0 0 0 0 0 0 1 0 1
1 1 0 0 0 0 1 0 0 0 1
1 1 0 0 0 0 0 0 0 0 1
1 0 0 0 0 0 0 0 1 0 1
1 1 0 0 1 0 0 0 0 1 1
1 0 0 0 1 0 1 0 0 1 1
1 1 0 0 0 0 0 0 0 1 1
0 1 0 0 0 0 0 1 1 1 1
1 0 0 0 0 0 1 0 1 1 1
1 0 0 0 0 0 1 0 0 0 1
1 1 0 0 0 0 1 0 1 0 1
1 1 0 0 1 0 1 0 1 0 1
0 0 0 0 1 0 0 0 0 0 1
1 1 0 0 0 0 0 0 1 0 1
0 1 0 0 1 0 0 1 1 1 1
1 1 0 0 1 0 1 0 1 0 1
1 0 0 0 0 0 0 0 0 0 1
1 1 0 0 0 0 0 0 1 0 1
1 1 0 0 0 0 1 0 1 0 1
0 1 0 0 1 0 1 0 0 1 1
0 1 0 0 1 0 0 0 1 0 1
0 1 0 0 1 0 0 0 1 0 1
1 0 0 0 0 0 0 1 1 1 1
1 1 0 1 1 0 0 0 0 0 1
1 1 0 0 1 0 0 0 1 0 1
0 0 0 0 0 0 0 0 0 0 1
1 1 0 0 0 0 0 0 1 0 1
1 1 0 0 0 0 0 0 0 1 1
0 1 0 0 1 0 1 0 0 0 1
1 0 0 1 1 0 0 0 1 0 1
1 1 0 0 1 0 0 0 1 0 1
1 1 0 0 0 0 0 0 0 1 1
0 0 1 0 0 0 0 0 1 0 1
0 0 0 0 0 0 0 1 1 1 1
0 1 0 0 0 0 0 0 1 1 1
1 1 0 0 1 0 0 0 0 0 1
1 1 0 0 0 0 0 0 1 0 1
1 0 0 0 0 0 0 0 1 0 1
1 1 0 0 1 0 0 0 0 0 1
1 1 0 0 1 0 0 1 0 1 1
0 1 0 0 1 0 1 1 1 1 2
0 0 1 0 0 0 1 1 1 1 2
1 0 0 0 0 0 1 1 1 1 2
1 0 0 0 1 0 1 0 1 1 2
1 0 1 0 1 0 1 1 1 1 2
0 1 1 0 0 0 0 0 1 1 2
0 1 0 0 0 0 1 0 1 1 2
1 1 0 0 1 1 1 0 1 0 2
1 0 0 0 0 0 1 0 1 0 2
1 1 0 0 0 0 1 1 1 1 2
0 0 1 0 0 0 1 1 1 1 2
1 0 0 0 1 0 1 0 1 1 2
1 1 0 0 1 0 1 1 1 1 2
1 1 0 0 0 0 1 1 1 1 2
1 1 0 0 1 0 1 0 1 1 2
1 1 1 0 0 0 1 1 1 1 2
0 1 0 0 0 0 1 0 1 0 2
0 1 0 0 1 0 1 0 1 1 2
0 1 0 0 0 0 1 0 1 1 2
1 1 1 0 1 0 1 0 1 1 2
0 0 0 0 0 0 1 0 1 1 2
1 0 0 0 0 0 1 0 1 1 2
0 0 0 0 1 1 0 1 1 1 2
1 1 1 0 0 0 1 0 1 0 2
1 1 0 0 1 0 1 1 1 1 2
1 1 0 0 0 1 1 0 1 1 2
1 1 0 0 0 0 1 0 1 1 2
0 1 0 0 1 0 0 0 1 1 2
1 1 0 0 0 0 0 0 1 1 2
0 1 0 0 0 0 1 0 1 1 2
1 1 0 0 1 0 0 0 1 1 2
1 1 0 0 0 1 1 0 1 1 2
0 1 0 0 0 0 1 0 1 1 2
0 1 0 0 0 1 1 0 1 0 2
1 1 0 0 1 1 1 0 1 1 2
0 1 0 0 0 1 1 0 1 1 2
0 1 0 0 0 0 1 1 1 1 2
0 0 0 0 1 0 1 0 1 1 2
1 1 0 0 0 0 1 0 1 1 2
1 1 0 0 0 0 1 0 1 1 2
0 1 0 0 1 0 1 1 1 1 2
1 1 0 0 1 0 0 0 1 1 2
1 0 0 0 1 0 0 1 1 0 2
1 1 1 0 1 1 1 1 1 1 2
0 1 0 0 1 0 0 1 1 1 2
0 0 0 0 0 0 1 0 1 1 2
1 1 1 0 0 0 1 1 1 1 2
1 1 0 0 0 0 1 1 1 1 2
0 1 0 0 0 0 1 1 1 1 2
1 1 0 0 0 0 1 0 1 1 2
1 1 0 0 0 1 0 1 1 1 2
0 1 0 0 0 0 1 1 1 1 2
0 1 0 0 0 0 1 0 1 1 2
1 0 1 0 0 0 1 0 1 1 2
0 1 0 0 1 0 1 0 1 1 2
1 1 1 0 1 0 1 0 1 1 2
0 1 0 0 1 0 1 0 1 0 2
1 1 0 0 0 1 1 0 1 1 2
1 1 0 0 0 0 1 0 1 1 2
1 1 1 0 0 0 1 0 1 1 2
0 1 0 0 0 0 1 0 1 1 2
1 0 0 0 0 1 0 1 1 1 2
0 0 0 0 0 0 0 0 1 1 2
0 1 0 0 0 1 1 1 1 0 2
1 1 0 0 0 0 1 0 1 0 2
1 1 0 0 1 0 1 0 1 1 2
0 1 0 0 1 0 1 0 1 0 2
0 1 0 0 1 1 1 0 1 0 2
0 1 0 1 0 0 1 1 1 1 2
0 1 0 0 1 1 1 0 1 1 2
1 1 0 0 0 0 1 1 0 1 2
0 0 0 0 0 0 1 0 1 1 2
0 1 0 0 0 0 1 0 1 1 2
0 1 0 0 0 0 1 0 1 1 2
0 1 0 0 1 0 1 0 1 1 2
0 0 0 0 1 1 1 1 1 0 2
1 1 0 0 0 0 1 0 1 0 2
1 0 0 0 1 0 1 1 1 1 2
1 1 0 0 0 0 1 0 1 1 2
1 1 0 0 0 1 1 0 1 0 2
0 1 0 1 0 0 0 0 1 1 2
1 1 0 0 0 1 1 0 1 1 2
1 1 0 0 0 1 1 0 1 1 2
0 1 0 1 0 0 1 0 1 1 2
1 1 0 0 0 0 1 0 1 0 2
0 1 1 0 0 0 1 0 1 1 2
1 0 0 0 1 0 1 0 1 1 2
1 1 0 0 0 0 1 0 1 0 2
1 1 0 0 0 1 1 0 1 1 2
1 1 0 0 1 0 1 0 1 1 2
0 0 0 0 0 0 1 0 1 1 2
1 1 0 0 0 1 1 0 1 1 2
1 1 0 0 0 0 1 0 1 1 2
1 0 0 0 1 1 1 0 1 1 2
1 1 0 0 0 1 1 0 1 1 2
1 1 0 0 1 0 1 0 1 1 2
1 1 0 0 0 0 1 0 1 1 2
1 1 0 1 0 0 1 0 1 1 2
1 1 0 0 1 1 1 1 1 1 2
0 0 0 0 1 1 1 0 1 1 2
1 1 0 1 1 1 1 1 1 0 2
0 1 0 0 1 0 1 0 1 0 2
0 1 1 0 1 1 1 1 1 1 2
0 1 0 0 0 0 1 0 1 1 2
0 1 0 0 0 0 1 0 1 1 2
1 1 0 0 0 0 1 0 0 1 2
0 1 0 0 0 0 1 0 1 1 2
1 0 0 1 0 0 1 0 1 1 2
1 1 0 0 0 0 1 1 1 1 2
1 1 0 0 0 0 1 0 1 0 2
1 1 0 0 0 1 0 0 1 0 2
0 1 0 0 0 0 1 1 1 1 2
0 1 0 0 0 1 1 0 1 1 2
0 1 0 0 1 0 1 1 1 1 2
1 0 0 0 0 0 1 1 1 1 2
0 1 0 0 1 1 1 1 1 1 2
1 1 0 0 0 1 1 0 1 0 2
1 0 0 0 1 0 1 0 1 0 2
0 1 0 0 1 1 0 0 0 1 2
1 0 0 0 1 0 1 0 1 1 2
1 1 0 0 0 1 1 0 1 1 2
0 1 0 0 0 0 1 0 1 0 2
1 1 0 0 0 1 1 0 1 1 2
1 1 0 0 0 0 1 0 1 0 2
0 0 0 0 0 1 1 1 1 1 2
0 1 0 0 1 0 1 1 1 1 2
0 1 0 0 1 0 1 0 1 1 2
1 1 0 0 1 0 1 1 1 1 2
1 1 0 0 0 0 1 0 1 0 2
0 1 0 0 0 0 1 1 1 0 2
1 1 0 0 1 0 1 0 1 1 2
1 1 0 0 1 0 1 0 1 0 2
0 1 0 0 0 0 1 0 1 1 2
1 1 0 0 0 1 1 0 1 1 2
1 1 0 0 1 0 1 0 1 1 2
0 1 0 0 0 1 1 0 1 1 2
0 0 0 0 0 0 1 0 1 1 2
1 1 0 0 0 0 1 0 1 1 2
0 1 0 0 0 1 1 0 1 0 2
1 0 0 0 1 0 1 1 1 1 2
0 0 0 0 0 0 1 0 1 1 2
1 1 0 0 0 0 1 0 1 1 2
1 1 1 0 0 0 1 0 1 1 2
1 1 0 0 1 1 1 0 1 0 2
1 1 0 0 0 0 1 0 1 1 2
0 1 0 0 1 0 1 0 1 1 2
0 1 0 0 0 1 1 0 1 1 2
1 1 1 0 0 1 1 0 1 0 2
1 1 0 0 0 0 1 1 1 1 2
1 1 0 0 0 0 0 0 1 1 2

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,200 @@
0 0 0 1 1 0 1 0 0 0 1
1 0 0 0 1 0 0 0 1 1 1
0 0 0 0 1 0 0 0 1 1 1
1 0 0 0 0 0 0 0 1 1 1
1 0 0 0 0 0 0 1 0 1 1
1 1 0 0 1 0 0 1 0 0 1
1 1 1 0 1 0 0 0 1 1 1
0 1 0 0 1 0 1 0 0 0 1
1 0 0 0 0 0 1 0 1 0 1
1 1 0 0 0 0 0 0 1 0 1
0 0 0 0 0 0 0 0 0 1 1
1 0 0 0 1 0 0 0 0 1 1
1 1 0 1 0 0 0 0 0 0 1
1 1 0 0 1 0 0 0 1 1 1
0 1 0 1 1 0 0 0 1 1 1
1 0 0 0 1 0 0 0 1 0 1
1 1 0 0 1 0 0 0 0 0 1
1 1 0 0 1 0 0 0 0 0 1
1 1 0 0 1 0 1 0 1 1 1
1 1 0 1 1 0 1 0 1 0 1
1 1 0 0 0 0 0 0 0 1 1
0 0 0 0 0 0 0 0 0 1 1
1 1 0 0 1 0 0 0 0 0 1
1 1 0 0 0 0 0 0 1 0 1
1 1 0 0 1 0 0 0 0 1 1
1 1 0 0 0 0 0 0 1 1 1
0 1 0 0 1 0 0 0 0 0 1
0 1 1 0 1 0 0 0 1 0 1
1 0 0 0 1 1 0 0 1 1 1
0 1 0 0 1 0 0 0 0 0 1
1 0 0 0 1 0 0 0 0 0 1
1 1 0 0 1 0 0 0 1 1 1
1 1 0 0 1 0 0 0 1 0 1
1 1 0 0 0 0 0 1 1 1 1
1 0 0 0 1 0 0 0 1 0 1
1 1 0 0 0 0 1 0 1 1 1
1 0 0 0 0 0 0 0 1 1 1
1 1 0 0 0 0 0 0 1 0 1
1 1 0 0 1 0 0 0 0 0 1
1 0 0 0 0 0 1 0 1 1 1
1 0 0 1 0 0 0 0 1 1 1
1 1 0 0 0 0 0 0 0 0 1
1 1 0 0 0 0 0 0 0 0 1
1 1 0 0 0 0 1 0 1 1 1
1 0 0 0 1 0 0 0 0 0 1
0 1 0 0 0 0 0 0 1 1 1
1 1 0 0 0 0 0 0 1 0 1
0 1 1 0 0 0 0 0 1 0 1
0 0 0 0 0 0 0 0 0 0 1
0 1 0 0 1 0 0 0 1 0 1
0 1 0 0 0 0 1 1 1 1 2
0 1 0 0 1 1 1 0 1 1 2
1 1 1 0 1 1 1 1 1 1 2
1 1 0 0 0 0 0 0 1 1 2
1 1 0 0 1 0 1 1 1 1 2
0 1 0 0 1 0 1 0 1 1 2
0 1 0 0 0 0 1 0 1 1 2
1 0 0 0 0 0 0 0 1 0 2
1 1 0 0 0 0 1 0 1 1 2
0 0 1 0 0 0 1 0 1 1 2
1 1 0 0 0 0 1 0 1 1 2
1 1 0 0 1 1 0 0 1 1 2
1 0 0 0 0 0 1 0 1 1 2
1 1 0 0 1 0 1 1 1 1 2
0 1 0 0 1 0 1 0 1 1 2
1 1 0 0 0 0 1 1 1 1 2
1 1 0 0 0 0 1 1 1 1 2
1 1 0 0 0 1 0 0 1 1 2
1 1 0 0 0 0 0 1 1 1 2
0 1 0 0 1 0 1 0 1 1 2
0 1 0 0 0 0 1 0 1 1 2
0 1 0 0 1 0 1 0 1 0 2
1 1 0 0 1 0 1 0 1 0 2
1 1 0 0 0 1 1 0 1 1 2
1 1 0 1 1 0 1 0 1 1 2
1 1 0 0 1 0 1 1 1 1 2
0 1 0 0 0 0 1 0 1 1 2
0 1 0 0 1 0 1 0 1 0 2
0 0 0 0 0 1 1 0 1 0 2
1 0 0 0 0 1 0 0 1 1 2
1 0 1 0 1 0 1 0 1 1 2
0 1 0 1 1 1 0 0 1 0 2
1 1 0 0 0 1 1 1 1 1 2
0 1 0 0 0 1 1 1 1 1 2
0 1 0 1 0 0 0 0 1 1 2
1 1 0 0 1 0 1 1 1 1 2
0 1 0 1 1 1 1 1 1 1 2
0 0 0 0 0 1 1 0 1 1 2
1 1 0 0 1 0 1 0 1 0 2
1 0 0 0 1 0 1 1 1 1 2
0 1 0 0 0 1 1 0 1 0 2
0 0 0 0 0 0 1 0 1 1 2
0 1 0 0 1 1 1 0 1 1 2
0 1 1 0 0 1 1 1 1 1 2
1 1 0 0 0 1 1 0 1 1 2
1 1 0 0 0 1 1 0 1 1 2
0 0 0 0 1 1 1 1 1 1 2
0 1 1 0 0 0 1 0 1 1 2
0 1 0 0 1 1 1 1 1 1 2
1 0 0 0 0 0 1 0 1 1 2
0 1 0 0 0 0 1 0 1 0 2
1 1 0 0 0 0 1 0 1 1 2
0 1 0 0 1 1 1 1 1 1 2
1 1 0 0 1 0 1 0 1 0 2
0 1 0 0 0 1 1 0 1 1 2
1 0 0 0 1 0 1 1 1 1 2
1 1 1 0 0 0 1 0 1 0 2
0 0 0 0 1 0 0 1 1 1 2
0 0 0 0 1 1 1 0 1 0 2
0 1 0 0 0 0 1 0 1 1 2
0 1 0 0 0 0 0 0 1 0 2
1 1 0 0 1 0 0 0 1 1 2
0 0 0 0 0 1 1 1 1 0 2
0 1 0 0 0 0 1 0 1 1 2
1 1 0 0 0 0 1 0 1 0 2
0 1 0 0 0 0 1 0 1 1 2
1 1 0 0 1 0 1 0 1 1 2
0 1 0 0 0 1 1 1 1 1 2
1 1 1 0 0 1 1 0 1 1 2
1 1 1 0 0 0 1 0 1 1 2
0 1 0 0 0 0 1 0 1 1 2
1 1 0 0 1 1 1 0 1 1 2
0 1 1 0 0 1 1 1 1 1 2
1 1 0 0 0 0 0 1 1 0 2
0 1 0 0 0 0 1 0 1 0 2
1 1 0 0 1 0 1 1 1 1 2
1 1 0 0 0 1 1 1 1 1 2
1 1 0 0 1 1 1 0 0 1 2
1 1 0 0 0 0 1 0 1 0 2
0 1 0 0 0 0 1 0 1 1 2
1 1 0 0 0 1 1 0 1 1 2
0 1 0 0 1 1 1 0 1 1 2
1 1 0 0 0 1 0 0 1 0 2
1 1 0 0 1 1 1 1 1 1 2
1 1 0 0 0 1 1 0 1 1 2
1 1 0 0 0 1 1 0 1 1 2
1 1 0 0 0 0 1 0 1 0 2
0 1 0 0 0 0 0 1 1 1 2
1 1 0 0 0 0 1 1 1 1 2
0 0 0 0 1 0 1 1 1 1 2
1 0 1 0 0 0 0 0 1 1 2
0 1 0 0 0 0 1 0 1 1 2
1 0 0 0 0 1 1 1 1 0 2
0 1 0 0 0 0 1 0 1 1 2
1 1 0 0 0 1 1 0 1 0 2
1 1 1 0 0 1 1 1 1 1 2
0 1 0 0 0 1 1 0 1 1 2
1 0 0 0 1 0 1 1 1 1 2
1 1 0 0 0 1 0 0 1 1 2
0 1 0 1 1 0 1 0 1 1 2
0 1 0 0 1 0 1 0 1 0 2
0 1 0 0 1 1 1 1 1 0 2
1 0 0 0 1 1 1 0 1 1 2
0 1 0 0 1 0 1 1 1 1 2
1 1 0 0 1 0 1 1 1 1 2
0 0 0 0 0 0 0 0 1 1 2
1 0 0 1 1 0 0 1 1 1 2
0 0 0 0 1 0 1 0 1 1 2
0 1 0 0 1 1 1 1 1 1 2
1 1 0 0 1 1 1 0 1 1 2
0 1 0 0 0 1 1 0 1 1 2
0 1 0 0 0 0 1 0 1 0 2
1 1 0 0 0 0 1 0 1 1 2
0 1 0 0 1 1 1 0 1 1 2
0 1 0 0 0 1 0 0 1 1 2
0 1 0 0 0 1 1 0 1 1 2
0 1 0 0 1 1 1 1 1 0 2
0 1 1 0 0 0 1 0 1 1 2
0 0 0 0 0 0 1 1 1 1 2
1 1 0 0 1 0 0 0 1 0 2
1 1 0 0 0 1 1 1 1 1 2
1 1 0 0 1 0 0 0 1 1 2
0 1 0 0 0 1 1 1 1 1 2
0 1 0 1 1 0 1 1 1 1 2
0 1 1 0 0 1 1 0 1 1 2
1 1 0 0 1 0 1 1 1 1 2
0 1 0 0 0 1 0 0 1 1 2
1 1 0 0 0 0 1 0 1 1 2
1 1 1 0 0 0 0 0 1 1 2
1 1 0 0 0 1 0 0 1 1 2
0 1 0 0 0 1 1 0 1 1 2
1 1 0 0 0 0 1 0 1 1 2
0 1 0 0 0 0 0 0 1 1 2
0 1 0 0 0 0 1 0 1 1 2
0 1 0 1 0 0 1 0 1 0 2
1 1 1 0 0 0 1 0 1 1 2
0 1 0 0 0 1 1 0 1 1 2
1 1 1 0 0 0 1 0 1 1 2
0 0 0 0 1 0 1 1 1 1 2
1 1 0 0 0 1 1 0 1 0 2
0 1 0 0 1 0 0 0 1 1 2
1 0 0 0 0 1 1 0 1 0 2
1 1 0 0 1 1 1 0 1 0 2
1 1 0 0 1 0 1 0 1 1 2
0 1 0 0 0 1 1 0 1 1 2
0 1 0 0 1 0 1 1 1 0 2
0 0 0 0 1 0 1 0 1 1 2
1 1 0 0 1 0 1 0 1 1 2
0 1 0 0 1 1 1 0 1 1 2
1 1 0 0 0 0 1 1 1 0 2

View File

@ -35,8 +35,12 @@ while err > 0
%%% ylim([mny mxy]);
%ginput(1);
% pause(0.5); %change the delay
rate = rate * 0.9;
end
end
round = round + 1
err = sum(sign(w'*X)~=Y')/N %show misclassification rate
end
round = round + 1;
err = sum(sign(w'*X)~=Y')/N; %show misclassification rate
end
round

View File

@ -2,22 +2,26 @@
% Rui Kuang
% Run perceptron on random data points in two classes
n = 20; %set the number of data points
mydata = rand(n,2);
shiftidx = abs(mydata(:,1)-mydata(:,2))>0.05;
mydata = mydata(shiftidx,:);
myclasses = mydata(:,1)>mydata(:,2); % labels
n = size(mydata,1);
X = [mydata ones(1,n)']'; Y=myclasses;
Y = Y * 2 -1;
% init weigth vector
w = [mean(mydata) 0]';
% n = 200; %set the number of data points
% mydata = rand(n,2);
%
% shiftidx = abs(mydata(:,1)-mydata(:,2))>0.00005;
% mydata = mydata(shiftidx,:);
% myclasses = mydata(:,1)>mydata(:,2); % labels
% n = size(mydata,1);
% X = [mydata ones(1,n)']'; Y=myclasses;
% Y = Y * 2 -1;
%
% % init weigth vector
% %%% w = [mean(mydata) 0]';
% w = [1 0 0];
for i = 1:1
w=rand(1,3)';
w(3,1)=0;%go through the origin for visualization
%%% w=rand(1,3)'
w = [0.6842
0.5148
0]
w(3,1)=0%go through the origin for visualization
% call perceptron
wtag=perceptron(X,Y,w,10);
end

1
zoo/.gitignore vendored Normal file
View File

@ -0,0 +1 @@
trees