Skip to content

Commit 314f475

Browse files
committed
tweak logistic regression
1 parent 0635e51 commit 314f475

File tree

2 files changed

+8
-9
lines changed

2 files changed

+8
-9
lines changed

chapter04/logitBin.m

Lines changed: 7 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1,16 +1,16 @@
1-
function [model, llh] = logitBin(X, y, lambda, eta)
1+
function [model, llh] = logitBin(X, y, lambda)
22
% Logistic regression for binary classification optimized by Newton-Raphson method.
33
% Input:
44
% X: d x n data matrix
5-
% z: 1 x n label (0/1)
5+
% y: 1 x n label (0/1)
66
% lambda: regularization parameter
7-
% eta: step size
7+
% alpha: step size
88
% Output:
99
% model: trained model structure
1010
% llh: loglikelihood
1111
% Written by Mo Chen (sth4nth@gmail.com).
1212
if nargin < 4
13-
eta = 1e-1;
13+
alpha = 1e-1;
1414
end
1515
if nargin < 3
1616
lambda = 1e-4;
@@ -20,18 +20,17 @@
2020
tol = 1e-4;
2121
epoch = 200;
2222
llh = -inf(1,epoch);
23-
h = 2*y-1;
2423
w = rand(d,1);
2524
for t = 2:epoch
2625
a = w'*X;
27-
llh(t) = -(sum(log1pexp(-h.*a))+0.5*lambda*dot(w,w))/n; % 4.89
28-
if llh(t)-llh(t-1) < tol; break; end
26+
llh(t) = (dot(a,y)-sum(log1pexp(a))-0.5*lambda*dot(w,w))/n; % 4.90
27+
if abs(llh(t)-llh(t-1)) < tol; break; end
2928
z = sigmoid(a); % 4.87
3029
g = X*(z-y)'+lambda*w; % 4.96
3130
r = z.*(1-z); % 4.98
3231
Xw = bsxfun(@times, X, sqrt(r));
3332
H = Xw*Xw'+lambda*eye(d); % 4.97
34-
w = w-eta*(H\g);
33+
w = w-alpha*(H\g); % 4.92
3534
end
3635
llh = llh(2:t);
3736
model.w = w;

chapter09/kmeansRnd.m

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@
1010
% mu: d x k centers of clusters
1111
% Written by Mo Chen (sth4nth@gmail.com).
1212
alpha = 1;
13-
beta = nthroot(k,d); % in volume x^d there is k points: x^d=k
13+
beta = nthroot(k,d); % k points in volume x^d : x^d=k
1414

1515
X = randn(d,n);
1616
w = dirichletRnd(alpha,ones(1,k)/k);

0 commit comments

Comments
 (0)