Logistic RegressionΒΆ
with l1 and l2 penalty
Python source code: logistic_l1_l2_coef.py
print __doc__
# Author: Alexandre Gramfort <alexandre.gramfort@inria.fr>
# License: BSD Style.
import numpy as np
from scikits.learn.linear_model import LogisticRegression
from scikits.learn import datasets
iris = datasets.load_iris()
X = iris.data
y = iris.target
# Set regularization parameter
C = 0.1
classifier_l1_LR = LogisticRegression(C=C, penalty='l1')
classifier_l2_LR = LogisticRegression(C=C, penalty='l2')
classifier_l1_LR.fit(X, y)
classifier_l2_LR.fit(X, y)
hyperplane_coefficients_l1_LR = classifier_l1_LR.coef_[:]
hyperplane_coefficients_l2_LR = classifier_l2_LR.coef_[:]
# hyperplane_coefficients_l1_LR contains zeros due to the
# L1 sparsity inducing norm
pct_non_zeros_l1_LR = np.mean(hyperplane_coefficients_l1_LR != 0) * 100
pct_non_zeros_l2_LR = np.mean(hyperplane_coefficients_l2_LR != 0) * 100
print "Percentage of non zeros coefficients (L1) : %f" % pct_non_zeros_l1_LR
print "Percentage of non zeros coefficients (L2) : %f" % pct_non_zeros_l2_LR