@@ -372,7 +372,7 @@ def logistic_regression_path(X, y, pos_class=None, Cs=10, fit_intercept=True,
372372 To lessen the effect of regularization on synthetic feature weight
373373 (and therefore on the intercept) intercept_scaling has to be increased.
374374
375- multi_class : str, optional default 'ovr'
375+ multi_class : str, { 'ovr', 'multinomial'}
376376 Multiclass option can be either 'ovr' or 'multinomial'. If the option
377377 chosen is 'ovr', then a binary problem is fit for each label. Else
378378 the loss minimised is the multinomial loss fit across
@@ -455,10 +455,11 @@ def logistic_regression_path(X, y, pos_class=None, Cs=10, fit_intercept=True,
455455
456456 else :
457457 lbin = LabelBinarizer ()
458- Y = lbin .fit_transform (y )
459- if Y .shape [1 ] == 1 :
460- Y = np .hstack ([1 - Y , Y ])
461- w0 = np .zeros ((Y .shape [1 ], n_features + int (fit_intercept )), order = 'F' )
458+ Y_bin = lbin .fit_transform (y )
459+ if Y_bin .shape [1 ] == 1 :
460+ Y_bin = np .hstack ([1 - Y_bin , Y_bin ])
461+ w0 = np .zeros ((Y_bin .shape [1 ], n_features + int (fit_intercept )),
462+ order = 'F' )
462463 mask_classes = classes
463464
464465 if class_weight == "auto" :
@@ -495,7 +496,7 @@ def logistic_regression_path(X, y, pos_class=None, Cs=10, fit_intercept=True,
495496 for C in Cs :
496497 if solver == 'lbfgs' :
497498 if multi_class == 'multinomial' :
498- target = Y
499+ target = Y_bin
499500 func = _multinomial_loss_grad
500501 else :
501502 target = y
@@ -627,7 +628,7 @@ def _log_reg_scoring_path(X, y, train, test, pos_class=None, Cs=10,
627628 To lessen the effect of regularization on synthetic feature weight
628629 (and therefore on the intercept) intercept_scaling has to be increased.
629630
630- multi_class : str, optional default 'ovr'
631+ multi_class : str, { 'ovr', 'multinomial'}
631632 Multiclass option can be either 'ovr' or 'multinomial'. If the option
632633 chosen is 'ovr', then a binary problem is fit for each label. Else
633634 the loss minimised is the multinomial loss fit across
@@ -658,8 +659,10 @@ def _log_reg_scoring_path(X, y, train, test, pos_class=None, Cs=10,
658659 log_reg ._enc = LabelEncoder ()
659660 if multi_class == 'ovr' :
660661 log_reg ._enc .fit ([- 1 , 1 ])
661- else :
662+ elif multi_class == 'multinomial' :
662663 log_reg ._enc .fit (np .unique (y_train ))
664+ else :
665+ raise ValueError ("multi_class should be either multinomial or ovr" )
663666
664667 if pos_class is not None :
665668 mask = (y_test == pos_class )
@@ -772,7 +775,7 @@ class LogisticRegression(BaseLibLinear, LinearClassifierMixin,
772775 tol : float, optional
773776 Tolerance for stopping criteria.
774777
775- multi_class : str, optional default 'ovr'
778+ multi_class : str, { 'ovr', 'multinomial'}
776779 Multiclass option can be either 'ovr' or 'multinomial'. If the option
777780 chosen is 'ovr', then a binary problem is fit for each label. Else
778781 the loss minimised is the multinomial loss fit across
@@ -958,7 +961,7 @@ class LogisticRegressionCV(LogisticRegression, BaseEstimator,
958961 Otherwise the coefs, intercepts and C that correspond to the
959962 best scores across folds are averaged.
960963
961- multi_class : str, optional default 'ovr'
964+ multi_class : str, { 'ovr', 'multinomial'}
962965 Multiclass option can be either 'ovr' or 'multinomial'. If the option
963966 chosen is 'ovr', then a binary problem is fit for each label. Else
964967 the loss minimised is the multinomial loss fit across
0 commit comments