Skip to content

Commit

Permalink
Merge pull request #82 from dynamicslab/fix-check-fitted
Browse files Browse the repository at this point in the history
Fix check fitted
  • Loading branch information
briandesilva committed Jul 7, 2020
2 parents a5443a5 + 2b4e6f6 commit a3b2796
Show file tree
Hide file tree
Showing 4 changed files with 23 additions and 7 deletions.
4 changes: 2 additions & 2 deletions pysindy/feature_library/custom_library.py
Expand Up @@ -89,7 +89,7 @@ def get_feature_names(self, input_features=None):
output_feature_names : list of string, length n_output_features
"""
check_is_fitted(self, "n_output_features_")
check_is_fitted(self)
if input_features is None:
input_features = ["x%d" % i for i in range(self.n_input_features_)]
feature_names = []
Expand Down Expand Up @@ -147,7 +147,7 @@ def transform(self, X):
generated from applying the custom functions to the inputs.
"""
check_is_fitted(self, "n_output_features_")
check_is_fitted(self)

X = check_array(X)

Expand Down
7 changes: 3 additions & 4 deletions pysindy/optimizers/base.py
Expand Up @@ -8,6 +8,7 @@
from sklearn.linear_model import LinearRegression
from sklearn.multioutput import MultiOutputRegressor
from sklearn.utils.extmath import safe_sparse_dot
from sklearn.utils.validation import check_is_fitted
from sklearn.utils.validation import check_X_y


Expand All @@ -27,6 +28,7 @@ def _rescale_data(X, y, sample_weight):
class ComplexityMixin:
@property
def complexity(self):
check_is_fitted(self)
return np.count_nonzero(self.coef_) + np.count_nonzero(self.intercept_)


Expand Down Expand Up @@ -73,9 +75,6 @@ def __init__(self, max_iter=20, normalize=False, fit_intercept=False, copy_X=Tru

self.max_iter = max_iter
self.iters = 0
self.coef_ = None
self.ind_ = None
self.history_ = []

# Force subclasses to implement this
@abc.abstractmethod
Expand Down Expand Up @@ -127,7 +126,7 @@ def fit(self, x_, y, sample_weight=None, **reduce_kws):
self.iters = 0
self.ind_ = np.ones((y.shape[1], x.shape[1]), dtype=bool)
self.coef_ = np.linalg.lstsq(x, y, rcond=None)[0].T # initial guess
self.history_.append(self.coef_)
self.history_ = [self.coef_]

self._reduce(x, y, **reduce_kws)
self.ind_ = np.abs(self.coef_) > 1e-14
Expand Down
2 changes: 1 addition & 1 deletion requirements.txt
@@ -1,3 +1,3 @@
scikit-learn[alldeps]>=0.21
scikit-learn[alldeps]>=0.23
numpy
scipy
17 changes: 17 additions & 0 deletions test/optimizers/test_optimizers.py
Expand Up @@ -5,6 +5,7 @@
import pytest
from numpy.linalg import norm
from sklearn.base import BaseEstimator
from sklearn.exceptions import NotFittedError
from sklearn.linear_model import ElasticNet
from sklearn.linear_model import Lasso
from sklearn.utils.validation import check_is_fitted
Expand Down Expand Up @@ -81,6 +82,22 @@ def test_fit(data, optimizer):
assert opt.coef_.shape == (1, x.shape[1])


@pytest.mark.parametrize(
"optimizer", [STLSQ(), SR3()],
)
def test_not_fitted(optimizer):
with pytest.raises(NotFittedError):
optimizer.predict(np.ones((1, 3)))


@pytest.mark.parametrize(
"optimizer", [STLSQ(), SR3()],
)
def test_complexity_not_fitted(optimizer):
with pytest.raises(NotFittedError):
optimizer.complexity


@pytest.mark.parametrize(
"kwargs", [{"normalize": True}, {"fit_intercept": True}, {"copy_X": False}]
)
Expand Down

0 comments on commit a3b2796

Please sign in to comment.