Skip to content

Commit cb15b30

Browse files
committed
0.17
1 parent fbd552b commit cb15b30

File tree

7 files changed

+40
-39
lines changed

7 files changed

+40
-39
lines changed

.travis.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ install:
2121
- sudo pip install python-coveralls
2222
- sudo pip install codecov
2323
- if [[ "$TRAVIS_PYTHON_VERSION" == "2.7" ]]; then
24-
conda create -q -n test-environment python=$TRAVIS_PYTHON_VERSION six numpy scipy scikit-learn=0.18 pandas coverage jupyter seaborn xgboost tensorflow;
24+
conda create -q -n test-environment python=$TRAVIS_PYTHON_VERSION six numpy scipy scikit-learn=0.17 pandas coverage jupyter seaborn xgboost tensorflow;
2525
else
2626
conda create -q -n test-environment python=$TRAVIS_PYTHON_VERSION six numpy scipy scikit-learn pandas coverage jupyter seaborn xgboost tensorflow;
2727
fi

docs/source/extending.rst

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -101,9 +101,12 @@ We can now use this as a regular step. If we fit it on ``df`` and transform it o
101101

102102
We can, however, now use it for fitting on one ``DataFrame``, and transforming another:
103103

104-
>>> from sklearn import model_selection
104+
>>> try:
105+
... from sklearn.model_selection import train_test_split
106+
... except: # Older sklearn versions
107+
... from ibex.sklearn.cross_val_predict import train_test_split
105108
>>>
106-
>>> tr, te = model_selection.train_test_split(df, random_state=3)
109+
>>> tr, te = train_test_split(df, random_state=3)
107110
>>> GroupbyAggregator('a').fit(tr).transform(te)
108111
b c
109112
0 0... 2...

docs/source/feature_union.rst

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -64,11 +64,11 @@ The output using this, however, discards the meaning of the columns:
6464
>>> trn.fit_transform(iris[features], iris['class'])
6565
pca selectkbest
6666
comp_0 comp_1 petal length (cm)
67-
0 -2.684207 0.326607 1.4
68-
1 -2.715391 -0.169557 1.4
69-
2 -2.889820 -0.137346 1.3
70-
3 -2.746437 -0.311124 1.5
71-
4 -2.728593 0.333925 1.4
67+
0 -2.684207 ...0.326607 1.4
68+
1 -2.715391 ...0.169557 1.4
69+
2 -2.889820 ...0.137346 1.3
70+
3 -2.746437 ...0.311124 1.5
71+
4 -2.728593 ...0.333925 1.4
7272
...
7373

7474
A better way would be to combine this with :func:`ibex.trans`:
@@ -79,11 +79,11 @@ A better way would be to combine this with :func:`ibex.trans`:
7979
>>> trn.fit_transform(iris[features], iris['class'])
8080
functiontransformer_0 functiontransformer_1
8181
pc1 pc2 best
82-
0 -2.684207 0.326607 1.4
83-
1 -2.715391 -0.169557 1.4
84-
2 -2.889820 -0.137346 1.3
85-
3 -2.746437 -0.311124 1.5
86-
4 -2.728593 0.333925 1.4
82+
0 -2.684207 ...0.326607 1.4
83+
1 -2.715391 ...0.169557 1.4
84+
2 -2.889820 ...0.137346 1.3
85+
3 -2.746437 ...0.311124 1.5
86+
4 -2.728593 ...0.333925 1.4
8787
...
8888

8989

examples/iris_feature_importance.ipynb

Lines changed: 6 additions & 6 deletions
Large diffs are not rendered by default.

ibex/_adapter.py

Lines changed: 6 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -42,18 +42,16 @@ def _from_pickle(
4242
est,
4343
params,
4444
extra_methods,
45-
extra_attribs,
46-
post_op):
47-
cls = frame_ex(est, extra_methods, extra_attribs, post_op)
45+
extra_attribs):
46+
cls = frame_ex(est, extra_methods, extra_attribs)
4847
est = cls(**params)
4948
return est
5049

5150

5251
def make_adapter(
5352
est,
5453
extra_methods,
55-
extra_attribs,
56-
post_op):
54+
extra_attribs):
5755
from ._base import FrameMixin
5856

5957
extra_attribs_d = {fn.__name__: fn for fn in extra_attribs}
@@ -322,15 +320,12 @@ def __getattribute__(self, name):
322320
def __reduce__(self):
323321
if not self.__module__.startswith('ibex'):
324322
raise TypeError('Cannot serialize a subclass of this type; please use composition instead')
325-
return (_from_pickle, (est, self.get_params(deep=True), extra_methods, extra_attribs, post_op))
326-
327-
if post_op is not None:
328-
post_op(_Adapter)
323+
return (_from_pickle, (est, self.get_params(deep=True), extra_methods, extra_attribs))
329324

330325
return _Adapter
331326

332327

333-
def frame_ex(est, extra_methods=(), extra_attribs=(), post_op=None):
328+
def frame_ex(est, extra_methods=(), extra_attribs=()):
334329
from ._base import FrameMixin
335330

336331
if isinstance(est, FrameMixin):
@@ -341,7 +336,7 @@ def frame_ex(est, extra_methods=(), extra_attribs=(), post_op=None):
341336
f = frame(type(est))(**params)
342337
return f
343338

344-
_Adapter = make_adapter(est, extra_methods, extra_attribs, post_op)
339+
_Adapter = make_adapter(est, extra_methods, extra_attribs)
345340

346341
update_class_wrapper(_Adapter, est)
347342

ibex/sklearn/_cross_val_predict.py

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -83,17 +83,20 @@ def cross_val_predict(
8383
Example:
8484
8585
>>> import pandas as pd
86-
>>> from ibex.sklearn import model_selection as pd_model_selection
87-
>>> from ibex.sklearn import linear_model as pd_linear_model
86+
>>> from ibex.sklearn.linear_model import LinearRegression
87+
>>> try:
88+
... from ibex.sklearn.model_selection import cross_val_predict
89+
... except: # Older sklearn versions
90+
... from ibex.sklearn.cross_val_predict import cross_val_predict
8891
8992
>>> df = pd.DataFrame({
9093
... 'x': range(100),
9194
... 'y': range(100),
9295
... },
9396
... index=['i%d' % i for i in range(100)])
9497
95-
>>> pd_model_selection.cross_val_predict(
96-
... pd_linear_model.LinearRegression(),
98+
>>> cross_val_predict(
99+
... LinearRegression(),
97100
... df[['x']],
98101
... df['y'])
99102
i0 ...

ibex/sklearn/_decomposition.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -41,11 +41,11 @@
4141
4242
>>> PdPCA(n_components=2).fit(iris[features], iris['class']).transform(iris[features])
4343
comp_0 comp_1
44-
0 -2.684207 0.326607
45-
1 -2.715391 -0.169557
46-
2 -2.889820 -0.137346
47-
3 -2.746437 -0.311124
48-
4 -2.728593 0.333925
44+
0 -2.684207 ...0.326607
45+
1 -2.715391 ...0.169557
46+
2 -2.889820 ...0.137346
47+
3 -2.746437 ...0.311124
48+
4 -2.728593 ...0.333925
4949
...
5050
5151
"""

0 commit comments

Comments
 (0)