Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

ENH: add CovDetMCD and det for regression #9227

Merged
merged 15 commits into from
May 25, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
935 changes: 928 additions & 7 deletions statsmodels/robust/covariance.py

Large diffs are not rendered by default.

114 changes: 96 additions & 18 deletions statsmodels/robust/norms.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import numpy as np

# TODO: add plots to weighting functions for online docs.
from . import tools as rtools

Check notice

Code scanning / CodeQL

Cyclic import Note

Import of module
statsmodels.robust.tools
begins an import cycle.


def _cabs(x):
Expand Down Expand Up @@ -41,6 +41,9 @@

continuous = 1

def __repr__(self):
return self.__class__.__name__

def rho(self, z):
"""
The robust criterion estimator function.
Expand Down Expand Up @@ -199,12 +202,16 @@
def __init__(self, t=1.345):
self.t = t

def _set_tuning_param(self, c):
def _set_tuning_param(self, c, inplace=False):
"""Set and change the tuning parameter of the Norm.

Warning: this needs to wipe cached attributes that depend on the param.
"""
self.t = c
if inplace:
self.t = c
return self
else:
return self.__class__(t=c)

Check warning on line 214 in statsmodels/robust/norms.py

View check run for this annotation

Codecov / codecov/patch

statsmodels/robust/norms.py#L214

Added line #L214 was not covered by tests

def max_rho(self):
return np.inf
Expand Down Expand Up @@ -322,6 +329,18 @@
def __init__(self, a=.3):
self.a = a

def _set_tuning_param(self, c, inplace=False):
"""Set and change the tuning parameter of the Norm.

Warning: this needs to wipe cached attributes that depend on the param.
"""
# todo : change default to inplace=False, when tools are fixed
if inplace:
self.a = c
return self

Check warning on line 340 in statsmodels/robust/norms.py

View check run for this annotation

Codecov / codecov/patch

statsmodels/robust/norms.py#L339-L340

Added lines #L339 - L340 were not covered by tests
else:
return self.__class__(a=c)

Check warning on line 342 in statsmodels/robust/norms.py

View check run for this annotation

Codecov / codecov/patch

statsmodels/robust/norms.py#L342

Added line #L342 was not covered by tests

def max_rho(self):
return np.inf

Expand Down Expand Up @@ -419,12 +438,16 @@
def __init__(self, a=1.339):
self.a = a

def _set_tuning_param(self, a):
def _set_tuning_param(self, c, inplace=False):
"""Set and change the tuning parameter of the Norm.

Warning: this needs to wipe cached attributes that depend on the param.
"""
self.a = a
if inplace:
self.a = c
return self
else:
return self.__class__(a=c)

Check warning on line 450 in statsmodels/robust/norms.py

View check run for this annotation

Codecov / codecov/patch

statsmodels/robust/norms.py#L450

Added line #L450 was not covered by tests

def max_rho(self):
return 2 * self.a**2
Expand Down Expand Up @@ -553,12 +576,16 @@
def __init__(self, c=2.):
self.c = c

def _set_tuning_param(self, c):
def _set_tuning_param(self, c, inplace=False):
"""Set and change the tuning parameter of the Norm.

Warning: this needs to wipe cached attributes that depend on the param.
"""
self.c = c
if inplace:
self.c = c
return self
else:
return self.__class__(c=c)

Check warning on line 588 in statsmodels/robust/norms.py

View check run for this annotation

Codecov / codecov/patch

statsmodels/robust/norms.py#L588

Added line #L588 was not covered by tests

def max_rho(self):
return self.rho(self.c)
Expand Down Expand Up @@ -674,14 +701,20 @@
self.b = b
self.c = c

def _set_tuning_param(self, c):
def _set_tuning_param(self, c, inplace=False):
"""Set and change the tuning parameter of the Norm.

Warning: this needs to wipe cached attributes that depend on the param.
"""
self.c = c
self.a = c / 4
self.b = c / 2
a = c / 4
b = c / 2
if inplace:
self.c = c
self.a = a
self.b = b
return self
else:
return self.__class__(a=a, b=b, c=c)

Check warning on line 717 in statsmodels/robust/norms.py

View check run for this annotation

Codecov / codecov/patch

statsmodels/robust/norms.py#L717

Added line #L717 was not covered by tests

def max_rho(self):
return self.rho(self.c)
Expand Down Expand Up @@ -862,12 +895,49 @@
def __init__(self, c=4.685):
self.c = c

def _set_tuning_param(self, c):
def __repr__(self):
return f"{self.__class__.__name__}(c={self.c})"

@classmethod
def get_tuning(cls, bp=None, eff=None):
"""Tuning parameter for given breakdown point or efficiency.

This currently only return values from a table.

Parameters
----------
bp : float in [0.05, 0.5] or None
Required breakdown point
Either bp or eff has to be specified, but not both.
eff : float or None
Required asymptotic efficiency.
Either bp or eff has to be specified, but not both.

Returns
-------
float : tuning parameter.

"""
if ((bp is None and eff is None) or
(bp is not None and eff is not None)):
raise ValueError("exactly one of bp and eff needs to be provided")

Check warning on line 923 in statsmodels/robust/norms.py

View check run for this annotation

Codecov / codecov/patch

statsmodels/robust/norms.py#L923

Added line #L923 was not covered by tests

if bp is not None:
return rtools.tukeybiweight_bp[bp]

Check warning on line 926 in statsmodels/robust/norms.py

View check run for this annotation

Codecov / codecov/patch

statsmodels/robust/norms.py#L926

Added line #L926 was not covered by tests
elif eff is not None:
return rtools.tukeybiweight_eff[eff]

Check warning on line 928 in statsmodels/robust/norms.py

View check run for this annotation

Codecov / codecov/patch

statsmodels/robust/norms.py#L928

Added line #L928 was not covered by tests

def _set_tuning_param(self, c, inplace=False):
"""Set and change the tuning parameter of the Norm.

Warning: this needs to wipe cached attributes that depend on the param.
"""
self.c = c
# todo : change default to inplace=False, when tools are fixed
if inplace:
self.c = c
return self
else:
return self.__class__(c=c)

Check warning on line 940 in statsmodels/robust/norms.py

View check run for this annotation

Codecov / codecov/patch

statsmodels/robust/norms.py#L940

Added line #L940 was not covered by tests

def max_rho(self):
return self.rho(self.c)
Expand All @@ -891,7 +961,7 @@
Returns
-------
rho : ndarray
rho(z) = -(1 - (z/c)**2)**3 * c**2/6. for \|z\| <= R
rho(z) = -(1 - (z/c)**2)**3 * c**2/6 + c**2/6 for \|z\| <= R

rho(z) = 0 for \|z\| > R
"""
Expand Down Expand Up @@ -983,12 +1053,16 @@
self.c = c
self.k = k

def _set_tuning_param(self, c):
def _set_tuning_param(self, c, inplace=False):
"""Set and change the tuning parameter of the Norm.

Warning: this needs to wipe cached attributes that depend on the param.
"""
self.c = c
if inplace:
self.c = c
return self
else:
return self.__class__(c=c, k=self.k)

Check warning on line 1065 in statsmodels/robust/norms.py

View check run for this annotation

Codecov / codecov/patch

statsmodels/robust/norms.py#L1065

Added line #L1065 was not covered by tests

def max_rho(self):
return self.rho(self.c)
Expand Down Expand Up @@ -1121,12 +1195,16 @@
self.c = c
self.df = df

def _set_tuning_param(self, c):
def _set_tuning_param(self, c, inplace=False):
"""Set and change the tuning parameter of the Norm.

Warning: this needs to wipe cached attributes that depend on the param.
"""
self.c = c
if inplace:
self.c = c
return self
else:
return self.__class__(c=c, df=self.df)

Check warning on line 1207 in statsmodels/robust/norms.py

View check run for this annotation

Codecov / codecov/patch

statsmodels/robust/norms.py#L1207

Added line #L1207 was not covered by tests

def max_rho(self):
return np.inf
Expand Down