Skip to content

Commit

Permalink
[Fix] expired scipy deprecations, sp.sqrt (NeuralEnsemble#616)
Browse files Browse the repository at this point in the history
* replace sp.sqrt with np.sqrt

* replace: 'scipy.integrate.simps' with 'scipy.integrate.simpson'

* Change from 'scipy.signal'  to 'scipy.signal.windows.gaussian'

* replace 'scipy.integrate.cumtrapz' with  'scipy.integrate.cumulative_trapezoid'

* replace 'scipy.integrate.cumtrapz' with 'scipy.integrate.cumulative_trapezoid'

* replace 'scipy.integrate.cumtrapz' with 'scipy.integrate.cumulative_trapezoid'

* replace 'scipy.integrate.trapz' with 'scipy.integrate.trapezoid'

* update requiremtn for scipy to 1.11.0, since scipy.integrate.simpson was introduced with this version

* use keyword arguments with scipy.integrate.simpson

* change scipy to >=1.10.0
  • Loading branch information
Moritz-Alexander-Kern committed Mar 20, 2024
1 parent dcd57e5 commit dd3146c
Show file tree
Hide file tree
Showing 9 changed files with 24 additions and 25 deletions.
14 changes: 7 additions & 7 deletions elephant/current_source_density.py
Expand Up @@ -38,7 +38,7 @@
import neo
import numpy as np
import quantities as pq
from scipy.integrate import simps
from scipy.integrate import simpson

import elephant.current_source_density_src.utility_functions as utils
from elephant.current_source_density_src import KCSD, icsd
Expand Down Expand Up @@ -281,7 +281,7 @@ def generate_lfp(csd_profile, x_positions, y_positions=None, z_positions=None,
def integrate_1D(x0, csd_x, csd, h):
m = np.sqrt((csd_x - x0) ** 2 + h ** 2) - abs(csd_x - x0)
y = csd * m
I = simps(y, csd_x)
I = simpson(y, x=csd_x)
return I

def integrate_2D(x, y, xlin, ylin, csd, h, X, Y):
Expand All @@ -293,17 +293,17 @@ def integrate_2D(x, y, xlin, ylin, csd, h, X, Y):
m = np.sqrt((x - X) ** 2 + (y - Y) ** 2)
np.clip(m, a_min=0.0000001, a_max=None, out=m)
y = np.arcsinh(2 * h / m) * csd
I = simps(y.T, ylin)
F = simps(I, xlin)
I = simpson(y.T, x=ylin)
F = simpson(I, x=xlin)
return F

def integrate_3D(x, y, z, csd, xlin, ylin, zlin, X, Y, Z):
m = np.sqrt((x - X) ** 2 + (y - Y) ** 2 + (z - Z) ** 2)
np.clip(m, a_min=0.0000001, a_max=None, out=m)
z = csd / m
Iy = simps(np.transpose(z, (1, 0, 2)), zlin)
Iy = simps(Iy, ylin)
F = simps(Iy, xlin)
Iy = simpson(np.transpose(z, (1, 0, 2)), x=zlin)
Iy = simpson(Iy, x=ylin)
F = simpson(Iy, x=xlin)
return F

dim = 1
Expand Down
12 changes: 6 additions & 6 deletions elephant/current_source_density_src/icsd.py
Expand Up @@ -95,26 +95,26 @@ def filter_csd(self, csd, filterfunction='convolve'):
raise ae('filter order f_order must be a tuple of length 2')
else:
try:
assert(self.f_order > 0 and isinstance(self.f_order, int))
assert (self.f_order > 0 and isinstance(self.f_order, int))
except AssertionError as ae:
raise ae('Filter order must be int > 0!')
try:
assert(filterfunction in ['filtfilt', 'convolve'])
assert (filterfunction in ['filtfilt', 'convolve'])
except AssertionError as ae:
raise ae("{} not equal to 'filtfilt' or \
'convolve'".format(filterfunction))

if self.f_type == 'boxcar':
num = ss.boxcar(self.f_order)
num = ss.windows.boxcar(self.f_order)
denom = np.array([num.sum()])
elif self.f_type == 'hamming':
num = ss.hamming(self.f_order)
num = ss.windows.hamming(self.f_order)
denom = np.array([num.sum()])
elif self.f_type == 'triangular':
num = ss.triang(self.f_order)
num = ss.windows.triang(self.f_order)
denom = np.array([num.sum()])
elif self.f_type == 'gaussian':
num = ss.gaussian(self.f_order[0], self.f_order[1])
num = ss.windows.gaussian(self.f_order[0], self.f_order[1])
denom = np.array([num.sum()])
elif self.f_type == 'identity':
num = np.array([1.])
Expand Down
2 changes: 1 addition & 1 deletion elephant/spike_train_correlation.py
Expand Up @@ -1073,5 +1073,5 @@ def spike_train_timescale(binned_spiketrain, max_tau):

# Calculate the timescale using trapezoidal integration
integr = (corrfct / corrfct[0]) ** 2
timescale = 2 * integrate.trapz(integr, dx=bin_size)
timescale = 2 * integrate.trapezoid(integr, dx=bin_size)
return pq.Quantity(timescale, units=binned_spiketrain.units, copy=False)
3 changes: 1 addition & 2 deletions elephant/spike_train_dissimilarity.py
Expand Up @@ -24,7 +24,6 @@

import numpy as np
import quantities as pq
import scipy as sp
from neo.core import SpikeTrain

import elephant.kernels as kernels
Expand Down Expand Up @@ -364,7 +363,7 @@ def van_rossum_distance(spiketrains, time_constant=1.0 * pq.s, sort=True):
for i, j in np.ndindex(k_dist.shape):
vr_dist[i, j] = (
k_dist[i, i] + k_dist[j, j] - k_dist[i, j] - k_dist[j, i])
return sp.sqrt(vr_dist)
return np.sqrt(vr_dist)


def _summed_dist_matrix(spiketrains, tau, presorted=False):
Expand Down
8 changes: 4 additions & 4 deletions elephant/test/test_kernels.py
Expand Up @@ -81,7 +81,7 @@ def test_kernels_normalization(self):
restric_defdomain = np.linspace(
-b, b, num=n_points) * sigma.units
kern = kernel(restric_defdomain)
norm = spint.cumtrapz(y=kern.magnitude,
norm = spint.cumulative_trapezoid(y=kern.magnitude,
x=restric_defdomain.magnitude)[-1]
self.assertAlmostEqual(norm, 1, delta=0.003)

Expand All @@ -104,11 +104,11 @@ def test_kernels_stddev(self):
-b, b, num=n_points) * sigma.units
kern = kernel(restric_defdomain)
av_integr = kern * restric_defdomain
average = spint.cumtrapz(
average = spint.cumulative_trapezoid(
y=av_integr.magnitude,
x=restric_defdomain.magnitude)[-1] * sigma.units
var_integr = (restric_defdomain - average) ** 2 * kern
variance = spint.cumtrapz(
variance = spint.cumulative_trapezoid(
y=var_integr.magnitude,
x=restric_defdomain.magnitude)[-1] * sigma.units ** 2
stddev = np.sqrt(variance)
Expand All @@ -132,7 +132,7 @@ def test_kernel_boundary_enclosing(self):
restric_defdomain = np.linspace(
-b, b, num=n_points) * sigma.units
kern = kernel(restric_defdomain)
frac = spint.cumtrapz(y=kern.magnitude,
frac = spint.cumulative_trapezoid(y=kern.magnitude,
x=restric_defdomain.magnitude)[-1]
self.assertAlmostEqual(frac, fraction, delta=0.002)

Expand Down
4 changes: 2 additions & 2 deletions elephant/test/test_spike_train_dissimilarity.py
Expand Up @@ -491,7 +491,7 @@ def test_van_rossum_distance(self):
-((self.t - self.st08[1]) / self.tau3).simplified) -
(self.t > self.st09[0]) * np.exp(
-((self.t - self.st09[0]) / self.tau3).simplified)) ** 2
distance = np.sqrt(2.0 * spint.cumtrapz(
distance = np.sqrt(2.0 * spint.cumulative_trapezoid(
y=f_minus_g_squared, x=self.t.magnitude)[-1] /
self.tau3.rescale(self.t.units).magnitude)
self.assertAlmostEqual(stds.van_rossum_distance(
Expand Down Expand Up @@ -573,7 +573,7 @@ def test_van_rossum_distance(self):
-((self.t - self.st34[0]) / self.tau3).simplified) -
(self.t > self.st34[1]) * np.exp(
-((self.t - self.st34[1]) / self.tau3).simplified)) ** 2
distance = np.sqrt(2.0 * spint.cumtrapz(
distance = np.sqrt(2.0 * spint.cumulative_trapezoid(
y=f_minus_g_squared, x=self.t.magnitude)[-1] /
self.tau3.rescale(self.t.units).magnitude)
self.assertAlmostEqual(stds.van_rossum_distance([self.st31, self.st34],
Expand Down
2 changes: 1 addition & 1 deletion elephant/test/test_statistics.py
Expand Up @@ -616,7 +616,7 @@ def test_instantaneous_rate_rate_estimation_consistency(self):
border_correction=border_correction
)
num_spikes = len(self.spike_train)
area_under_curve = spint.cumtrapz(
area_under_curve = spint.cumulative_trapezoid(
y=rate_estimate.magnitude[:, 0],
x=rate_estimate.times.rescale('s').magnitude)[-1]
self.assertAlmostEqual(num_spikes, area_under_curve,
Expand Down
2 changes: 1 addition & 1 deletion requirements/environment.yml
Expand Up @@ -7,7 +7,7 @@ dependencies:
- python>=3.8
- mpi4py
- numpy>=1.19.5
- scipy
- scipy>=1.10.0
- tqdm
- scikit-learn
- statsmodels
Expand Down
2 changes: 1 addition & 1 deletion requirements/requirements.txt
@@ -1,6 +1,6 @@
neo>=0.10.0
numpy>=1.19.5
quantities>=0.14.1
scipy>=1.5.4
scipy>=1.10.0
six>=1.10.0
tqdm

0 comments on commit dd3146c

Please sign in to comment.