Skip to content

Commit

Permalink
Merge branch 'OperationChanges'
Browse files Browse the repository at this point in the history
  • Loading branch information
benfulcher committed Jul 11, 2019
2 parents 5725535 + 3cdc045 commit 109b9a6
Show file tree
Hide file tree
Showing 7 changed files with 173 additions and 137 deletions.
198 changes: 110 additions & 88 deletions Database/INP_ops.txt

Large diffs are not rendered by default.

4 changes: 3 additions & 1 deletion Operations/DN_RemovePoints.m
Expand Up @@ -53,7 +53,7 @@
%% Preliminaries
% ------------------------------------------------------------------------------
N = length(y); % time-series length
doPlot = 0; % plot output
doPlot = false; % plot output

% ------------------------------------------------------------------------------
%% Check inputs
Expand Down Expand Up @@ -108,6 +108,8 @@
%% Compute output statistics
% ------------------------------------------------------------------------------
out.fzcacrat = CO_FirstZero(y_trim,'ac')/CO_FirstZero(y,'ac');
out.ac1rat = acf_y_trim(1)/acf_y(1); % includes the sign
out.ac1diff = abs(acf_y_trim(1)-acf_y(1));
out.ac2rat = acf_y_trim(2)/acf_y(2); % includes the sign
out.ac2diff = abs(acf_y_trim(2)-acf_y(2));
out.ac3rat = acf_y_trim(3)/acf_y(3); % includes the sign
Expand Down
2 changes: 1 addition & 1 deletion Operations/MF_CompareTestSets.m
Expand Up @@ -236,7 +236,7 @@

% Get statistics on output time series
meandiffs(i) = abs(mean(yp.y) - mean(yTest.y));
stdrats(i) = abs(std(yp.y)/std(yTest.y));
stdrats(i) = std(yp.y)/std(yTest.y);

% % 1) Get statistics on residuals
% residout = MF_ResidualAnalysis(mresiduals);
Expand Down
5 changes: 3 additions & 2 deletions Operations/NW_VisibilityGraph.m
Expand Up @@ -138,6 +138,7 @@
out.mink = min(k); % minimum degree
out.rangek = range(k); % range of degree distribution
out.iqrk = iqr(k); % interquartile range of degree distribution
out.skewnessk = skewness(k); % skewness of degree distribution
out.maxonmedian = max(k)/median(k); % max on median (indicator of outlier)
out.ol90 = mean(k(k>=quantile(k,0.05) & k<=quantile(k,0.95)))/mean(k);
out.olu90 = (mean(k(k>=quantile(k,0.95)))-mean(k))/std(k); % top 5% of points are
Expand Down Expand Up @@ -227,8 +228,8 @@

% Extreme Value Distribution
paramhat = evfit(k);
out.evparm1 = paramhat(1);
out.evparm2 = paramhat(2);
out.evparam1 = paramhat(1);
out.evparam2 = paramhat(2);
out.evnlogL = evlike(paramhat,k);

% ------------------------------------------------------------------------------
Expand Down
17 changes: 9 additions & 8 deletions Operations/ST_LocalExtrema.m
Expand Up @@ -63,7 +63,7 @@
end
end

doPlot = 0; % plot outputs to a figure
doPlot = false; % plot outputs to a figure

N = length(y); % length of time series

Expand Down Expand Up @@ -92,20 +92,21 @@
%% Buffer the time series
% ------------------------------------------------------------------------------
y_buff = buffer(y,wl); % no overlap
% each *column* is a window of samples
% Each *column* is a window of samples:
if y_buff(end) == 0
y_buff = y_buff(:,1:end-1); % remove last window if zero-padded
end
nw = size(y_buff,2); % number of windows
numWindows = size(y_buff,2); % number of windows

% ------------------------------------------------------------------------------
%% Find local extrema
% ------------------------------------------------------------------------------
locmax = max(y_buff); % summary of local maxima
locmin = min(y_buff); % summary of local minima
abslocmin = abs(locmin); % absoluate value of local minima
exti = find(abslocmin>locmax);
locext = locmax; locext(exti) = locmin(exti); % local extrema (furthest from mean; either maxs or mins)
abslocmin = abs(locmin); % absolute value of local minima
exti = find(abslocmin > locmax);
locext = locmax;
locext(exti) = locmin(exti); % local extrema (furthest from mean; either maxs or mins)
abslocext = abs(locext); % the magnitude of the most extreme events in each window

if doPlot
Expand Down Expand Up @@ -136,8 +137,8 @@
out.zcext = ST_SimpleStats(locext,'zcross');
out.meanabsext = mean(abslocext);
out.medianabsext = median(abslocext);
out.diffmaxabsmin = sum(abs(locmax-abslocmin))/nw;
out.uord = sum(sign(locext))/nw; % whether extreme events are more up or down
out.diffmaxabsmin = sum(abs(locmax-abslocmin))/numWindows;
out.uord = sum(sign(locext))/numWindows; % whether extreme events are more up or down
out.maxmaxmed = max(locmax)/median(locmax);
out.minminmed = min(locmin)/median(locmin);
out.maxabsext = max(abslocext)/median(abslocext);
Expand Down
35 changes: 21 additions & 14 deletions PeripheryFunctions/TS_local_clear_remove.m
@@ -1,10 +1,12 @@
function TS_local_clear_remove(tsOrOps,idRange,doRemove,whatData)
function TS_local_clear_remove(whatData,tsOrOps,idRange,doRemove)
% TS_local_clear_remove Clear or remove data from an hctsa dataset
%
% 'Clear' means clearing any calculations performed about a given time series
% or operation, but keeping it in the dataset.
% 'Remove' means removing the time series or operation from the dataset completely.
% The result is saved back to the hctsa .mat datafile provided.
% 'clear' (doRemove = false) means clearing any calculations performed about a
% given time series or operation, but keeping it in the dataset.
% 'remove' (doRemove = true) means removing the time series or operation from
% the dataset completely.
%
% The result is saved back to the hctsa-formatted .mat data file provided as whatData.
%
%---INPUTS:
% tsOrOps -- either 'ts' or 'ops' for whether to work with either time series
Expand All @@ -17,10 +19,15 @@ function TS_local_clear_remove(tsOrOps,idRange,doRemove,whatData)
%---EXAMPLE USAGE:
% This clears the data about the time series with IDs 1,2,3,4, and 5 from the hctsa dataset
% stored in HCTSA.mat:
% >> TS_local_clear_remove('ts',1:5,0,'HCTSA.mat');
% >> TS_local_clear_remove('HCTSA.mat','ts',1:5,false);
%
% This *removes* the time series with IDs from 1:5 from the dataset completely:
% >> TS_local_clear_remove('ts',1:5,1,'HCTSA.mat');
% >> TS_local_clear_remove('HCTSA.mat','ts',1:5,true);
%
% IDs for a given keyword can be retrieved using TS_getIDs. This example removes
% all time series from HCTSA.mat that have the keyword 'noise':
% >> noiseIDs = TS_getIDs('noise','HCTSA.mat','ts');
% >> TS_local_clear_remove('HCTSA.mat','ts',noiseIDs,true);

% ------------------------------------------------------------------------------
% Copyright (C) 2018, Ben D. Fulcher <ben.d.fulcher@gmail.com>,
Expand Down Expand Up @@ -48,7 +55,11 @@ function TS_local_clear_remove(tsOrOps,idRange,doRemove,whatData)
%% Preliminaries and input checking
%-------------------------------------------------------------------------------

if nargin < 1
if nargin < 1 || isempty(whatData)
whatData = 'raw'; % normally want to clear data from the local store
end

if nargin < 2
tsOrOps = 'ts';
end
switch tsOrOps
Expand All @@ -63,18 +74,14 @@ function TS_local_clear_remove(tsOrOps,idRange,doRemove,whatData)
end

% Must specify a set of time series
if nargin < 2 || min(size(idRange)) ~= 1
if nargin < 3 || min(size(idRange)) ~= 1
error('Specify a range of IDs');
end

if nargin < 3 % doRemove
if nargin < 4 % doRemove
error('You must specify whether to remove the %s or just clear their data results',theWhat)
end

if nargin < 4
whatData = 'raw'; % normally want to clear data from the local store
end

% ------------------------------------------------------------------------------
%% Load data
% ------------------------------------------------------------------------------
Expand Down
49 changes: 26 additions & 23 deletions Toolboxes/Michael_Small/MS_complexity.m
@@ -1,9 +1,8 @@
% cmp = MS_complexity(x,n);
%
% calculate the Lempel-Ziv complexity of the n-bit encoding of x.
function cmp = MS_complexity(x,n,binHow)
% Calculate the Lempel-Ziv complexity of the n-bit encoding of x.
%
% cmp is the normalised complexity, that is the number of distinct
% symbol sequences in x, divided by the expected number of distinct
% symbol sequences in x, divided by the expected number of distinct
% symbols for a noise sequence.
%
% Algorithm is implemented in complexitybs.c
Expand All @@ -16,45 +15,49 @@
% Series A, vol. 52. World Scientific, 2005. (ISBN 981-256-117-X) and the
% references therein.

function cmp = MS_complexity(x,n);

if nargin < 2
n = 2;
end
if nargin < 3
binHow = 'equiprobable';
end
%-------------------------------------------------------------------------------

if length(n) > 1
for ni = 1:length(n)
cmp(ni) = MS_complexity(x,n(ni));
end
else,

if 1,
%do the binning, with equi-probably bins
% Can run with multiple values of the number of symbols, n:
for ni = 1:length(n)
cmp(ni) = MS_complexity(x,n(ni),binHow);
end
else

% do the binning, with equiprobable bins
switch binHow
case 'equiprobable'
x=x(:);
nx=length(x);
[xn,xi]=sort(x+eps*randn(size(x))); %introduce randomness for ties
y=zeros(nx,1);
y=1:nx;
y=floor(y.*(n/(nx+1)));
x(xi)=y;
else,
%do binning with equal width bins
case 'equiwidth'
% else,
% %do binning with equal width bins
x=x(:);
nx=length(x);
minx=min(x);
maxx=max(x);
stepx=(maxx-minx)/n;
y=zeros(nx,1);
while minx<maxx,
while minx<maxx
minx=minx+stepx;
y=y+double(x<minx);
end;
end
x=floor(y);
end;

%compute complexity with complexitybs
cmp=MS_complexitybs(x);
end

end;
%compute complexity with complexitybs
cmp = MS_complexitybs(x);
end

end
end

0 comments on commit 109b9a6

Please sign in to comment.