Skip to content

Commit

Permalink
added anomaly stuff, fixed examples
Browse files Browse the repository at this point in the history
  • Loading branch information
technocreep committed Jul 20, 2023
1 parent e484785 commit c670e71
Show file tree
Hide file tree
Showing 7 changed files with 801 additions and 423 deletions.
800 changes: 406 additions & 394 deletions examples/anomaly_detection/anomaly_detection_using_classification.ipynb

Large diffs are not rendered by default.

8 changes: 4 additions & 4 deletions examples/time_series_classification/ts_classification.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -727,8 +727,8 @@
}
],
"source": [
"model = industrial.fit(train_features=train_data[0],\n",
" train_target=train_data[1])"
"model = industrial.fit(features=train_data[0],\n",
" target=train_data[1])"
]
},
{
Expand Down Expand Up @@ -808,7 +808,7 @@
}
],
"source": [
"labels = industrial.predict(test_features=test_data[0])\n",
"labels = industrial.predict(features=test_data[0])\n",
"\n",
"print(labels)"
]
Expand Down Expand Up @@ -839,7 +839,7 @@
}
],
"source": [
"probs = industrial.predict_proba(test_features=test_data[0])\n",
"probs = industrial.predict_proba(features=test_data[0])\n",
"\n",
"print(probs)"
]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -146,8 +146,8 @@ def predict(self, features: np.ndarray, **kwargs) -> dict:
mode='labels')
return self.prediction_label

def predict_proba(self, test_features: np.ndarray, **kwargs) -> dict:
self.prediction_proba = self.__predict_abstraction(test_features=test_features,
def predict_proba(self, features: np.ndarray, **kwargs) -> dict:
self.prediction_proba = self.__predict_abstraction(test_features=features,
mode='probs', )
return self.prediction_proba

Expand Down
13 changes: 8 additions & 5 deletions fedot_ind/core/models/statistical/StatsExtractor.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@

import numpy as np
import pandas as pd

from fedot.core.data.data import InputData
from fedot.core.operations.operation_parameters import OperationParameters
from pandas import Index
Expand Down Expand Up @@ -52,10 +51,14 @@ def _transform(self, input_data: InputData) -> np.array:
stat_features = v[0].columns
n_components = v[0].shape[0]
predict = self._clean_predict(np.array(v))
predict = self.drop_features(predict, stat_features, n_components)
return predict.values

def drop_features(self, predict: pd.DataFrame, columns: Index, n_components: int):
# predict = self.drop_features(predict=predict,
# target_values=input_data.target,
# columns=stat_features,
# n_components=n_components)
return predict
# return predict.values

def drop_features(self, predict: pd.DataFrame, target_values, columns: Index, n_components: int):
"""
Method for dropping features with low variance
"""
Expand Down
36 changes: 18 additions & 18 deletions fedot_ind/core/tuning/search_space.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,25 +13,25 @@
{'spectrum': (hp.choice, [['smoothed']]),
'threshold': (hp.uniformint, [10000, 50000])},

# 'quantile_extractor':
# {'window_mode': (hp.choice, [[True, True]]),
# {'window_mode': (hp.choice, [[True, False]]),
# 'window_size': (hp.choice, [[x for x in range(1, 50, 3)]]),
# 'var_threshold': (hp.choice, [[_ for _ in np.linspace(0, 0.02, 35)]])},
'quantile_extractor':
{'window_mode': (hp.choice, [[True, True]]),
# {'window_mode': (hp.choice, [[True, False]]),
'window_size': (hp.choice, [[x for x in range(1, 50, 3)]]),
'var_threshold': (hp.choice, [[_ for _ in np.linspace(0, 0.02, 35)]])},

'quantile_extractor': {'nested_space': (hp.choice, [[
{
'window_mode': True,
'window_size': hp.choice('window_size_true', list(range(1, 50, 3))),
'var_threshold': hp.uniform('threshold_true', 0, 0.02)
},
{
'window_mode': False,
'window_size': None,
'var_threshold': hp.uniform('threshold_false', 0, 0.02)
}

]])},
# 'quantile_extractor': {'nested_space': (hp.choice, [[
# {
# 'window_mode': True,
# 'window_size': hp.choice('window_size_true', list(range(1, 50, 3))),
# 'var_threshold': hp.uniform('threshold_true', 0, 0.02)
# },
# {
# 'window_mode': False,
# 'window_size': None,
# 'var_threshold': hp.uniform('threshold_false', 0, 0.02)
# }
#
# ]])},

'recurrence_extractor':
{'win_mode': (hp.choice, [[True, False]]),
Expand Down
126 changes: 126 additions & 0 deletions fedot_ind/synth_anomalies/anomalies.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,126 @@
import numpy as np


class Anomaly:
def __init__(self, params: dict):
self.level = params.get('level', 10)
self.anomaly_type = self.__class__.__name__

def get(self, ts: np.ndarray, interval: tuple):
NotImplementedError()


class ShiftTrendUP(Anomaly):
def __init__(self, params):
super().__init__(params)

def get(self, ts: np.ndarray, interval: tuple):
shift = np.zeros(ts.size)
shift_value = np.mean(ts[interval[0]:interval[1] + 1]) * (self.level / 100)
shift_value = abs(shift_value)
shift[interval[0]:interval[1] + 1] = shift_value
return self.apply_shift(ts, shift)

def apply_shift(self, ts: np.ndarray, shift: np.ndarray):
return ts + shift


class ShiftTrendDOWN(ShiftTrendUP):
def __init__(self, params):
super().__init__(params)

def apply_shift(self, ts: np.ndarray, shift: np.ndarray):
return ts - shift


class DecreaseDispersion(Anomaly):

def __init__(self, params):
super().__init__(params)

def get(self, ts: np.ndarray, interval: tuple):
new_ts = ts.copy()
sector_values = new_ts[interval[0]:interval[1] + 1]
mean = float(np.mean(sector_values))
new_sector_values = [self.shrink(mean, x) for x in sector_values]
new_ts[interval[0]:interval[1] + 1] = new_sector_values
return new_ts

def shrink(self, mean_value: float, i: float):
diff = mean_value - i
new_diff = diff - diff * (self.level / 100)
new_i = mean_value - new_diff
return new_i


class IncreaseDispersion(DecreaseDispersion):

def __init__(self, params):
super().__init__(params)

def shrink(self, mean_value: float, i: float):
diff = mean_value - i
new_diff = diff + diff * (self.level / 100)
new_i = mean_value - new_diff
return new_i


class AddNoise(Anomaly):
def __init__(self, params):
super().__init__(params)
self.noise_type = params.get('noise_type', np.random.choice(['gaussian',
'uniform',
'laplace']))

def get(self, ts: np.ndarray, interval: tuple):
ts_ = ts.copy()
sector = ts_[interval[0]: interval[1]+1]

noise_std = np.std(sector) * self.level / 100

if self.noise_type == 'gaussian':
noise = np.random.normal(0, noise_std, len(sector))
elif self.noise_type == 'uniform':
noise = np.random.uniform(-noise_std, noise_std, len(sector))
elif self.noise_type == 'laplace':
noise = np.random.laplace(0, noise_std, len(sector))
else:
raise ValueError("Invalid noise_type. Please choose 'gaussian', 'uniform', or 'laplace'.")

noisy_sector = sector + noise
ts_[interval[0]:interval[1]+1] = noisy_sector

return ts_


class Peak(Anomaly):
def __init__(self, params):
super().__init__(params)

def get(self, ts: np.ndarray, interval: tuple):
ts_ = ts.copy()
shift = np.zeros(ts.size)
sector = ts_[interval[0]: interval[1]+1]
peak_value = abs(np.mean(sector) * (self.level / 100))
center_point = int((interval[1]+1 + interval[0]) / 2)
shift[center_point] = peak_value
return self.apply_shift(ts_, shift)

def apply_shift(self, ts_, shift):
return ts_ + shift


class Dip(Peak):
def __init__(self, params):
super().__init__(params)

def apply_shift(self, ts_, shift):
return ts_ - shift


class ChangeTrend(Anomaly):
def __init__(self, params):
super().__init__(params)

def get(self, ts: np.ndarray, interval: tuple):
pass
Loading

0 comments on commit c670e71

Please sign in to comment.