Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/Fedot_ver_0.3' into Fedot_ver_0.3
Browse files Browse the repository at this point in the history
# Conflicts:
#	fedot_ind/core/architecture/experiment/TimeSeriesClassifierPreset.py
#	fedot_ind/core/operation/transformation/basis/data_driven.py
  • Loading branch information
technocreep committed Aug 23, 2023
2 parents 359f2f1 + e7548b0 commit bc08cee
Show file tree
Hide file tree
Showing 3 changed files with 7 additions and 19 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -231,9 +231,9 @@ def predict(self, features: pd.DataFrame, target: np.array) -> dict:

test_data = self._init_input_data(features, target)
test_data_preprocessed = self.preprocessing_pipeline.root_node.predict(test_data)
data_cacher = DataCacher()
# data_cacher = DataCacher()
# get unique hash of input data
test_predict_hash = data_cacher.hash_info(data=features)
# test_predict_hash = data_cacher.hash_info(data=features)
# compare it to existed hash
# if self.test_predict_hash != test_predict_hash:
# test_data = self._init_input_data(features, target)
Expand Down
2 changes: 1 addition & 1 deletion fedot_ind/core/models/recurrence/RecurrenceExtractor.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ def generate_vector_from_ts(self, ts_frame: pd.DataFrame) -> pd.DataFrame:
"""

parallel = Parallel(n_jobs=self.n_processes, verbose=0, pre_dispatch="2*n_jobs")
components_and_vectors = parallel(delayed(self.generate_features_from_ts)(sample) for sample in ts_frame.value)
components_and_vectors = parallel(delayed(self._ts_chunk_function)(component) for component in ts_frame)
if self.image_mode:
components_and_vectors = np.asarray(components_and_vectors)
components_and_vectors = components_and_vectors[:, np.newaxis, :, :]
Expand Down
20 changes: 4 additions & 16 deletions fedot_ind/core/operation/transformation/basis/data_driven.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,33 +94,21 @@ def _transform(self, input_data: InputData) -> np.array:
# if predict[0].shape[0] == new_shape or predict[0].shape[0] == 1:
# reduce_dimension = False
# new_shape = predict[0].shape[0]
#predict = self._clean_predict(np.array(v))
# predict = self._clean_predict(np.array(v))
return predict

def get_threshold(self, data, selector: str):

selectors = {'median': np.median,
'mode': self.mode}
# 'mode': stats.mode}
selectors = {'median': stats.mode,
'mode': stats.mode}

svd_numbers = []
with tqdm(total=len(data), desc='SVD estimation') as pbar:
for signal in data:
svd_numbers.append(self._transform_one_sample(signal, svd_flag=True))
pbar.update(1)
return int(selectors[selector](svd_numbers))

def mode(self, arr):
frequency_dict = {}
for num in arr:
if num in frequency_dict:
frequency_dict[num] += 1
else:
frequency_dict[num] = 1
max_frequency = max(frequency_dict.values())
modes = [num for num, frequency in frequency_dict.items() if frequency == max_frequency]
return modes[0]

return selectors[selector](svd_numbers).mode[0]

def _transform_one_sample(self, series: np.array, svd_flag: bool = False):
trajectory_transformer = HankelMatrix(time_series=series, window_size=self.window_size)
Expand Down

0 comments on commit bc08cee

Please sign in to comment.