From 29a914d553e099e3554a0242c60f6a583bcf7cb3 Mon Sep 17 00:00:00 2001 From: anton Date: Sat, 30 Sep 2023 10:27:30 +0200 Subject: [PATCH] Issue #67: Black linter refactoring --- .github/workflows/linter.yml | 4 ++- analyst/__init__.py | 2 +- analyst/analyst_service.py | 4 +-- logs/clear_logs.py | 4 +-- predictor/model_testing/testing.py | 27 ++++++++++--------- predictor/models/data_processing.py | 12 ++++----- predictor/models/fortune-nn-configs | 2 +- predictor/models/model_handler.py | 4 +-- predictor/models/sf1/snowfall_model.py | 7 +++-- predictor/models/sf1/snowfall_training.py | 15 ++++++----- predictor/predictor_service.py | 4 +-- project.py | 10 +++---- src/__init__.py | 3 +-- src/api_binance.py | 32 +++++++++++------------ src/config_parser.py | 8 +++--- src/custom_types/__init__.py | 32 ++++++++++++++++++----- src/custom_types/noop_queue.py | 6 ++--- src/discord_bot.py | 4 +-- src/generate_config.py | 18 ++++++------- src/log_setup.py | 5 ++-- src/parse_arguments.py | 9 +++---- trader/__init__.py | 1 - trader/demo_account.py | 6 ++--- trader/trader_service.py | 19 +++++++------- train_testing.py | 18 ++++++++----- unit_tests/config_parser_test.py | 23 +++++++--------- unit_tests/demo_account_test.py | 2 +- unit_tests/generate_config_test.py | 9 +++---- 28 files changed, 151 insertions(+), 139 deletions(-) diff --git a/.github/workflows/linter.yml b/.github/workflows/linter.yml index 4114ba4..579cf51 100644 --- a/.github/workflows/linter.yml +++ b/.github/workflows/linter.yml @@ -21,4 +21,6 @@ jobs: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} DEFAULT_BRANCH: develop # DISABLE_ERRORS: true # avoid crashing PR - # VALIDATE_ALL_CODEBASE: false # check only new changes \ No newline at end of file + # VALIDATE_ALL_CODEBASE: false # check only new changes + VALIDATE_PYTHON_BLACK: true + VALIDATE_PYTHON_BLACK_ARGS: "--line-length 100" diff --git a/analyst/__init__.py b/analyst/__init__.py index 5cc11a6..2a9360a 100644 --- a/analyst/__init__.py +++ b/analyst/__init__.py @@ -3,4 +3,4 @@ Analyse all predictions and make decision whether to make purchase or not. Get information from Prediction Module and give issues commands to Trading Module. -""" \ No newline at end of file +""" diff --git a/analyst/analyst_service.py b/analyst/analyst_service.py index 50d4919..f6523a1 100644 --- a/analyst/analyst_service.py +++ b/analyst/analyst_service.py @@ -18,7 +18,7 @@ def analyse_threshold(self, vector): return vector >= self.threshold def make_trading_decision(self, vector): - """ Decide if purchasing cryptocurrency is profitable based on the probability of price growth. + """Decide if purchasing cryptocurrency is profitable based on the probability of price growth. :arg vector: probability of cryptocurrency price growth. :return: True if it is profitable to buy/keep crypto, False otherwise (to sell). @@ -26,5 +26,3 @@ def make_trading_decision(self, vector): decision = self.analyse_threshold(vector) self.logger.info(f"Vector: {vector}, Decision: {decision}") return decision - - diff --git a/logs/clear_logs.py b/logs/clear_logs.py index f190536..9d7a943 100644 --- a/logs/clear_logs.py +++ b/logs/clear_logs.py @@ -3,11 +3,11 @@ import os -def clear_all_logs(log_dir='./'): +def clear_all_logs(log_dir="./"): files = os.listdir(log_dir) for file in files: - if file.endswith('.log'): + if file.endswith(".log"): os.remove(os.path.join(log_dir, file)) diff --git a/predictor/model_testing/testing.py b/predictor/model_testing/testing.py index c9f53d8..683e4bf 100644 --- a/predictor/model_testing/testing.py +++ b/predictor/model_testing/testing.py @@ -7,35 +7,36 @@ class ModelTesting: def __init__(self, data_set, train_coefficient, output): self.data_set = data_set - self.testing_data = data_set[int(len(data_set) * train_coefficient):] + self.testing_data = data_set[int(len(data_set) * train_coefficient) :] self.train_coefficient = train_coefficient self.output_column = output self.lines = [] - self._add_line(self.data_set[self.output_column], 'real_data') + self._add_line(self.data_set[self.output_column], "real_data") def _add_line(self, data_set, name: str): - line1, = plt.plot(data_set, label=name) + (line1,) = plt.plot(data_set, label=name) self.lines.append(line1) def add_model(self, model, input_layers: list): - test_x, test_labels = reconstruct_data(np.array(self.testing_data[input_layers]), - np.array(self.testing_data[self.output_column]), - model.NUM_OF_PREV_ITEMS) - + test_x, test_labels = reconstruct_data( + np.array(self.testing_data[input_layers]), + np.array(self.testing_data[self.output_column]), + model.NUM_OF_PREV_ITEMS, + ) + test_predict = model.make_prediction(test_x) test_score = mean_squared_error(test_labels, test_predict) - print(f'[{model.__class__.__name__}|{model.model_name}] Score on test set: {test_score} MSE') + print(f"[{model.__class__.__name__}|{model.model_name}] Score on test set: {test_score} MSE") number_of_unlabeled_data = int(len(self.data_set) * self.train_coefficient) + 1 + model.NUM_OF_PREV_ITEMS - predict_plot = np.array([[np.nan]] * number_of_unlabeled_data).astype('float32') - predict_plot = np.concatenate((predict_plot.astype('float32'), - test_predict.astype('float32'))) - + predict_plot = np.array([[np.nan]] * number_of_unlabeled_data).astype("float32") + predict_plot = np.concatenate((predict_plot.astype("float32"), test_predict.astype("float32"))) + self._add_line(predict_plot, model.model_name) def show_graph(self): plt.legend(handles=self.lines) - plt.show() # TODO show date on axis + plt.show() # TODO show date on axis diff --git a/predictor/models/data_processing.py b/predictor/models/data_processing.py index cb478d9..138434c 100644 --- a/predictor/models/data_processing.py +++ b/predictor/models/data_processing.py @@ -1,9 +1,9 @@ import numpy as np -def reconstruct_data(input_data, output_data, n: int) -> (np.array, np.array): +def reconstruct_data(input_data, output_data, n: int) -> (np.array, np.array): """ - Match each element in output_data (except the first n element) + Match each element in output_data (except the first n element) to list of previous n elements of input_data. The data will be reshaped so network could see what output should be to the given input data. @@ -12,12 +12,12 @@ def reconstruct_data(input_data, output_data, n: int) -> (np.array, np.array): the length of input_data should be = length of output_data. Input and output data could be the same or different. Input data could contain multiple or single features. - + For example - input_data = output_data = [[1], [2], [3], [4], [5], [6], [7], [8]], n=3 The result - [1, 2, 3] -> 4; [2, 3, 4] -> [5] ... The result data will be returned in form: - (array([[1, 2, 3], [2, 3, 4], [3, 4, 5], [4, 5, 6], [5, 6, 7]]), + (array([[1, 2, 3], [2, 3, 4], [3, 4, 5], [4, 5, 6], [5, 6, 7]]), array([4, 5, 6, 7, 8])) :param input_data: array @@ -29,7 +29,7 @@ def reconstruct_data(input_data, output_data, n: int) -> (np.array, np.array): x, y = [], [] for i in range(len(input_data) - n): - a = input_data[i:(i + n)] + a = input_data[i : (i + n)] x.append(a) y.append(output_data[i + n]) @@ -37,6 +37,6 @@ def reconstruct_data(input_data, output_data, n: int) -> (np.array, np.array): def train_test_split(data, train_cof): - train, test = data[0:int(len(data) * train_cof), :], data[int(len(data) * train_cof):len(data), :] + train, test = data[0 : int(len(data) * train_cof), :], data[int(len(data) * train_cof) : len(data), :] return train, test diff --git a/predictor/models/fortune-nn-configs b/predictor/models/fortune-nn-configs index 377b4a1..dc8df92 160000 --- a/predictor/models/fortune-nn-configs +++ b/predictor/models/fortune-nn-configs @@ -1 +1 @@ -Subproject commit 377b4a14d6b8b9b7c974acb1b2fe3454cb808d51 +Subproject commit dc8df923098aea11f0686248fb052ccf97a43474 diff --git a/predictor/models/model_handler.py b/predictor/models/model_handler.py index 4bd17e3..b506466 100644 --- a/predictor/models/model_handler.py +++ b/predictor/models/model_handler.py @@ -4,7 +4,7 @@ class ModelHandler(ABC): - PATH_TO_CONF = os.path.join(os.path.dirname(__file__), 'fortune-nn-configs') + PATH_TO_CONF = os.path.join(os.path.dirname(__file__), "fortune-nn-configs") def __init__(self): super().__init__() @@ -19,7 +19,7 @@ def make_prediction(self, data_set: list) -> list: class ModelTrainer(ABC): - PATH_TO_CONF = os.path.join(os.path.dirname(__file__), 'fortune-nn-configs') + PATH_TO_CONF = os.path.join(os.path.dirname(__file__), "fortune-nn-configs") def __init__(self, input_shape: tuple): super().__init__() diff --git a/predictor/models/sf1/snowfall_model.py b/predictor/models/sf1/snowfall_model.py index 1fa7d74..220fd27 100755 --- a/predictor/models/sf1/snowfall_model.py +++ b/predictor/models/sf1/snowfall_model.py @@ -7,17 +7,17 @@ class Snowfall(ModelHandler): - def __init__(self, model_name='model_15m_50:1_c-c'): + def __init__(self, model_name="model_15m_50:1_c-c"): super().__init__() self.model_name = model_name path_to_model = os.path.join(self.PATH_TO_CONF, model_name) self.model = keras.models.load_model(path_to_model) - with open(os.path.join(ModelHandler.PATH_TO_CONF, model_name, 'input_scaler'),'rb') as f: + with open(os.path.join(ModelHandler.PATH_TO_CONF, model_name, "input_scaler"), "rb") as f: self.input_scaler = pickle.load(f) - with open(os.path.join(ModelHandler.PATH_TO_CONF, model_name, 'output_scaler'),'rb') as f: + with open(os.path.join(ModelHandler.PATH_TO_CONF, model_name, "output_scaler"), "rb") as f: self.output_scaler = pickle.load(f) self.input_shape = self.model.layers[0].input_shape[1:] @@ -25,7 +25,6 @@ def __init__(self, model_name='model_15m_50:1_c-c'): self.NUM_OF_PREV_ITEMS = self.model.layers[0].input_shape[1] def make_prediction(self, data_set): - n_data_set = np.reshape(data_set, (-1, self.input_shape[1])) # min-max normalization (inverse to (0, 1) range) diff --git a/predictor/models/sf1/snowfall_training.py b/predictor/models/sf1/snowfall_training.py index 34f67e1..47a74a4 100755 --- a/predictor/models/sf1/snowfall_training.py +++ b/predictor/models/sf1/snowfall_training.py @@ -12,6 +12,7 @@ from predictor.models.data_processing import reconstruct_data from predictor.models.model_handler import ModelTrainer + class SnowfallTestTrain(ModelTrainer): def __init__(self, shape): super().__init__(shape) @@ -43,16 +44,16 @@ def train(self, train_data, label_data): train_x, train_y = reconstruct_data(data_transformed, label_transformed, self.NUM_OF_PREV_ITEMS) - self.model.compile(loss='mean_squared_error', optimizer='adam') - self.model.fit(train_x, train_y, epochs=self.epochs, batch_size=self.batch_size , verbose=self.verbose) + self.model.compile(loss="mean_squared_error", optimizer="adam") + self.model.fit(train_x, train_y, epochs=self.epochs, batch_size=self.batch_size, verbose=self.verbose) def save_model(self, path): self.model.save(path) - with open(os.path.join(path, 'input_scaler'), 'wb') as save_file: - pickle.dump(self.input_scaler, save_file) + with open(os.path.join(path, "input_scaler"), "wb") as save_file: + pickle.dump(self.input_scaler, save_file) + + with open(os.path.join(path, "output_scaler"), "wb") as save_file: + pickle.dump(self.output_scaler, save_file) - with open(os.path.join(path, 'output_scaler'), 'wb') as save_file: - pickle.dump(self.output_scaler, save_file) - print(f"Model 'sf' saved to - {path}") diff --git a/predictor/predictor_service.py b/predictor/predictor_service.py index a04d19d..f912617 100644 --- a/predictor/predictor_service.py +++ b/predictor/predictor_service.py @@ -14,7 +14,7 @@ def predict(self, data: list) -> int: """ Make predictions using a list of data. - This method uses the `model_handler` to predict the next number based on the input `data`. + This method uses the `model_handler` to predict the next number based on the input `data`. The input `data` should be of the correct size, which can be accessed using `self.model_handler.NUM_OF_PREV_ITEMS`. :param data: list of numbers of correct size @@ -23,6 +23,6 @@ def predict(self, data: list) -> int: if np.shape(data)[0] == self.model_handler.NUM_OF_PREV_ITEMS: return self.model_handler.predict_next(data) else: - massage = f"The number of previous items should be {self.model_handler.NUM_OF_PREV_ITEMS}, but not - {np.shape(data)[0]}" + massage = f"The number of previous items should be {self.model_handler.NUM_OF_PREV_ITEMS}, but not - {np.shape(data)[0]}" self.logger.error(massage) return False diff --git a/project.py b/project.py index 97dad73..fe4cf5a 100644 --- a/project.py +++ b/project.py @@ -17,13 +17,13 @@ class Fortune: - CONFIG_PATH = './src/config/' + CONFIG_PATH = "./src/config/" def __init__(self, **kwargs): - log_setup.configurate_logs(self.CONFIG_PATH + 'log_config.yml') + log_setup.configurate_logs(self.CONFIG_PATH + "log_config.yml") self.logger = logging.getLogger(__class__.__name__) self.exit_flag = asyncio.Event() - self.run_pigamma = kwargs['pigamma'] + self.run_pigamma = kwargs["pigamma"] self.client = api_binance.configure_binance_api(self.CONFIG_PATH + api_binance.API.CLIENT_CONFIG_FILE) self.trader = trader_service.Trader() @@ -35,9 +35,7 @@ def __init__(self, **kwargs): def configure_pigamma_wrapper(self): discord_bot.configure_pigamma( - self.CONFIG_PATH + discord_bot.PiGamma.CONFIG_FILE, - self.stats_queue, - self.exit_flag + self.CONFIG_PATH + discord_bot.PiGamma.CONFIG_FILE, self.stats_queue, self.exit_flag ) def process_iteration(self): diff --git a/src/__init__.py b/src/__init__.py index 0990139..cba8a8e 100644 --- a/src/__init__.py +++ b/src/__init__.py @@ -10,5 +10,4 @@ import src.generate_config import src.api_binance -print("*** ARA Development | Fortune ***\n" - "All rights reserved!") +print("*** ARA Development | Fortune ***\n" "All rights reserved!") diff --git a/src/api_binance.py b/src/api_binance.py index b96079b..b6ab5a5 100644 --- a/src/api_binance.py +++ b/src/api_binance.py @@ -18,14 +18,14 @@ def convert_timestamp_to_str(timestamp): dt_object = datetime.date.fromtimestamp(timestamp / 1000) - return dt_object.strftime('%-d %b %Y') + return dt_object.strftime("%-d %b %Y") class API(Client): - """ Binance API """ + """Binance API""" - CLIENT_CONFIG_FILE = 'api_config.json' - BTCUSDT = 'BTCUSDT' + CLIENT_CONFIG_FILE = "api_config.json" + BTCUSDT = "BTCUSDT" def __init__(self, api_key, api_secret): """ @@ -47,10 +47,10 @@ def _get_new_price(self, symbol): self.logger.error(f"Unable to retrieve the latest price!\n{exception}") return None else: - return response_['price'] + return response_["price"] def _update_price(self, symbol, interval): - """ Retrieves the latest price for a given trading symbol and puts it into the price_queue. + """Retrieves the latest price for a given trading symbol and puts it into the price_queue. :param symbol: The trading pair symbol (e.g., 'BTCUSDT') for which to retrieve the price. :param interval: The interval length in minutes for fetching the klines data. @@ -67,7 +67,7 @@ def _update_price(self, symbol, interval): time.sleep(interval_in_seconds) def launch_price_update_subprocess(self, symbol, interval): - """ Launches a subprocess to update the price for the given symbol at the specified interval. + """Launches a subprocess to update the price for the given symbol at the specified interval. This function creates a new subprocess to continuously update the price for the specified trading symbol at the given interval. The subprocess runs the `_update_price` method internally. @@ -86,7 +86,7 @@ def launch_price_update_subprocess(self, symbol, interval): self.logger.info("Price update subprocess was launched") def terminate_price_update_subprocess(self): - """ Terminates the subprocess responsible for updating the price. + """Terminates the subprocess responsible for updating the price. This function terminates the subprocess that is responsible for updating the price of a trading symbol. If no subprocess is currently running, the function returns without taking any action. @@ -107,7 +107,7 @@ def terminate_price_update_subprocess(self): self.logger.info("Price update subprocess was terminated.") def await_price_update(self): - """ Waits for a price update by retrieving the latest price from the price queue. + """Waits for a price update by retrieving the latest price from the price queue. If the price queue is not empty, this function retrieves and discards all existing prices until the queue becomes empty. If the price queue is empty, the function waits and blocks until @@ -127,7 +127,7 @@ def await_price_update(self): return float(price) def load_price_history(self, symbol, interval): - """ Loads historical klines data for a specified symbol and time interval. + """Loads historical klines data for a specified symbol and time interval. This function retrieves historical klines data for the specified trading symbol with the provided time interval. The data is fetched starting from '1 Jan 2000' up to the current date and time. @@ -141,7 +141,7 @@ def load_price_history(self, symbol, interval): """ all_data = [] - end_date = datetime.datetime.now().strftime('%-d %b %Y') + end_date = datetime.datetime.now().strftime("%-d %b %Y") limit = 1000 while True: @@ -173,7 +173,7 @@ def _check_column_for_duplicates(self, data, column_index): self.logger.info("No data duplication was detected") def save_price_history_csv(self, symbol, interval, file_path): - """ Save price history data to CSV + """Save price history data to CSV :param symbol: The trading pair symbol (e.g., 'BTCUSDT') for which to load historical data. :param interval: The time interval for the klines data (e.g., Client.KLINE_INTERVAL_15MINUTE). @@ -184,7 +184,7 @@ def save_price_history_csv(self, symbol, interval, file_path): data = self.load_price_history(symbol, interval) self._check_column_for_duplicates(data, 0) - with open(file_path, 'w', newline='') as csv_file: + with open(file_path, "w", newline="") as csv_file: csv_writer = csv.writer(csv_file) csv_writer.writerow(kline_metric) csv_writer.writerows(data) @@ -201,7 +201,7 @@ def load_last_prices(self, symbol, interval, limit=5): def configure_binance_api(config_file): - """ Configures the Binance API client. + """Configures the Binance API client. This function reads the API configuration data from the specified file, creates a Binance API client instance, and returns it. @@ -217,8 +217,8 @@ def configure_binance_api(config_file): return client -if __name__ == '__main__': - api = configure_binance_api('./config/api_config.json') +if __name__ == "__main__": + api = configure_binance_api("./config/api_config.json") prices = api.load_price_history(api.BTCUSDT, Client.KLINE_INTERVAL_15MINUTE) api.save_price_history_csv(api.BTCUSDT, api.KLINE_INTERVAL_15MINUTE, "./tmp.csv") print(len(prices)) diff --git a/src/config_parser.py b/src/config_parser.py index 5890e33..d4cea93 100644 --- a/src/config_parser.py +++ b/src/config_parser.py @@ -5,7 +5,7 @@ def parse_json(file_path): - """ Reads and parses JSON data from the specified file. + """Reads and parses JSON data from the specified file. :param file_path: The path to the JSON file to be parsed. :return: dict or list or None @@ -25,7 +25,7 @@ def parse_json(file_path): def get_api_data(file): - """ This function reads an API key and API secret from a specified configuration file. + """This function reads an API key and API secret from a specified configuration file. :param file: The path and name of the configuration file. :return: A tuple containing the API key and API secret read from the configuration file, @@ -37,11 +37,11 @@ def get_api_data(file): generate_config.generate_api_json(file) return -1 - return data['api-key'], data['api-secret'] + return data["api-key"], data["api-secret"] def get_pigamma_data(file): - """ This function reads data from a specified configuration file for the Pigamma service. + """This function reads data from a specified configuration file for the Pigamma service. :param file: The path and name of the configuration file. :return: A dictionary containing data read from the configuration file, diff --git a/src/custom_types/__init__.py b/src/custom_types/__init__.py index 22e44c0..0a7efc7 100644 --- a/src/custom_types/__init__.py +++ b/src/custom_types/__init__.py @@ -1,6 +1,26 @@ -kline_metric = ["open_time", "open", "high", "low", "close", "volume", "close_time", - "quote_asset_volume", "number_of_trades", "taker_buy_base_asset_volume", - "taker_buy_quote_asset_volume", "ignore"] -shorten_kline_metric = ["open", "high", "low", "close", "volume", - "quote_asset_volume", "number_of_trades", "taker_buy_base_asset_volume", - "taker_buy_quote_asset_volume", "ignore"] +kline_metric = [ + "open_time", + "open", + "high", + "low", + "close", + "volume", + "close_time", + "quote_asset_volume", + "number_of_trades", + "taker_buy_base_asset_volume", + "taker_buy_quote_asset_volume", + "ignore", +] +shorten_kline_metric = [ + "open", + "high", + "low", + "close", + "volume", + "quote_asset_volume", + "number_of_trades", + "taker_buy_base_asset_volume", + "taker_buy_quote_asset_volume", + "ignore", +] diff --git a/src/custom_types/noop_queue.py b/src/custom_types/noop_queue.py index 92a505b..a739c84 100644 --- a/src/custom_types/noop_queue.py +++ b/src/custom_types/noop_queue.py @@ -4,7 +4,7 @@ class NoopQueue(Queue): - """ A no-operation (no-op) implementation of the Queue class. + """A no-operation (no-op) implementation of the Queue class. This class inherits from the Queue class and overrides the `put` and `get` methods to provide a behavior where items are not actually added or retrieved from the queue. Instead, any items added are @@ -15,9 +15,9 @@ class NoopQueue(Queue): """ def put(self, *args): - """ Discard the provided items without adding them to the queue.""" + """Discard the provided items without adding them to the queue.""" return def get(self, *args): - """ Return None without retrieving any items from the queue.""" + """Return None without retrieving any items from the queue.""" return None diff --git a/src/discord_bot.py b/src/discord_bot.py index 9c05359..08fdb62 100644 --- a/src/discord_bot.py +++ b/src/discord_bot.py @@ -11,7 +11,7 @@ class PiGamma(discord.Client): - CONFIG_FILE = 'pigamma_config.json' + CONFIG_FILE = "pigamma_config.json" def __init__(self, token, channel_id: int, stats_queue: Queue, exit_flag: asyncio.Event): super().__init__(intents=discord.Intents.default()) @@ -74,4 +74,4 @@ async def on_message(self, message): def configure_pigamma(file, stats_queue, exit_flag): data = config_parser.get_pigamma_data(file) - PiGamma(data['TOKEN'], int(data['CHANNEL_ID']), stats_queue, exit_flag) + PiGamma(data["TOKEN"], int(data["CHANNEL_ID"]), stats_queue, exit_flag) diff --git a/src/generate_config.py b/src/generate_config.py index 3dd46e5..e642d1a 100644 --- a/src/generate_config.py +++ b/src/generate_config.py @@ -2,19 +2,19 @@ def generate_json(file: str, data: dict): - """ Generates a JSON file with the provided data dictionary. + """Generates a JSON file with the provided data dictionary. :param file: The path and name of the JSON file to be created. :param data: The dictionary containing data to be written to the JSON file. """ json_string = json.dumps(data) - json_file = open(file, 'w') + json_file = open(file, "w") json_file.write(json_string) json_file.close() def generate_api_json(file: str): - """ Creates an API JSON configuration file at the specified path. + """Creates an API JSON configuration file at the specified path. This function generates a new JSON configuration file for API settings at the provided file path. The file will contain keys for "api-key" and "api-secret," initially set to None to be filled with actual API credentials later. @@ -23,12 +23,11 @@ def generate_api_json(file: str): """ conf_dict = {"api-key": None, "api-secret": None} generate_json(file, conf_dict) - print(f"[Warning] API config file {file} was created.\n" - "Please, complete your details in it.") + print(f"[Warning] API config file {file} was created.\n" "Please, complete your details in it.") def generate_pigamma_json(file: str): - """ Creates a PiGamma JSON configuration file at the specified path. + """Creates a PiGamma JSON configuration file at the specified path. This function generates a new JSON configuration file for PiGamma settings at the provided file path. The file will contain keys for "TOKEN" and "CHANNEL_ID" initially set to None, to be filled with the required values later. @@ -37,9 +36,8 @@ def generate_pigamma_json(file: str): """ conf_dict = {"TOKEN": None, "CHANNEL_ID": None} generate_json(file, conf_dict) - print(f"[Warning] PiGamma config file {file} was created.\n" - "Please, complete your details in it.") + print(f"[Warning] PiGamma config file {file} was created.\n" "Please, complete your details in it.") -if __name__ == '__main__': - generate_api_json('test') +if __name__ == "__main__": + generate_api_json("test") diff --git a/src/log_setup.py b/src/log_setup.py index e81e695..60b0a44 100644 --- a/src/log_setup.py +++ b/src/log_setup.py @@ -14,13 +14,12 @@ def configurate_logs(file): logging.error("[FileNotFoundError] Logs were not configured properly!") else: - timestamp = time.strftime('%Y-%m-%d_(%H-%M-%S)', time.localtime()) + timestamp = time.strftime("%Y-%m-%d_(%H-%M-%S)", time.localtime()) config["handlers"]["file_handler"]["filename"] = f"./logs/{timestamp}.log" logging.config.dictConfig(config) logging.info("Logs were configured successfully.") -if __name__ == '__main__': +if __name__ == "__main__": configurate_logs("./config/log_config.yml") - diff --git a/src/parse_arguments.py b/src/parse_arguments.py index d19c5bc..51d3c5f 100644 --- a/src/parse_arguments.py +++ b/src/parse_arguments.py @@ -2,11 +2,10 @@ def parse_arguments(): - parser = argparse.ArgumentParser(description='Run Fortune') - parser.add_argument('-p', '--pigamma', action='store_true', help='enables Pi Gamma') - parser.add_argument('--test', action='store_true', help='start only testing') - parser.add_argument('--train', action='store_true', help='start only training') - + parser = argparse.ArgumentParser(description="Run Fortune") + parser.add_argument("-p", "--pigamma", action="store_true", help="enables Pi Gamma") + parser.add_argument("--test", action="store_true", help="start only testing") + parser.add_argument("--train", action="store_true", help="start only training") args = parser.parse_args() return vars(args) diff --git a/trader/__init__.py b/trader/__init__.py index b0ee16f..1767256 100644 --- a/trader/__init__.py +++ b/trader/__init__.py @@ -5,4 +5,3 @@ Get commands from Analysing Module. """ - diff --git a/trader/demo_account.py b/trader/demo_account.py index ad0dc5c..7da93ca 100644 --- a/trader/demo_account.py +++ b/trader/demo_account.py @@ -22,7 +22,7 @@ def _has_sufficient_funds(self, amount): return True def deposit(self, amount): - """ Deposit the specified amount into the account. + """Deposit the specified amount into the account. :param amount: The amount to be deposited. """ if not self._is_valid_amount(amount): @@ -32,7 +32,7 @@ def deposit(self, amount): self.logger.info(f"Deposited {amount} {self.currency}. New balance: {self.balance} {self.currency}.") def withdraw(self, amount): - """ Withdraw the specified amount from the account. + """Withdraw the specified amount from the account. :param amount: The amount to be withdrawn. :return: True if the withdrawal was successful, False otherwise. @@ -45,7 +45,7 @@ def withdraw(self, amount): return True def transfer(self, to_account, amount=None, exchange_rate=1): - """ Transfer funds to another account. + """Transfer funds to another account. :param to_account: The target account to transfer funds to. :type to_account: DemoAccount diff --git a/trader/trader_service.py b/trader/trader_service.py index 8260fd3..31ac981 100644 --- a/trader/trader_service.py +++ b/trader/trader_service.py @@ -15,7 +15,7 @@ def __init__(self): self.logger.info("Trader was configured successfully") def demo_trade(self, profitable, exchange_rate): - """ Performs a demo trade based on the profitability and exchange rate. + """Performs a demo trade based on the profitability and exchange rate. :param profitable: A boolean indicating whether the trade is profitable (True) or not (False). :param exchange_rate: The exchange rate to be used for the transfer of assets between accounts. @@ -28,7 +28,7 @@ def demo_trade(self, profitable, exchange_rate): self.bitcoin_account.transfer(self.dollars_account, exchange_rate=exchange_rate) def generate_stats(self, exchange_rate=None): - """ Generates statistics related to the account balances. + """Generates statistics related to the account balances. :param exchange_rate: The exchange rate. If not provided, the total value in dollars will not be calculated. :return: A Statistics named tuple containing the account statistics and the total value in dollars. @@ -36,18 +36,17 @@ def generate_stats(self, exchange_rate=None): total_in_usd = None if exchange_rate is not None: - total_in_usd = AccountStats(currency=self.dollars_account.currency, - balance=self.dollars_account.balance + ( - self.bitcoin_account.balance * exchange_rate)) + total_in_usd = AccountStats( + currency=self.dollars_account.currency, + balance=self.dollars_account.balance + (self.bitcoin_account.balance * exchange_rate), + ) stats = Statistics( accounts=[ - AccountStats(currency=self.dollars_account.currency, - balance=self.dollars_account.balance), - AccountStats(currency=self.bitcoin_account.currency, - balance=self.bitcoin_account.balance) + AccountStats(currency=self.dollars_account.currency, balance=self.dollars_account.balance), + AccountStats(currency=self.bitcoin_account.currency, balance=self.bitcoin_account.balance), ], - total=total_in_usd + total=total_in_usd, ) return stats diff --git a/train_testing.py b/train_testing.py index e44b109..7289d03 100644 --- a/train_testing.py +++ b/train_testing.py @@ -7,30 +7,34 @@ from src.parse_arguments import parse_arguments from predictor.models.sf1.snowfall_model import Snowfall + def test(data: list): - output_column = ['close'] + output_column = ["close"] test = ModelTesting(data, 0.7, output_column) - test.add_model(Snowfall('model_15m_50:10_all-c'), shorten_kline_metric) - test.add_model(Snowfall(), ['close']) + test.add_model(Snowfall("model_15m_50:10_all-c"), shorten_kline_metric) + test.add_model(Snowfall(), ["close"]) test.show_graph() + def train_main(data): - output_data = data[['close']] + output_data = data[["close"]] input_data = data[shorten_kline_metric] model = SnowfallTestTrain((50, 10)) - train(model, input_data, output_data, 'model_15m_50:10_all-c') + train(model, input_data, output_data, "model_15m_50:10_all-c") + def train(model, input_data, output_data, save_file): model.train(input_data, output_data) model.save_model(os.path.join(model.PATH_TO_CONF, save_file)) + if __name__ == "__main__": args_dict = parse_arguments() - data = read_csv('predictor/dataset/btc_data.csv') - data = data.astype('float32') + data = read_csv("predictor/dataset/btc_data.csv") + data = data.astype("float32") if args_dict["train"] or not args_dict["test"]: train_main(data) diff --git a/unit_tests/config_parser_test.py b/unit_tests/config_parser_test.py index c5e2417..5476e69 100644 --- a/unit_tests/config_parser_test.py +++ b/unit_tests/config_parser_test.py @@ -11,17 +11,14 @@ def create_temp_file(data): temp_file = tempfile.NamedTemporaryFile(delete=False) - with open(temp_file.name, 'w') as f: + with open(temp_file.name, "w") as f: json.dump(data, f) return temp_file.name class TestFileParsing(unittest.TestCase): def setUp(self): - self.config_data = { - 'api-key': 'your-api-key', - 'api-secret': 'your-api-secret' - } + self.config_data = {"api-key": "your-api-key", "api-secret": "your-api-secret"} def tearDown(self): pass @@ -29,27 +26,27 @@ def tearDown(self): def test_get_api_data_valid_file(self): file_path = create_temp_file(self.config_data) api_key, api_secret = get_api_data(file_path) - self.assertEqual(api_key, self.config_data['api-key']) - self.assertEqual(api_secret, self.config_data['api-secret']) + self.assertEqual(api_key, self.config_data["api-key"]) + self.assertEqual(api_secret, self.config_data["api-secret"]) def test_get_api_data_invalid_file(self): - file_path = 'non_existent_file.json' - with patch('src.config_parser.generate_config.generate_api_json'): + file_path = "non_existent_file.json" + with patch("src.config_parser.generate_config.generate_api_json"): result = get_api_data(file_path) self.assertEqual(result, -1) def test_get_pigamma_data_valid_file(self): - pigamma_data = {'some_key': 'some_value'} + pigamma_data = {"some_key": "some_value"} file_path = create_temp_file(pigamma_data) data = get_pigamma_data(file_path) self.assertEqual(data, pigamma_data) def test_get_pigamma_data_invalid_file(self): - file_path = 'non_existent_file.json' - with patch('src.config_parser.generate_config.generate_pigamma_json'): + file_path = "non_existent_file.json" + with patch("src.config_parser.generate_config.generate_pigamma_json"): result = get_pigamma_data(file_path) self.assertEqual(result, None) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/unit_tests/demo_account_test.py b/unit_tests/demo_account_test.py index 8e7a896..4f3db51 100644 --- a/unit_tests/demo_account_test.py +++ b/unit_tests/demo_account_test.py @@ -69,5 +69,5 @@ def test_transfer_insufficient_funds(self): self.assertEqual(to_account.balance, 500) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/unit_tests/generate_config_test.py b/unit_tests/generate_config_test.py index 8070830..34b5132 100644 --- a/unit_tests/generate_config_test.py +++ b/unit_tests/generate_config_test.py @@ -5,7 +5,6 @@ class TestGenerateJsonFunctions(unittest.TestCase): - def setUp(self): self.test_data = {"name": "John", "age": 30} self.test_file = "test_file.json" @@ -18,7 +17,7 @@ def test_generate_json(self): generate_json(self.test_file, self.test_data) self.assertTrue(os.path.exists(self.test_file)) - with open(self.test_file, 'r') as file: + with open(self.test_file, "r") as file: data_from_file = json.load(file) self.assertEqual(data_from_file, self.test_data) @@ -27,7 +26,7 @@ def test_generate_api_json(self): self.assertTrue(os.path.exists(self.test_file)) expected_data = {"api-key": None, "api-secret": None} - with open(self.test_file, 'r') as file: + with open(self.test_file, "r") as file: data_from_file = json.load(file) self.assertEqual(data_from_file, expected_data) @@ -36,10 +35,10 @@ def test_generate_pigamma_json(self): self.assertTrue(os.path.exists(self.test_file)) expected_data = {"TOKEN": None, "CHANNEL_ID": None} - with open(self.test_file, 'r') as file: + with open(self.test_file, "r") as file: data_from_file = json.load(file) self.assertEqual(data_from_file, expected_data) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main()