Skip to content

Commit

Permalink
[Community] init gpt history
Browse files Browse the repository at this point in the history
  • Loading branch information
GuillaumeDSM committed Oct 25, 2023
1 parent 4464a4f commit 6895fda
Show file tree
Hide file tree
Showing 6 changed files with 228 additions and 84 deletions.
2 changes: 2 additions & 0 deletions octobot/api/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@
initialize_and_run_independent_backtesting,
join_independent_backtesting,
initialize_independent_backtesting_config,
clear_backtesting_fetched_data,
stop_independent_backtesting,
join_independent_backtesting_stop,
get_independent_backtesting_report,
Expand Down Expand Up @@ -86,6 +87,7 @@
"initialize_and_run_independent_backtesting",
"join_independent_backtesting",
"initialize_independent_backtesting_config",
"clear_backtesting_fetched_data",
"stop_independent_backtesting",
"join_independent_backtesting_stop",
"get_independent_backtesting_report",
Expand Down
43 changes: 25 additions & 18 deletions octobot/api/backtesting.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,23 +17,25 @@
import octobot_backtesting.constants as constants


def create_independent_backtesting(config,
tentacles_setup_config,
data_files,
data_file_path=constants.BACKTESTING_FILE_PATH,
join_backtesting_timeout=constants.BACKTESTING_DEFAULT_JOIN_TIMEOUT,
run_on_common_part_only=True,
start_timestamp=None,
end_timestamp=None,
enable_logs=True,
stop_when_finished=False,
name=None,
enforce_total_databases_max_size_after_run=True,
enable_storage=True,
run_on_all_available_time_frames=False,
backtesting_data=None,
config_by_tentacle=None) \
-> backtesting.IndependentBacktesting:
def create_independent_backtesting(
config,
tentacles_setup_config,
data_files,
data_file_path=constants.BACKTESTING_FILE_PATH,
join_backtesting_timeout=constants.BACKTESTING_DEFAULT_JOIN_TIMEOUT,
run_on_common_part_only=True,
start_timestamp=None,
end_timestamp=None,
enable_logs=True,
stop_when_finished=False,
name=None,
enforce_total_databases_max_size_after_run=True,
enable_storage=True,
run_on_all_available_time_frames=False,
backtesting_data=None,
config_by_tentacle=None,
services_config=None
) -> backtesting.IndependentBacktesting:
return backtesting.IndependentBacktesting(
config, tentacles_setup_config, data_files,
data_file_path,
Expand All @@ -48,7 +50,8 @@ def create_independent_backtesting(config,
enable_storage=enable_storage,
run_on_all_available_time_frames=run_on_all_available_time_frames,
backtesting_data=backtesting_data,
config_by_tentacle=config_by_tentacle
config_by_tentacle=config_by_tentacle,
services_config=services_config,
)


Expand All @@ -64,6 +67,10 @@ async def initialize_independent_backtesting_config(independent_backtesting) ->
return await independent_backtesting.initialize_config()


async def clear_backtesting_fetched_data(independent_backtesting):
await independent_backtesting.clear_fetched_data()


async def stop_independent_backtesting(independent_backtesting, memory_check=False, should_raise=False) -> None:
await independent_backtesting.stop(memory_check=memory_check, should_raise=should_raise)

Expand Down
68 changes: 39 additions & 29 deletions octobot/backtesting/independent_backtesting.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,22 +43,26 @@


class IndependentBacktesting:
def __init__(self, config,
tentacles_setup_config,
backtesting_files,
data_file_path=backtesting_constants.BACKTESTING_FILE_PATH,
run_on_common_part_only=True,
join_backtesting_timeout=backtesting_constants.BACKTESTING_DEFAULT_JOIN_TIMEOUT,
start_timestamp=None,
end_timestamp=None,
enable_logs=True,
stop_when_finished=False,
name=None,
enforce_total_databases_max_size_after_run=True,
enable_storage=True,
run_on_all_available_time_frames=False,
backtesting_data=None,
config_by_tentacle=None):
def __init__(
self,
config,
tentacles_setup_config,
backtesting_files,
data_file_path=backtesting_constants.BACKTESTING_FILE_PATH,
run_on_common_part_only=True,
join_backtesting_timeout=backtesting_constants.BACKTESTING_DEFAULT_JOIN_TIMEOUT,
start_timestamp=None,
end_timestamp=None,
enable_logs=True,
stop_when_finished=False,
name=None,
enforce_total_databases_max_size_after_run=True,
enable_storage=True,
run_on_all_available_time_frames=False,
backtesting_data=None,
config_by_tentacle=None,
services_config=None,
):
self.octobot_origin_config = config
self.tentacles_setup_config = tentacles_setup_config
self.backtesting_config = {}
Expand All @@ -83,19 +87,22 @@ def __init__(self, config,
self.previous_handlers_log_level = commons_logging.get_logger_level_per_handler()
self.enforce_total_databases_max_size_after_run = enforce_total_databases_max_size_after_run
self.backtesting_data = backtesting_data
self.octobot_backtesting = backtesting.OctoBotBacktesting(self.backtesting_config,
self.tentacles_setup_config,
self.symbols_to_create_exchange_classes,
self.backtesting_files,
run_on_common_part_only,
start_timestamp=start_timestamp,
end_timestamp=end_timestamp,
enable_logs=self.enable_logs,
enable_storage=enable_storage,
run_on_all_available_time_frames=run_on_all_available_time_frames,
backtesting_data=self.backtesting_data,
name=name,
config_by_tentacle=config_by_tentacle)
self.octobot_backtesting = backtesting.OctoBotBacktesting(
self.backtesting_config,
self.tentacles_setup_config,
self.symbols_to_create_exchange_classes,
self.backtesting_files,
run_on_common_part_only,
start_timestamp=start_timestamp,
end_timestamp=end_timestamp,
enable_logs=self.enable_logs,
enable_storage=enable_storage,
run_on_all_available_time_frames=run_on_all_available_time_frames,
backtesting_data=self.backtesting_data,
name=name,
config_by_tentacle=config_by_tentacle,
services_config=services_config,
)

async def initialize_and_run(self, log_errors=True):
try:
Expand Down Expand Up @@ -134,6 +141,9 @@ async def join_backtesting_updater(self, timeout=None):
if self.octobot_backtesting.backtesting is not None:
await asyncio.wait_for(self.octobot_backtesting.backtesting.time_updater.finished_event.wait(), timeout)

async def clear_fetched_data(self):
await self.octobot_backtesting.clear_fetched_data()

async def stop(self, memory_check=False, should_raise=False):
try:
if not self.stopped:
Expand Down
100 changes: 78 additions & 22 deletions octobot/backtesting/octobot_backtesting.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,8 @@
import octobot_commons.configuration as commons_configuration
import octobot_commons.databases as commons_databases
import octobot_commons.constants as commons_constants
import octobot_commons.enums as commons_enums
import octobot_commons.list_util as list_util
import octobot_commons.asyncio_tools as asyncio_tools

import octobot_backtesting.api as backtesting_api
Expand All @@ -48,20 +50,23 @@


class OctoBotBacktesting:

def __init__(self, backtesting_config,
tentacles_setup_config,
symbols_to_create_exchange_classes,
backtesting_files,
run_on_common_part_only,
start_timestamp=None,
end_timestamp=None,
enable_logs=True,
enable_storage=True,
run_on_all_available_time_frames=False,
backtesting_data=None,
name=None,
config_by_tentacle=None):
def __init__(
self,
backtesting_config,
tentacles_setup_config,
symbols_to_create_exchange_classes,
backtesting_files,
run_on_common_part_only,
start_timestamp=None,
end_timestamp=None,
enable_logs=True,
enable_storage=True,
run_on_all_available_time_frames=False,
backtesting_data=None,
name=None,
config_by_tentacle=None,
services_config=None,
):
self.logger = commons_logging.get_logger(self.__class__.__name__)
self.backtesting_config = backtesting_config
self.tentacles_setup_config = tentacles_setup_config
Expand Down Expand Up @@ -91,6 +96,8 @@ def __init__(self, backtesting_config,
self.enable_storage = enable_storage
self.run_on_all_available_time_frames = run_on_all_available_time_frames
self._has_started = False
self.has_fetched_data = False
self.services_config = services_config

async def initialize_and_run(self):
if not constants.ENABLE_BACKTESTING:
Expand All @@ -110,9 +117,13 @@ async def initialize_and_run(self):
await self._init_backtesting()
await self._init_evaluators()
await self._init_service_feeds()
min_timestamp, max_timestamp = await self._configure_backtesting_time_window()
await self._init_exchanges()
self._ensure_limits()
await self._create_evaluators()
await self._fetch_backtesting_extra_data_if_any(
min_timestamp, max_timestamp
)
await self._create_service_feeds()
await backtesting_api.start_backtesting(self.backtesting)
if logger.BOT_CHANNEL_LOGGER is not None and self.enable_logs:
Expand Down Expand Up @@ -275,7 +286,9 @@ async def _init_matrix(self):
self.matrix_id = evaluator_api.create_matrix()

async def _init_evaluators(self):
await evaluator_api.initialize_evaluators(self.backtesting_config, self.tentacles_setup_config)
await evaluator_api.initialize_evaluators(
self.backtesting_config, self.tentacles_setup_config, config_by_evaluator=self.config_by_tentacle
)
if (not self.backtesting_config[commons_constants.CONFIG_TIME_FRAME]) and \
evaluator_constants.CONFIG_FORCED_TIME_FRAME in self.backtesting_config:
self.backtesting_config[commons_constants.CONFIG_TIME_FRAME] = self.backtesting_config[
Expand Down Expand Up @@ -341,16 +354,59 @@ async def _init_backtesting(self):
for tf in self.backtesting.importers[0].time_frames
]

async def _init_exchanges(self):
async def _configure_backtesting_time_window(self):
# modify_backtesting_channels before creating exchanges as they require the current backtesting time to
# initialize
await backtesting_api.adapt_backtesting_channels(self.backtesting,
self.backtesting_config,
importers.ExchangeDataImporter,
run_on_common_part_only=self.run_on_common_part_only,
start_timestamp=self.start_timestamp,
end_timestamp=self.end_timestamp)
min_timestamp, max_timestamp = await backtesting_api.adapt_backtesting_channels(
self.backtesting,
self.backtesting_config,
importers.ExchangeDataImporter,
run_on_common_part_only=self.run_on_common_part_only,
start_timestamp=self.start_timestamp,
end_timestamp=self.end_timestamp
)
return min_timestamp, max_timestamp

async def _fetch_backtesting_extra_data_if_any(
self, min_timestamp: float, max_timestamp: float
):
handled_classes = set()
coros = []
for evaluator in list_util.flatten_list(self.evaluators):
if evaluator and evaluator.get_name() not in handled_classes:
if evaluator.get_signals_history_type() == commons_enums.SignalHistoryTypes.GPT:
coros.append(self._fetch_gpt_history(evaluator, min_timestamp, max_timestamp))
handled_classes.add(evaluator.get_name())
if coros:
self.has_fetched_data = True
await asyncio.gather(*coros)

async def _fetch_gpt_history(self, evaluator, min_timestamp: float, max_timestamp: float):
# prevent circular import
import tentacles.Services.Services_bases.gpt_service as gpt_service
service = await service_api.get_service(gpt_service.GPTService, True, self.services_config)
version = evaluator.get_version()
for exchange_id in self.exchange_manager_ids:
exchange_configuration = trading_api.get_exchange_configuration_from_exchange_id(exchange_id)
exchange_name = trading_api.get_exchange_name(
trading_api.get_exchange_manager_from_exchange_id(exchange_id)
)
await service.fetch_gpt_history(
exchange_name,
[str(symbol) for symbol in self.symbols_to_create_exchange_classes.get(exchange_name, [])],
exchange_configuration.available_required_time_frames,
version,
min_timestamp,
max_timestamp
)

async def clear_fetched_data(self):
if self.has_fetched_data:
# prevent circular import
import tentacles.Services.Services_bases.gpt_service as gpt_service
(await service_api.get_service(gpt_service.GPTService, True, self.services_config)).clear_signal_history()

async def _init_exchanges(self):
for exchange_class_string in self.symbols_to_create_exchange_classes.keys():
is_future = self.exchange_type_by_exchange[exchange_class_string] == \
commons_constants.CONFIG_EXCHANGE_FUTURE
Expand Down
13 changes: 13 additions & 0 deletions octobot/community/authentication.py
Original file line number Diff line number Diff line change
Expand Up @@ -134,6 +134,19 @@ async def get_deployment_url(self):
)
return self.user_account.get_bot_deployment_url(deployment_url_data)

async def get_gpt_signal(
self, exchange: str, symbol: str, time_frame: commons_enums.TimeFrames, candle_open_time: float, version: str
) -> str:
return await self.supabase_client.fetch_gpt_signal(exchange, symbol, time_frame, candle_open_time, version)

async def get_gpt_signals_history(
self, exchange: str, symbol: str, time_frame: commons_enums.TimeFrames,
first_open_time: float, last_open_time: float, version: str
) -> dict:
return await self.supabase_client.fetch_gpt_signals_history(
exchange, symbol, time_frame, first_open_time, last_open_time, version
)

def get_is_signal_receiver(self):
if self._community_feed is None:
return False
Expand Down
Loading

0 comments on commit 6895fda

Please sign in to comment.