From 788b32d02fa313bdc827fffc61be654ee157fdec Mon Sep 17 00:00:00 2001 From: Guillaume De Saint Martin Date: Thu, 26 Oct 2023 00:24:15 +0200 Subject: [PATCH 1/3] [Community] init gpt history --- octobot/api/__init__.py | 2 + octobot/api/backtesting.py | 43 ++++---- .../backtesting/independent_backtesting.py | 68 +++++++----- octobot/backtesting/octobot_backtesting.py | 102 ++++++++++++++---- octobot/community/authentication.py | 13 +++ .../community_supabase_client.py | 86 ++++++++++++--- 6 files changed, 230 insertions(+), 84 deletions(-) diff --git a/octobot/api/__init__.py b/octobot/api/__init__.py index 63b79e180..e8996efd5 100644 --- a/octobot/api/__init__.py +++ b/octobot/api/__init__.py @@ -34,6 +34,7 @@ initialize_and_run_independent_backtesting, join_independent_backtesting, initialize_independent_backtesting_config, + clear_backtesting_fetched_data, stop_independent_backtesting, join_independent_backtesting_stop, get_independent_backtesting_report, @@ -86,6 +87,7 @@ "initialize_and_run_independent_backtesting", "join_independent_backtesting", "initialize_independent_backtesting_config", + "clear_backtesting_fetched_data", "stop_independent_backtesting", "join_independent_backtesting_stop", "get_independent_backtesting_report", diff --git a/octobot/api/backtesting.py b/octobot/api/backtesting.py index 2a99271b1..5dba18105 100644 --- a/octobot/api/backtesting.py +++ b/octobot/api/backtesting.py @@ -17,23 +17,25 @@ import octobot_backtesting.constants as constants -def create_independent_backtesting(config, - tentacles_setup_config, - data_files, - data_file_path=constants.BACKTESTING_FILE_PATH, - join_backtesting_timeout=constants.BACKTESTING_DEFAULT_JOIN_TIMEOUT, - run_on_common_part_only=True, - start_timestamp=None, - end_timestamp=None, - enable_logs=True, - stop_when_finished=False, - name=None, - enforce_total_databases_max_size_after_run=True, - enable_storage=True, - run_on_all_available_time_frames=False, - backtesting_data=None, - config_by_tentacle=None) \ - -> backtesting.IndependentBacktesting: +def create_independent_backtesting( + config, + tentacles_setup_config, + data_files, + data_file_path=constants.BACKTESTING_FILE_PATH, + join_backtesting_timeout=constants.BACKTESTING_DEFAULT_JOIN_TIMEOUT, + run_on_common_part_only=True, + start_timestamp=None, + end_timestamp=None, + enable_logs=True, + stop_when_finished=False, + name=None, + enforce_total_databases_max_size_after_run=True, + enable_storage=True, + run_on_all_available_time_frames=False, + backtesting_data=None, + config_by_tentacle=None, + services_config=None +) -> backtesting.IndependentBacktesting: return backtesting.IndependentBacktesting( config, tentacles_setup_config, data_files, data_file_path, @@ -48,7 +50,8 @@ def create_independent_backtesting(config, enable_storage=enable_storage, run_on_all_available_time_frames=run_on_all_available_time_frames, backtesting_data=backtesting_data, - config_by_tentacle=config_by_tentacle + config_by_tentacle=config_by_tentacle, + services_config=services_config, ) @@ -64,6 +67,10 @@ async def initialize_independent_backtesting_config(independent_backtesting) -> return await independent_backtesting.initialize_config() +async def clear_backtesting_fetched_data(independent_backtesting): + await independent_backtesting.clear_fetched_data() + + async def stop_independent_backtesting(independent_backtesting, memory_check=False, should_raise=False) -> None: await independent_backtesting.stop(memory_check=memory_check, should_raise=should_raise) diff --git a/octobot/backtesting/independent_backtesting.py b/octobot/backtesting/independent_backtesting.py index 03dfa3ba7..e018f1475 100644 --- a/octobot/backtesting/independent_backtesting.py +++ b/octobot/backtesting/independent_backtesting.py @@ -43,22 +43,26 @@ class IndependentBacktesting: - def __init__(self, config, - tentacles_setup_config, - backtesting_files, - data_file_path=backtesting_constants.BACKTESTING_FILE_PATH, - run_on_common_part_only=True, - join_backtesting_timeout=backtesting_constants.BACKTESTING_DEFAULT_JOIN_TIMEOUT, - start_timestamp=None, - end_timestamp=None, - enable_logs=True, - stop_when_finished=False, - name=None, - enforce_total_databases_max_size_after_run=True, - enable_storage=True, - run_on_all_available_time_frames=False, - backtesting_data=None, - config_by_tentacle=None): + def __init__( + self, + config, + tentacles_setup_config, + backtesting_files, + data_file_path=backtesting_constants.BACKTESTING_FILE_PATH, + run_on_common_part_only=True, + join_backtesting_timeout=backtesting_constants.BACKTESTING_DEFAULT_JOIN_TIMEOUT, + start_timestamp=None, + end_timestamp=None, + enable_logs=True, + stop_when_finished=False, + name=None, + enforce_total_databases_max_size_after_run=True, + enable_storage=True, + run_on_all_available_time_frames=False, + backtesting_data=None, + config_by_tentacle=None, + services_config=None, + ): self.octobot_origin_config = config self.tentacles_setup_config = tentacles_setup_config self.backtesting_config = {} @@ -83,19 +87,22 @@ def __init__(self, config, self.previous_handlers_log_level = commons_logging.get_logger_level_per_handler() self.enforce_total_databases_max_size_after_run = enforce_total_databases_max_size_after_run self.backtesting_data = backtesting_data - self.octobot_backtesting = backtesting.OctoBotBacktesting(self.backtesting_config, - self.tentacles_setup_config, - self.symbols_to_create_exchange_classes, - self.backtesting_files, - run_on_common_part_only, - start_timestamp=start_timestamp, - end_timestamp=end_timestamp, - enable_logs=self.enable_logs, - enable_storage=enable_storage, - run_on_all_available_time_frames=run_on_all_available_time_frames, - backtesting_data=self.backtesting_data, - name=name, - config_by_tentacle=config_by_tentacle) + self.octobot_backtesting = backtesting.OctoBotBacktesting( + self.backtesting_config, + self.tentacles_setup_config, + self.symbols_to_create_exchange_classes, + self.backtesting_files, + run_on_common_part_only, + start_timestamp=start_timestamp, + end_timestamp=end_timestamp, + enable_logs=self.enable_logs, + enable_storage=enable_storage, + run_on_all_available_time_frames=run_on_all_available_time_frames, + backtesting_data=self.backtesting_data, + name=name, + config_by_tentacle=config_by_tentacle, + services_config=services_config, + ) async def initialize_and_run(self, log_errors=True): try: @@ -134,6 +141,9 @@ async def join_backtesting_updater(self, timeout=None): if self.octobot_backtesting.backtesting is not None: await asyncio.wait_for(self.octobot_backtesting.backtesting.time_updater.finished_event.wait(), timeout) + async def clear_fetched_data(self): + await self.octobot_backtesting.clear_fetched_data() + async def stop(self, memory_check=False, should_raise=False): try: if not self.stopped: diff --git a/octobot/backtesting/octobot_backtesting.py b/octobot/backtesting/octobot_backtesting.py index 343e2f35b..12eee546a 100644 --- a/octobot/backtesting/octobot_backtesting.py +++ b/octobot/backtesting/octobot_backtesting.py @@ -24,6 +24,8 @@ import octobot_commons.configuration as commons_configuration import octobot_commons.databases as commons_databases import octobot_commons.constants as commons_constants +import octobot_commons.enums as commons_enums +import octobot_commons.list_util as list_util import octobot_commons.asyncio_tools as asyncio_tools import octobot_backtesting.api as backtesting_api @@ -48,20 +50,23 @@ class OctoBotBacktesting: - - def __init__(self, backtesting_config, - tentacles_setup_config, - symbols_to_create_exchange_classes, - backtesting_files, - run_on_common_part_only, - start_timestamp=None, - end_timestamp=None, - enable_logs=True, - enable_storage=True, - run_on_all_available_time_frames=False, - backtesting_data=None, - name=None, - config_by_tentacle=None): + def __init__( + self, + backtesting_config, + tentacles_setup_config, + symbols_to_create_exchange_classes, + backtesting_files, + run_on_common_part_only, + start_timestamp=None, + end_timestamp=None, + enable_logs=True, + enable_storage=True, + run_on_all_available_time_frames=False, + backtesting_data=None, + name=None, + config_by_tentacle=None, + services_config=None, + ): self.logger = commons_logging.get_logger(self.__class__.__name__) self.backtesting_config = backtesting_config self.tentacles_setup_config = tentacles_setup_config @@ -91,6 +96,8 @@ def __init__(self, backtesting_config, self.enable_storage = enable_storage self.run_on_all_available_time_frames = run_on_all_available_time_frames self._has_started = False + self.has_fetched_data = False + self.services_config = services_config async def initialize_and_run(self): if not constants.ENABLE_BACKTESTING: @@ -110,9 +117,13 @@ async def initialize_and_run(self): await self._init_backtesting() await self._init_evaluators() await self._init_service_feeds() + min_timestamp, max_timestamp = await self._configure_backtesting_time_window() await self._init_exchanges() self._ensure_limits() await self._create_evaluators() + await self._fetch_backtesting_extra_data_if_any( + min_timestamp, max_timestamp + ) await self._create_service_feeds() await backtesting_api.start_backtesting(self.backtesting) if logger.BOT_CHANNEL_LOGGER is not None and self.enable_logs: @@ -275,7 +286,9 @@ async def _init_matrix(self): self.matrix_id = evaluator_api.create_matrix() async def _init_evaluators(self): - await evaluator_api.initialize_evaluators(self.backtesting_config, self.tentacles_setup_config) + await evaluator_api.initialize_evaluators( + self.backtesting_config, self.tentacles_setup_config, config_by_evaluator=self.config_by_tentacle + ) if (not self.backtesting_config[commons_constants.CONFIG_TIME_FRAME]) and \ evaluator_constants.CONFIG_FORCED_TIME_FRAME in self.backtesting_config: self.backtesting_config[commons_constants.CONFIG_TIME_FRAME] = self.backtesting_config[ @@ -341,16 +354,61 @@ async def _init_backtesting(self): for tf in self.backtesting.importers[0].time_frames ] - async def _init_exchanges(self): + async def _configure_backtesting_time_window(self): # modify_backtesting_channels before creating exchanges as they require the current backtesting time to # initialize - await backtesting_api.adapt_backtesting_channels(self.backtesting, - self.backtesting_config, - importers.ExchangeDataImporter, - run_on_common_part_only=self.run_on_common_part_only, - start_timestamp=self.start_timestamp, - end_timestamp=self.end_timestamp) + min_timestamp, max_timestamp = await backtesting_api.adapt_backtesting_channels( + self.backtesting, + self.backtesting_config, + importers.ExchangeDataImporter, + run_on_common_part_only=self.run_on_common_part_only, + start_timestamp=self.start_timestamp, + end_timestamp=self.end_timestamp + ) + return min_timestamp, max_timestamp + async def _fetch_backtesting_extra_data_if_any( + self, min_timestamp: float, max_timestamp: float + ): + if not self.evaluators: + return + handled_classes = set() + coros = [] + for evaluator in list_util.flatten_list(self.evaluators): + if evaluator and evaluator.get_name() not in handled_classes: + if evaluator.get_signals_history_type() == commons_enums.SignalHistoryTypes.GPT: + coros.append(self._fetch_gpt_history(evaluator, min_timestamp, max_timestamp)) + handled_classes.add(evaluator.get_name()) + if coros: + self.has_fetched_data = True + await asyncio.gather(*coros) + + async def _fetch_gpt_history(self, evaluator, min_timestamp: float, max_timestamp: float): + # prevent circular import + import tentacles.Services.Services_bases.gpt_service as gpt_service + service = await service_api.get_service(gpt_service.GPTService, True, self.services_config) + version = evaluator.get_version() + for exchange_id in self.exchange_manager_ids: + exchange_configuration = trading_api.get_exchange_configuration_from_exchange_id(exchange_id) + exchange_name = trading_api.get_exchange_name( + trading_api.get_exchange_manager_from_exchange_id(exchange_id) + ) + await service.fetch_gpt_history( + exchange_name, + [str(symbol) for symbol in self.symbols_to_create_exchange_classes.get(exchange_name, [])], + exchange_configuration.available_required_time_frames, + version, + min_timestamp, + max_timestamp + ) + + async def clear_fetched_data(self): + if self.has_fetched_data: + # prevent circular import + import tentacles.Services.Services_bases.gpt_service as gpt_service + (await service_api.get_service(gpt_service.GPTService, True, self.services_config)).clear_signal_history() + + async def _init_exchanges(self): for exchange_class_string in self.symbols_to_create_exchange_classes.keys(): is_future = self.exchange_type_by_exchange[exchange_class_string] == \ commons_constants.CONFIG_EXCHANGE_FUTURE diff --git a/octobot/community/authentication.py b/octobot/community/authentication.py index 5c20eaaba..d3029536a 100644 --- a/octobot/community/authentication.py +++ b/octobot/community/authentication.py @@ -134,6 +134,19 @@ async def get_deployment_url(self): ) return self.user_account.get_bot_deployment_url(deployment_url_data) + async def get_gpt_signal( + self, exchange: str, symbol: str, time_frame: commons_enums.TimeFrames, candle_open_time: float, version: str + ) -> str: + return await self.supabase_client.fetch_gpt_signal(exchange, symbol, time_frame, candle_open_time, version) + + async def get_gpt_signals_history( + self, exchange: str, symbol: str, time_frame: commons_enums.TimeFrames, + first_open_time: float, last_open_time: float, version: str + ) -> dict: + return await self.supabase_client.fetch_gpt_signals_history( + exchange, symbol, time_frame, first_open_time, last_open_time, version + ) + def get_is_signal_receiver(self): if self._community_feed is None: return False diff --git a/octobot/community/supabase_backend/community_supabase_client.py b/octobot/community/supabase_backend/community_supabase_client.py index 006d67bc4..9799b3b33 100644 --- a/octobot/community/supabase_backend/community_supabase_client.py +++ b/octobot/community/supabase_backend/community_supabase_client.py @@ -346,22 +346,71 @@ async def fetch_candles_history( self, exchange: str, symbol: str, time_frame: commons_enums.TimeFrames, first_open_time: float, last_open_time: float ) -> list: - total_candles_count = (last_open_time - first_open_time) // ( - commons_enums.TimeFramesMinutes[time_frame] * commons_constants.MINUTE_TO_SECONDS + historical_candles = await self._fetch_paginated_signals_history( + "temp_ohlcv_history", + "timestamp, open, high, low, close, volume", + { + "exchange_internal_name": exchange, + "symbol": symbol, + "time_frame": time_frame.value, + }, + commons_enums.TimeFramesMinutes[time_frame] * commons_constants.MINUTE_TO_SECONDS, + first_open_time, + last_open_time ) + return self._format_ohlcvs(historical_candles) + + async def fetch_gpt_signal( + self, exchange: str, symbol: str, time_frame: commons_enums.TimeFrames, timestamp: float, version: str + ) -> str: + signals = ( + await self.table("temp_chatgpt_signals").select("signal").match( + { + "timestamp": timestamp, + "exchange_internal_name": exchange, + "symbol": symbol, + "time_frame": time_frame.value, + "metadata->>version": version, + }, + ).execute() + ).data + if signals: + return signals[0]["content"] + return "" + + async def fetch_gpt_signals_history( + self, exchange: str, symbol: str, time_frame: commons_enums.TimeFrames, + first_open_time: float, last_open_time: float, version: str + ) -> dict: + historical_signals = await self._fetch_paginated_signals_history( + "temp_chatgpt_signals", + "timestamp, signal", + { + "exchange_internal_name": exchange, + "symbol": symbol, + "time_frame": time_frame.value, + "metadata->>version": version, + }, + commons_enums.TimeFramesMinutes[time_frame] * commons_constants.MINUTE_TO_SECONDS, + first_open_time, + last_open_time + ) + return self._format_gpt_signals(historical_signals) + + async def _fetch_paginated_signals_history( + self, table_name: str, select: str, matcher: dict, + time_interval: float, first_open_time: float, last_open_time: float + ) -> list: + total_elements_count = (last_open_time - first_open_time) // time_interval offset = 0 max_size = 0 - total_candles = [] + total_elements = [] max_requests_count = 100 request_count = 0 while request_count < max_requests_count: request = ( - self.table("temp_ohlcv_history").select("*") - .match({ - "exchange_internal_name": exchange, - "symbol": symbol, - "time_frame": time_frame.value, - }).gte( + self.table(table_name).select(select) + .match(matcher).gte( "timestamp", self.get_formatted_time(first_open_time) ).lte( "timestamp", self.get_formatted_time(last_open_time) @@ -371,21 +420,28 @@ async def fetch_candles_history( ) if offset: request = request.range(offset, offset+max_size) - fetched_candles = (await request.execute()).data - total_candles += fetched_candles - if len(fetched_candles) < max_size or (max_size == 0 and len(fetched_candles) == total_candles_count): + fetched_elements = (await request.execute()).data + total_elements += fetched_elements + if len(fetched_elements) < max_size or (max_size == 0 and len(fetched_elements) == total_elements_count): # fetched everything break - offset += len(fetched_candles) + offset += len(fetched_elements) if max_size == 0: max_size = offset request_count += 1 if request_count == max_requests_count: commons_logging.get_logger(self.__class__.__name__).info( - f"OHLCV fetch error: too many requests ({request_count}), fetched: {len(total_candles)} candles" + f"paginated fetch error on {table_name} with matcher: {matcher}: " + f"too many requests ({request_count}), fetched: {len(total_elements)} elements" ) - return self._format_ohlcvs(total_candles) + return total_elements + + def _format_gpt_signals(self, signals: list): + return { + signal["timestamp"]: signal["signal"]["content"] + for signal in signals + } def _format_ohlcvs(self, ohlcvs: list): # uses PriceIndexes order From 8c0490cc07e552a29a5328855dc6ed32244fdc7d Mon Sep 17 00:00:00 2001 From: Guillaume De Saint Martin Date: Fri, 27 Oct 2023 15:10:08 +0200 Subject: [PATCH 2/3] [Community] add get_production_anon_client --- octobot/community/authentication.py | 11 ++-- .../community_supabase_client.py | 55 +++++++++++++------ .../supabase_backend/supabase_client.py | 38 +++++++++---- octobot/constants.py | 7 ++- 4 files changed, 75 insertions(+), 36 deletions(-) diff --git a/octobot/community/authentication.py b/octobot/community/authentication.py index d3029536a..816ff21fa 100644 --- a/octobot/community/authentication.py +++ b/octobot/community/authentication.py @@ -64,9 +64,11 @@ class CommunityAuthentication(authentication.Authenticator): SESSION_HEADER = "X-Session" GQL_AUTHORIZATION_HEADER = "Authorization" - def __init__(self, feed_url, config=None): + def __init__(self, feed_url, config=None, backend_url=None, backend_key=None): super().__init__() self.feed_url = feed_url + self.backend_url = backend_url or identifiers_provider.IdentifiersProvider.BACKEND_URL + self.backend_key = backend_key or identifiers_provider.IdentifiersProvider.BACKEND_KEY self.configuration_storage = supabase_backend.SyncConfigurationStorage(config) self.supabase_client = self._create_client() self.user_account = community_user_account.CommunityUserAccount() @@ -80,10 +82,11 @@ def __init__(self, feed_url, config=None): self._fetch_account_task = None @staticmethod - def create(configuration: commons_configuration.Configuration): + def create(configuration: commons_configuration.Configuration, **kwargs): return CommunityAuthentication.instance( None, config=configuration, + **kwargs, ) def update(self, configuration: commons_configuration.Configuration): @@ -178,8 +181,8 @@ def _supports_mock(): def _create_client(self): return supabase_backend.CommunitySupabaseClient( - identifiers_provider.IdentifiersProvider.BACKEND_URL, - identifiers_provider.IdentifiersProvider.BACKEND_KEY, + self.backend_url, + self.backend_key, self.configuration_storage ) diff --git a/octobot/community/supabase_backend/community_supabase_client.py b/octobot/community/supabase_backend/community_supabase_client.py index 9799b3b33..d2024af69 100644 --- a/octobot/community/supabase_backend/community_supabase_client.py +++ b/octobot/community/supabase_backend/community_supabase_client.py @@ -59,6 +59,7 @@ def __init__( self.event_loop = None super().__init__(supabase_url, supabase_key, options=options) self.is_admin = False + self.production_anon_client = None async def sign_in(self, email: str, password: str) -> None: try: @@ -346,7 +347,8 @@ async def fetch_candles_history( self, exchange: str, symbol: str, time_frame: commons_enums.TimeFrames, first_open_time: float, last_open_time: float ) -> list: - historical_candles = await self._fetch_paginated_signals_history( + historical_candles = await self._fetch_paginated_history( + await self.get_production_anon_client(), "temp_ohlcv_history", "timestamp, open, high, low, close, volume", { @@ -363,26 +365,25 @@ async def fetch_candles_history( async def fetch_gpt_signal( self, exchange: str, symbol: str, time_frame: commons_enums.TimeFrames, timestamp: float, version: str ) -> str: - signals = ( - await self.table("temp_chatgpt_signals").select("signal").match( - { - "timestamp": timestamp, - "exchange_internal_name": exchange, - "symbol": symbol, - "time_frame": time_frame.value, - "metadata->>version": version, - }, - ).execute() - ).data + signals = (await (await self.get_production_anon_client()).table("temp_chatgpt_signals").select("signal").match( + { + "timestamp": self.get_formatted_time(timestamp), + "exchange_internal_name": exchange, + "symbol": symbol, + "time_frame": time_frame.value, + "metadata->>version": version, + }, + ).execute()).data if signals: - return signals[0]["content"] + return signals[0]["signal"]["content"] return "" async def fetch_gpt_signals_history( self, exchange: str, symbol: str, time_frame: commons_enums.TimeFrames, first_open_time: float, last_open_time: float, version: str ) -> dict: - historical_signals = await self._fetch_paginated_signals_history( + historical_signals = await self._fetch_paginated_history( + await self.get_production_anon_client(), "temp_chatgpt_signals", "timestamp, signal", { @@ -397,8 +398,16 @@ async def fetch_gpt_signals_history( ) return self._format_gpt_signals(historical_signals) - async def _fetch_paginated_signals_history( - self, table_name: str, select: str, matcher: dict, + async def get_production_anon_client(self): + if self.production_anon_client is None: + self.production_anon_client = await self.init_other_postgrest_client( + supabase_url=constants.COMMUNITY_PRODUCTION_BACKEND_URL, + supabase_key=constants.COMMUNITY_PRODUCTION_BACKEND_KEY, + ) + return self.production_anon_client + + async def _fetch_paginated_history( + self, client, table_name: str, select: str, matcher: dict, time_interval: float, first_open_time: float, last_open_time: float ) -> list: total_elements_count = (last_open_time - first_open_time) // time_interval @@ -409,7 +418,7 @@ async def _fetch_paginated_signals_history( request_count = 0 while request_count < max_requests_count: request = ( - self.table(table_name).select(select) + client.table(table_name).select(select) .match(matcher).gte( "timestamp", self.get_formatted_time(first_open_time) ).lte( @@ -439,7 +448,7 @@ async def _fetch_paginated_signals_history( def _format_gpt_signals(self, signals: list): return { - signal["timestamp"]: signal["signal"]["content"] + self.get_parsed_time(signal["timestamp"]).timestamp(): signal["signal"]["content"] for signal in signals } @@ -525,3 +534,13 @@ def get_parsed_time(str_time: str) -> datetime.datetime: async def _get_user(self) -> gotrue.User: return self.auth.get_user().user + + async def close(self): + await super().close() + if self.production_anon_client is not None: + try: + await self.production_anon_client.aclose() + except RuntimeError: + # happens when the event loop is closed already + pass + self.production_anon_client = None diff --git a/octobot/community/supabase_backend/supabase_client.py b/octobot/community/supabase_backend/supabase_client.py index 777f48f0b..37a38ab3b 100644 --- a/octobot/community/supabase_backend/supabase_client.py +++ b/octobot/community/supabase_backend/supabase_client.py @@ -14,6 +14,7 @@ # You should have received a copy of the GNU General Public # License along with OctoBot. If not, see . import contextlib +import copy import typing import storage3 import storage3.constants @@ -61,8 +62,8 @@ def _init_postgrest_client( rest_url: str, supabase_key: str, headers: typing.Dict[str, str], - schema: str, timeout, # skip typing to avoid httpx import + schema: str = "public", ) -> postgrest.AsyncPostgrestClient: """Private helper for creating an instance of the Postgrest client.""" # Override to use postgrest.AsyncPostgrestClient and allow async requests @@ -96,21 +97,33 @@ def table(self, table_name: str) -> postgrest.AsyncRequestBuilder: # typing ove return self.from_(table_name) @contextlib.asynccontextmanager - async def other_postgres_client(self, schema): - other_postgres: postgrest.AsyncPostgrestClient = None + async def other_postgres_client(self, supabase_url: str = None, supabase_key: str = None, schema: str = "public"): + other_postgres = None try: - other_postgres = AuthenticatedAsyncSupabaseClient._init_postgrest_client( - rest_url=self.rest_url, - supabase_key=self.supabase_key, - headers=self.options.headers, - schema=schema, - timeout=self.options.postgrest_client_timeout, + other_postgres = await self.init_other_postgrest_client( + supabase_url=supabase_url, supabase_key=supabase_key, schema=schema ) yield other_postgres finally: if other_postgres is not None: await other_postgres.aclose() + async def init_other_postgrest_client( + self, supabase_url: str = None, supabase_key: str = None, schema: str = "public" + ) -> postgrest.AsyncPostgrestClient: + supabase_key = supabase_key or self.supabase_key + headers = self.options.headers + if supabase_key != self.supabase_key: + headers = copy.deepcopy(postgrest.constants.DEFAULT_POSTGREST_CLIENT_HEADERS) + headers.update(self._format_auth_headers(supabase_key, supabase_key)) + return AuthenticatedAsyncSupabaseClient._init_postgrest_client( + rest_url=f"{supabase_url}/rest/v1" if supabase_url else self.rest_url, + supabase_key=supabase_key, + headers=headers, + timeout=self.options.postgrest_client_timeout, + schema=schema, + ) + async def close(self): # timer has to be stopped, there is no public stop api if self.auth._refresh_token_timer: @@ -148,9 +161,12 @@ def _use_auth_session(self, event: gotrue.AuthChangeEvent, _): def _get_auth_headers(self): """Helper method to get auth headers.""" # What's the corresponding method to get the token + return self._format_auth_headers(self.supabase_key, self._get_auth_key()) + + def _format_auth_headers(self, supabase_key, auth_token): return { - "apiKey": self.supabase_key, - "Authorization": f"Bearer {self._get_auth_key()}", + "apiKey": supabase_key, + "Authorization": f"Bearer {auth_token}", } def _get_auth_key(self): diff --git a/octobot/constants.py b/octobot/constants.py index 80516dcfc..491a79b02 100644 --- a/octobot/constants.py +++ b/octobot/constants.py @@ -63,11 +63,10 @@ OCTOBOT_COMMUNITY_LANDING_URL = os.getenv("COMMUNITY_SERVER_URL", "https://octobot.cloud") OCTOBOT_COMMUNITY_URL = os.getenv("COMMUNITY_SERVER_URL", "https://app.octobot.cloud") OCTOBOT_COMMUNITY_RECOVER_PASSWORD_URL = OCTOBOT_COMMUNITY_URL -# todo use real production db +# default env COMMUNITY_BACKEND_URL = os.getenv("COMMUNITY_BACKEND_URL", "https://nwhpvrguwcihhizrnyoe.supabase.co") COMMUNITY_BACKEND_KEY = os.getenv("COMMUNITY_BACKEND_KEY", "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZSIsInJlZiI6Im53aHB2cmd1d2NpaGhpenJueW9lIiwicm9sZSI6ImFub24iLCJpYXQiOjE2OTU2NDQxMDcsImV4cCI6MjAxMTIyMDEwN30.AILcgv0l6hl_0IUEPlWh1wiu9RIpgrkGZGERM5uXftE") - # staging env SHOULD ONLY BE USED THROUGH CommunityIdentifiersProvider STAGING_OCTOBOT_COMMUNITY_LANDING_URL = os.getenv("COMMUNITY_SERVER_URL", "https://beta.octobot.cloud") STAGING_OCTOBOT_COMMUNITY_URL = os.getenv("COMMUNITY_SERVER_URL", "https://app-beta.octobot.cloud/") @@ -75,7 +74,9 @@ STAGING_COMMUNITY_BACKEND_URL = os.getenv("COMMUNITY_BACKEND_URL", "https://wmfkgvgzokyzhvxowbyg.supabase.co") STAGING_COMMUNITY_BACKEND_KEY = os.getenv("COMMUNITY_BACKEND_KEY", "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZSIsInJlZiI6IndtZmtndmd6b2t5emh2eG93YnlnIiwicm9sZSI6ImFub24iLCJpYXQiOjE2OTE0NDA1MTEsImV4cCI6MjAwNzAxNjUxMX0.YZQl7LYgvnzO_Jizs0UKfPEaqPoV2EwhjunH8gime8o") - +# production env, ignored by CommunityIdentifiersProvider +COMMUNITY_PRODUCTION_BACKEND_URL = os.getenv("COMMUNITY_PRODUCTION_BACKEND_URL", COMMUNITY_BACKEND_URL) +COMMUNITY_PRODUCTION_BACKEND_KEY = os.getenv("COMMUNITY_PRODUCTION_BACKEND_KEY", COMMUNITY_BACKEND_KEY) CONFIG_COMMUNITY = "community" CONFIG_COMMUNITY_BOT_ID = "bot_id" From 249c601895146d59d43fb6a3ffd9ac8553b1ed88 Mon Sep 17 00:00:00 2001 From: Guillaume De Saint Martin Date: Fri, 27 Oct 2023 17:32:53 +0200 Subject: [PATCH 3/3] [Requirements] bump --- requirements.txt | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/requirements.txt b/requirements.txt index 48ddd2598..cf8199313 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,10 +1,10 @@ # Drakkar-Software requirements -OctoBot-Commons==1.9.28 -OctoBot-Trading==2.4.36 -OctoBot-Evaluators==1.9.1 +OctoBot-Commons==1.9.29 +OctoBot-Trading==2.4.37 +OctoBot-Evaluators==1.9.2 OctoBot-Tentacles-Manager==2.9.5 -OctoBot-Services==1.6.4 -OctoBot-Backtesting==1.9.4 +OctoBot-Services==1.6.5 +OctoBot-Backtesting==1.9.5 Async-Channel==2.2.1 trading-backend==1.2.9