diff --git a/Makefile b/Makefile index 6502cdcd8ed..c5e049adb5d 100644 --- a/Makefile +++ b/Makefile @@ -15,12 +15,9 @@ test: --exclude-dir="test/debug" \ --exclude-dir="test/mock" \ --exclude-dir="test/hummingbot/connector/gateway/amm" \ - --exclude-dir="test/hummingbot/connector/exchange/polkadex" \ --exclude-dir="test/hummingbot/connector/exchange/coinbase_pro" \ --exclude-dir="test/hummingbot/connector/exchange/kraken" \ --exclude-dir="test/hummingbot/connector/exchange/hitbtc" \ - --exclude-dir="test/hummingbot/connector/exchange/bitmart" \ - --exclude-dir="test/hummingbot/connector/exchange/ndax" \ --exclude-dir="test/hummingbot/connector/gateway/clob_spot/data_sources/dexalot" \ --exclude-dir="test/hummingbot/strategy/amm_arb" \ --exclude-dir="test/hummingbot/core/gateway" \ diff --git a/README.md b/README.md index 924b8d930df..8303247c20a 100644 --- a/README.md +++ b/README.md @@ -72,7 +72,6 @@ Exchanges may be centralized (**CEX**), or decentralized (**DEX**), in which cas | ![](https://img.shields.io/static/v1?label=Hummingbot&message=BRONZE&color=green) | [Bybit](https://docs.hummingbot.org/exchanges/bybit/) | SPOT CEX | | ![](https://img.shields.io/static/v1?label=Hummingbot&message=BRONZE&color=green) | [Bybit (perp)](https://docs.hummingbot.org/exchanges/bitmex-perpetual/) | PERP CEX | | ![](https://img.shields.io/static/v1?label=Hummingbot&message=BRONZE&color=green) | [Coinbase](https://docs.hummingbot.org/exchanges/coinbase/) | SPOT CEX | -| ![](https://img.shields.io/static/v1?label=Hummingbot&message=BRONZE&color=green) | [DeFi Kingdoms](https://docs.hummingbot.org/exchanges/defikingdoms/) | AMM DEX | | ![](https://img.shields.io/static/v1?label=Hummingbot&message=BRONZE&color=green) | [Defira](https://docs.hummingbot.org/exchanges/defira/) | AMM DEX | | ![](https://img.shields.io/static/v1?label=Hummingbot&message=BRONZE&color=green) | [Dexalot](https://docs.hummingbot.org/exchanges/dexalot/) | CLOB DEX | | ![](https://img.shields.io/static/v1?label=Hummingbot&message=BRONZE&color=green) | [HitBTC](https://docs.hummingbot.org/exchanges/hitbtc/) | SPOT CEX | @@ -88,14 +87,13 @@ Exchanges may be centralized (**CEX**), or decentralized (**DEX**), in which cas | ![](https://img.shields.io/static/v1?label=Hummingbot&message=BRONZE&color=green) | [Pancakeswap](https://docs.hummingbot.org/exchanges/pancakeswap/) | AMM DEX | | ![](https://img.shields.io/static/v1?label=Hummingbot&message=BRONZE&color=green) | [Pangolin](https://docs.hummingbot.org/exchanges/pangolin/) | AMM DEX | | ![](https://img.shields.io/static/v1?label=Hummingbot&message=BRONZE&color=green) | [Perpetual Protocol](https://docs.hummingbot.org/exchanges/perp/) | PERP DEX | -| ![](https://img.shields.io/static/v1?label=Hummingbot&message=BRONZE&color=green) | [Phemex Perpetual](https://docs.hummingbot.org/exchanges/perp/) | PERP DEX | +| ![](https://img.shields.io/static/v1?label=Hummingbot&message=BRONZE&color=green) | [Phemex Perpetual](https://docs.hummingbot.org/exchanges/perp/) | PERP CEX | | ![](https://img.shields.io/static/v1?label=Hummingbot&message=BRONZE&color=green) | [Polkadex](https://docs.hummingbot.org/exchanges/polkadex/) | SPOT DEX | | ![](https://img.shields.io/static/v1?label=Hummingbot&message=BRONZE&color=green) | [Ref Finance](https://docs.hummingbot.org/exchanges/ref/) | SPOT DEX | | ![](https://img.shields.io/static/v1?label=Hummingbot&message=BRONZE&color=green) | [Sushiswap](https://docs.hummingbot.org/exchanges/sushiswap/) | AMM DEX | | ![](https://img.shields.io/static/v1?label=Hummingbot&message=BRONZE&color=green) | [Tinyman](https://docs.hummingbot.org/exchanges/tinyman/) | SPOT DEX | | ![](https://img.shields.io/static/v1?label=Hummingbot&message=BRONZE&color=green) | [VVS Finance](https://docs.hummingbot.org/exchanges/vvs/) | AMM DEX | | ![](https://img.shields.io/static/v1?label=Hummingbot&message=BRONZE&color=green) | [XSWAP](https://docs.hummingbot.org/exchanges/xswap/) | AMM DEX | -| ![](https://img.shields.io/static/v1?label=Hummingbot&message=BRONZE&color=green) | [Zigzag](https://docs.hummingbot.org/exchanges/zigzag/) | SPOT DEX | Quarterly [Polls](https://docs.hummingbot.org/governance/polls/) allow the Hummingbot community to vote using HBOT tokens to decide which exchanges should be certified GOLD or SILVER, which means that they are maintained and continually improved by Hummingbot Foundation. In addition, the codebase includes BRONZE exchange connectors that are maintained by community members. See the [Hummingbot documentation](https://docs.hummingbot.org/exchanges) for all exchanges supported. diff --git a/hummingbot/client/config/client_config_map.py b/hummingbot/client/config/client_config_map.py index 2f5d0e25e7c..2fe35eb8a9b 100644 --- a/hummingbot/client/config/client_config_map.py +++ b/hummingbot/client/config/client_config_map.py @@ -600,7 +600,7 @@ class Config: class GlobalTokenConfigMap(BaseClientModel): global_token_name: str = Field( - default="USD", + default="USDT", client_data=ClientFieldData( prompt=lambda cm: "What is your default display token? (e.g. USD,EUR,BTC)", diff --git a/hummingbot/connector/connector_status.py b/hummingbot/connector/connector_status.py index 6f7f3226b4e..566893d6460 100644 --- a/hummingbot/connector/connector_status.py +++ b/hummingbot/connector/connector_status.py @@ -36,25 +36,23 @@ 'perpetual_finance': 'bronze', 'uniswap': 'gold', 'uniswapLP': 'gold', - 'pancakeswap': 'bronze', + 'pancakeswap': 'silver', 'sushiswap': 'bronze', - 'traderjoe': 'silver', - 'quickswap': 'silver', + 'traderjoe': 'bronze', + 'quickswap': 'bronze', 'perp': 'bronze', 'openocean': 'bronze', 'pangolin': 'bronze', - 'defikingdoms': 'bronze', 'defira': 'bronze', 'mad_meerkat': 'bronze', 'vvs': 'bronze', 'ref': 'bronze', 'injective': 'bronze', 'xswap': 'bronze', - 'dexalot': 'bronze', + 'dexalot': 'silver', 'kucoin_perpetual': 'silver', 'kucoin_perpetual_testnet': 'silver', 'injective_perpetual': 'bronze', - 'zigzag': 'bronze', 'bit_com_perpetual': 'bronze', 'bit_com_perpetual_testnet': 'bronze', 'tinyman': 'bronze', diff --git a/hummingbot/connector/derivative/bitget_perpetual/bitget_perpetual_derivative.py b/hummingbot/connector/derivative/bitget_perpetual/bitget_perpetual_derivative.py index 7a4c5efd984..9e2015259cb 100644 --- a/hummingbot/connector/derivative/bitget_perpetual/bitget_perpetual_derivative.py +++ b/hummingbot/connector/derivative/bitget_perpetual/bitget_perpetual_derivative.py @@ -303,7 +303,7 @@ def _get_fee(self, trading_pair = combine_to_hb_trading_pair(base=base_currency, quote=quote_currency) if trading_pair in self._trading_fees: fee_schema: TradeFeeSchema = self._trading_fees[trading_pair] - fee_rate = fee_schema.maker_percent_fee_decimal if is_maker else fee_schema.maker_percent_fee_decimal + fee_rate = fee_schema.maker_percent_fee_decimal if is_maker else fee_schema.taker_percent_fee_decimal fee = TradeFeeBase.new_spot_fee( fee_schema=fee_schema, trade_type=order_side, diff --git a/hummingbot/connector/exchange/injective_v2/README.md b/hummingbot/connector/exchange/injective_v2/README.md new file mode 100644 index 00000000000..0f31810251f --- /dev/null +++ b/hummingbot/connector/exchange/injective_v2/README.md @@ -0,0 +1,17 @@ +## Injective v2 + +This is a spot connector created by **[Injective Labs](https://injectivelabs.org/)**. +The difference with `injective` connector is that v2 is a pure Python connector. That means that the user does not need to configure and run a Gateway instance to use the connector. +Also, `injective_v2` has been implemented to use delegated accounts. That means that the account used to send the transactions to the chain for trading is not the account holding the funds. +The user will need to have one portfolio account and at least one trading account. And permissions should be granted with the portfolio account to the trading account for it to operate using the portfolio account's funds. + +### Trading permissions grant +To grant permissions from a portfolio account to a trading account to operate using the portfolio account funds please refer to the script `account_delegation_script.py` + +### Connector parameters +When configuring a new instance of the connector in Hummingbot the following parameters are required: + +- **injective_private_key**: the private key of the trading account (grantee account) +- **injective_subaccount_index**: the index (decimal number) of the subaccount from the trading account that the connector will be operating with +- **injective_granter_address**: the public key (injective format address) or the portfolio account +- **injective_granter_subaccount_index**: the index (decimal number) of the subaccount from the portfolio account (the subaccount holding the funds) \ No newline at end of file diff --git a/hummingbot/connector/exchange/injective_v2/__init__.py b/hummingbot/connector/exchange/injective_v2/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/hummingbot/connector/exchange/injective_v2/account_delegation_script.py b/hummingbot/connector/exchange/injective_v2/account_delegation_script.py new file mode 100644 index 00000000000..106fe6c08af --- /dev/null +++ b/hummingbot/connector/exchange/injective_v2/account_delegation_script.py @@ -0,0 +1,84 @@ +import asyncio + +from pyinjective.async_client import AsyncClient +from pyinjective.composer import Composer +from pyinjective.constant import Network +from pyinjective.transaction import Transaction +from pyinjective.wallet import PrivateKey + +# Values to be configured by the user +NETWORK = Network.testnet() # Select the correct network: mainnet, testnet, devnet, local or custom +GRANT_EXPIRATION_IN_DAYS = 365 +GRANTER_ACCOUNT_PRIVATE_KEY = "" +GRANTER_SUBACCOUNT_INDEX = 0 +GRANTEE_PUBLIC_INJECTIVE_ADDRESS = "" +MARKET_IDS = [] +# List of the ids of all the markets the grant will include, for example: +# MARKET_IDS = ["0x0511ddc4e6586f3bfe1acb2dd905f8b8a82c97e1edaef654b12ca7e6031ca0fa"] # noqa: mock + +# Fixed values, do not change +SECONDS_PER_DAY = 60 * 60 * 24 + + +async def main() -> None: + composer = Composer(network=NETWORK.string()) + + # initialize grpc client + client = AsyncClient(NETWORK, insecure=False) + await client.sync_timeout_height() + + # load account + granter_private_key = PrivateKey.from_hex(GRANTER_ACCOUNT_PRIVATE_KEY) + granter_public_key = granter_private_key.to_public_key() + granter_address = granter_public_key.to_address() + account = await client.get_account(granter_address.to_acc_bech32()) # noqa: F841 + granter_subaccount_id = granter_address.get_subaccount_id(index=0) + + msg = composer.MsgGrantTyped( + granter = granter_address.to_acc_bech32(), + grantee = GRANTEE_PUBLIC_INJECTIVE_ADDRESS, + msg_type = "BatchUpdateOrdersAuthz", + expire_in=GRANT_EXPIRATION_IN_DAYS * SECONDS_PER_DAY, + subaccount_id=granter_subaccount_id, + spot_markets=MARKET_IDS, + ) + + tx = ( + Transaction() + .with_messages(msg) + .with_sequence(client.get_sequence()) + .with_account_num(client.get_number()) + .with_chain_id(NETWORK.chain_id) + ) + sim_sign_doc = tx.get_sign_doc(granter_public_key) + sim_sig = granter_private_key.sign(sim_sign_doc.SerializeToString()) + sim_tx_raw_bytes = tx.get_tx_data(sim_sig, granter_public_key) + + # simulate tx + (sim_res, success) = await client.simulate_tx(sim_tx_raw_bytes) + if not success: + print(sim_res) + return + + # build tx + gas_price = 500000000 + gas_limit = sim_res.gas_info.gas_used + 20000 + gas_fee = "{:.18f}".format((gas_price * gas_limit) / pow(10, 18)).rstrip("0") + fee = [composer.Coin( + amount=gas_price * gas_limit, + denom=NETWORK.fee_denom, + )] + + tx = tx.with_gas(gas_limit).with_fee(fee).with_memo("").with_timeout_height(client.timeout_height) + sign_doc = tx.get_sign_doc(granter_public_key) + sig = granter_private_key.sign(sign_doc.SerializeToString()) + tx_raw_bytes = tx.get_tx_data(sig, granter_public_key) + + res = await client.send_tx_sync_mode(tx_raw_bytes) + print(res) + print("gas wanted: {}".format(gas_limit)) + print("gas fee: {} INJ".format(gas_fee)) + + +if __name__ == "__main__": + asyncio.get_event_loop().run_until_complete(main()) diff --git a/hummingbot/connector/exchange/injective_v2/injective_constants.py b/hummingbot/connector/exchange/injective_v2/injective_constants.py new file mode 100644 index 00000000000..b5ba662a220 --- /dev/null +++ b/hummingbot/connector/exchange/injective_v2/injective_constants.py @@ -0,0 +1,52 @@ +import sys + +from hummingbot.core.api_throttler.data_types import RateLimit +from hummingbot.core.data_type.in_flight_order import OrderState + +EXCHANGE_NAME = "injective_v2" + +DEFAULT_DOMAIN = "" +TESTNET_DOMAIN = "testnet" + +DEFAULT_SUBACCOUNT_INDEX = 0 +EXTRA_TRANSACTION_GAS = 20000 +DEFAULT_GAS_PRICE = 500000000 + +EXPECTED_BLOCK_TIME = 1.5 +TRANSACTIONS_CHECK_INTERVAL = 3 * EXPECTED_BLOCK_TIME + +# Public limit ids +ORDERBOOK_LIMIT_ID = "OrderBookSnapshot" +GET_TRANSACTION_LIMIT_ID = "GetTransaction" +GET_CHAIN_TRANSACTION_LIMIT_ID = "GetChainTransaction" + +# Private limit ids +PORTFOLIO_BALANCES_LIMIT_ID = "AccountPortfolio" +SPOT_ORDERS_HISTORY_LIMIT_ID = "SpotOrdersHistory" +SPOT_TRADES_LIMIT_ID = "SpotTrades" +SIMULATE_TRANSACTION_LIMIT_ID = "SimulateTransaction" +SEND_TRANSACTION = "SendTransaction" + +NO_LIMIT = sys.maxsize +ONE_SECOND = 1 + +RATE_LIMITS = [ + RateLimit(limit_id=ORDERBOOK_LIMIT_ID, limit=NO_LIMIT, time_interval=ONE_SECOND), + RateLimit(limit_id=GET_TRANSACTION_LIMIT_ID, limit=NO_LIMIT, time_interval=ONE_SECOND), + RateLimit(limit_id=GET_CHAIN_TRANSACTION_LIMIT_ID, limit=NO_LIMIT, time_interval=ONE_SECOND), + RateLimit(limit_id=PORTFOLIO_BALANCES_LIMIT_ID, limit=NO_LIMIT, time_interval=ONE_SECOND), + RateLimit(limit_id=SPOT_ORDERS_HISTORY_LIMIT_ID, limit=NO_LIMIT, time_interval=ONE_SECOND), + RateLimit(limit_id=SPOT_TRADES_LIMIT_ID, limit=NO_LIMIT, time_interval=ONE_SECOND), + RateLimit(limit_id=SIMULATE_TRANSACTION_LIMIT_ID, limit=NO_LIMIT, time_interval=ONE_SECOND), + RateLimit(limit_id=SEND_TRANSACTION, limit=NO_LIMIT, time_interval=ONE_SECOND), +] + +ORDER_STATE_MAP = { + "booked": OrderState.OPEN, + "partial_filled": OrderState.PARTIALLY_FILLED, + "filled": OrderState.FILLED, + "canceled": OrderState.CANCELED, +} + +ORDER_NOT_FOUND_ERROR_MESSAGE = "order not found" +ACCOUNT_SEQUENCE_MISMATCH_ERROR = "account sequence mismatch" diff --git a/hummingbot/connector/exchange/injective_v2/injective_data_source.py b/hummingbot/connector/exchange/injective_v2/injective_data_source.py new file mode 100644 index 00000000000..c7178bb3c3b --- /dev/null +++ b/hummingbot/connector/exchange/injective_v2/injective_data_source.py @@ -0,0 +1,1140 @@ +import asyncio +import base64 +import logging +import time +from abc import ABC, abstractmethod +from decimal import Decimal +from enum import Enum +from typing import Any, Dict, List, Mapping, Optional + +from bidict import bidict +from google.protobuf import any_pb2 +from pyinjective import Transaction +from pyinjective.async_client import AsyncClient +from pyinjective.composer import Composer +from pyinjective.constant import Network +from pyinjective.orderhash import OrderHashManager +from pyinjective.wallet import Address, PrivateKey + +from hummingbot.connector.exchange.injective_v2 import injective_constants as CONSTANTS +from hummingbot.connector.exchange.injective_v2.injective_market import InjectiveSpotMarket, InjectiveToken +from hummingbot.connector.exchange.injective_v2.injective_query_executor import PythonSDKInjectiveQueryExecutor +from hummingbot.connector.gateway.common_types import CancelOrderResult, PlaceOrderResult +from hummingbot.connector.gateway.gateway_in_flight_order import GatewayInFlightOrder +from hummingbot.connector.trading_rule import TradingRule +from hummingbot.connector.utils import combine_to_hb_trading_pair +from hummingbot.core.api_throttler.async_throttler import AsyncThrottler +from hummingbot.core.api_throttler.async_throttler_base import AsyncThrottlerBase +from hummingbot.core.data_type.common import OrderType, TradeType +from hummingbot.core.data_type.in_flight_order import OrderState, OrderUpdate, TradeUpdate +from hummingbot.core.data_type.order_book_message import OrderBookMessage, OrderBookMessageType +from hummingbot.core.data_type.trade_fee import TokenAmount, TradeFeeBase, TradeFeeSchema +from hummingbot.core.event.event_listener import EventListener +from hummingbot.core.event.events import AccountEvent, BalanceUpdateEvent, MarketEvent, OrderBookDataSourceEvent +from hummingbot.core.network_iterator import NetworkStatus +from hummingbot.core.pubsub import PubSub +from hummingbot.core.utils.async_utils import safe_gather +from hummingbot.logger import HummingbotLogger + + +class InjectiveDataSource(ABC): + _logger: Optional[HummingbotLogger] = None + + TRANSACTIONS_LOOKUP_TIMEOUT = CONSTANTS.EXPECTED_BLOCK_TIME * 3 + + @classmethod + def logger(cls) -> HummingbotLogger: + if cls._logger is None: + cls._logger = logging.getLogger(HummingbotLogger.logger_name_for_class(cls)) + return cls._logger + + @classmethod + def for_grantee( + cls, + private_key: str, + subaccount_index: int, + granter_address: str, + granter_subaccount_index: int, + domain: Optional[str] = CONSTANTS.DEFAULT_DOMAIN): + return InjectiveGranteeDataSource( + private_key=private_key, + subaccount_index=subaccount_index, + granter_address=granter_address, + granter_subaccount_index=granter_subaccount_index, + domain=domain, + ) + + @property + @abstractmethod + def publisher(self): + raise NotImplementedError + + @property + @abstractmethod + def query_executor(self): + raise NotImplementedError + + @property + @abstractmethod + def composer(self) -> Composer: + raise NotImplementedError + + @property + @abstractmethod + def order_creation_lock(self) -> asyncio.Lock: + raise NotImplementedError + + @property + @abstractmethod + def throttler(self): + raise NotImplementedError + + @property + @abstractmethod + def portfolio_account_injective_address(self) -> str: + raise NotImplementedError + + @property + @abstractmethod + def portfolio_account_subaccount_id(self) -> str: + raise NotImplementedError + + @property + @abstractmethod + def trading_account_injective_address(self) -> str: + raise NotImplementedError + + @property + @abstractmethod + def injective_chain_id(self) -> str: + raise NotImplementedError + + @property + @abstractmethod + def fee_denom(self) -> str: + raise NotImplementedError + + @abstractmethod + async def timeout_height(self) -> int: + raise NotImplementedError + + @abstractmethod + async def market_and_trading_pair_map(self): + raise NotImplementedError + + @abstractmethod + async def market_info_for_id(self, market_id: str): + raise NotImplementedError + + @abstractmethod + async def trading_pair_for_market(self, market_id: str): + raise NotImplementedError + + @abstractmethod + async def market_id_for_trading_pair(self, trading_pair: str) -> str: + raise NotImplementedError + + @abstractmethod + async def all_markets(self): + raise NotImplementedError + + @abstractmethod + async def token(self, denom: str) -> InjectiveToken: + raise NotImplementedError + + @abstractmethod + def events_listening_tasks(self) -> List[asyncio.Task]: + raise NotImplementedError + + @abstractmethod + def add_listening_task(self, task: asyncio.Task): + raise NotImplementedError + + @abstractmethod + def configure_throttler(self, throttler: AsyncThrottlerBase): + raise NotImplementedError + + @abstractmethod + async def trading_account_sequence(self) -> int: + raise NotImplementedError + + @abstractmethod + async def trading_account_number(self) -> int: + raise NotImplementedError + + @abstractmethod + async def initialize_trading_account(self): + raise NotImplementedError + + @abstractmethod + async def update_markets(self): + raise NotImplementedError + + @abstractmethod + async def transaction_result_data(self, transaction_hash: str) -> str: + raise NotImplementedError + + @abstractmethod + def real_tokens_trading_pair(self, unique_trading_pair: str) -> str: + raise NotImplementedError + + def is_started(self): + return len(self.events_listening_tasks()) > 0 + + async def check_network(self) -> NetworkStatus: + try: + await self.query_executor.ping() + status = NetworkStatus.CONNECTED + except asyncio.CancelledError: + raise + except Exception: + status = NetworkStatus.NOT_CONNECTED + return status + + async def start(self, market_ids: List[str]): + if not self.is_started(): + await self.initialize_trading_account() + if not self.is_started(): + self.add_listening_task(asyncio.create_task(self._listen_to_public_trades(market_ids=market_ids))) + self.add_listening_task(asyncio.create_task(self._listen_to_order_book_updates(market_ids=market_ids))) + self.add_listening_task(asyncio.create_task(self._listen_to_account_balance_updates())) + + for market_id in market_ids: + self.add_listening_task(asyncio.create_task( + self._listen_to_subaccount_order_updates(market_id=market_id)) + ) + await self._initialize_timeout_height() + + async def stop(self): + for task in self.events_listening_tasks(): + task.cancel() + + def add_listener(self, event_tag: Enum, listener: EventListener): + self.publisher.add_listener(event_tag=event_tag, listener=listener) + + def remove_listener(self, event_tag: Enum, listener: EventListener): + self.publisher.remove_listener(event_tag=event_tag, listener=listener) + + async def all_trading_rules(self) -> List[TradingRule]: + all_markets = await self.all_markets() + trading_rules = [] + + for market in all_markets: + try: + min_price_tick_size = market.min_price_tick_size() + min_quantity_tick_size = market.min_quantity_tick_size() + trading_rule = TradingRule( + trading_pair=market.trading_pair(), + min_order_size=min_quantity_tick_size, + min_price_increment=min_price_tick_size, + min_base_amount_increment=min_quantity_tick_size, + min_quote_amount_increment=min_price_tick_size, + ) + trading_rules.append(trading_rule) + except asyncio.CancelledError: + raise + except Exception: + self.logger().exception(f"Error parsing the trading pair rule: {market.market_info}. Skipping...") + return trading_rules + + async def order_book_snapshot(self, market_id: str, trading_pair: str) -> OrderBookMessage: + async with self.throttler.execute_task(limit_id=CONSTANTS.ORDERBOOK_LIMIT_ID): + snapshot_data = await self.query_executor.get_spot_orderbook(market_id=market_id) + + market = await self.market_info_for_id(market_id=market_id) + bids = [(market.price_from_chain_format(chain_price=Decimal(price)), + market.quantity_from_chain_format(chain_quantity=Decimal(quantity))) + for price, quantity, _ in snapshot_data["buys"]] + asks = [(market.price_from_chain_format(chain_price=Decimal(price)), + market.quantity_from_chain_format(chain_quantity=Decimal(quantity))) + for price, quantity, _ in snapshot_data["sells"]] + snapshot_msg = OrderBookMessage( + message_type=OrderBookMessageType.SNAPSHOT, + content={ + "trading_pair": trading_pair, + "update_id": snapshot_data["sequence"], + "bids": bids, + "asks": asks, + }, + timestamp=snapshot_data["timestamp"] * 1e-3, + ) + return snapshot_msg + + async def last_traded_price(self, market_id: str) -> Decimal: + price = await self._last_traded_price(market_id=market_id) + return price + + async def all_account_balances(self) -> Dict[str, Dict[str, Decimal]]: + account_address = self.portfolio_account_injective_address + + async with self.throttler.execute_task(limit_id=CONSTANTS.PORTFOLIO_BALANCES_LIMIT_ID): + portfolio_response = await self.query_executor.account_portfolio(account_address=account_address) + + bank_balances = portfolio_response["bankBalances"] + sub_account_balances = portfolio_response.get("subaccounts", []) + + balances_dict: Dict[str, Dict[str, Decimal]] = {} + + if self._uses_default_portfolio_subaccount(): + for bank_entry in bank_balances: + token = await self.token(denom=bank_entry["denom"]) + if token is not None: + asset_name: str = token.unique_symbol + + available_balance = token.value_from_chain_format(chain_value=Decimal(bank_entry["amount"])) + total_balance = available_balance + balances_dict[asset_name] = { + "total_balance": total_balance, + "available_balance": available_balance, + } + + for entry in sub_account_balances: + if entry["subaccountId"] == self.portfolio_account_subaccount_id: + token = await self.token(denom=entry["denom"]) + if token is not None: + asset_name: str = token.unique_symbol + + total_balance = token.value_from_chain_format(chain_value=Decimal(entry["deposit"]["totalBalance"])) + available_balance = token.value_from_chain_format( + chain_value=Decimal(entry["deposit"]["availableBalance"])) + + balance_element = balances_dict.get( + asset_name, {"total_balance": Decimal("0"), "available_balance": Decimal("0")} + ) + balance_element["total_balance"] += total_balance + balance_element["available_balance"] += available_balance + balances_dict[asset_name] = balance_element + + return balances_dict + + async def create_orders(self, orders_to_create: List[GatewayInFlightOrder]) -> List[PlaceOrderResult]: + if self.order_creation_lock.locked(): + raise RuntimeError("It is not possible to create new orders because the hash manager is not synchronized") + async with self.order_creation_lock: + composer = self.composer + order_definitions = [] + results = [] + + for order in orders_to_create: + order_definition = await self._create_order_definition(order=order) + order_definitions.append(order_definition) + + order_hashes = self._calculate_order_hashes(orders=order_definitions) + + message = composer.MsgBatchUpdateOrders( + sender=self.portfolio_account_injective_address, + spot_orders_to_create=order_definitions, + ) + delegated_message = composer.MsgExec( + grantee=self.trading_account_injective_address, + msgs=[message] + ) + + try: + result = await self._send_in_transaction(message=delegated_message) + if result["rawLog"] != "[]" or result["txhash"] in [None, ""]: + raise ValueError(f"Error sending the order creation transaction ({result['rawLog']})") + else: + transaction_hash = result["txhash"] + results = [ + PlaceOrderResult( + update_timestamp=self._time(), + client_order_id=order.client_order_id, + exchange_order_id=order_hash, + trading_pair=order.trading_pair, + misc_updates={ + "creation_transaction_hash": transaction_hash, + }, + ) for order, order_hash in zip(orders_to_create, order_hashes) + ] + except asyncio.CancelledError: + raise + except Exception as ex: + results = [ + PlaceOrderResult( + update_timestamp=self._time(), + client_order_id=order.client_order_id, + exchange_order_id=order_hash, + trading_pair=order.trading_pair, + exception=ex, + ) for order, order_hash in zip(orders_to_create, order_hashes) + ] + + return results + + async def cancel_orders(self, orders_to_cancel: List[GatewayInFlightOrder]) -> List[CancelOrderResult]: + composer = self.composer + orders_with_hash = [] + orders_data = [] + results = [] + + for order in orders_to_cancel: + if order.exchange_order_id is None: + results.append(CancelOrderResult( + client_order_id=order.client_order_id, + trading_pair=order.trading_pair, + not_found=True, + )) + else: + market_id = await self.market_id_for_trading_pair(trading_pair=order.trading_pair) + order_data = composer.OrderData( + market_id=market_id, + subaccount_id=self.portfolio_account_subaccount_id, + order_hash=order.exchange_order_id, + ) + orders_data.append(order_data) + orders_with_hash.append(order) + + message = composer.MsgBatchUpdateOrders( + sender=self.portfolio_account_injective_address, + spot_orders_to_cancel=orders_data, + ) + delegated_message = composer.MsgExec( + grantee=self.trading_account_injective_address, + msgs=[message] + ) + + try: + result = await self._send_in_transaction(message=delegated_message) + if result["rawLog"] != "[]": + raise ValueError(f"Error sending the order cancel transaction ({result['rawLog']})") + else: + cancel_transaction_hash = result.get("txhash", "") + results.extend([ + CancelOrderResult( + client_order_id=order.client_order_id, + trading_pair=order.trading_pair, + misc_updates={"cancelation_transaction_hash": cancel_transaction_hash}, + ) for order in orders_with_hash + ]) + except asyncio.CancelledError: + raise + except Exception as ex: + results.extend([ + CancelOrderResult( + client_order_id=order.client_order_id, + trading_pair=order.trading_pair, + exception=ex, + ) for order in orders_with_hash + ]) + + return results + + async def spot_trade_updates(self, market_ids: List[str], start_time: float) -> List[TradeUpdate]: + done = False + skip = 0 + trade_entries = [] + + while not done: + async with self.throttler.execute_task(limit_id=CONSTANTS.SPOT_TRADES_LIMIT_ID): + trades_response = await self.query_executor.get_spot_trades( + market_ids=market_ids, + subaccount_id=self.portfolio_account_subaccount_id, + start_time=int(start_time * 1e3), + skip=skip, + ) + if "trades" in trades_response: + total = int(trades_response["paging"]["total"]) + entries = trades_response["trades"] + + trade_entries.extend(entries) + done = len(trade_entries) >= total + skip += len(entries) + else: + done = True + + trade_updates = [await self._parse_trade_entry(trade_info=trade_info) for trade_info in trade_entries] + + return trade_updates + + async def spot_order_updates(self, market_ids: List[str], start_time: float) -> List[OrderUpdate]: + done = False + skip = 0 + order_entries = [] + + while not done: + async with self.throttler.execute_task(limit_id=CONSTANTS.SPOT_ORDERS_HISTORY_LIMIT_ID): + orders_response = await self.query_executor.get_historical_spot_orders( + market_ids=market_ids, + subaccount_id=self.portfolio_account_subaccount_id, + start_time=int(start_time * 1e3), + skip=skip, + ) + if "orders" in orders_response: + total = int(orders_response["paging"]["total"]) + entries = orders_response["orders"] + + order_entries.extend(entries) + done = len(order_entries) >= total + skip += len(entries) + else: + done = True + + order_updates = [await self._parse_order_entry(order_info=order_info) for order_info in order_entries] + + return order_updates + + async def reset_order_hash_generator(self, active_orders: List[GatewayInFlightOrder]): + if not self.order_creation_lock.locked: + raise RuntimeError("The order creation lock should be acquired before resetting the order hash manager") + transactions_to_wait_before_reset = set() + for order in active_orders: + if order.creation_transaction_hash is not None and order.current_state == OrderState.PENDING_CREATE: + transactions_to_wait_before_reset.add(order.creation_transaction_hash) + transaction_wait_tasks = [ + asyncio.wait_for( + self._transaction_from_chain(tx_hash=transaction_hash, retries=2), + timeout=self.TRANSACTIONS_LOOKUP_TIMEOUT + ) + for transaction_hash in transactions_to_wait_before_reset + ] + await safe_gather(*transaction_wait_tasks, return_exceptions=True) + self._reset_order_hash_manager() + + async def get_trading_fees(self) -> Dict[str, TradeFeeSchema]: + markets = await self.all_markets() + fees = {} + for market in markets: + trading_pair = await self.trading_pair_for_market(market_id=market.market_id) + fees[trading_pair] = TradeFeeSchema( + percent_fee_token=market.quote_token.unique_symbol, + maker_percent_fee_decimal=market.maker_fee_rate(), + taker_percent_fee_decimal=market.taker_fee_rate(), + ) + + return fees + + @abstractmethod + async def _initialize_timeout_height(self): + raise NotImplementedError + + @abstractmethod + def _sign_and_encode(self, transaction: Transaction) -> bytes: + raise NotImplementedError + + @abstractmethod + def _uses_default_portfolio_subaccount(self) -> bool: + raise NotImplementedError + + @abstractmethod + def _order_book_updates_stream(self, market_ids: List[str]): + raise NotImplementedError + + @abstractmethod + def _public_trades_stream(self, market_ids: List[str]): + raise NotImplementedError + + @abstractmethod + def _subaccount_balance_stream(self): + raise NotImplementedError + + @abstractmethod + def _subaccount_orders_stream(self, market_id: str): + raise NotImplementedError + + @abstractmethod + async def _create_order_definition(self, order: GatewayInFlightOrder): + raise NotImplementedError + + @abstractmethod + def _calculate_order_hashes(self, orders: List[GatewayInFlightOrder]) -> List[str]: + raise NotImplementedError + + @abstractmethod + def _reset_order_hash_manager(self): + raise NotImplementedError + + @abstractmethod + async def _last_traded_price(self, market_id: str) -> Decimal: + raise NotImplementedError + + async def _transaction_from_chain(self, tx_hash: str, retries: int) -> int: + executed_tries = 0 + found = False + block_height = None + + while executed_tries < retries and not found: + executed_tries += 1 + try: + async with self.throttler.execute_task(limit_id=CONSTANTS.SPOT_ORDERS_HISTORY_LIMIT_ID): + block_height = await self.query_executor.get_tx_block_height(tx_hash=tx_hash) + found = True + except ValueError: + # No block found containing the transaction, continue the search + pass + if executed_tries < retries and not found: + await self._sleep(CONSTANTS.EXPECTED_BLOCK_TIME) + + if not found: + raise ValueError(f"The transaction {tx_hash} is not included in any mined block") + + return block_height + + async def _parse_trade_entry(self, trade_info: Dict[str, Any]) -> TradeUpdate: + exchange_order_id: str = trade_info["orderHash"] + market = await self.market_info_for_id(market_id=trade_info["marketId"]) + trading_pair = await self.trading_pair_for_market(market_id=trade_info["marketId"]) + trade_id: str = trade_info["tradeId"] + + price = market.price_from_chain_format(chain_price=Decimal(trade_info["price"]["price"])) + size = market.quantity_from_chain_format(chain_quantity=Decimal(trade_info["price"]["quantity"])) + trade_type = TradeType.BUY if trade_info["tradeDirection"] == "buy" else TradeType.SELL + is_taker: bool = trade_info["executionSide"] == "taker" + trade_time = int(trade_info["executedAt"]) * 1e-3 + + fee_amount = market.quote_token.value_from_chain_format(chain_value=Decimal(trade_info["fee"])) + fee = TradeFeeBase.new_spot_fee( + fee_schema=TradeFeeSchema(), + trade_type=trade_type, + percent_token=market.quote_token.symbol, + flat_fees=[TokenAmount(amount=fee_amount, token=market.quote_token.symbol)] + ) + + trade_update = TradeUpdate( + trade_id=trade_id, + client_order_id=None, + exchange_order_id=exchange_order_id, + trading_pair=trading_pair, + fill_timestamp=trade_time, + fill_price=price, + fill_base_amount=size, + fill_quote_amount=size * price, + fee=fee, + is_taker=is_taker, + ) + + return trade_update + + async def _parse_order_entry(self, order_info: Dict[str, Any]) -> OrderUpdate: + exchange_order_id: str = order_info["orderHash"] + trading_pair = await self.trading_pair_for_market(market_id=order_info["marketId"]) + + status_update = OrderUpdate( + trading_pair=trading_pair, + update_timestamp=int(order_info["updatedAt"]) * 1e-3, + new_state=CONSTANTS.ORDER_STATE_MAP[order_info["state"]], + client_order_id=None, + exchange_order_id=exchange_order_id, + ) + + return status_update + + async def _send_in_transaction(self, message: any_pb2.Any) -> Dict[str, Any]: + transaction = Transaction() + transaction.with_messages(message) + transaction.with_sequence(await self.trading_account_sequence()) + transaction.with_account_num(await self.trading_account_number()) + transaction.with_chain_id(self.injective_chain_id) + + signed_transaction_data = self._sign_and_encode(transaction=transaction) + + async with self.throttler.execute_task(limit_id=CONSTANTS.SIMULATE_TRANSACTION_LIMIT_ID): + try: + simulation_result = await self.query_executor.simulate_tx(tx_byte=signed_transaction_data) + except RuntimeError as simulation_ex: + if CONSTANTS.ACCOUNT_SEQUENCE_MISMATCH_ERROR in str(simulation_ex): + await self.initialize_trading_account() + raise + + gas_limit = int(simulation_result["gasInfo"]["gasUsed"]) + CONSTANTS.EXTRA_TRANSACTION_GAS + fee = [self.composer.Coin( + amount=gas_limit * CONSTANTS.DEFAULT_GAS_PRICE, + denom=self.fee_denom, + )] + + transaction.with_gas(gas_limit) + transaction.with_fee(fee) + transaction.with_memo('') + transaction.with_timeout_height(await self.timeout_height()) + + signed_transaction_data = self._sign_and_encode(transaction=transaction) + + async with self.throttler.execute_task(limit_id=CONSTANTS.SEND_TRANSACTION): + result = await self.query_executor.send_tx_sync_mode(tx_byte=signed_transaction_data) + + if CONSTANTS.ACCOUNT_SEQUENCE_MISMATCH_ERROR in result.get("rawLog", ""): + await self.initialize_trading_account() + + return result + + async def _listen_to_order_book_updates(self, market_ids: List[str]): + while True: + try: + updates_stream = self._order_book_updates_stream(market_ids=market_ids) + async for update in updates_stream: + try: + await self._process_order_book_update(order_book_update=update) + except asyncio.CancelledError: + raise + except Exception as ex: + self.logger().warning(f"Invalid orderbook diff event format ({ex})\n{update}") + except asyncio.CancelledError: + raise + except Exception as ex: + self.logger().error(f"Error while listening to order book updates, reconnecting ... ({ex})") + + async def _listen_to_public_trades(self, market_ids: List[str]): + while True: + try: + public_trades_stream = self._public_trades_stream(market_ids=market_ids) + async for trade in public_trades_stream: + try: + await self._process_public_trade_update(trade_update=trade) + except asyncio.CancelledError: + raise + except Exception as ex: + self.logger().warning(f"Invalid public trade event format ({ex})\n{trade}") + except asyncio.CancelledError: + raise + except Exception as ex: + self.logger().error(f"Error while listening to public trades, reconnecting ... ({ex})") + + async def _listen_to_account_balance_updates(self): + while True: + try: + balance_stream = self._subaccount_balance_stream() + async for balance_event in balance_stream: + try: + await self._process_subaccount_balance_update(balance_event=balance_event) + except asyncio.CancelledError: + raise + except Exception as ex: + self.logger().warning(f"Invalid balance event format ({ex})\n{balance_event}") + except asyncio.CancelledError: + raise + except Exception as ex: + self.logger().error(f"Error while listening to balance updates, reconnecting ... ({ex})") + + async def _listen_to_subaccount_order_updates(self, market_id: str): + while True: + try: + orders_stream = self._subaccount_orders_stream(market_id=market_id) + async for order_event in orders_stream: + try: + await self._process_subaccount_order_update(order_event=order_event) + except asyncio.CancelledError: + raise + except Exception as ex: + self.logger().warning(f"Invalid order event format ({ex})\n{order_event}") + except asyncio.CancelledError: + raise + except Exception as ex: + self.logger().error(f"Error while listening to subaccount orders updates, reconnecting ... ({ex})") + + async def _process_order_book_update(self, order_book_update: Dict[str, Any]): + market_id = order_book_update["marketId"] + market_info = await self.market_info_for_id(market_id=market_id) + + trading_pair = await self.trading_pair_for_market(market_id=market_id) + bids = [(market_info.price_from_chain_format(chain_price=Decimal(bid["price"])), + market_info.quantity_from_chain_format(chain_quantity=Decimal(bid["quantity"]))) + for bid in order_book_update.get("buys", [])] + asks = [(market_info.price_from_chain_format(chain_price=Decimal(ask["price"])), + market_info.quantity_from_chain_format(chain_quantity=Decimal(ask["quantity"]))) + for ask in order_book_update.get("sells", [])] + + order_book_message_content = { + "trading_pair": trading_pair, + "update_id": int(order_book_update["sequence"]), + "bids": bids, + "asks": asks, + } + diff_message = OrderBookMessage( + message_type=OrderBookMessageType.DIFF, + content=order_book_message_content, + timestamp=int(order_book_update["updatedAt"]) * 1e-3, + ) + self.publisher.trigger_event( + event_tag=OrderBookDataSourceEvent.DIFF_EVENT, message=diff_message + ) + + async def _process_public_trade_update(self, trade_update: Dict[str, Any]): + market_id = trade_update["marketId"] + market_info = await self.market_info_for_id(market_id=market_id) + + trading_pair = await self.trading_pair_for_market(market_id=market_id) + timestamp = int(trade_update["executedAt"]) * 1e-3 + trade_type = float(TradeType.BUY.value) if trade_update["tradeDirection"] == "buy" else float( + TradeType.SELL.value) + message_content = { + "trade_id": trade_update["tradeId"], + "trading_pair": trading_pair, + "trade_type": trade_type, + "amount": market_info.quantity_from_chain_format( + chain_quantity=Decimal(str(trade_update["price"]["quantity"]))), + "price": market_info.price_from_chain_format(chain_price=Decimal(str(trade_update["price"]["price"]))), + } + trade_message = OrderBookMessage( + message_type=OrderBookMessageType.TRADE, + content=message_content, + timestamp=timestamp, + ) + self.publisher.trigger_event( + event_tag=OrderBookDataSourceEvent.TRADE_EVENT, message=trade_message + ) + + update = await self._parse_trade_entry(trade_info=trade_update) + self.publisher.trigger_event(event_tag=MarketEvent.TradeUpdate, message=update) + + async def _process_subaccount_balance_update(self, balance_event: Dict[str, Any]): + updated_token = await self.token(denom=balance_event["balance"]["denom"]) + if updated_token is not None: + if self._uses_default_portfolio_subaccount(): + token_balances = await self.all_account_balances() + total_balance = token_balances[updated_token.unique_symbol]["total_balance"] + available_balance = token_balances[updated_token.unique_symbol]["available_balance"] + else: + updated_total = balance_event["balance"]["deposit"].get("totalBalance") + total_balance = (updated_token.value_from_chain_format(chain_value=Decimal(updated_total)) + if updated_total is not None + else None) + updated_available = balance_event["balance"]["deposit"].get("availableBalance") + available_balance = (updated_token.value_from_chain_format(chain_value=Decimal(updated_available)) + if updated_available is not None + else None) + + balance_msg = BalanceUpdateEvent( + timestamp=int(balance_event["timestamp"]) * 1e3, + asset_name=updated_token.unique_symbol, + total_balance=total_balance, + available_balance=available_balance, + ) + self.publisher.trigger_event(event_tag=AccountEvent.BalanceEvent, message=balance_msg) + + async def _process_subaccount_order_update(self, order_event: Dict[str, Any]): + order_update = await self._parse_order_entry(order_info=order_event) + self.publisher.trigger_event(event_tag=MarketEvent.OrderUpdate, message=order_update) + + def _time(self): + return time.time() + + async def _sleep(self, delay: float): + """ + Method created to enable tests to prevent processes from sleeping + """ + await asyncio.sleep(delay) + + +class InjectiveGranteeDataSource(InjectiveDataSource): + _logger: Optional[HummingbotLogger] = None + + def __init__( + self, + private_key: str, + subaccount_index: int, + granter_address: str, + granter_subaccount_index: int, + domain: Optional[str] = CONSTANTS.DEFAULT_DOMAIN): + self._network = Network.testnet() if domain == CONSTANTS.TESTNET_DOMAIN else Network.mainnet() + self._client = AsyncClient(network=self._network, insecure=False) + self._composer = Composer(network=self._network.string()) + self._query_executor = PythonSDKInjectiveQueryExecutor(sdk_client=self._client) + + self._private_key = None + self._public_key = None + self._grantee_address = "" + self._grantee_subaccount_index = subaccount_index + self._granter_subaccount_id = "" + if private_key: + self._private_key = PrivateKey.from_hex(private_key) + self._public_key = self._private_key.to_public_key() + self._grantee_address = self._public_key.to_address() + self._grantee_subaccount_id = self._grantee_address.get_subaccount_id(index=subaccount_index) + + self._granter_address = None + self._granter_subaccount_id = "" + self._granter_subaccount_index = granter_subaccount_index + if granter_address: + self._granter_address = Address.from_acc_bech32(granter_address) + self._granter_subaccount_id = self._granter_address.get_subaccount_id(index=granter_subaccount_index) + + self._order_hash_manager: Optional[OrderHashManager] = None + self._publisher = PubSub() + self._last_received_message_time = 0 + self._order_creation_lock = asyncio.Lock() + # We create a throttler instance here just to have a fully valid instance from the first moment. + # The connector using this data source should replace the throttler with the one used by the connector. + self._throttler = AsyncThrottler(rate_limits=CONSTANTS.RATE_LIMITS) + + self._is_timeout_height_initialized = False + self._is_trading_account_initialized = False + self._markets_initialization_lock = asyncio.Lock() + self._market_info_map: Optional[Dict[str, InjectiveSpotMarket]] = None + self._market_and_trading_pair_map: Optional[Mapping[str, str]] = None + self._tokens_map: Optional[Dict[str, InjectiveToken]] = None + self._token_symbol_symbol_and_denom_map: Optional[Mapping[str, str]] = None + + self._events_listening_tasks: List[asyncio.Task] = [] + + @property + def publisher(self): + return self._publisher + + @property + def query_executor(self): + return self._query_executor + + @property + def composer(self) -> Composer: + return self._composer + + @property + def order_creation_lock(self) -> asyncio.Lock: + return self._order_creation_lock + + @property + def throttler(self): + return self._throttler + + @property + def portfolio_account_injective_address(self) -> str: + return self._granter_address.to_acc_bech32() + + @property + def portfolio_account_subaccount_id(self) -> str: + return self._granter_subaccount_id + + @property + def trading_account_injective_address(self) -> str: + return self._grantee_address.to_acc_bech32() + + @property + def injective_chain_id(self) -> str: + return self._network.chain_id + + @property + def fee_denom(self) -> str: + return self._network.fee_denom + + def events_listening_tasks(self) -> List[asyncio.Task]: + return self._events_listening_tasks.copy() + + def add_listening_task(self, task: asyncio.Task): + self._events_listening_tasks.append(task) + + async def market_and_trading_pair_map(self): + if self._market_and_trading_pair_map is None: + async with self._markets_initialization_lock: + if self._market_and_trading_pair_map is None: + await self.update_markets() + return self._market_and_trading_pair_map.copy() + + async def market_info_for_id(self, market_id: str): + if self._market_info_map is None: + async with self._markets_initialization_lock: + if self._market_info_map is None: + await self.update_markets() + + return self._market_info_map[market_id] + + async def trading_pair_for_market(self, market_id: str): + if self._market_and_trading_pair_map is None: + async with self._markets_initialization_lock: + if self._market_and_trading_pair_map is None: + await self.update_markets() + + return self._market_and_trading_pair_map[market_id] + + async def market_id_for_trading_pair(self, trading_pair: str) -> str: + if self._market_and_trading_pair_map is None: + async with self._markets_initialization_lock: + if self._market_and_trading_pair_map is None: + await self.update_markets() + + return self._market_and_trading_pair_map.inverse[trading_pair] + + async def all_markets(self): + if self._market_info_map is None: + async with self._markets_initialization_lock: + if self._market_info_map is None: + await self.update_markets() + + return list(self._market_info_map.values()) + + async def token(self, denom: str) -> InjectiveToken: + if self._tokens_map is None: + async with self._markets_initialization_lock: + if self._tokens_map is None: + await self.update_markets() + + return self._tokens_map.get(denom) + + def configure_throttler(self, throttler: AsyncThrottlerBase): + self._throttler = throttler + + async def trading_account_sequence(self) -> int: + if not self._is_trading_account_initialized: + await self.initialize_trading_account() + return self._client.get_sequence() + + async def trading_account_number(self) -> int: + if not self._is_trading_account_initialized: + await self.initialize_trading_account() + return self._client.get_number() + + async def stop(self): + await super().stop() + self._events_listening_tasks = [] + + async def initialize_trading_account(self): + await self._client.get_account(address=self.trading_account_injective_address) + self._is_trading_account_initialized = True + + def order_hash_manager(self) -> OrderHashManager: + if self._order_hash_manager is None: + self._order_hash_manager = OrderHashManager( + address=self._granter_address, + network=self._network, + subaccount_indexes=[self._granter_subaccount_index] + ) + return self._order_hash_manager + + async def update_markets(self): + self._tokens_map = {} + self._token_symbol_symbol_and_denom_map = bidict() + markets = await self._query_executor.spot_markets(status="active") + markets_map = {} + market_id_to_trading_pair = bidict() + + for market_info in markets: + try: + ticker_base, ticker_quote = market_info["ticker"].split("/") + base_token = self._token_from_market_info( + denom=market_info["baseDenom"], + token_meta=market_info["baseTokenMeta"], + candidate_symbol=ticker_base, + ) + quote_token = self._token_from_market_info( + denom=market_info["quoteDenom"], + token_meta=market_info["quoteTokenMeta"], + candidate_symbol=ticker_quote, + ) + market = InjectiveSpotMarket( + market_id=market_info["marketId"], + base_token=base_token, + quote_token=quote_token, + market_info=market_info + ) + market_id_to_trading_pair[market.market_id] = market.trading_pair() + markets_map[market.market_id] = market + except KeyError: + self.logger().debug(f"The market {market_info['marketId']} will be excluded because it could not be " + f"parsed ({market_info})") + continue + + self._market_info_map = markets_map + self._market_and_trading_pair_map = market_id_to_trading_pair + + async def transaction_result_data(self, transaction_hash: str) -> str: + async with self.throttler.execute_task(limit_id=CONSTANTS.GET_TRANSACTION_LIMIT_ID): + transaction_info = await self.query_executor.get_tx_by_hash(tx_hash=transaction_hash) + + return str(base64.b64decode(transaction_info["data"]["logs"])) + + async def timeout_height(self) -> int: + if not self._is_timeout_height_initialized: + await self._initialize_timeout_height() + return self._client.timeout_height + + def real_tokens_trading_pair(self, unique_trading_pair: str) -> str: + resulting_trading_pair = unique_trading_pair + if (self._market_and_trading_pair_map is not None + and self._market_info_map is not None): + market_id = self._market_and_trading_pair_map.inverse.get(unique_trading_pair) + market = self._market_info_map.get(market_id) + if market is not None: + resulting_trading_pair = combine_to_hb_trading_pair( + base=market.base_token.symbol, + quote=market.quote_token.symbol, + ) + + return resulting_trading_pair + + async def _initialize_timeout_height(self): + await self._client.sync_timeout_height() + self._is_timeout_height_initialized = True + + def _reset_order_hash_manager(self): + self._order_hash_manager = None + + def _sign_and_encode(self, transaction: Transaction) -> bytes: + sign_doc = transaction.get_sign_doc(self._public_key) + sig = self._private_key.sign(sign_doc.SerializeToString()) + tx_raw_bytes = transaction.get_tx_data(sig, self._public_key) + return tx_raw_bytes + + def _uses_default_portfolio_subaccount(self) -> bool: + return self._granter_subaccount_index == CONSTANTS.DEFAULT_SUBACCOUNT_INDEX + + def _token_from_market_info(self, denom: str, token_meta: Dict[str, Any], candidate_symbol: str) -> InjectiveToken: + token = self._tokens_map.get(denom) + if token is None: + unique_symbol = token_meta["symbol"] + if unique_symbol in self._token_symbol_symbol_and_denom_map: + if candidate_symbol not in self._token_symbol_symbol_and_denom_map: + unique_symbol = candidate_symbol + else: + unique_symbol = token_meta["name"] + token = InjectiveToken( + denom=denom, + symbol=token_meta["symbol"], + unique_symbol=unique_symbol, + name=token_meta["name"], + decimals=token_meta["decimals"] + ) + self._tokens_map[denom] = token + self._token_symbol_symbol_and_denom_map[unique_symbol] = denom + + return token + + async def _last_traded_price(self, market_id: str) -> Decimal: + async with self.throttler.execute_task(limit_id=CONSTANTS.SPOT_TRADES_LIMIT_ID): + trades_response = await self.query_executor.get_spot_trades( + market_ids=[market_id], + limit=1, + ) + + price = Decimal("nan") + if len(trades_response["trades"]) > 0: + market = await self.market_info_for_id(market_id=market_id) + price = market.price_from_chain_format(chain_price=Decimal(trades_response["trades"][0]["price"]["price"])) + + return price + + async def _create_order_definition(self, order: GatewayInFlightOrder): + market_id = await self.market_id_for_trading_pair(order.trading_pair) + definition = self.composer.SpotOrder( + market_id=market_id, + subaccount_id=self.portfolio_account_subaccount_id, + fee_recipient=self.portfolio_account_injective_address, + price=order.price, + quantity=order.amount, + is_buy=order.trade_type == TradeType.BUY, + is_po=order.order_type == OrderType.LIMIT_MAKER + ) + return definition + + def _calculate_order_hashes(self, orders) -> List[str]: + hash_manager = self.order_hash_manager() + hash_manager_result = hash_manager.compute_order_hashes( + spot_orders=orders, derivative_orders=[], subaccount_index=self._grantee_subaccount_index + ) + return hash_manager_result.spot + + def _order_book_updates_stream(self, market_ids: List[str]): + stream = self._query_executor.spot_order_book_updates_stream(market_ids=market_ids) + return stream + + def _public_trades_stream(self, market_ids: List[str]): + stream = self._query_executor.public_spot_trades_stream(market_ids=market_ids) + return stream + + def _subaccount_balance_stream(self): + stream = self._query_executor.subaccount_balance_stream(subaccount_id=self.portfolio_account_subaccount_id) + return stream + + def _subaccount_orders_stream(self, market_id: str): + stream = self._query_executor.subaccount_historical_spot_orders_stream( + market_id=market_id, subaccount_id=self.portfolio_account_subaccount_id + ) + return stream diff --git a/hummingbot/connector/exchange/injective_v2/injective_market.py b/hummingbot/connector/exchange/injective_v2/injective_market.py new file mode 100644 index 00000000000..e2733c13c3b --- /dev/null +++ b/hummingbot/connector/exchange/injective_v2/injective_market.py @@ -0,0 +1,50 @@ +from dataclasses import dataclass +from decimal import Decimal +from typing import Any, Dict + +from hummingbot.connector.utils import combine_to_hb_trading_pair + + +@dataclass(frozen=True) +class InjectiveToken: + denom: str + symbol: str + unique_symbol: str + name: str + decimals: int + + def value_from_chain_format(self, chain_value: Decimal) -> Decimal: + scaler = Decimal(f"1e{-self.decimals}") + return chain_value * scaler + + +@dataclass(frozen=True) +class InjectiveSpotMarket: + market_id: str + base_token: InjectiveToken + quote_token: InjectiveToken + market_info: Dict[str, Any] + + def trading_pair(self): + return combine_to_hb_trading_pair(self.base_token.unique_symbol, self.quote_token.unique_symbol) + + def quantity_from_chain_format(self, chain_quantity: Decimal) -> Decimal: + return self.base_token.value_from_chain_format(chain_value=chain_quantity) + + def price_from_chain_format(self, chain_price: Decimal) -> Decimal: + scaler = Decimal(f"1e{self.base_token.decimals-self.quote_token.decimals}") + return chain_price * scaler + + def min_price_tick_size(self) -> Decimal: + min_price_tick_size = Decimal(self.market_info["minPriceTickSize"]) + return self.price_from_chain_format(chain_price=min_price_tick_size) + + def min_quantity_tick_size(self) -> Decimal: + min_quantity_tick_size = Decimal(self.market_info["minQuantityTickSize"]) + return self.quantity_from_chain_format(chain_quantity=min_quantity_tick_size) + + def maker_fee_rate(self) -> Decimal: + return Decimal(self.market_info["makerFeeRate"]) + + def taker_fee_rate(self) -> Decimal: + return Decimal(self.market_info["takerFeeRate"]) diff --git a/hummingbot/connector/exchange/injective_v2/injective_query_executor.py b/hummingbot/connector/exchange/injective_v2/injective_query_executor.py new file mode 100644 index 00000000000..fb74a37fe88 --- /dev/null +++ b/hummingbot/connector/exchange/injective_v2/injective_query_executor.py @@ -0,0 +1,211 @@ +from abc import ABC, abstractmethod +from typing import Any, Dict, List, Optional + +from google.protobuf import json_format +from grpc import RpcError +from pyinjective.async_client import AsyncClient + + +class BaseInjectiveQueryExecutor(ABC): + + @abstractmethod + async def ping(self): + raise NotImplementedError + + @abstractmethod + async def spot_markets(self, status: str) -> Dict[str, Any]: + raise NotImplementedError + + @abstractmethod + async def get_spot_orderbook(self, market_id: str) -> Dict[str, Any]: + raise NotImplementedError # pragma: no cover + + @abstractmethod + async def get_tx_by_hash(self, tx_hash: str) -> Dict[str, Any]: + raise NotImplementedError + + @abstractmethod + async def get_tx_block_height(self, tx_hash: str) -> int: + raise NotImplementedError + + @abstractmethod + async def account_portfolio(self, account_address: str) -> Dict[str, Any]: + raise NotImplementedError + + @abstractmethod + async def simulate_tx(self, tx_byte: bytes) -> Dict[str, Any]: + raise NotImplementedError + + @abstractmethod + async def send_tx_sync_mode(self, tx_byte: bytes) -> Dict[str, Any]: + raise NotImplementedError + + @abstractmethod + async def get_spot_trades( + self, + market_ids: List[str], + subaccount_id: Optional[str] = None, + start_time: Optional[int] = None, + skip: Optional[int] = None, + limit: Optional[int] = None, + ) -> Dict[str, Any]: + raise NotImplementedError + + @abstractmethod + async def get_historical_spot_orders( + self, + market_ids: List[str], + subaccount_id: str, + start_time: int, + skip: int, + ) -> Dict[str, Any]: + raise NotImplementedError + + @abstractmethod + async def spot_order_book_updates_stream(self, market_ids: List[str]): + raise NotImplementedError # pragma: no cover + + @abstractmethod + async def public_spot_trades_stream(self, market_ids: List[str]): + raise NotImplementedError # pragma: no cover + + @abstractmethod + async def subaccount_balance_stream(self, subaccount_id: str): + raise NotImplementedError # pragma: no cover + + @abstractmethod + async def subaccount_historical_spot_orders_stream( + self, market_id: str, subaccount_id: str + ): + raise NotImplementedError + + +class PythonSDKInjectiveQueryExecutor(BaseInjectiveQueryExecutor): + + def __init__(self, sdk_client: AsyncClient): + super().__init__() + self._sdk_client = sdk_client + + async def ping(self): # pragma: no cover + await self._sdk_client.ping() + + async def spot_markets(self, status: str) -> List[Dict[str, Any]]: # pragma: no cover + response = await self._sdk_client.get_spot_markets(status=status) + markets = [] + + for market_info in response.markets: + markets.append(json_format.MessageToDict(market_info)) + + return markets + + async def get_spot_orderbook(self, market_id: str) -> Dict[str, Any]: # pragma: no cover + order_book_response = await self._sdk_client.get_spot_orderbookV2(market_id=market_id) + order_book_data = order_book_response.orderbook + result = { + "buys": [(buy.price, buy.quantity, buy.timestamp) for buy in order_book_data.buys], + "sells": [(buy.price, buy.quantity, buy.timestamp) for buy in order_book_data.sells], + "sequence": order_book_data.sequence, + "timestamp": order_book_data.timestamp, + } + + return result + + async def get_tx_by_hash(self, tx_hash: str) -> Dict[str, Any]: # pragma: no cover + try: + transaction_response = await self._sdk_client.get_tx_by_hash(tx_hash=tx_hash) + except RpcError as rpc_exception: + if "object not found" in str(rpc_exception): + raise ValueError(f"The transaction with hash {tx_hash} was not found") + else: + raise + + result = json_format.MessageToDict(transaction_response) + return result + + async def get_tx_block_height(self, tx_hash: str) -> int: # pragma: no cover + try: + transaction_response = await self._sdk_client.get_tx(tx_hash=tx_hash) + except RpcError as rpc_exception: + if "StatusCode.NOT_FOUND" in str(rpc_exception): + raise ValueError(f"The transaction with hash {tx_hash} was not found") + else: + raise + + result = transaction_response.tx_response.height + return result + + async def account_portfolio(self, account_address: str) -> Dict[str, Any]: # pragma: no cover + portfolio_response = await self._sdk_client.get_account_portfolio(account_address=account_address) + result = json_format.MessageToDict(portfolio_response.portfolio) + return result + + async def simulate_tx(self, tx_byte: bytes) -> Dict[str, Any]: # pragma: no cover + response, success = await self._sdk_client.simulate_tx(tx_byte=tx_byte) + if not success: + raise RuntimeError(f"Transaction simulation failure ({response})") + result = json_format.MessageToDict(response) + return result + + async def send_tx_sync_mode(self, tx_byte: bytes) -> Dict[str, Any]: # pragma: no cover + response = await self._sdk_client.send_tx_sync_mode(tx_byte=tx_byte) + result = json_format.MessageToDict(response) + return result + + async def get_spot_trades( + self, + market_ids: List[str], + subaccount_id: Optional[str] = None, + start_time: Optional[int] = None, + skip: Optional[int] = None, + limit: Optional[int] = None, + ) -> Dict[str, Any]: # pragma: no cover + response = await self._sdk_client.get_spot_trades( + market_ids=market_ids, + subaccount_id=subaccount_id, + start_time=start_time, + skip=skip, + limit=limit, + ) + result = json_format.MessageToDict(response) + return result + + async def get_historical_spot_orders( + self, + market_ids: List[str], + subaccount_id: str, + start_time: int, + skip: int, + ) -> Dict[str, Any]: # pragma: no cover + response = await self._sdk_client.get_historical_spot_orders( + market_ids=market_ids, + subaccount_id=subaccount_id, + start_time=start_time, + skip=skip, + ) + result = json_format.MessageToDict(response) + return result + + async def spot_order_book_updates_stream(self, market_ids: List[str]): # pragma: no cover + stream = await self._sdk_client.stream_spot_orderbook_update(market_ids=market_ids) + async for update in stream: + order_book_update = update.orderbook_level_updates + yield json_format.MessageToDict(order_book_update) + + async def public_spot_trades_stream(self, market_ids: List[str]): # pragma: no cover + stream = await self._sdk_client.stream_spot_trades(market_ids=market_ids) + async for trade in stream: + trade_data = trade.trade + yield json_format.MessageToDict(trade_data) + + async def subaccount_balance_stream(self, subaccount_id: str): # pragma: no cover + stream = await self._sdk_client.stream_subaccount_balance(subaccount_id=subaccount_id) + async for event in stream: + yield json_format.MessageToDict(event) + + async def subaccount_historical_spot_orders_stream( + self, market_id: str, subaccount_id: str + ): # pragma: no cover + stream = await self._sdk_client.stream_historical_spot_orders(market_id=market_id, subaccount_id=subaccount_id) + async for event in stream: + event_data = event.order + yield json_format.MessageToDict(event_data) diff --git a/hummingbot/connector/exchange/injective_v2/injective_v2_api_order_book_data_source.py b/hummingbot/connector/exchange/injective_v2/injective_v2_api_order_book_data_source.py new file mode 100644 index 00000000000..9150b321ec7 --- /dev/null +++ b/hummingbot/connector/exchange/injective_v2/injective_v2_api_order_book_data_source.py @@ -0,0 +1,72 @@ +import asyncio +from typing import TYPE_CHECKING, Dict, List, Optional + +from hummingbot.connector.exchange.injective_v2 import injective_constants as CONSTANTS +from hummingbot.connector.exchange.injective_v2.injective_data_source import InjectiveDataSource +from hummingbot.core.data_type.order_book_message import OrderBookMessage +from hummingbot.core.data_type.order_book_tracker_data_source import OrderBookTrackerDataSource +from hummingbot.core.event.event_forwarder import EventForwarder +from hummingbot.core.event.events import OrderBookDataSourceEvent + +if TYPE_CHECKING: + from hummingbot.connector.exchange.injective_v2.injective_v2_exchange import InjectiveV2Exchange + + +class InjectiveV2APIOrderBookDataSource(OrderBookTrackerDataSource): + + def __init__( + self, + trading_pairs: List[str], + connector: "InjectiveV2Exchange", + data_source: InjectiveDataSource, + domain: str = CONSTANTS.DEFAULT_DOMAIN, + ): + super().__init__(trading_pairs=trading_pairs) + self._ev_loop = asyncio.get_event_loop() + self._connector = connector + self._data_source = data_source + self._domain = domain + self._forwarders = [] + self._configure_event_forwarders() + + async def get_last_traded_prices(self, trading_pairs: List[str], domain: Optional[str] = None) -> Dict[str, float]: + return await self._connector.get_last_traded_prices(trading_pairs=trading_pairs) + + async def listen_for_subscriptions(self): + # Subscriptions to streams is handled by the data_source + # Here we just make sure the data_source is listening to the streams + market_ids = [await self._connector.exchange_symbol_associated_to_pair(trading_pair=trading_pair) + for trading_pair in self._trading_pairs] + await self._data_source.start(market_ids=market_ids) + + async def _order_book_snapshot(self, trading_pair: str) -> OrderBookMessage: + symbol = await self._connector.exchange_symbol_associated_to_pair(trading_pair=trading_pair) + snapshot = await self._data_source.order_book_snapshot(market_id=symbol, trading_pair=trading_pair) + return snapshot + + async def _parse_order_book_diff_message(self, raw_message: OrderBookMessage, message_queue: asyncio.Queue): + # In Injective 'raw_message' is not a raw message, but the OrderBookMessage with type Trade created + # by the data source + message_queue.put_nowait(raw_message) + + async def _parse_trade_message(self, raw_message: OrderBookMessage, message_queue: asyncio.Queue): + # In Injective 'raw_message' is not a raw message, but the OrderBookMessage with type Trade created + # by the data source + message_queue.put_nowait(raw_message) + + def _configure_event_forwarders(self): + event_forwarder = EventForwarder(to_function=self._process_order_book_event) + self._forwarders.append(event_forwarder) + self._data_source.add_listener( + event_tag=OrderBookDataSourceEvent.DIFF_EVENT, listener=event_forwarder + ) + + event_forwarder = EventForwarder(to_function=self._process_public_trade_event) + self._forwarders.append(event_forwarder) + self._data_source.add_listener(event_tag=OrderBookDataSourceEvent.TRADE_EVENT, listener=event_forwarder) + + def _process_order_book_event(self, order_book_diff: OrderBookMessage): + self._message_queue[self._diff_messages_queue_key].put_nowait(order_book_diff) + + def _process_public_trade_event(self, trade_update: OrderBookMessage): + self._message_queue[self._trade_messages_queue_key].put_nowait(trade_update) diff --git a/hummingbot/connector/exchange/injective_v2/injective_v2_exchange.py b/hummingbot/connector/exchange/injective_v2/injective_v2_exchange.py new file mode 100644 index 00000000000..b10325cb2e4 --- /dev/null +++ b/hummingbot/connector/exchange/injective_v2/injective_v2_exchange.py @@ -0,0 +1,894 @@ +import asyncio +from collections import defaultdict +from decimal import Decimal +from enum import Enum +from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Tuple + +from async_timeout import timeout + +from hummingbot.connector.client_order_tracker import ClientOrderTracker +from hummingbot.connector.constants import s_decimal_NaN +from hummingbot.connector.exchange.injective_v2 import ( + injective_constants as CONSTANTS, + injective_v2_web_utils as web_utils, +) +from hummingbot.connector.exchange.injective_v2.injective_data_source import InjectiveDataSource +from hummingbot.connector.exchange.injective_v2.injective_v2_api_order_book_data_source import ( + InjectiveV2APIOrderBookDataSource, +) +from hummingbot.connector.exchange_py_base import ExchangePyBase +from hummingbot.connector.gateway.gateway_in_flight_order import GatewayInFlightOrder +from hummingbot.connector.gateway.gateway_order_tracker import GatewayOrderTracker +from hummingbot.connector.trading_rule import TradingRule +from hummingbot.connector.utils import combine_to_hb_trading_pair, get_new_client_order_id +from hummingbot.core.api_throttler.data_types import RateLimit +from hummingbot.core.data_type.cancellation_result import CancellationResult +from hummingbot.core.data_type.common import OrderType, TradeType +from hummingbot.core.data_type.in_flight_order import OrderState, OrderUpdate, TradeUpdate +from hummingbot.core.data_type.limit_order import LimitOrder +from hummingbot.core.data_type.order_book_tracker_data_source import OrderBookTrackerDataSource +from hummingbot.core.data_type.trade_fee import TradeFeeBase, TradeFeeSchema +from hummingbot.core.data_type.user_stream_tracker_data_source import UserStreamTrackerDataSource +from hummingbot.core.event.event_forwarder import EventForwarder +from hummingbot.core.event.events import AccountEvent, BalanceUpdateEvent, MarketEvent +from hummingbot.core.network_iterator import NetworkStatus +from hummingbot.core.utils.async_utils import safe_ensure_future +from hummingbot.core.utils.estimate_fee import build_trade_fee +from hummingbot.core.web_assistant.auth import AuthBase +from hummingbot.core.web_assistant.web_assistants_factory import WebAssistantsFactory + +if TYPE_CHECKING: + from hummingbot.client.config.config_helpers import ClientConfigAdapter + + +class InjectiveV2Exchange(ExchangePyBase): + web_utils = web_utils + + def __init__( + self, + client_config_map: "ClientConfigAdapter", + injective_private_key: str, + injective_subaccount_index: str, + injective_granter_address: str, + injective_granter_subaccount_index: str, + trading_pairs: Optional[List[str]] = None, + trading_required: bool = True, + domain: str = CONSTANTS.DEFAULT_DOMAIN, + ): + self._orders_processing_delta_time = 0.5 + + self._trading_required = trading_required + self._trading_pairs = trading_pairs + self._domain = domain + self._data_source = InjectiveDataSource.for_grantee( + private_key=injective_private_key, + subaccount_index=(0 if injective_subaccount_index == "" else int(injective_subaccount_index)), + granter_address=injective_granter_address, + granter_subaccount_index=(0 + if injective_granter_subaccount_index == "" + else int(injective_granter_subaccount_index)), + domain=domain, + ) + super().__init__(client_config_map=client_config_map) + self._data_source.configure_throttler(throttler=self._throttler) + self._forwarders = [] + self._configure_event_forwarders() + self._latest_polled_order_fill_time: float = self._time() + self._orders_transactions_check_task: Optional[asyncio.Task] = None + self._last_received_message_timestamp = 0 + self._orders_queued_to_create: List[GatewayInFlightOrder] = [] + self._orders_queued_to_cancel: List[GatewayInFlightOrder] = [] + + self._orders_transactions_check_task = None + self._queued_orders_task = None + + @property + def name(self) -> str: + suffix = "" if self.domain == CONSTANTS.DEFAULT_DOMAIN else f"_{self.domain}" + return CONSTANTS.EXCHANGE_NAME + suffix + + @property + def authenticator(self) -> AuthBase: + return None + + @property + def rate_limits_rules(self) -> List[RateLimit]: + return CONSTANTS.RATE_LIMITS + + @property + def domain(self) -> str: + return self._domain + + @property + def client_order_id_max_length(self) -> int: + return None + + @property + def client_order_id_prefix(self) -> str: + return "" + + @property + def trading_rules_request_path(self) -> str: + raise NotImplementedError + + @property + def trading_pairs_request_path(self) -> str: + raise NotImplementedError + + @property + def check_network_request_path(self) -> str: + raise NotImplementedError + + @property + def trading_pairs(self) -> List[str]: + return self._trading_pairs + + @property + def is_cancel_request_in_exchange_synchronous(self) -> bool: + return False + + @property + def is_trading_required(self) -> bool: + return self._trading_required + + @property + def status_dict(self) -> Dict[str, bool]: + status = super().status_dict + status["data_source_initialized"] = self._data_source.is_started() + return status + + async def start_network(self): + await super().start_network() + + market_ids = [ + await self.exchange_symbol_associated_to_pair(trading_pair=trading_pair) + for trading_pair in self._trading_pairs + ] + await self._data_source.start(market_ids=market_ids) + + if self.is_trading_required: + self._orders_transactions_check_task = safe_ensure_future(self._check_orders_transactions()) + self._queued_orders_task = safe_ensure_future(self._process_queued_orders()) + + async def stop_network(self): + """ + This function is executed when the connector is stopped. It performs a general cleanup and stops all background + tasks that require the connection with the exchange to work. + """ + await super().stop_network() + await self._data_source.stop() + self._forwarders = [] + if self._orders_transactions_check_task is not None: + self._orders_transactions_check_task.cancel() + self._orders_transactions_check_task = None + if self._queued_orders_task is not None: + self._queued_orders_task.cancel() + self._queued_orders_task = None + + def supported_order_types(self) -> List[OrderType]: + return [OrderType.LIMIT, OrderType.LIMIT_MAKER] + + def start_tracking_order( + self, + order_id: str, + exchange_order_id: Optional[str], + trading_pair: str, + trade_type: TradeType, + price: Decimal, + amount: Decimal, + order_type: OrderType, + **kwargs, + ): + self._order_tracker.start_tracking_order( + GatewayInFlightOrder( + client_order_id=order_id, + exchange_order_id=exchange_order_id, + trading_pair=trading_pair, + order_type=order_type, + trade_type=trade_type, + amount=amount, + price=price, + creation_timestamp=self.current_timestamp, + ) + ) + + def batch_order_create(self, orders_to_create: List[LimitOrder]) -> List[LimitOrder]: + """ + Issues a batch order creation as a single API request for exchanges that implement this feature. The default + implementation of this method is to send the requests discretely (one by one). + :param orders_to_create: A list of LimitOrder objects representing the orders to create. The order IDs + can be blanc. + :returns: A tuple composed of LimitOrder objects representing the created orders, complete with the generated + order IDs. + """ + orders_with_ids_to_create = [] + for order in orders_to_create: + client_order_id = get_new_client_order_id( + is_buy=order.is_buy, + trading_pair=order.trading_pair, + hbot_order_id_prefix=self.client_order_id_prefix, + max_id_len=self.client_order_id_max_length, + ) + orders_with_ids_to_create.append( + LimitOrder( + client_order_id=client_order_id, + trading_pair=order.trading_pair, + is_buy=order.is_buy, + base_currency=order.base_currency, + quote_currency=order.quote_currency, + price=order.price, + quantity=order.quantity, + filled_quantity=order.filled_quantity, + creation_timestamp=order.creation_timestamp, + status=order.status, + ) + ) + safe_ensure_future(self._execute_batch_order_create(orders_to_create=orders_with_ids_to_create)) + return orders_with_ids_to_create + + def batch_order_cancel(self, orders_to_cancel: List[LimitOrder]): + """ + Issues a batch order cancelation as a single API request for exchanges that implement this feature. The default + implementation of this method is to send the requests discretely (one by one). + :param orders_to_cancel: A list of the orders to cancel. + """ + safe_ensure_future(coro=self._execute_batch_cancel(orders_to_cancel=orders_to_cancel)) + + async def cancel_all(self, timeout_seconds: float) -> List[CancellationResult]: + """ + Cancels all currently active orders. The cancellations are performed in parallel tasks. + + :param timeout_seconds: the maximum time (in seconds) the cancel logic should run + + :return: a list of CancellationResult instances, one for each of the orders to be cancelled + """ + incomplete_orders = {} + limit_orders = [] + successful_cancellations = [] + + for order in self.in_flight_orders.values(): + if not order.is_done: + incomplete_orders[order.client_order_id] = order + limit_orders.append(order.to_limit_order()) + + if len(limit_orders) > 0: + try: + async with timeout(timeout_seconds): + cancellation_results = await self._execute_batch_cancel(orders_to_cancel=limit_orders) + for cr in cancellation_results: + if cr.success: + del incomplete_orders[cr.order_id] + successful_cancellations.append(CancellationResult(cr.order_id, True)) + except Exception: + self.logger().network( + "Unexpected error cancelling orders.", + exc_info=True, + app_warning_msg="Failed to cancel order. Check API key and network connection." + ) + failed_cancellations = [CancellationResult(oid, False) for oid in incomplete_orders.keys()] + return successful_cancellations + failed_cancellations + + async def check_network(self) -> NetworkStatus: + """ + Checks connectivity with the exchange using the API + """ + try: + status = await self._data_source.check_network() + except asyncio.CancelledError: + raise + except Exception: + status = NetworkStatus.NOT_CONNECTED + return status + + def trigger_event(self, event_tag: Enum, message: any): + # Reimplemented because Injective connector has trading pairs with modified token names, because market tickers + # are not always unique. + # We need to change the original trading pair in all events to the real tokens trading pairs to not impact the + # bot events processing + trading_pair = getattr(message, "trading_pair", None) + if trading_pair is not None: + new_trading_pair = self._data_source.real_tokens_trading_pair(unique_trading_pair=trading_pair) + if isinstance(message, tuple): + message = message._replace(trading_pair=new_trading_pair) + else: + setattr(message, "trading_pair", new_trading_pair) + + super().trigger_event(event_tag=event_tag, message=message) + + def _is_request_exception_related_to_time_synchronizer(self, request_exception: Exception) -> bool: + return False + + def _is_order_not_found_during_status_update_error(self, status_update_exception: Exception) -> bool: + return CONSTANTS.ORDER_NOT_FOUND_ERROR_MESSAGE in str(status_update_exception) + + def _is_order_not_found_during_cancelation_error(self, cancelation_exception: Exception) -> bool: + # For Injective the cancelation is done by sending a transaction to the chain. + # The cancel request is not validated until the transaction is included in a block, and so this does not apply + return False + + async def _place_cancel(self, order_id: str, tracked_order: GatewayInFlightOrder): + # Not required because of _execute_order_cancel redefinition + raise NotImplementedError + + async def _execute_order_cancel(self, order: GatewayInFlightOrder) -> str: + # Order cancelation requests for single orders are queued to be executed in batch if possible + self._orders_queued_to_cancel.append(order) + return None + + async def _place_order(self, order_id: str, trading_pair: str, amount: Decimal, trade_type: TradeType, + order_type: OrderType, price: Decimal, **kwargs) -> Tuple[str, float]: + # Not required because of _place_order_and_process_update redefinition + raise NotImplementedError + + async def _place_order_and_process_update(self, order: GatewayInFlightOrder, **kwargs) -> str: + # Order creation requests for single orders are queued to be executed in batch if possible + self._orders_queued_to_create.append(order) + return None + + async def _execute_batch_order_create(self, orders_to_create: List[LimitOrder]): + inflight_orders_to_create = [] + for order in orders_to_create: + valid_order = await self._start_tracking_and_validate_order( + trade_type=TradeType.BUY if order.is_buy else TradeType.SELL, + order_id=order.client_order_id, + trading_pair=order.trading_pair, + amount=order.quantity, + order_type=OrderType.LIMIT, + price=order.price, + ) + if valid_order is not None: + inflight_orders_to_create.append(valid_order) + await self._execute_batch_inflight_order_create(inflight_orders_to_create=inflight_orders_to_create) + + async def _execute_batch_inflight_order_create(self, inflight_orders_to_create: List[GatewayInFlightOrder]): + try: + place_order_results = await self._data_source.create_orders( + orders_to_create=inflight_orders_to_create + ) + for place_order_result, in_flight_order in ( + zip(place_order_results, inflight_orders_to_create) + ): + if place_order_result.exception: + self._on_order_creation_failure( + order_id=in_flight_order.client_order_id, + trading_pair=in_flight_order.trading_pair, + amount=in_flight_order.amount, + trade_type=in_flight_order.trade_type, + order_type=in_flight_order.order_type, + price=in_flight_order.price, + exception=place_order_result.exception, + ) + else: + self._update_order_after_creation_success( + exchange_order_id=place_order_result.exchange_order_id, + order=in_flight_order, + update_timestamp=self.current_timestamp, + misc_updates=place_order_result.misc_updates, + ) + except asyncio.CancelledError: + raise + except Exception as ex: + self.logger().network("Batch order create failed.") + for order in inflight_orders_to_create: + self._on_order_creation_failure( + order_id=order.client_order_id, + trading_pair=order.trading_pair, + amount=order.amount, + trade_type=order.trade_type, + order_type=order.order_type, + price=order.price, + exception=ex, + ) + + async def _start_tracking_and_validate_order( + self, + trade_type: TradeType, + order_id: str, + trading_pair: str, + amount: Decimal, + order_type: OrderType, + price: Optional[Decimal] = None, + **kwargs + ) -> Optional[GatewayInFlightOrder]: + trading_rule = self._trading_rules[trading_pair] + + if order_type in [OrderType.LIMIT, OrderType.LIMIT_MAKER]: + price = self.quantize_order_price(trading_pair, price) + amount = self.quantize_order_amount(trading_pair=trading_pair, amount=amount) + + self.start_tracking_order( + order_id=order_id, + exchange_order_id=None, + trading_pair=trading_pair, + order_type=order_type, + trade_type=trade_type, + price=price, + amount=amount, + **kwargs, + ) + order = self._order_tracker.active_orders[order_id] + + if order_type not in self.supported_order_types(): + self.logger().error(f"{order_type} is not in the list of supported order types") + self._update_order_after_creation_failure(order_id=order_id, trading_pair=trading_pair) + order = None + elif amount < trading_rule.min_order_size: + self.logger().warning(f"{trade_type.name.title()} order amount {amount} is lower than the minimum order" + f" size {trading_rule.min_order_size}. The order will not be created.") + self._update_order_after_creation_failure(order_id=order_id, trading_pair=trading_pair) + order = None + elif price is not None and amount * price < trading_rule.min_notional_size: + self.logger().warning(f"{trade_type.name.title()} order notional {amount * price} is lower than the " + f"minimum notional size {trading_rule.min_notional_size}. " + "The order will not be created.") + self._update_order_after_creation_failure(order_id=order_id, trading_pair=trading_pair) + order = None + + return order + + def _update_order_after_creation_success( + self, + exchange_order_id: str, + order: GatewayInFlightOrder, + update_timestamp: float, + misc_updates: Optional[Dict[str, Any]] = None + ): + order_update: OrderUpdate = OrderUpdate( + client_order_id=order.client_order_id, + exchange_order_id=str(exchange_order_id), + trading_pair=order.trading_pair, + update_timestamp=update_timestamp, + new_state=order.current_state, + misc_updates=misc_updates, + ) + self._order_tracker.process_order_update(order_update) + + def _on_order_creation_failure( + self, + order_id: str, + trading_pair: str, + amount: Decimal, + trade_type: TradeType, + order_type: OrderType, + price: Optional[Decimal], + exception: Exception, + ): + self.logger().network( + f"Error submitting {trade_type.name.lower()} {order_type.name.upper()} order to {self.name_cap} for " + f"{amount} {trading_pair} {price}.", + exc_info=exception, + app_warning_msg=f"Failed to submit buy order to {self.name_cap}. Check API key and network connection." + ) + self._update_order_after_creation_failure(order_id=order_id, trading_pair=trading_pair) + + def _update_order_after_creation_failure(self, order_id: str, trading_pair: str): + order_update: OrderUpdate = OrderUpdate( + client_order_id=order_id, + trading_pair=trading_pair, + update_timestamp=self.current_timestamp, + new_state=OrderState.FAILED, + ) + self._order_tracker.process_order_update(order_update) + + async def _execute_batch_cancel(self, orders_to_cancel: List[LimitOrder]) -> List[CancellationResult]: + results = [] + tracked_orders_to_cancel = [] + + for order in orders_to_cancel: + tracked_order = self._order_tracker.all_updatable_orders.get(order.client_order_id) + if tracked_order is not None: + tracked_orders_to_cancel.append(tracked_order) + else: + results.append(CancellationResult(order_id=order.client_order_id, success=False)) + + if len(tracked_orders_to_cancel) > 0: + results.extend(await self._execute_batch_order_cancel(orders_to_cancel=tracked_orders_to_cancel)) + + return results + + async def _execute_batch_order_cancel(self, orders_to_cancel: List[GatewayInFlightOrder]) -> List[CancellationResult]: + try: + cancel_order_results = await self._data_source.cancel_orders(orders_to_cancel=orders_to_cancel) + cancelation_results = [] + for cancel_order_result in cancel_order_results: + success = True + if cancel_order_result.not_found: + self.logger().warning( + f"Failed to cancel the order {cancel_order_result.client_order_id} due to the order" + f" not being found." + ) + await self._order_tracker.process_order_not_found( + client_order_id=cancel_order_result.client_order_id + ) + success = False + elif cancel_order_result.exception is not None: + self.logger().error( + f"Failed to cancel order {cancel_order_result.client_order_id}", + exc_info=cancel_order_result.exception, + ) + success = False + else: + order_update: OrderUpdate = OrderUpdate( + client_order_id=cancel_order_result.client_order_id, + trading_pair=cancel_order_result.trading_pair, + update_timestamp=self.current_timestamp, + new_state=(OrderState.CANCELED + if self.is_cancel_request_in_exchange_synchronous + else OrderState.PENDING_CANCEL), + misc_updates=cancel_order_result.misc_updates, + ) + self._order_tracker.process_order_update(order_update) + cancelation_results.append( + CancellationResult(order_id=cancel_order_result.client_order_id, success=success) + ) + except asyncio.CancelledError: + raise + except Exception: + self.logger().error( + f"Failed to cancel orders {', '.join([o.client_order_id for o in orders_to_cancel])}", + exc_info=True, + ) + cancelation_results = [ + CancellationResult(order_id=order.client_order_id, success=False) + for order in orders_to_cancel + ] + + return cancelation_results + + def _update_order_after_cancelation_success(self, order: GatewayInFlightOrder): + order_update: OrderUpdate = OrderUpdate( + client_order_id=order.client_order_id, + trading_pair=order.trading_pair, + update_timestamp=self.current_timestamp, + new_state=(OrderState.CANCELED + if self.is_cancel_request_in_exchange_synchronous + else OrderState.PENDING_CANCEL), + ) + self._order_tracker.process_order_update(order_update) + + def _get_fee(self, base_currency: str, quote_currency: str, order_type: OrderType, order_side: TradeType, + amount: Decimal, price: Decimal = s_decimal_NaN, + is_maker: Optional[bool] = None) -> TradeFeeBase: + is_maker = is_maker or (order_type is OrderType.LIMIT_MAKER) + trading_pair = combine_to_hb_trading_pair(base=base_currency, quote=quote_currency) + if trading_pair in self._trading_fees: + fee_schema: TradeFeeSchema = self._trading_fees[trading_pair] + fee_rate = fee_schema.maker_percent_fee_decimal if is_maker else fee_schema.taker_percent_fee_decimal + fee = TradeFeeBase.new_spot_fee( + fee_schema=fee_schema, + trade_type=order_side, + percent=fee_rate, + percent_token=fee_schema.percent_fee_token, + ) + else: + fee = build_trade_fee( + self.name, + is_maker, + base_currency=base_currency, + quote_currency=quote_currency, + order_type=order_type, + order_side=order_side, + amount=amount, + price=price, + ) + return fee + + async def _update_trading_fees(self): + self._trading_fees = await self._data_source.get_trading_fees() + + async def _user_stream_event_listener(self): + # Not required in Injective since all event are processed using the data source PubSub + pass # pragma: no cover + + async def _format_trading_rules(self, exchange_info_dict: Dict[str, Any]) -> List[TradingRule]: + # Not used in Injective + raise NotImplementedError # pragma: no cover + + async def _update_balances(self): + all_balances = await self._data_source.all_account_balances() + + self._account_available_balances.clear() + self._account_balances.clear() + + for token, token_balance_info in all_balances.items(): + self._account_balances[token] = token_balance_info["total_balance"] + self._account_available_balances[token] = token_balance_info["available_balance"] + + async def _all_trade_updates_for_order(self, order: GatewayInFlightOrder) -> List[TradeUpdate]: + # Not required because of _update_orders_fills redefinition + raise NotImplementedError + + async def _update_orders_fills(self, orders: List[GatewayInFlightOrder]): + oldest_order_creation_time = self.current_timestamp + all_market_ids = set() + orders_by_hash = {} + + for order in orders: + oldest_order_creation_time = min(oldest_order_creation_time, order.creation_timestamp) + all_market_ids.add(await self.exchange_symbol_associated_to_pair(trading_pair=order.trading_pair)) + if order.exchange_order_id is not None: + orders_by_hash[order.exchange_order_id] = order + + try: + start_time = min(oldest_order_creation_time, self._latest_polled_order_fill_time) + trade_updates = await self._data_source.spot_trade_updates(market_ids=all_market_ids, start_time=start_time) + for trade_update in trade_updates: + tracked_order = orders_by_hash.get(trade_update.exchange_order_id) + if tracked_order is not None: + new_trade_update = TradeUpdate( + trade_id=trade_update.trade_id, + client_order_id=tracked_order.client_order_id, + exchange_order_id=trade_update.exchange_order_id, + trading_pair=trade_update.trading_pair, + fill_timestamp=trade_update.fill_timestamp, + fill_price=trade_update.fill_price, + fill_base_amount=trade_update.fill_base_amount, + fill_quote_amount=trade_update.fill_quote_amount, + fee=trade_update.fee, + is_taker=trade_update.is_taker, + ) + self._latest_polled_order_fill_time = max(self._latest_polled_order_fill_time, trade_update.fill_timestamp) + self._order_tracker.process_trade_update(new_trade_update) + except asyncio.CancelledError: + raise + except Exception as ex: + self.logger().warning( + f"Failed to fetch trade updates. Error: {ex}", + exc_info=ex, + ) + + async def _request_order_status(self, tracked_order: GatewayInFlightOrder) -> OrderUpdate: + # Not required due to the redefinition of _update_orders_with_error_handler + raise NotImplementedError + + async def _update_orders_with_error_handler(self, orders: List[GatewayInFlightOrder], error_handler: Callable): + oldest_order_creation_time = self.current_timestamp + all_market_ids = set() + orders_by_hash = {} + + for order in orders: + oldest_order_creation_time = min(oldest_order_creation_time, order.creation_timestamp) + all_market_ids.add(await self.exchange_symbol_associated_to_pair(trading_pair=order.trading_pair)) + if order.exchange_order_id is not None: + orders_by_hash[order.exchange_order_id] = order + + try: + order_updates = await self._data_source.spot_order_updates( + market_ids=all_market_ids, + start_time=oldest_order_creation_time + ) + + for order_update in order_updates: + tracked_order = orders_by_hash.get(order_update.exchange_order_id) + if tracked_order is not None: + try: + new_order_update = OrderUpdate( + trading_pair=order_update.trading_pair, + update_timestamp=order_update.update_timestamp, + new_state=order_update.new_state, + client_order_id=tracked_order.client_order_id, + exchange_order_id=order_update.exchange_order_id, + misc_updates=order_update.misc_updates, + ) + + if tracked_order.current_state == OrderState.PENDING_CREATE and new_order_update.new_state != OrderState.OPEN: + open_update = OrderUpdate( + trading_pair=order_update.trading_pair, + update_timestamp=order_update.update_timestamp, + new_state=OrderState.OPEN, + client_order_id=tracked_order.client_order_id, + exchange_order_id=order_update.exchange_order_id, + misc_updates=order_update.misc_updates, + ) + self._order_tracker.process_order_update(open_update) + + del orders_by_hash[order_update.exchange_order_id] + self._order_tracker.process_order_update(new_order_update) + except asyncio.CancelledError: + raise + except Exception as ex: + await error_handler(tracked_order, ex) + + if len(orders_by_hash) > 0: + # await self._data_source.check_order_hashes_synchronization(orders=orders_by_hash.values()) + for order in orders_by_hash.values(): + not_found_error = RuntimeError( + f"There was a problem updating order {order.client_order_id} " + f"({CONSTANTS.ORDER_NOT_FOUND_ERROR_MESSAGE})" + ) + await error_handler(order, not_found_error) + except asyncio.CancelledError: + raise + except Exception as request_error: + for order in orders_by_hash.values(): + await error_handler(order, request_error) + + def _create_web_assistants_factory(self) -> WebAssistantsFactory: + return WebAssistantsFactory(throttler=self._throttler) + + def _create_order_tracker(self) -> ClientOrderTracker: + tracker = GatewayOrderTracker(connector=self) + return tracker + + def _create_order_book_data_source(self) -> OrderBookTrackerDataSource: + return InjectiveV2APIOrderBookDataSource( + trading_pairs=self.trading_pairs, + connector=self, + data_source=self._data_source, + domain=self.domain + ) + + def _create_user_stream_data_source(self) -> UserStreamTrackerDataSource: + # Not used in Injective + raise NotImplementedError # pragma: no cover + + def _is_user_stream_initialized(self): + # Injective does not have private websocket endpoints + return self._data_source.is_started() + + def _create_user_stream_tracker(self): + # Injective does not use a tracker for the private streams + return None + + def _create_user_stream_tracker_task(self): + # Injective does not use a tracker for the private streams + return None + + def _initialize_trading_pair_symbols_from_exchange_info(self, exchange_info: Dict[str, Any]): + # Not used in Injective + raise NotImplementedError() # pragma: no cover + + async def _initialize_trading_pair_symbol_map(self): + exchange_info = None + try: + mapping = await self._data_source.market_and_trading_pair_map() + self._set_trading_pair_symbol_map(mapping) + except Exception: + self.logger().exception("There was an error requesting exchange info.") + return exchange_info + + async def _update_trading_rules(self): + await self._data_source.update_markets() + await self._initialize_trading_pair_symbol_map() + trading_rules_list = await self._data_source.all_trading_rules() + trading_rules = {} + for trading_rule in trading_rules_list: + trading_rules[trading_rule.trading_pair] = trading_rule + self._trading_rules.clear() + self._trading_rules.update(trading_rules) + + def _configure_event_forwarders(self): + event_forwarder = EventForwarder(to_function=self._process_user_trade_update) + self._forwarders.append(event_forwarder) + self._data_source.add_listener(event_tag=MarketEvent.TradeUpdate, listener=event_forwarder) + + event_forwarder = EventForwarder(to_function=self._process_user_order_update) + self._forwarders.append(event_forwarder) + self._data_source.add_listener(event_tag=MarketEvent.OrderUpdate, listener=event_forwarder) + + event_forwarder = EventForwarder(to_function=self._process_balance_event) + self._forwarders.append(event_forwarder) + self._data_source.add_listener(event_tag=AccountEvent.BalanceEvent, listener=event_forwarder) + + def _process_balance_event(self, event: BalanceUpdateEvent): + self._last_received_message_timestamp = self._time() + if event.total_balance is not None: + self._account_balances[event.asset_name] = event.total_balance + if event.available_balance is not None: + self._account_available_balances[event.asset_name] = event.available_balance + + def _process_user_order_update(self, order_update: OrderUpdate): + self._last_received_message_timestamp = self._time() + tracked_order = self._order_tracker.all_updatable_orders_by_exchange_order_id.get(order_update.exchange_order_id) + if tracked_order is not None: + new_order_update = OrderUpdate( + trading_pair=order_update.trading_pair, + update_timestamp=order_update.update_timestamp, + new_state=order_update.new_state, + client_order_id=tracked_order.client_order_id, + exchange_order_id=order_update.exchange_order_id, + misc_updates=order_update.misc_updates, + ) + self._order_tracker.process_order_update(order_update=new_order_update) + + def _process_user_trade_update(self, trade_update: TradeUpdate): + self._last_received_message_timestamp = self._time() + tracked_order = self._order_tracker.all_fillable_orders_by_exchange_order_id.get(trade_update.exchange_order_id) + if tracked_order is not None: + new_trade_update = TradeUpdate( + trade_id=trade_update.trade_id, + client_order_id=tracked_order.client_order_id, + exchange_order_id=trade_update.exchange_order_id, + trading_pair=trade_update.trading_pair, + fill_timestamp=trade_update.fill_timestamp, + fill_price=trade_update.fill_price, + fill_base_amount=trade_update.fill_base_amount, + fill_quote_amount=trade_update.fill_quote_amount, + fee=trade_update.fee, + is_taker=trade_update.is_taker, + ) + self._order_tracker.process_trade_update(new_trade_update) + + async def _check_orders_transactions(self): + while True: + try: + await self._check_orders_creation_transactions() + await self._sleep(CONSTANTS.TRANSACTIONS_CHECK_INTERVAL) + except NotImplementedError: + raise + except asyncio.CancelledError: + raise + except Exception: + self.logger().exception("Unexpected error while running the transactions check process", exc_info=True) + await self._sleep(0.5) + + async def _check_orders_creation_transactions(self): + orders: List[GatewayInFlightOrder] = self._order_tracker.active_orders.values() + orders_by_creation_tx = defaultdict(list) + orders_with_inconsistent_hash = [] + + for order in orders: + if order.creation_transaction_hash is not None and order.current_state == OrderState.PENDING_CREATE: + orders_by_creation_tx[order.creation_transaction_hash].append(order) + + for transaction_hash, orders in orders_by_creation_tx.items(): + try: + transaction_result = await self._data_source.transaction_result_data(transaction_hash=transaction_hash) + for order in orders: + if order.exchange_order_id not in transaction_result: + self.logger().debug( + f"Order hash inconsistency detected for {order.client_order_id} (expected hash " + f"{order.exchange_order_id}) [{transaction_result}]") + orders_with_inconsistent_hash.append(order) + except ValueError: + self.logger().debug(f"Transaction not included in a block yet ({transaction_hash})") + + if len(orders_with_inconsistent_hash) > 0: + async with self._data_source.order_creation_lock: + for order in orders_with_inconsistent_hash: + self._update_order_after_failure(order_id=order.client_order_id, trading_pair=order.trading_pair) + active_orders = [ + order for order in self._order_tracker.active_orders.values() + if order not in orders_with_inconsistent_hash and order.current_state == OrderState.PENDING_CREATE + ] + await self._data_source.reset_order_hash_generator(active_orders=active_orders) + + async def _process_queued_orders(self): + while True: + try: + await self._cancel_and_create_queued_orders() + sleep_time = (self.clock.tick_size * 0.5 + if self.clock is not None + else self._orders_processing_delta_time) + await self._sleep(sleep_time) + except NotImplementedError: + raise + except asyncio.CancelledError: + raise + except Exception: + self.logger().exception("Unexpected error while processing queued individual orders", exc_info=True) + await self._sleep(self.clock.tick_size * 0.5) + + async def _cancel_and_create_queued_orders(self): + if len(self._orders_queued_to_cancel) > 0: + orders = [order.to_limit_order() for order in self._orders_queued_to_cancel] + self._orders_queued_to_cancel = [] + await self._execute_batch_cancel(orders_to_cancel=orders) + if len(self._orders_queued_to_create) > 0: + orders = self._orders_queued_to_create + self._orders_queued_to_create = [] + await self._execute_batch_inflight_order_create(inflight_orders_to_create=orders) + + async def _get_last_traded_price(self, trading_pair: str) -> float: + market_id = await self.exchange_symbol_associated_to_pair(trading_pair=trading_pair) + last_price = await self._data_source.last_traded_price(market_id=market_id) + return float(last_price) + + def _get_poll_interval(self, timestamp: float) -> float: + last_recv_diff = timestamp - self._last_received_message_timestamp + poll_interval = ( + self.SHORT_POLL_INTERVAL + if last_recv_diff > self.TICK_INTERVAL_LIMIT + else self.LONG_POLL_INTERVAL + ) + return poll_interval diff --git a/hummingbot/connector/exchange/injective_v2/injective_v2_utils.py b/hummingbot/connector/exchange/injective_v2/injective_v2_utils.py new file mode 100644 index 00000000000..d38da871cb7 --- /dev/null +++ b/hummingbot/connector/exchange/injective_v2/injective_v2_utils.py @@ -0,0 +1,111 @@ +from decimal import Decimal + +from pydantic import Field, SecretStr + +from hummingbot.client.config.config_data_types import BaseConnectorConfigMap, ClientFieldData +from hummingbot.core.data_type.trade_fee import TradeFeeSchema + +CENTRALIZED = False +EXAMPLE_PAIR = "INJ-USDT" + +DEFAULT_FEES = TradeFeeSchema( + maker_percent_fee_decimal=Decimal("0"), + taker_percent_fee_decimal=Decimal("0"), +) + + +class InjectiveConfigMap(BaseConnectorConfigMap): + connector: str = Field(default="injective_v2", const=True, client_data=None) + injective_private_key: SecretStr = Field( + default=..., + client_data=ClientFieldData( + prompt=lambda cm: "Enter your Injective trading account private key", + is_secure=True, + is_connect_key=True, + prompt_on_new=True, + ), + ) + injective_subaccount_index: SecretStr = Field( + default=..., + client_data=ClientFieldData( + prompt=lambda cm: "Enter your Injective trading account subaccount index", + is_secure=True, + is_connect_key=True, + prompt_on_new=True, + ), + ) + injective_granter_address: SecretStr = Field( + default=..., + client_data=ClientFieldData( + prompt=lambda cm: "Enter the Injective address of the granter account (portfolio account)", + is_secure=True, + is_connect_key=True, + prompt_on_new=True, + ), + ) + injective_granter_subaccount_index: SecretStr = Field( + default=..., + client_data=ClientFieldData( + prompt=lambda cm: "Enter the Injective granter subaccount index (portfolio subaccount index)", + is_secure=True, + is_connect_key=True, + prompt_on_new=True, + ), + ) + + class Config: + title = "injective" + + +KEYS = InjectiveConfigMap.construct() + +OTHER_DOMAINS = ["injective_v2_testnet"] +OTHER_DOMAINS_PARAMETER = {"injective_v2_testnet": "testnet"} +OTHER_DOMAINS_EXAMPLE_PAIR = {"injective_v2_testnet": EXAMPLE_PAIR} +OTHER_DOMAINS_DEFAULT_FEES = {"injective_v2_testnet": DEFAULT_FEES} + + +class InjectiveTestnetConfigMap(BaseConnectorConfigMap): + connector: str = Field(default="injective_v2_testnet", const=True, client_data=None) + injective_private_key: SecretStr = Field( + default=..., + client_data=ClientFieldData( + prompt=lambda cm: "Enter your Injective trading account private key", + is_secure=True, + is_connect_key=True, + prompt_on_new=True, + ), + ) + injective_subaccount_index: SecretStr = Field( + default=..., + client_data=ClientFieldData( + prompt=lambda cm: "Enter your Injective trading account subaccount index", + is_secure=True, + is_connect_key=True, + prompt_on_new=True, + ), + ) + injective_granter_address: SecretStr = Field( + default=..., + client_data=ClientFieldData( + prompt=lambda cm: "Enter the Injective address of the granter account (portfolio account)", + is_secure=True, + is_connect_key=True, + prompt_on_new=True, + ), + ) + injective_granter_subaccount_index: SecretStr = Field( + default=..., + client_data=ClientFieldData( + prompt=lambda cm: "Enter the Injective granter subaccount index (portfolio subaccount index)", + is_secure=True, + is_connect_key=True, + prompt_on_new=True, + ), + ) + + class Config: + title = "injective_v2_testnet" + + +OTHER_DOMAINS_KEYS = {"injective_v2_testnet": InjectiveTestnetConfigMap.construct()} diff --git a/hummingbot/connector/exchange/injective_v2/injective_v2_web_utils.py b/hummingbot/connector/exchange/injective_v2/injective_v2_web_utils.py new file mode 100644 index 00000000000..082f23287bb --- /dev/null +++ b/hummingbot/connector/exchange/injective_v2/injective_v2_web_utils.py @@ -0,0 +1,15 @@ +import time +from typing import Optional + +from hummingbot.connector.exchange.injective_v2 import injective_constants as CONSTANTS +from hummingbot.core.api_throttler.async_throttler import AsyncThrottler + + +async def get_current_server_time( + throttler: Optional[AsyncThrottler] = None, domain: str = CONSTANTS.DEFAULT_DOMAIN +) -> float: + return _time() * 1e3 + + +def _time() -> float: + return time.time() diff --git a/hummingbot/connector/exchange/mexc/mexc_exchange.py b/hummingbot/connector/exchange/mexc/mexc_exchange.py index 4b168de3d37..8374b35325f 100644 --- a/hummingbot/connector/exchange/mexc/mexc_exchange.py +++ b/hummingbot/connector/exchange/mexc/mexc_exchange.py @@ -617,8 +617,8 @@ async def place_order(self, 'order_type': order_type_str, 'trade_type': "BID" if is_buy else "ASK", 'symbol': convert_to_exchange_trading_pair(trading_pair), - 'quantity': str(amount), - 'price': str(price) + 'quantity': format(Decimal(str(amount)), "f"), + 'price': format(Decimal(str(price)), "f") } exchange_order_id = await self._api_request( diff --git a/hummingbot/connector/exchange_py_base.py b/hummingbot/connector/exchange_py_base.py index a3c35b5493d..4d60eb930f9 100644 --- a/hummingbot/connector/exchange_py_base.py +++ b/hummingbot/connector/exchange_py_base.py @@ -234,36 +234,6 @@ def get_order_size_quantum(self, trading_pair: str, order_size: Decimal) -> Deci trading_rule = self._trading_rules[trading_pair] return Decimal(trading_rule.min_base_amount_increment) - def quantize_order_amount(self, trading_pair: str, amount: Decimal, price: Decimal = s_decimal_0) -> Decimal: - """ - Applies the trading rules to calculate the correct order amount for the market - - :param trading_pair: the token pair for which the order will be created - :param amount: the intended amount for the order - :param price: the intended price for the order - - :return: the quantized order amount after applying the trading rules - """ - trading_rule = self._trading_rules[trading_pair] - quantized_amount: Decimal = super().quantize_order_amount(trading_pair, amount) - - # Check against min_order_size and min_notional_size. If not passing either check, return 0. - if quantized_amount < trading_rule.min_order_size: - self.logger().warning(f"Quantizing order amount to 0 because order amount of {quantized_amount} is below {trading_rule.min_order_size} market minimum order size.") - return s_decimal_0 - - if price == s_decimal_0: - current_price: Decimal = self.get_price(trading_pair, False) - notional_size = current_price * quantized_amount - else: - notional_size = price * quantized_amount - - # Add 1% as a safety factor in case the prices changed while making the order. - if notional_size < trading_rule.min_notional_size * Decimal("1.01"): - self.logger().warning(f"Quantizing order amount to 0 because order notional value is below {trading_rule.min_notional_size} market minimum notional value.") - return s_decimal_0 - return quantized_amount - def get_order_book(self, trading_pair: str) -> OrderBook: """ Returns the current order book for a particular market @@ -442,10 +412,7 @@ async def _create_order(self, if order_type in [OrderType.LIMIT, OrderType.LIMIT_MAKER]: price = self.quantize_order_price(trading_pair, price) - quantize_amount_price = Decimal("0") if price.is_nan() else price - amount = self.quantize_order_amount(trading_pair=trading_pair, amount=amount, price=quantize_amount_price) - else: - amount = self.quantize_order_amount(trading_pair=trading_pair, amount=amount) + quantized_amount = self.quantize_order_amount(trading_pair=trading_pair, amount=amount) self.start_tracking_order( order_id=order_id, @@ -454,28 +421,34 @@ async def _create_order(self, order_type=order_type, trade_type=trade_type, price=price, - amount=amount, + amount=quantized_amount, **kwargs, ) order = self._order_tracker.active_orders[order_id] + if not price or price.is_nan() or price == s_decimal_0: + current_price: Decimal = self.get_price(trading_pair, False) + notional_size = current_price * quantized_amount + else: + notional_size = price * quantized_amount if order_type not in self.supported_order_types(): self.logger().error(f"{order_type} is not in the list of supported order types") self._update_order_after_failure(order_id=order_id, trading_pair=trading_pair) return - if amount < trading_rule.min_order_size: - self.logger().warning(f"{trade_type.name.title()} order amount {amount} is lower than the minimum order" - f" size {trading_rule.min_order_size}. The order will not be created.") + elif quantized_amount < trading_rule.min_order_size: + self.logger().warning(f"{trade_type.name.title()} order amount {amount} is lower than the minimum order " + f"size {trading_rule.min_order_size}. The order will not be created, increase the " + f"amount to be higher than the minimum order size.") self._update_order_after_failure(order_id=order_id, trading_pair=trading_pair) return - if price is not None and not math.isnan(price) and amount * price < trading_rule.min_notional_size: - self.logger().warning(f"{trade_type.name.title()} order notional {amount * price} is lower than the " - f"minimum notional size {trading_rule.min_notional_size}. " - "The order will not be created.") + + elif notional_size < trading_rule.min_notional_size: + self.logger().warning(f"{trade_type.name.title()} order notional {notional_size} is lower than the " + f"minimum notional size {trading_rule.min_notional_size}. The order will not be " + f"created. Increase the amount or the price to be higher than the minimum notional.") self._update_order_after_failure(order_id=order_id, trading_pair=trading_pair) return - try: exchange_order_id = await self._place_order_and_process_update(order=order, **kwargs,) @@ -485,7 +458,7 @@ async def _create_order(self, self._on_order_failure( order_id=order_id, trading_pair=trading_pair, - amount=amount, + amount=quantized_amount, trade_type=trade_type, order_type=order_type, price=price, diff --git a/hummingbot/connector/gateway/clob_spot/data_sources/injective/injective_api_data_source.py b/hummingbot/connector/gateway/clob_spot/data_sources/injective/injective_api_data_source.py index 89b8ce6742a..01f7a344c0a 100644 --- a/hummingbot/connector/gateway/clob_spot/data_sources/injective/injective_api_data_source.py +++ b/hummingbot/connector/gateway/clob_spot/data_sources/injective/injective_api_data_source.py @@ -26,7 +26,7 @@ SpotMarketInfo, SpotOrderHistory, SpotTrade, - StreamOrderbookResponse, + StreamOrderbookV2Response, StreamOrdersResponse, StreamTradesResponse, TokenMeta, @@ -794,7 +794,7 @@ async def _listen_to_order_books_stream(self): self.logger().info("Restarting order books stream.") stream.cancel() - def _parse_order_book_event(self, order_book_update: StreamOrderbookResponse): + def _parse_order_book_event(self, order_book_update: StreamOrderbookV2Response): udpate_timestamp_ms = order_book_update.timestamp market_id = order_book_update.market_id trading_pair = self._get_trading_pair_from_market_id(market_id=market_id) diff --git a/hummingbot/connector/gateway/clob_spot/data_sources/injective/injective_utils.py b/hummingbot/connector/gateway/clob_spot/data_sources/injective/injective_utils.py index d708f7190af..52a09d7c8aa 100644 --- a/hummingbot/connector/gateway/clob_spot/data_sources/injective/injective_utils.py +++ b/hummingbot/connector/gateway/clob_spot/data_sources/injective/injective_utils.py @@ -9,7 +9,7 @@ exchange_pb2 as injective_dot_exchange_dot_v1beta1_dot_exchange__pb2, ) from pyinjective.proto.injective.exchange.v1beta1.exchange_pb2 import DerivativeOrder, SpotOrder -from pyinjective.utils import derivative_price_to_backend, derivative_quantity_to_backend +from pyinjective.utils.utils import derivative_price_to_backend, derivative_quantity_to_backend from hummingbot.connector.gateway.clob_spot.data_sources.injective.injective_constants import ( ACC_NONCE_PATH_RATE_LIMIT_ID, diff --git a/hummingbot/connector/gateway/clob_spot/gateway_clob_spot.py b/hummingbot/connector/gateway/clob_spot/gateway_clob_spot.py index 0f9da56500a..74f7cac85de 100644 --- a/hummingbot/connector/gateway/clob_spot/gateway_clob_spot.py +++ b/hummingbot/connector/gateway/clob_spot/gateway_clob_spot.py @@ -5,7 +5,7 @@ from typing import TYPE_CHECKING, Any, Dict, List, Mapping, Optional, Tuple from hummingbot.connector.client_order_tracker import ClientOrderTracker -from hummingbot.connector.constants import s_decimal_NaN +from hummingbot.connector.constants import s_decimal_0, s_decimal_NaN from hummingbot.connector.exchange_base import TradeType from hummingbot.connector.exchange_py_base import ExchangePyBase from hummingbot.connector.gateway.clob_spot.data_sources.gateway_clob_api_data_source_base import CLOBAPIDataSourceBase @@ -356,10 +356,7 @@ async def _start_tracking_and_validate_order( if order_type in [OrderType.LIMIT, OrderType.LIMIT_MAKER]: price = self.quantize_order_price(trading_pair, price) - quantize_amount_price = Decimal("0") if price.is_nan() else price - amount = self.quantize_order_amount(trading_pair=trading_pair, amount=amount, price=quantize_amount_price) - else: - amount = self.quantize_order_amount(trading_pair=trading_pair, amount=amount) + quantized_amount = self.quantize_order_amount(trading_pair=trading_pair, amount=amount) self.start_tracking_order( order_id=order_id, @@ -368,25 +365,33 @@ async def _start_tracking_and_validate_order( order_type=order_type, trade_type=trade_type, price=price, - amount=amount, + amount=quantized_amount, **kwargs, ) order = self._order_tracker.active_orders[order_id] + if not price or price.is_nan() or price == s_decimal_0: + current_price: Decimal = self.get_price(trading_pair, False) + notional_size = current_price * quantized_amount + else: + notional_size = price * quantized_amount + if order_type not in self.supported_order_types(): self.logger().error(f"{order_type} is not in the list of supported order types") self._update_order_after_creation_failure(order_id=order_id, trading_pair=trading_pair) order = None - elif amount < trading_rule.min_order_size: - self.logger().warning(f"{trade_type.name.title()} order amount {amount} is lower than the minimum order" - f" size {trading_rule.min_order_size}. The order will not be created.") + + elif quantized_amount < trading_rule.min_order_size: + self.logger().warning(f"{trade_type.name.title()} order amount {amount} is lower than the minimum order " + f"size {trading_rule.min_order_size}. The order will not be created, increase the " + f"amount to be higher than the minimum order size.") self._update_order_after_creation_failure(order_id=order_id, trading_pair=trading_pair) order = None - elif price is not None and amount * price < trading_rule.min_notional_size: - self.logger().warning(f"{trade_type.name.title()} order notional {amount * price} is lower than the " - f"minimum notional size {trading_rule.min_notional_size}. " - "The order will not be created.") - self._update_order_after_creation_failure(order_id=order_id, trading_pair=trading_pair) + elif notional_size < trading_rule.min_notional_size: + self.logger().warning(f"{trade_type.name.title()} order notional {notional_size} is lower than the " + f"minimum notional size {trading_rule.min_notional_size}. The order will not be " + f"created. Increase the amount or the price to be higher than the minimum notional.") + self._update_order_after_failure(order_id=order_id, trading_pair=trading_pair) order = None return order diff --git a/hummingbot/connector/test_support/exchange_connector_test.py b/hummingbot/connector/test_support/exchange_connector_test.py index 251cee43c1a..61a10fa6722 100644 --- a/hummingbot/connector/test_support/exchange_connector_test.py +++ b/hummingbot/connector/test_support/exchange_connector_test.py @@ -259,7 +259,7 @@ def configure_completely_filled_order_status_response( self, order: InFlightOrder, mock_api: aioresponses, - callback: Optional[Callable] = lambda *args, **kwargs: None) -> str: + callback: Optional[Callable] = lambda *args, **kwargs: None) -> List[str]: """ :return: the URL configured """ @@ -281,7 +281,7 @@ def configure_open_order_status_response( self, order: InFlightOrder, mock_api: aioresponses, - callback: Optional[Callable] = lambda *args, **kwargs: None) -> str: + callback: Optional[Callable] = lambda *args, **kwargs: None) -> List[str]: """ :return: the URL configured """ @@ -783,11 +783,11 @@ def test_create_order_fails_when_trading_rule_error_and_raises_failure_event(sel callback=lambda *args, **kwargs: request_sent_event.set()) order_id_for_invalid_order = self.place_buy_order( - amount=Decimal("0.0001"), price=Decimal("0.0000001") + amount=Decimal("0.0001"), price=Decimal("0.0001") ) # The second order is used only to have the event triggered and avoid using timeouts for tests order_id = self.place_buy_order() - self.async_run_with_timeout(request_sent_event.wait()) + self.async_run_with_timeout(request_sent_event.wait(), timeout=3) self.assertNotIn(order_id_for_invalid_order, self.exchange.in_flight_orders) self.assertNotIn(order_id, self.exchange.in_flight_orders) @@ -801,7 +801,9 @@ def test_create_order_fails_when_trading_rule_error_and_raises_failure_event(sel self.assertTrue( self.is_logged( "WARNING", - "Buy order amount 0 is lower than the minimum order size 0.01. The order will not be created." + "Buy order amount 0.0001 is lower than the minimum order " + "size 0.01. The order will not be created, increase the " + "amount to be higher than the minimum order size." ) ) self.assertTrue( @@ -839,11 +841,12 @@ def test_cancel_order_successfully(self, mock_api): self.exchange.cancel(trading_pair=order.trading_pair, client_order_id=order.client_order_id) self.async_run_with_timeout(request_sent_event.wait()) - cancel_request = self._all_executed_requests(mock_api, url)[0] - self.validate_auth_credentials_present(cancel_request) - self.validate_order_cancelation_request( - order=order, - request_call=cancel_request) + if url != "": + cancel_request = self._all_executed_requests(mock_api, url)[0] + self.validate_auth_credentials_present(cancel_request) + self.validate_order_cancelation_request( + order=order, + request_call=cancel_request) if self.exchange.is_cancel_request_in_exchange_synchronous: self.assertNotIn(order.client_order_id, self.exchange.in_flight_orders) @@ -888,11 +891,12 @@ def test_cancel_order_raises_failure_event_when_request_fails(self, mock_api): self.exchange.cancel(trading_pair=self.trading_pair, client_order_id=self.client_order_id_prefix + "1") self.async_run_with_timeout(request_sent_event.wait()) - cancel_request = self._all_executed_requests(mock_api, url)[0] - self.validate_auth_credentials_present(cancel_request) - self.validate_order_cancelation_request( - order=order, - request_call=cancel_request) + if url != "": + cancel_request = self._all_executed_requests(mock_api, url)[0] + self.validate_auth_credentials_present(cancel_request) + self.validate_order_cancelation_request( + order=order, + request_call=cancel_request) self.assertEquals(0, len(self.order_cancelled_logger.event_log)) self.assertTrue( @@ -1036,7 +1040,7 @@ def test_update_order_status_when_filled(self, mock_api): ) order: InFlightOrder = self.exchange.in_flight_orders[self.client_order_id_prefix + "1"] - url = self.configure_completely_filled_order_status_response( + urls = self.configure_completely_filled_order_status_response( order=order, mock_api=mock_api, callback=lambda *args, **kwargs: request_sent_event.set()) @@ -1053,22 +1057,24 @@ def test_update_order_status_when_filled(self, mock_api): # Execute one more synchronization to ensure the async task that processes the update is finished self.async_run_with_timeout(request_sent_event.wait()) - order_status_request = self._all_executed_requests(mock_api, url)[0] - self.validate_auth_credentials_present(order_status_request) - self.validate_order_status_request( - order=order, - request_call=order_status_request) + for url in (urls if isinstance(urls, list) else [urls]): + order_status_request = self._all_executed_requests(mock_api, url)[0] + self.validate_auth_credentials_present(order_status_request) + self.validate_order_status_request( + order=order, + request_call=order_status_request) self.async_run_with_timeout(order.wait_until_completely_filled()) self.assertTrue(order.is_done) if self.is_order_fill_http_update_included_in_status_update: self.assertTrue(order.is_filled) - trades_request = self._all_executed_requests(mock_api, trade_url)[0] - self.validate_auth_credentials_present(trades_request) - self.validate_trades_request( - order=order, - request_call=trades_request) + if trade_url: + trades_request = self._all_executed_requests(mock_api, trade_url)[0] + self.validate_auth_credentials_present(trades_request) + self.validate_trades_request( + order=order, + request_call=trades_request) fill_event: OrderFilledEvent = self.order_filled_logger.event_log[0] self.assertEqual(self.exchange.current_timestamp, fill_event.timestamp) @@ -1191,11 +1197,12 @@ def test_update_order_status_when_request_fails_marks_order_as_not_found(self, m self.async_run_with_timeout(self.exchange._update_order_status()) - order_status_request = self._all_executed_requests(mock_api, url)[0] - self.validate_auth_credentials_present(order_status_request) - self.validate_order_status_request( - order=order, - request_call=order_status_request) + if url: + order_status_request = self._all_executed_requests(mock_api, url)[0] + self.validate_auth_credentials_present(order_status_request) + self.validate_order_status_request( + order=order, + request_call=order_status_request) self.assertTrue(order.is_open) self.assertFalse(order.is_filled) @@ -1231,21 +1238,23 @@ def test_update_order_status_when_order_has_not_changed_and_one_partial_fill(sel self.async_run_with_timeout(self.exchange._update_order_status()) - order_status_request = self._all_executed_requests(mock_api, order_url)[0] - self.validate_auth_credentials_present(order_status_request) - self.validate_order_status_request( - order=order, - request_call=order_status_request) + if order_url: + order_status_request = self._all_executed_requests(mock_api, order_url)[0] + self.validate_auth_credentials_present(order_status_request) + self.validate_order_status_request( + order=order, + request_call=order_status_request) self.assertTrue(order.is_open) self.assertEqual(OrderState.PARTIALLY_FILLED, order.current_state) if self.is_order_fill_http_update_included_in_status_update: - trades_request = self._all_executed_requests(mock_api, trade_url)[0] - self.validate_auth_credentials_present(trades_request) - self.validate_trades_request( - order=order, - request_call=trades_request) + if trade_url: + trades_request = self._all_executed_requests(mock_api, trade_url)[0] + self.validate_auth_credentials_present(trades_request) + self.validate_trades_request( + order=order, + request_call=trades_request) fill_event: OrderFilledEvent = self.order_filled_logger.event_log[0] self.assertEqual(self.exchange.current_timestamp, fill_event.timestamp) @@ -1297,11 +1306,12 @@ def test_update_order_status_when_filled_correctly_processed_even_when_trade_fil self.assertTrue(order.is_done) if self.is_order_fill_http_update_included_in_status_update: - trades_request = self._all_executed_requests(mock_api, trade_url)[0] - self.validate_auth_credentials_present(trades_request) - self.validate_trades_request( - order=order, - request_call=trades_request) + if trade_url: + trades_request = self._all_executed_requests(mock_api, trade_url)[0] + self.validate_auth_credentials_present(trades_request) + self.validate_trades_request( + order=order, + request_call=trades_request) self.assertEqual(0, len(self.order_filled_logger.event_log)) @@ -1567,11 +1577,12 @@ def test_lost_order_included_in_order_fills_update_and_not_in_order_status_updat self.assertTrue(order.is_failure) if self.is_order_fill_http_update_included_in_status_update: - trades_request = self._all_executed_requests(mock_api, trade_url)[0] - self.validate_auth_credentials_present(trades_request) - self.validate_trades_request( - order=order, - request_call=trades_request) + if trade_url: + trades_request = self._all_executed_requests(mock_api, trade_url)[0] + self.validate_auth_credentials_present(trades_request) + self.validate_trades_request( + order=order, + request_call=trades_request) fill_event: OrderFilledEvent = self.order_filled_logger.event_log[0] self.assertEqual(self.exchange.current_timestamp, fill_event.timestamp) @@ -1649,11 +1660,12 @@ def test_cancel_lost_order_successfully(self, mock_api): self.async_run_with_timeout(self.exchange._cancel_lost_orders()) self.async_run_with_timeout(request_sent_event.wait()) - cancel_request = self._all_executed_requests(mock_api, url)[0] - self.validate_auth_credentials_present(cancel_request) - self.validate_order_cancelation_request( - order=order, - request_call=cancel_request) + if url: + cancel_request = self._all_executed_requests(mock_api, url)[0] + self.validate_auth_credentials_present(cancel_request) + self.validate_order_cancelation_request( + order=order, + request_call=cancel_request) if self.exchange.is_cancel_request_in_exchange_synchronous: self.assertNotIn(order.client_order_id, self.exchange._order_tracker.lost_orders) @@ -1696,11 +1708,12 @@ def test_cancel_lost_order_raises_failure_event_when_request_fails(self, mock_ap self.async_run_with_timeout(self.exchange._cancel_lost_orders()) self.async_run_with_timeout(request_sent_event.wait()) - cancel_request = self._all_executed_requests(mock_api, url)[0] - self.validate_auth_credentials_present(cancel_request) - self.validate_order_cancelation_request( - order=order, - request_call=cancel_request) + if url: + cancel_request = self._all_executed_requests(mock_api, url)[0] + self.validate_auth_credentials_present(cancel_request) + self.validate_order_cancelation_request( + order=order, + request_call=cancel_request) self.assertIn(order.client_order_id, self.exchange._order_tracker.lost_orders) self.assertEquals(0, len(self.order_cancelled_logger.event_log)) diff --git a/hummingbot/connector/utils.py b/hummingbot/connector/utils.py index a77ef192148..8ba4269713d 100644 --- a/hummingbot/connector/utils.py +++ b/hummingbot/connector/utils.py @@ -1,4 +1,3 @@ -import base64 import gzip import json import os @@ -7,8 +6,6 @@ from hashlib import md5 from typing import Any, Callable, Dict, Optional, Tuple -from zero_ex.order_utils import Order as ZeroExOrder - from hummingbot.connector.time_synchronizer import TimeSynchronizer from hummingbot.core.api_throttler.async_throttler import AsyncThrottler from hummingbot.core.api_throttler.async_throttler_base import AsyncThrottlerBase @@ -21,33 +18,6 @@ TradeFillOrderDetails = namedtuple("TradeFillOrderDetails", "market exchange_trade_id symbol") -def zrx_order_to_json(order: Optional[ZeroExOrder]) -> Optional[Dict[str, any]]: - if order is None: - return None - - retval: Dict[str, any] = {} - for key, value in order.items(): - if not isinstance(value, bytes): - retval[key] = value - else: - retval[f"__binary__{key}"] = base64.b64encode(value).decode("utf8") - return retval - - -def json_to_zrx_order(data: Optional[Dict[str, any]]) -> Optional[ZeroExOrder]: - if data is None: - return None - - intermediate: Dict[str, any] = {} - for key, value in data.items(): - if key.startswith("__binary__"): - target_key = key.replace("__binary__", "") - intermediate[target_key] = base64.b64decode(value) - else: - intermediate[key] = value - return ZeroExOrder(intermediate) - - def build_api_factory(throttler: AsyncThrottlerBase) -> WebAssistantsFactory: throttler = throttler or AsyncThrottler(rate_limits=[]) api_factory = WebAssistantsFactory(throttler=throttler) diff --git a/hummingbot/core/data_type/trade_fee.py b/hummingbot/core/data_type/trade_fee.py index 5f4c16e501e..c1fcc0103c1 100644 --- a/hummingbot/core/data_type/trade_fee.py +++ b/hummingbot/core/data_type/trade_fee.py @@ -187,7 +187,7 @@ def _get_exchange_rate( if exchange is not None and trading_pair in exchange.order_books: rate = exchange.get_price_by_type(trading_pair, PriceType.MidPrice) else: - local_rate_source: RateOracle = rate_source or RateOracle.get_instance() + local_rate_source: Optional[RateOracle] = rate_source or RateOracle.get_instance() rate: Decimal = local_rate_source.get_pair_rate(trading_pair) if rate is None: raise ValueError(f"Could not find the exchange rate for {trading_pair} using the rate source " @@ -230,6 +230,11 @@ def fee_amount_in_token( def _are_tokens_interchangeable(self, first_token: str, second_token: str): interchangeable_tokens = [ {"WETH", "ETH"}, + {"WBNB", "BNB"}, + {"WMATIC", "MATIC"}, + {"WAVAX", "AVAX"}, + {"WONE", "ONE"}, + {"USDC", "USDC.E"}, {"WBTC", "BTC"} ] return first_token == second_token or any(({first_token, second_token} <= interchangeable_pair diff --git a/hummingbot/data_feed/amm_gateway_data_feed.py b/hummingbot/data_feed/amm_gateway_data_feed.py new file mode 100644 index 00000000000..49273b6b010 --- /dev/null +++ b/hummingbot/data_feed/amm_gateway_data_feed.py @@ -0,0 +1,145 @@ +import asyncio +import logging +from decimal import Decimal +from typing import Dict, Optional, Set + +from pydantic import BaseModel + +from hummingbot.connector.utils import split_hb_trading_pair +from hummingbot.core.data_type.common import TradeType +from hummingbot.core.gateway.gateway_http_client import GatewayHttpClient +from hummingbot.core.network_base import NetworkBase +from hummingbot.core.network_iterator import NetworkStatus +from hummingbot.core.utils.async_utils import safe_ensure_future +from hummingbot.logger import HummingbotLogger + + +class TokenBuySellPrice(BaseModel): + base: str + quote: str + connector: str + chain: str + network: str + order_amount_in_base: Decimal + buy_price: Decimal + sell_price: Decimal + + +class AmmGatewayDataFeed(NetworkBase): + dex_logger: Optional[HummingbotLogger] = None + gateway_client = GatewayHttpClient.get_instance() + + def __init__( + self, + connector_chain_network: str, + trading_pairs: Set[str], + order_amount_in_base: Decimal, + update_interval: float = 1.0, + ) -> None: + super().__init__() + self._ev_loop = asyncio.get_event_loop() + self._price_dict: Dict[str, TokenBuySellPrice] = {} + self._update_interval = update_interval + self.fetch_data_loop_task: Optional[asyncio.Task] = None + # param required for DEX API request + self.connector_chain_network = connector_chain_network + self.trading_pairs = trading_pairs + self.order_amount_in_base = order_amount_in_base + + @classmethod + def logger(cls) -> HummingbotLogger: + if cls.dex_logger is None: + cls.dex_logger = logging.getLogger(__name__) + return cls.dex_logger + + @property + def name(self) -> str: + return f"AmmDataFeed[{self.connector_chain_network}]" + + @property + def connector(self) -> str: + return self.connector_chain_network.split("_")[0] + + @property + def chain(self) -> str: + return self.connector_chain_network.split("_")[1] + + @property + def network(self) -> str: + return self.connector_chain_network.split("_")[2] + + @property + def price_dict(self) -> Dict[str, TokenBuySellPrice]: + return self._price_dict + + def is_ready(self) -> bool: + return len(self._price_dict) == len(self.trading_pairs) + + async def check_network(self) -> NetworkStatus: + is_gateway_online = await self.gateway_client.ping_gateway() + if not is_gateway_online: + self.logger().warning("Gateway is not online. Please check your gateway connection.") + return NetworkStatus.CONNECTED if is_gateway_online else NetworkStatus.NOT_CONNECTED + + async def start_network(self) -> None: + await self.stop_network() + self.fetch_data_loop_task = safe_ensure_future(self._fetch_data_loop()) + + async def stop_network(self) -> None: + if self.fetch_data_loop_task is not None: + self.fetch_data_loop_task.cancel() + self.fetch_data_loop_task = None + + async def _fetch_data_loop(self) -> None: + while True: + try: + await self._fetch_data() + except asyncio.CancelledError: + raise + except Exception as e: + self.logger().error( + f"Error getting data from {self.name}" + f"Check network connection. Error: {e}", + ) + await self._async_sleep(self._update_interval) + + async def _fetch_data(self) -> None: + token_price_tasks = [ + asyncio.create_task(self._register_token_buy_sell_price(trading_pair)) + for trading_pair in self.trading_pairs + ] + await asyncio.gather(*token_price_tasks) + + async def _register_token_buy_sell_price(self, trading_pair: str) -> None: + base, quote = split_hb_trading_pair(trading_pair) + token_buy_price_task = asyncio.create_task(self._request_token_price(trading_pair, TradeType.BUY)) + token_sell_price_task = asyncio.create_task(self._request_token_price(trading_pair, TradeType.SELL)) + self._price_dict[trading_pair] = TokenBuySellPrice( + base=base, + quote=quote, + connector=self.connector, + chain=self.chain, + network=self.network, + order_amount_in_base=self.order_amount_in_base, + buy_price=await token_buy_price_task, + sell_price=await token_sell_price_task, + ) + + async def _request_token_price(self, trading_pair: str, trade_type: TradeType) -> Decimal: + base, quote = split_hb_trading_pair(trading_pair) + connector, chain, network = self.connector_chain_network.split("_") + token_price = await self.gateway_client.get_price( + chain, + network, + connector, + base, + quote, + self.order_amount_in_base, + trade_type, + ) + return Decimal(token_price["price"]) + + @staticmethod + async def _async_sleep(delay: float) -> None: + """Used to mock in test cases.""" + await asyncio.sleep(delay) diff --git a/hummingbot/data_feed/candles_feed/ascend_ex_spot_candles/__init__.py b/hummingbot/data_feed/candles_feed/ascend_ex_spot_candles/__init__.py new file mode 100644 index 00000000000..e5aed62db38 --- /dev/null +++ b/hummingbot/data_feed/candles_feed/ascend_ex_spot_candles/__init__.py @@ -0,0 +1,3 @@ +from hummingbot.data_feed.candles_feed.ascend_ex_spot_candles.ascend_ex_spot_candles import AscendExSpotCandles + +__all__ = ["AscendExSpotCandles"] diff --git a/hummingbot/data_feed/candles_feed/ascend_ex_spot_candles/ascend_ex_spot_candles.py b/hummingbot/data_feed/candles_feed/ascend_ex_spot_candles/ascend_ex_spot_candles.py new file mode 100644 index 00000000000..ae2a179fcce --- /dev/null +++ b/hummingbot/data_feed/candles_feed/ascend_ex_spot_candles/ascend_ex_spot_candles.py @@ -0,0 +1,175 @@ +import asyncio +import logging +from typing import Any, Dict, Optional + +import numpy as np + +from hummingbot.core.network_iterator import NetworkStatus, safe_ensure_future +from hummingbot.core.web_assistant.connections.data_types import WSJSONRequest +from hummingbot.core.web_assistant.ws_assistant import WSAssistant +from hummingbot.data_feed.candles_feed.ascend_ex_spot_candles import constants as CONSTANTS +from hummingbot.data_feed.candles_feed.candles_base import CandlesBase +from hummingbot.logger import HummingbotLogger + + +class AscendExSpotCandles(CandlesBase): + _logger: Optional[HummingbotLogger] = None + + @classmethod + def logger(cls) -> HummingbotLogger: + if cls._logger is None: + cls._logger = logging.getLogger(__name__) + return cls._logger + + def __init__(self, trading_pair: str, interval: str = "1m", max_records: int = 150): + super().__init__(trading_pair, interval, max_records) + + @property + def name(self): + return f"ascend_ex_spot_{self._trading_pair}" + + @property + def rest_url(self): + return CONSTANTS.REST_URL + + @property + def wss_url(self): + return CONSTANTS.WSS_URL + + @property + def health_check_url(self): + return self.rest_url + CONSTANTS.HEALTH_CHECK_ENDPOINT + + @property + def candles_url(self): + return self.rest_url + CONSTANTS.CANDLES_ENDPOINT + + @property + def rate_limits(self): + return CONSTANTS.RATE_LIMITS + + @property + def intervals(self): + return CONSTANTS.INTERVALS + + async def check_network(self) -> NetworkStatus: + rest_assistant = await self._api_factory.get_rest_assistant() + await rest_assistant.execute_request(url=self.health_check_url, + throttler_limit_id=CONSTANTS.HEALTH_CHECK_ENDPOINT) + return NetworkStatus.CONNECTED + + def get_exchange_trading_pair(self, trading_pair): + return trading_pair.replace("-", "/") + + async def fetch_candles(self, + start_time: Optional[int] = None, + end_time: Optional[int] = None, + limit: Optional[int] = 500): + rest_assistant = await self._api_factory.get_rest_assistant() + params = {"symbol": self._ex_trading_pair, "interval": CONSTANTS.INTERVALS[self.interval], "n": limit} + if start_time: + params["from"] = start_time + if end_time: + params["to"] = end_time + candles = await rest_assistant.execute_request(url=self.candles_url, + throttler_limit_id=CONSTANTS.CANDLES_ENDPOINT, + params=params) + new_hb_candles = [] + for i in candles["data"]: + timestamp_ms = i["data"]["ts"] + open = i["data"]["o"] + high = i["data"]["h"] + low = i["data"]["l"] + close = i["data"]["c"] + quote_asset_volume = i["data"]["v"] + # no data field + volume = 0 + n_trades = 0 + taker_buy_base_volume = 0 + taker_buy_quote_volume = 0 + new_hb_candles.append([timestamp_ms, open, high, low, close, volume, + quote_asset_volume, n_trades, taker_buy_base_volume, + taker_buy_quote_volume]) + return np.array(new_hb_candles).astype(float) + + async def fill_historical_candles(self): + max_request_needed = (self._candles.maxlen // 1000) + 1 + requests_executed = 0 + while not self.is_ready: + missing_records = self._candles.maxlen - len(self._candles) + end_timestamp = int(self._candles[0][0]) + try: + if requests_executed < max_request_needed: + # we have to add one more since, the last row is not going to be included + candles = await self.fetch_candles(end_time=end_timestamp, limit=missing_records + 1) + # we are computing again the quantity of records again since the websocket process is able to + # modify the deque and if we extend it, the new observations are going to be dropped. + missing_records = self._candles.maxlen - len(self._candles) + self._candles.extendleft(candles[-(missing_records + 1):-1][::-1]) + requests_executed += 1 + else: + self.logger().error(f"There is no data available for the quantity of " + f"candles requested for {self.name}.") + raise + except asyncio.CancelledError: + raise + except Exception: + self.logger().exception( + "Unexpected error occurred when getting historical klines. Retrying in 1 seconds...", + ) + await self._sleep(1.0) + + async def _subscribe_channels(self, ws: WSAssistant): + """ + Subscribes to the candles events through the provided websocket connection. + :param ws: the websocket assistant used to connect to the exchange + """ + try: + payload = {"op": CONSTANTS.SUB_ENDPOINT_NAME, + "ch": f"bar:{CONSTANTS.INTERVALS[self.interval]}:{self._ex_trading_pair}"} + subscribe_candles_request: WSJSONRequest = WSJSONRequest(payload=payload) + + await ws.send(subscribe_candles_request) + self.logger().info("Subscribed to public klines...") + except asyncio.CancelledError: + raise + except Exception: + self.logger().error( + "Unexpected error occurred subscribing to public klines...", + exc_info=True + ) + raise + + async def _process_websocket_messages(self, websocket_assistant: WSAssistant): + async for ws_response in websocket_assistant.iter_messages(): + data: Dict[str, Any] = ws_response.data + if data.get("m") == "ping": + pong_payloads = {"op": "pong"} + pong_request = WSJSONRequest(payload=pong_payloads) + await websocket_assistant.send(request=pong_request) + if data is not None and data.get("m") == "bar": # data will be None when the websocket is disconnected + timestamp = data["data"]["ts"] + open = data["data"]["o"] + high = data["data"]["h"] + low = data["data"]["l"] + close = data["data"]["c"] + quote_asset_volume = data["data"]["v"] + volume = 0 + n_trades = 0 + taker_buy_base_volume = 0 + taker_buy_quote_volume = 0 + if len(self._candles) == 0: + self._candles.append(np.array([timestamp, open, high, low, close, volume, + quote_asset_volume, n_trades, taker_buy_base_volume, + taker_buy_quote_volume])) + safe_ensure_future(self.fill_historical_candles()) + elif timestamp > int(self._candles[-1][0]): + # TODO: validate also that the diff of timestamp == interval (issue with 1M interval). + self._candles.append(np.array([timestamp, open, high, low, close, volume, + quote_asset_volume, n_trades, taker_buy_base_volume, + taker_buy_quote_volume])) + elif timestamp == int(self._candles[-1][0]): + self._candles.pop() + self._candles.append(np.array([timestamp, open, high, low, close, volume, + quote_asset_volume, n_trades, taker_buy_base_volume, + taker_buy_quote_volume])) diff --git a/hummingbot/data_feed/candles_feed/ascend_ex_spot_candles/constants.py b/hummingbot/data_feed/candles_feed/ascend_ex_spot_candles/constants.py new file mode 100644 index 00000000000..2fdbbe5b64b --- /dev/null +++ b/hummingbot/data_feed/candles_feed/ascend_ex_spot_candles/constants.py @@ -0,0 +1,35 @@ +from bidict import bidict + +from hummingbot.core.api_throttler.data_types import LinkedLimitWeightPair, RateLimit + +REST_URL = "https://ascendex.com/api/pro/v1/" +HEALTH_CHECK_ENDPOINT = "risk-limit-info" +CANDLES_ENDPOINT = "barhist" +SUB_ENDPOINT_NAME = "sub" + +WSS_URL = "wss://ascendex.com:443/api/pro/v1/websocket-for-hummingbot-liq-mining/stream" + +# Plesae note that the one-month bar (1m) always resets at the month start. +# The intervalInMillis value for the one-month bar is only indicative. +INTERVALS = bidict({ + "1m": "1", + "5m": "5", + "15m": "15", + "30m": "30", + "1h": "60", + "2h": "120", + "4h": "240", + "6h": "360", + "12h": "720", + "1d": "1d", + "1w": "1w", + "1M": "1m" +}) + +ALL_ENDPOINTS_LIMIT = "All" + +RATE_LIMITS = [ + RateLimit(ALL_ENDPOINTS_LIMIT, limit=100, time_interval=1), + RateLimit(CANDLES_ENDPOINT, limit=100, time_interval=1, linked_limits=[LinkedLimitWeightPair(ALL_ENDPOINTS_LIMIT)]), + RateLimit(HEALTH_CHECK_ENDPOINT, limit=100, time_interval=1, + linked_limits=[LinkedLimitWeightPair(ALL_ENDPOINTS_LIMIT)])] diff --git a/hummingbot/data_feed/candles_feed/candles_factory.py b/hummingbot/data_feed/candles_feed/candles_factory.py index 0934fd76563..44ff73a7700 100644 --- a/hummingbot/data_feed/candles_feed/candles_factory.py +++ b/hummingbot/data_feed/candles_feed/candles_factory.py @@ -1,3 +1,4 @@ +from hummingbot.data_feed.candles_feed.ascend_ex_spot_candles.ascend_ex_spot_candles import AscendExSpotCandles from hummingbot.data_feed.candles_feed.binance_perpetual_candles import BinancePerpetualCandles from hummingbot.data_feed.candles_feed.binance_spot_candles import BinanceSpotCandles from hummingbot.data_feed.candles_feed.gate_io_perpetual_candles import GateioPerpetualCandles @@ -24,5 +25,7 @@ def get_candle(cls, connector: str, trading_pair: str, interval: str = "1m", max return GateioPerpetualCandles(trading_pair, interval, max_records) elif connector == "kucoin": return KucoinSpotCandles(trading_pair, interval, max_records) + elif connector == "ascend_ex": + return AscendExSpotCandles(trading_pair, interval, max_records) else: raise Exception(f"The connector {connector} is not available. Please select another one.") diff --git a/hummingbot/strategy/amm_arb/amm_arb.py b/hummingbot/strategy/amm_arb/amm_arb.py index 0e0093dbd66..25e36e0f956 100644 --- a/hummingbot/strategy/amm_arb/amm_arb.py +++ b/hummingbot/strategy/amm_arb/amm_arb.py @@ -60,7 +60,7 @@ class AmmArbStrategy(StrategyPyBase): _quote_eth_rate_fetch_loop_task: Optional[asyncio.Task] _market_1_quote_eth_rate: None # XXX (martin_kou): Why are these here? _market_2_quote_eth_rate: None # XXX (martin_kou): Why are these here? - _rate_source: RateOracle + _rate_source: Optional[RateOracle] _cancel_outdated_orders_task: Optional[asyncio.Task] _gateway_transaction_cancel_interval: int @@ -81,6 +81,7 @@ def init_params(self, concurrent_orders_submission: bool = True, status_report_interval: float = 900, gateway_transaction_cancel_interval: int = 600, + rate_source: Optional[RateOracle] = RateOracle.get_instance(), ): """ Assigns strategy parameters, this function must be called directly after init. @@ -100,6 +101,7 @@ def init_params(self, :param status_report_interval: Amount of seconds to wait to refresh the status report :param gateway_transaction_cancel_interval: Amount of seconds to wait before trying to cancel orders that are blockchain transactions that have not been included in a block (they are still in the mempool). + :param rate_source: The rate source to use for conversion rate - (RateOracle or FixedRateSource) - default is FixedRateSource """ self._market_info_1 = market_info_1 self._market_info_2 = market_info_2 @@ -120,7 +122,7 @@ def init_params(self, self.add_markets([market_info_1.market, market_info_2.market]) self._quote_eth_rate_fetch_loop_task = None - self._rate_source = RateOracle.get_instance() + self._rate_source = rate_source self._cancel_outdated_orders_task = None self._gateway_transaction_cancel_interval = gateway_transaction_cancel_interval @@ -148,11 +150,11 @@ def order_amount(self, value: Decimal): self._order_amount = value @property - def rate_source(self) -> RateOracle: + def rate_source(self) -> Optional[RateOracle]: return self._rate_source @rate_source.setter - def rate_source(self, src: RateOracle): + def rate_source(self, src: Optional[RateOracle]): self._rate_source = src @property @@ -403,11 +405,12 @@ def short_proposal_msg(self, arb_proposal: List[ArbProposal], indented: bool = T f"{profit_pct:.2%}") return lines - def quotes_rate_df(self): - columns = ["Quotes pair", "Rate"] + def get_fixed_rates_df(self): + columns = ["Pair", "Rate"] quotes_pair: str = f"{self._market_info_2.quote_asset}-{self._market_info_1.quote_asset}" - data = [[quotes_pair, PerformanceMetrics.smart_round(self._rate_source.get_pair_rate(quotes_pair))]] - + bases_pair: str = f"{self._market_info_2.base_asset}-{self._market_info_1.base_asset}" + data = [[quotes_pair, PerformanceMetrics.smart_round(self._rate_source.get_pair_rate(quotes_pair))], + [bases_pair, PerformanceMetrics.smart_round(self._rate_source.get_pair_rate(bases_pair))]] return pd.DataFrame(data=data, columns=columns) async def format_status(self) -> str: @@ -460,9 +463,9 @@ async def format_status(self) -> str: lines.extend(["", " Profitability:"] + self.short_proposal_msg(self._all_arb_proposals)) - quotes_rates_df = self.quotes_rate_df() - lines.extend(["", f" Quotes Rates ({str(self._rate_source)})"] + - [" " + line for line in str(quotes_rates_df).split("\n")]) + fixed_rates_df = self.get_fixed_rates_df() + lines.extend(["", f" Exchange Rates: ({str(self._rate_source)})"] + + [" " + line for line in str(fixed_rates_df).split("\n")]) warning_lines = self.network_warning([self._market_info_1]) warning_lines.extend(self.network_warning([self._market_info_2])) diff --git a/hummingbot/strategy/amm_arb/amm_arb_config_map.py b/hummingbot/strategy/amm_arb/amm_arb_config_map.py index b349b7f37ae..3f0eac16ae4 100644 --- a/hummingbot/strategy/amm_arb/amm_arb_config_map.py +++ b/hummingbot/strategy/amm_arb/amm_arb_config_map.py @@ -138,8 +138,19 @@ def order_amount_prompt() -> str: key="gateway_transaction_cancel_interval", prompt="After what time should blockchain transactions be cancelled if they are not included in a block? " "(this only affects decentralized exchanges) (Enter time in seconds) >>> ", - prompt_on_new=True, default=600, validator=lambda v: validate_int(v, min_value=1, inclusive=True), type_str="int"), + "rate_oracle_enabled": ConfigVar( + key="rate_oracle_enabled", + prompt="Do you want to use the rate oracle? (Yes/No) >>> ", + default=False, + validator=validate_bool, + type_str="bool"), + "quote_conversion_rate": ConfigVar( + key="quote_conversion_rate", + prompt="What is the fixed_rate used to convert quote assets? >>> ", + default=Decimal("1"), + validator=lambda v: validate_decimal(v), + type_str="decimal"), } diff --git a/hummingbot/strategy/amm_arb/start.py b/hummingbot/strategy/amm_arb/start.py index afc49e592bc..65557c4ba35 100644 --- a/hummingbot/strategy/amm_arb/start.py +++ b/hummingbot/strategy/amm_arb/start.py @@ -4,6 +4,8 @@ from hummingbot.connector.gateway.amm.gateway_evm_amm import GatewayEVMAMM from hummingbot.connector.gateway.common_types import Chain from hummingbot.connector.gateway.gateway_price_shim import GatewayPriceShim +from hummingbot.core.rate_oracle.rate_oracle import RateOracle +from hummingbot.core.utils.fixed_rate_source import FixedRateSource from hummingbot.strategy.amm_arb.amm_arb import AmmArbStrategy from hummingbot.strategy.amm_arb.amm_arb_config_map import amm_arb_config_map from hummingbot.strategy.market_trading_pair_tuple import MarketTradingPairTuple @@ -21,6 +23,8 @@ def start(self): concurrent_orders_submission = amm_arb_config_map.get("concurrent_orders_submission").value debug_price_shim = amm_arb_config_map.get("debug_price_shim").value gateway_transaction_cancel_interval = amm_arb_config_map.get("gateway_transaction_cancel_interval").value + rate_oracle_enabled = amm_arb_config_map.get("rate_oracle_enabled").value + quote_conversion_rate = amm_arb_config_map.get("quote_conversion_rate").value self._initialize_markets([(connector_1, [market_1]), (connector_2, [market_2])]) base_1, quote_1 = market_1.split("-") @@ -51,6 +55,13 @@ def start(self): amm_market_info.trading_pair ) + if rate_oracle_enabled: + rate_source = RateOracle.get_instance() + else: + rate_source = FixedRateSource() + rate_source.add_rate(f"{quote_2}-{quote_1}", Decimal(str(quote_conversion_rate))) # reverse rate is already handled in FixedRateSource find_rate method. + rate_source.add_rate(f"{quote_1}-{quote_2}", Decimal(str(1 / quote_conversion_rate))) # reverse rate is already handled in FixedRateSource find_rate method. + self.strategy = AmmArbStrategy() self.strategy.init_params(market_info_1=market_info_1, market_info_2=market_info_2, @@ -60,4 +71,5 @@ def start(self): market_2_slippage_buffer=market_2_slippage_buffer, concurrent_orders_submission=concurrent_orders_submission, gateway_transaction_cancel_interval=gateway_transaction_cancel_interval, + rate_source=rate_source, ) diff --git a/hummingbot/templates/conf_amm_arb_strategy_TEMPLATE.yml b/hummingbot/templates/conf_amm_arb_strategy_TEMPLATE.yml index 9afa86cc536..ea75616687b 100644 --- a/hummingbot/templates/conf_amm_arb_strategy_TEMPLATE.yml +++ b/hummingbot/templates/conf_amm_arb_strategy_TEMPLATE.yml @@ -2,7 +2,7 @@ ### AMM Arbitrage strategy config ### ########################################## -template_version: 5 +template_version: 6 strategy: null # The following configurations are only required for the AMM arbitrage trading strategy @@ -38,4 +38,10 @@ concurrent_orders_submission: null debug_price_shim: false # After how many seconds should blockchain transactions be cancelled if they are not included in a block? -gateway_transaction_cancel_interval: 600 \ No newline at end of file +gateway_transaction_cancel_interval: 600 + +# What rate source should be used for quote assets pair - between fixed_rate_source and rate_oracle_source? +rate_oracle_enabled: false + +# What is the fixed_rate used to convert quote assets? +quote_conversion_rate: 1 \ No newline at end of file diff --git a/hummingbot/user/user_balances.py b/hummingbot/user/user_balances.py index 53c114eee0b..da3a60a33e4 100644 --- a/hummingbot/user/user_balances.py +++ b/hummingbot/user/user_balances.py @@ -32,15 +32,16 @@ def connect_market(exchange, client_config_map: ClientConfigMap, **api_details): trading_pairs: List[str] = gateway_connector_trading_pairs(conn_setting.name) # collect unique trading pairs that are for balance reporting only - config: Optional[Dict[str, str]] = GatewayConnectionSetting.get_connector_spec_from_market_name(conn_setting.name) - if config is not None: - existing_pairs = set(flatten([x.split("-") for x in trading_pairs])) - - other_tokens: Set[str] = set(config.get("tokens", "").split(",")) - other_tokens.discard("") - tokens: List[str] = [t for t in other_tokens if t not in existing_pairs] - if tokens != [""]: - trading_pairs.append("-".join(tokens)) + if conn_setting.uses_gateway_generic_connector(): + config: Optional[Dict[str, str]] = GatewayConnectionSetting.get_connector_spec_from_market_name(conn_setting.name) + if config is not None: + existing_pairs = set(flatten([x.split("-") for x in trading_pairs])) + + other_tokens: Set[str] = set(config.get("tokens", "").split(",")) + other_tokens.discard("") + tokens: List[str] = [t for t in other_tokens if t not in existing_pairs] + if tokens != [""]: + trading_pairs.append("-".join(tokens)) connector = connector_class(**init_params) return connector diff --git a/scripts/amm_data_feed_example.py b/scripts/amm_data_feed_example.py new file mode 100644 index 00000000000..b16addc0742 --- /dev/null +++ b/scripts/amm_data_feed_example.py @@ -0,0 +1,46 @@ +from decimal import Decimal +from typing import Dict + +import pandas as pd + +from hummingbot.client.ui.interface_utils import format_df_for_printout +from hummingbot.connector.connector_base import ConnectorBase +from hummingbot.data_feed.amm_gateway_data_feed import AmmGatewayDataFeed +from hummingbot.strategy.script_strategy_base import ScriptStrategyBase + + +class AMMDataFeedExample(ScriptStrategyBase): + amm_data_feed_uniswap = AmmGatewayDataFeed( + connector_chain_network="uniswap_polygon_mainnet", + trading_pairs={"LINK-USDC", "AAVE-USDC", "WMATIC-USDT"}, + order_amount_in_base=Decimal("1"), + ) + amm_data_feed_quickswap = AmmGatewayDataFeed( + connector_chain_network="quickswap_polygon_mainnet", + trading_pairs={"LINK-USDC", "AAVE-USDC", "WMATIC-USDT"}, + order_amount_in_base=Decimal("1"), + ) + markets = {"binance_paper_trade": {"BTC-USDT"}} + + def __init__(self, connectors: Dict[str, ConnectorBase]): + super().__init__(connectors) + self.amm_data_feed_uniswap.start() + self.amm_data_feed_quickswap.start() + + def on_stop(self): + self.amm_data_feed_uniswap.stop() + self.amm_data_feed_quickswap.stop() + + def on_tick(self): + pass + + def format_status(self) -> str: + if self.amm_data_feed_uniswap.is_ready() and self.amm_data_feed_quickswap.is_ready(): + lines = [] + rows = [] + rows.extend(dict(price) for token, price in self.amm_data_feed_uniswap.price_dict.items()) + rows.extend(dict(price) for token, price in self.amm_data_feed_quickswap.price_dict.items()) + df = pd.DataFrame(rows) + prices_str = format_df_for_printout(df, table_format="psql") + lines.append(f"AMM Data Feed is ready.\n{prices_str}") + return "\n".join(lines) diff --git a/scripts/directional_strategy_bb_rsi_multi_timeframe.py b/scripts/directional_strategy_bb_rsi_multi_timeframe.py index c0ab0ab9600..7b39cc591c8 100644 --- a/scripts/directional_strategy_bb_rsi_multi_timeframe.py +++ b/scripts/directional_strategy_bb_rsi_multi_timeframe.py @@ -39,7 +39,7 @@ class MultiTimeframeBBRSI(DirectionalStrategyBase): # Define the trading pair and exchange that we want to use and the csv where we are going to store the entries trading_pair: str = "ETH-USDT" exchange: str = "binance_perpetual" - order_amount_usd = Decimal("15") + order_amount_usd = Decimal("20") leverage = 10 # Configure the parameters for the position diff --git a/scripts/directional_strategy_macd_bb.py b/scripts/directional_strategy_macd_bb.py index e9251ce708b..e0ad856a196 100644 --- a/scripts/directional_strategy_macd_bb.py +++ b/scripts/directional_strategy_macd_bb.py @@ -39,7 +39,7 @@ class MacdBB(DirectionalStrategyBase): # Define the trading pair and exchange that we want to use and the csv where we are going to store the entries trading_pair: str = "BTC-USDT" exchange: str = "binance_perpetual" - order_amount_usd = Decimal("15") + order_amount_usd = Decimal("20") leverage = 10 # Configure the parameters for the position diff --git a/scripts/directional_strategy_rsi.py b/scripts/directional_strategy_rsi.py index a1225a899d1..c9c9d829d62 100644 --- a/scripts/directional_strategy_rsi.py +++ b/scripts/directional_strategy_rsi.py @@ -43,7 +43,7 @@ class RSI(DirectionalStrategyBase): # Define the trading pair and exchange that we want to use and the csv where we are going to store the entries trading_pair: str = "ETH-USDT" exchange: str = "binance_perpetual" - order_amount_usd = Decimal("15") + order_amount_usd = Decimal("20") leverage = 10 # Configure the parameters for the position diff --git a/scripts/directional_strategy_rsi_spot.py b/scripts/directional_strategy_rsi_spot.py index 987083dbc0f..22c54d2e760 100644 --- a/scripts/directional_strategy_rsi_spot.py +++ b/scripts/directional_strategy_rsi_spot.py @@ -43,7 +43,7 @@ class RSISpot(DirectionalStrategyBase): # Define the trading pair and exchange that we want to use and the csv where we are going to store the entries trading_pair: str = "ETH-USDT" exchange: str = "binance" - order_amount_usd = Decimal("15") + order_amount_usd = Decimal("20") leverage = 10 # Configure the parameters for the position diff --git a/scripts/directional_strategy_trend_follower.py b/scripts/directional_strategy_trend_follower.py index fe92ef02cbd..339f6fccdba 100644 --- a/scripts/directional_strategy_trend_follower.py +++ b/scripts/directional_strategy_trend_follower.py @@ -9,7 +9,7 @@ class TrendFollowingStrategy(DirectionalStrategyBase): directional_strategy_name = "trend_following" trading_pair = "DOGE-USDT" exchange = "binance_perpetual" - order_amount_usd = Decimal("15") + order_amount_usd = Decimal("20") leverage = 10 # Configure the parameters for the position diff --git a/scripts/directional_strategy_widening_ema_bands.py b/scripts/directional_strategy_widening_ema_bands.py index f06e3be58cc..9e6c2ba763c 100644 --- a/scripts/directional_strategy_widening_ema_bands.py +++ b/scripts/directional_strategy_widening_ema_bands.py @@ -39,7 +39,7 @@ class WideningEMABands(DirectionalStrategyBase): # Define the trading pair and exchange that we want to use and the csv where we are going to store the entries trading_pair: str = "LINA-USDT" exchange: str = "binance_perpetual" - order_amount_usd = Decimal("15") + order_amount_usd = Decimal("20") leverage = 10 distance_pct_threshold = 0.02 diff --git a/setup/environment.yml b/setup/environment.yml index 1fe8cb0ee59..71f3fe31caf 100644 --- a/setup/environment.yml +++ b/setup/environment.yml @@ -7,6 +7,8 @@ dependencies: - bidict - coverage=5.5 - gql + - grpcio + - grpcio-tools - nomkl=1.0 - nose=1.3.7 - nose-exclude @@ -20,15 +22,14 @@ dependencies: - python=3.10.12 - pytables=3.8.0 - scipy=1.10.1 + - sqlalchemy=1.4 - tabulate==0.8.9 - typing-extensions<4.6.0 + - ujson - zlib=1.2.13 - pip: - - 0x-contract-addresses==3.0.0 - - 0x-contract-wrappers==2.0.0 - - 0x-order-utils==4.0.0 - aiohttp==3.* - - aioprocessing==2.0.0 + - aioprocessing==2.0 - aioresponses - appdirs==1.4.3 - async-timeout @@ -42,15 +43,15 @@ dependencies: - cython==3.0.0a10 - diff-cover==5.1.2 - docker==5.0.3 - - dydx-v3-python==2.0.1 + - eip712-structs==1.1.0 - ethsnarks-loopring==0.1.5 - flake8==3.7.9 - importlib-metadata==0.23 - - injective-py==0.6.0.7 + - injective-py==0.7 - mypy-extensions==0.4.3 - pandas_ta==0.3.14b - - yarl==1.* - pre-commit==2.18.1 + - protobuf>=4 - psutil==5.7.2 - ptpython==3.0.20 - pyjwt==1.7.1 @@ -60,10 +61,10 @@ dependencies: - rsa==4.7 - ruamel-yaml==0.16.10 - signalr-client-aio==0.0.1.6.2 - - sqlalchemy==1.4.* - substrate-interface==1.6.2 - solders==0.1.4 - - web3==5.31.4 - - websockets==9.1 - - ujson==5.7.0 - - git+https://github.com/CoinAlpha/python-signalr-client.git \ No newline at end of file + - web3 + - websockets + - yarl==1.* + - git+https://github.com/CoinAlpha/python-signalr-client.git + - git+https://github.com/konichuvak/dydx-v3-python.git@web3 \ No newline at end of file diff --git a/test/hummingbot/client/command/test_config_command.py b/test/hummingbot/client/command/test_config_command.py index 712ff5c5d53..c56414bc772 100644 --- a/test/hummingbot/client/command/test_config_command.py +++ b/test/hummingbot/client/command/test_config_command.py @@ -87,7 +87,7 @@ def test_list_configs(self, notify_mock, get_strategy_config_map_mock): " | ∟ gateway_api_port | 15888 |\n" " | rate_oracle_source | binance |\n" " | global_token | |\n" - " | ∟ global_token_name | USD |\n" + " | ∟ global_token_name | USDT |\n" " | ∟ global_token_symbol | $ |\n" " | rate_limits_share_pct | 100 |\n" " | commands_timeout | |\n" diff --git a/test/hummingbot/connector/derivative/binance_perpetual/test_binance_perpetual_derivative.py b/test/hummingbot/connector/derivative/binance_perpetual/test_binance_perpetual_derivative.py index 7203e710539..5a841270668 100644 --- a/test/hummingbot/connector/derivative/binance_perpetual/test_binance_perpetual_derivative.py +++ b/test/hummingbot/connector/derivative/binance_perpetual/test_binance_perpetual_derivative.py @@ -1998,8 +1998,9 @@ def test_create_order_min_order_size_failure(self): self.assertTrue(self._is_logged( "WARNING", - f"{trade_type.name.title()} order amount 0 is lower than the minimum order" - f" size {min_order_size}. The order will not be created." + f"{trade_type.name.title()} order amount {amount} is lower than the minimum order " + f"size {trading_rules[0].min_order_size}. The order will not be created, increase the " + f"amount to be higher than the minimum order size." )) def test_create_order_min_notional_size_failure(self): diff --git a/test/hummingbot/connector/derivative/binance_perpetual/test_binance_perpetual_user_stream_data_source.py b/test/hummingbot/connector/derivative/binance_perpetual/test_binance_perpetual_user_stream_data_source.py index e2234335ab1..a0a7f7ec83d 100644 --- a/test/hummingbot/connector/derivative/binance_perpetual/test_binance_perpetual_user_stream_data_source.py +++ b/test/hummingbot/connector/derivative/binance_perpetual/test_binance_perpetual_user_stream_data_source.py @@ -203,10 +203,10 @@ def test_ping_listen_key_successful(self, mock_api): result: bool = self.async_run_with_timeout(self.data_source.ping_listen_key()) self.assertTrue(result) - # @unittest.skip("Test with error") @aioresponses() + @patch("hummingbot.core.data_type.user_stream_tracker_data_source.UserStreamTrackerDataSource._sleep") @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) - def test_create_websocket_connection_log_exception(self, mock_api, mock_ws): + def test_create_websocket_connection_log_exception(self, mock_api, mock_ws, _): url = web_utils.rest_url(path_url=CONSTANTS.BINANCE_USER_STREAM_ENDPOINT, domain=self.domain) regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) diff --git a/test/hummingbot/connector/exchange/bybit/test_bybit_exchange.py b/test/hummingbot/connector/exchange/bybit/test_bybit_exchange.py index 0c4c91f4319..4f07d32279e 100644 --- a/test/hummingbot/connector/exchange/bybit/test_bybit_exchange.py +++ b/test/hummingbot/connector/exchange/bybit/test_bybit_exchange.py @@ -641,7 +641,7 @@ def test_create_order_fails_when_trading_rule_error_and_raises_failure_event(sel trading_pair=self.trading_pair, amount=Decimal("0.0001"), order_type=OrderType.LIMIT, - price=Decimal("0.0000001"))) + price=Decimal("0.0001"))) # The second order is used only to have the event triggered and avoid using timeouts for tests asyncio.get_event_loop().create_task( self.exchange._create_order(trade_type=TradeType.BUY, @@ -663,7 +663,9 @@ def test_create_order_fails_when_trading_rule_error_and_raises_failure_event(sel self.assertTrue( self._is_logged( "WARNING", - "Buy order amount 0 is lower than the minimum order size 0.01. The order will not be created." + "Buy order amount 0.0001 is lower than the minimum order " + "size 0.01. The order will not be created, increase the " + "amount to be higher than the minimum order size." ) ) self.assertTrue( diff --git a/test/hummingbot/connector/exchange/gate_io/test_gate_io_exchange.py b/test/hummingbot/connector/exchange/gate_io/test_gate_io_exchange.py index 5914eb1332b..4673571f4bd 100644 --- a/test/hummingbot/connector/exchange/gate_io/test_gate_io_exchange.py +++ b/test/hummingbot/connector/exchange/gate_io/test_gate_io_exchange.py @@ -681,7 +681,9 @@ def test_order_with_less_amount_than_allowed_is_not_created(self, mock_api): self.assertTrue( self._is_logged( "WARNING", - "Buy order amount 0 is lower than the minimum order size 0.01. The order will not be created." + "Buy order amount 0.0001 is lower than the minimum order " + "size 0.01. The order will not be created, increase the " + "amount to be higher than the minimum order size." ) ) diff --git a/test/hummingbot/connector/exchange/injective_v2/__init__.py b/test/hummingbot/connector/exchange/injective_v2/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/test/hummingbot/connector/exchange/injective_v2/programmable_query_executor.py b/test/hummingbot/connector/exchange/injective_v2/programmable_query_executor.py new file mode 100644 index 00000000000..59ac4e3a4bb --- /dev/null +++ b/test/hummingbot/connector/exchange/injective_v2/programmable_query_executor.py @@ -0,0 +1,99 @@ +import asyncio +from typing import Any, Dict, List, Optional + +from hummingbot.connector.exchange.injective_v2.injective_query_executor import BaseInjectiveQueryExecutor + + +class ProgrammableQueryExecutor(BaseInjectiveQueryExecutor): + + def __init__(self): + self._ping_responses = asyncio.Queue() + self._spot_markets_responses = asyncio.Queue() + self._spot_order_book_responses = asyncio.Queue() + self._transaction_by_hash_responses = asyncio.Queue() + self._account_portfolio_responses = asyncio.Queue() + self._simulate_transaction_responses = asyncio.Queue() + self._send_transaction_responses = asyncio.Queue() + self._spot_trades_responses = asyncio.Queue() + self._historical_spot_orders_responses = asyncio.Queue() + self._transaction_block_height_responses = asyncio.Queue() + + self._spot_order_book_updates = asyncio.Queue() + self._public_spot_trade_updates = asyncio.Queue() + self._subaccount_balance_events = asyncio.Queue() + self._historical_spot_order_events = asyncio.Queue() + + async def ping(self): + response = await self._ping_responses.get() + return response + + async def spot_markets(self, status: str) -> Dict[str, Any]: + response = await self._spot_markets_responses.get() + return response + + async def get_spot_orderbook(self, market_id: str) -> Dict[str, Any]: + response = await self._spot_order_book_responses.get() + return response + + async def get_tx_by_hash(self, tx_hash: str) -> Dict[str, Any]: + response = await self._transaction_by_hash_responses.get() + return response + + async def get_tx_block_height(self, tx_hash: str) -> int: + response = await self._transaction_block_height_responses.get() + return response + + async def account_portfolio(self, account_address: str) -> Dict[str, Any]: + response = await self._account_portfolio_responses.get() + return response + + async def simulate_tx(self, tx_byte: bytes) -> Dict[str, Any]: + response = await self._simulate_transaction_responses.get() + return response + + async def send_tx_sync_mode(self, tx_byte: bytes) -> Dict[str, Any]: + response = await self._send_transaction_responses.get() + return response + + async def get_spot_trades( + self, + market_ids: List[str], + subaccount_id: Optional[str] = None, + start_time: Optional[int] = None, + skip: Optional[int] = None, + limit: Optional[int] = None, + ) -> Dict[str, Any]: + response = await self._spot_trades_responses.get() + return response + + async def get_historical_spot_orders( + self, + market_ids: List[str], + subaccount_id: str, + start_time: int, + skip: int, + ) -> Dict[str, Any]: + response = await self._historical_spot_orders_responses.get() + return response + + async def spot_order_book_updates_stream(self, market_ids: List[str]): + while True: + next_ob_update = await self._spot_order_book_updates.get() + yield next_ob_update + + async def public_spot_trades_stream(self, market_ids: List[str]): + while True: + next_trade = await self._public_spot_trade_updates.get() + yield next_trade + + async def subaccount_balance_stream(self, subaccount_id: str): + while True: + next_event = await self._subaccount_balance_events.get() + yield next_event + + async def subaccount_historical_spot_orders_stream( + self, market_id: str, subaccount_id: str + ): + while True: + next_event = await self._historical_spot_order_events.get() + yield next_event diff --git a/test/hummingbot/connector/exchange/injective_v2/test_injective_data_source.py b/test/hummingbot/connector/exchange/injective_v2/test_injective_data_source.py new file mode 100644 index 00000000000..be70dd4c671 --- /dev/null +++ b/test/hummingbot/connector/exchange/injective_v2/test_injective_data_source.py @@ -0,0 +1,348 @@ +import asyncio +import re +from test.hummingbot.connector.exchange.injective_v2.programmable_query_executor import ProgrammableQueryExecutor +from typing import Awaitable, Optional, Union +from unittest import TestCase +from unittest.mock import patch + +from pyinjective.wallet import Address, PrivateKey + +from hummingbot.connector.exchange.injective_v2.injective_data_source import InjectiveDataSource +from hummingbot.connector.exchange.injective_v2.injective_market import InjectiveSpotMarket + + +class InjectiveGranteeDataSourceTests(TestCase): + # the level is required to receive logs from the data source logger + level = 0 + + @patch("hummingbot.core.utils.trading_pair_fetcher.TradingPairFetcher.fetch_all") + def setUp(self, _) -> None: + super().setUp() + self._original_async_loop = asyncio.get_event_loop() + self.async_loop = asyncio.new_event_loop() + self.async_tasks = [] + asyncio.set_event_loop(self.async_loop) + + _, grantee_private_key = PrivateKey.generate() + _, granter_private_key = PrivateKey.generate() + + self.data_source = InjectiveDataSource.for_grantee( + private_key=grantee_private_key.to_hex(), + subaccount_index=0, + granter_address=Address(bytes.fromhex(granter_private_key.to_public_key().to_hex())).to_acc_bech32(), + granter_subaccount_index=0, + ) + + self.query_executor = ProgrammableQueryExecutor() + self.data_source._query_executor = self.query_executor + + self.log_records = [] + self._logs_event: Optional[asyncio.Event] = None + self.data_source.logger().setLevel(1) + self.data_source.logger().addHandler(self) + + def tearDown(self) -> None: + self.async_run_with_timeout(self.data_source.stop()) + for task in self.async_tasks: + task.cancel() + self.async_loop.stop() + # self.async_loop.close() + # Since the event loop will change we need to remove the logs event created in the old event loop + self._logs_event = None + asyncio.set_event_loop(self._original_async_loop) + super().tearDown() + + def async_run_with_timeout(self, coroutine: Awaitable, timeout: float = 1): + ret = self.async_loop.run_until_complete(asyncio.wait_for(coroutine, timeout)) + return ret + + def create_task(self, coroutine: Awaitable) -> asyncio.Task: + task = self.async_loop.create_task(coroutine) + self.async_tasks.append(task) + return task + + def handle(self, record): + self.log_records.append(record) + if self._logs_event is not None: + self._logs_event.set() + + def is_logged(self, log_level: str, message: Union[str, re.Pattern]) -> bool: + expression = ( + re.compile( + f"^{message}$" + .replace(".", r"\.") + .replace("?", r"\?") + .replace("/", r"\/") + .replace("(", r"\(") + .replace(")", r"\)") + .replace("[", r"\[") + .replace("]", r"\]") + ) + if isinstance(message, str) + else message + ) + return any( + record.levelname == log_level and expression.match(record.getMessage()) is not None + for record in self.log_records + ) + + def test_market_and_tokens_construction(self): + spot_markets_response = self._spot_markets_response() + self.query_executor._spot_markets_responses.put_nowait(spot_markets_response) + + market_info = self._inj_usdt_market_info() + inj_usdt_market: InjectiveSpotMarket = self.async_run_with_timeout( + self.data_source.market_info_for_id(market_info["marketId"]) + ) + inj_token = inj_usdt_market.base_token + usdt_token = inj_usdt_market.quote_token + + self.assertEqual(market_info["marketId"], inj_usdt_market.market_id) + self.assertEqual(market_info, inj_usdt_market.market_info) + self.assertEqual(f"{inj_token.unique_symbol}-{usdt_token.unique_symbol}", inj_usdt_market.trading_pair()) + self.assertEqual(market_info["baseDenom"], inj_token.denom) + self.assertEqual(market_info["baseTokenMeta"]["symbol"], inj_token.symbol) + self.assertEqual(inj_token.symbol, inj_token.unique_symbol) + self.assertEqual(market_info["baseTokenMeta"]["name"], inj_token.name) + self.assertEqual(market_info["baseTokenMeta"]["decimals"], inj_token.decimals) + self.assertEqual(market_info["quoteDenom"], usdt_token.denom) + self.assertEqual(market_info["quoteTokenMeta"]["symbol"], usdt_token.symbol) + self.assertEqual(usdt_token.symbol, usdt_token.unique_symbol) + self.assertEqual(market_info["quoteTokenMeta"]["name"], usdt_token.name) + self.assertEqual(market_info["quoteTokenMeta"]["decimals"], usdt_token.decimals) + + market_info = self._usdc_solana_usdc_eth_market_info() + usdc_solana_usdc_eth_market: InjectiveSpotMarket = self.async_run_with_timeout( + self.data_source.market_info_for_id(market_info["marketId"]) + ) + usdc_solana_token = usdc_solana_usdc_eth_market.base_token + usdc_eth_token = usdc_solana_usdc_eth_market.quote_token + + self.assertEqual(market_info["marketId"], usdc_solana_usdc_eth_market.market_id) + self.assertEqual(market_info, usdc_solana_usdc_eth_market.market_info) + self.assertEqual(f"{usdc_solana_token.unique_symbol}-{usdc_eth_token.unique_symbol}", usdc_solana_usdc_eth_market.trading_pair()) + self.assertEqual(market_info["baseDenom"], usdc_solana_token.denom) + self.assertEqual(market_info["baseTokenMeta"]["symbol"], usdc_solana_token.symbol) + self.assertEqual(market_info["ticker"].split("/")[0], usdc_solana_token.unique_symbol) + self.assertEqual(market_info["baseTokenMeta"]["name"], usdc_solana_token.name) + self.assertEqual(market_info["baseTokenMeta"]["decimals"], usdc_solana_token.decimals) + self.assertEqual(market_info["quoteDenom"], usdc_eth_token.denom) + self.assertEqual(market_info["quoteTokenMeta"]["symbol"], usdc_eth_token.symbol) + self.assertEqual(usdc_eth_token.name, usdc_eth_token.unique_symbol) + self.assertEqual(market_info["quoteTokenMeta"]["name"], usdc_eth_token.name) + self.assertEqual(market_info["quoteTokenMeta"]["decimals"], usdc_eth_token.decimals) + + def test_markets_initialization_generates_unique_trading_pairs_for_tokens_with_same_symbol(self): + spot_markets_response = self._spot_markets_response() + self.query_executor._spot_markets_responses.put_nowait(spot_markets_response) + + inj_usdt_trading_pair = self.async_run_with_timeout( + self.data_source.trading_pair_for_market(market_id=self._inj_usdt_market_info()["marketId"]) + ) + self.assertEqual("INJ-USDT", inj_usdt_trading_pair) + usdt_usdc_trading_pair = self.async_run_with_timeout( + self.data_source.trading_pair_for_market(market_id=self._usdt_usdc_market_info()["marketId"]) + ) + self.assertEqual("USDT-USDC", usdt_usdc_trading_pair) + usdt_usdc_eth_trading_pair = self.async_run_with_timeout( + self.data_source.trading_pair_for_market(market_id=self._usdt_usdc_eth_market_info()["marketId"]) + ) + self.assertEqual("USDT-USC Coin (Wormhole from Ethereum)", usdt_usdc_eth_trading_pair) + usdc_solana_usdc_eth_trading_pair = self.async_run_with_timeout( + self.data_source.trading_pair_for_market(market_id=self._usdc_solana_usdc_eth_market_info()["marketId"]) + ) + self.assertEqual("USDCso-USC Coin (Wormhole from Ethereum)", usdc_solana_usdc_eth_trading_pair) + + def test_markets_initialization_adds_different_tokens_having_same_symbol(self): + spot_markets_response = self._spot_markets_response() + self.query_executor._spot_markets_responses.put_nowait(spot_markets_response) + + self.async_run_with_timeout(self.data_source.update_markets()) + + inj_usdt_market_info = self._inj_usdt_market_info() + self.assertIn(inj_usdt_market_info["baseDenom"], self.data_source._tokens_map) + self.assertEqual( + inj_usdt_market_info["baseDenom"], + self.data_source._token_symbol_symbol_and_denom_map[inj_usdt_market_info["baseTokenMeta"]["symbol"]] + ) + self.assertIn(inj_usdt_market_info["quoteDenom"], self.data_source._tokens_map) + self.assertEqual( + inj_usdt_market_info["quoteDenom"], + self.data_source._token_symbol_symbol_and_denom_map[inj_usdt_market_info["quoteTokenMeta"]["symbol"]] + ) + + usdt_usdc_market_info = self._usdt_usdc_market_info() + self.assertIn(usdt_usdc_market_info["quoteDenom"], self.data_source._tokens_map) + self.assertEqual( + usdt_usdc_market_info["quoteDenom"], + self.data_source._token_symbol_symbol_and_denom_map[usdt_usdc_market_info["quoteTokenMeta"]["symbol"]] + ) + + usdt_usdc_eth_market_info = self._usdt_usdc_eth_market_info() + self.assertIn(usdt_usdc_eth_market_info["quoteDenom"], self.data_source._tokens_map) + self.assertEqual( + usdt_usdc_eth_market_info["quoteDenom"], + self.data_source._token_symbol_symbol_and_denom_map[usdt_usdc_eth_market_info["quoteTokenMeta"]["name"]] + ) + + usdc_solana_usdc_eth_market_info = self._usdc_solana_usdc_eth_market_info() + expected_usdc_solana_unique_symbol = usdc_solana_usdc_eth_market_info["ticker"].split("/")[0] + self.assertIn(usdc_solana_usdc_eth_market_info["baseDenom"], self.data_source._tokens_map) + self.assertEqual( + usdc_solana_usdc_eth_market_info["baseDenom"], + self.data_source._token_symbol_symbol_and_denom_map[expected_usdc_solana_unique_symbol] + ) + + def test_markets_initialization_creates_one_instance_per_token(self): + spot_markets_response = self._spot_markets_response() + self.query_executor._spot_markets_responses.put_nowait(spot_markets_response) + + inj_usdt_market: InjectiveSpotMarket = self.async_run_with_timeout( + self.data_source.market_info_for_id(self._inj_usdt_market_info()["marketId"]) + ) + usdt_usdc_market: InjectiveSpotMarket = self.async_run_with_timeout( + self.data_source.market_info_for_id(self._usdt_usdc_market_info()["marketId"]) + ) + usdt_usdc_eth_market: InjectiveSpotMarket = self.async_run_with_timeout( + self.data_source.market_info_for_id(self._usdt_usdc_eth_market_info()["marketId"]) + ) + usdc_solana_usdc_eth_market: InjectiveSpotMarket = self.async_run_with_timeout( + self.data_source.market_info_for_id(self._usdc_solana_usdc_eth_market_info()["marketId"]) + ) + + self.assertEqual(inj_usdt_market.quote_token, usdt_usdc_market.base_token) + self.assertEqual(inj_usdt_market.quote_token, usdt_usdc_eth_market.base_token) + + self.assertNotEqual(usdt_usdc_market.quote_token, usdt_usdc_eth_market.quote_token) + self.assertNotEqual(usdt_usdc_market.quote_token, usdc_solana_usdc_eth_market.base_token) + + self.assertEqual(usdt_usdc_eth_market.quote_token, usdc_solana_usdc_eth_market.quote_token) + self.assertNotEqual(usdt_usdc_eth_market.quote_token, usdc_solana_usdc_eth_market.base_token) + + def _spot_markets_response(self): + return [ + self._inj_usdt_market_info(), + self._usdt_usdc_market_info(), + self._usdt_usdc_eth_market_info(), + self._usdc_solana_usdc_eth_market_info() + ] + + def _usdc_solana_usdc_eth_market_info(self): + return { + "marketId": "0xb825e2e4dbe369446e454e21c16e041cbc4d95d73f025c369f92210e82d2106f", # noqa: mock + "marketStatus": "active", + "ticker": "USDCso/USDCet", + "baseDenom": "factory/inj14ejqjyq8um4p3xfqj74yld5waqljf88f9eneuk/inj12pwnhtv7yat2s30xuf4gdk9qm85v4j3e60dgvu", # noqa: mock + "baseTokenMeta": { + "name": "USD Coin (Wormhole from Solana)", + "address": "0x0000000000000000000000000000000000000000", + "symbol": "USDC", + "logo": "https://static.alchemyapi.io/images/assets/3408.png", + "decimals": 6, + "updatedAt": "1685371052880", + }, + "quoteDenom": "factory/inj14ejqjyq8um4p3xfqj74yld5waqljf88f9eneuk/inj1q6zlut7gtkzknkk773jecujwsdkgq882akqksk", # noqa: mock + "quoteTokenMeta": { + "name": "USC Coin (Wormhole from Ethereum)", + "address": "0x0000000000000000000000000000000000000000", + "symbol": "USDC", + "logo": "https://static.alchemyapi.io/images/assets/3408.png", + "decimals": 6, + "updatedAt": "1685371052880", + }, + "makerFeeRate": "-0.0001", + "takerFeeRate": "0.001", + "serviceProviderFee": "0.4", + "minPriceTickSize": "0.0001", + "minQuantityTickSize": "100", + } + + def _usdt_usdc_eth_market_info(self): + return { + "marketId": "0xda0bb7a7d8361d17a9d2327ed161748f33ecbf02738b45a7dd1d812735d1531c", # noqa: mock + "marketStatus": "active", + "ticker": "USDT/USDC", + "baseDenom": "peggy0xdAC17F958D2ee523a2206206994597C13D831ec7", + "baseTokenMeta": { + "name": "Tether", + "address": "0xdAC17F958D2ee523a2206206994597C13D831ec7", + "symbol": "USDT", + "logo": "https://static.alchemyapi.io/images/assets/825.png", + "decimals": 6, + "updatedAt": "1685371052879", + }, + "quoteDenom": "factory/inj14ejqjyq8um4p3xfqj74yld5waqljf88f9eneuk/inj1q6zlut7gtkzknkk773jecujwsdkgq882akqksk", # noqa: mock + "quoteTokenMeta": { + "name": "USC Coin (Wormhole from Ethereum)", + "address": "0x0000000000000000000000000000000000000000", + "symbol": "USDC", + "logo": "https://static.alchemyapi.io/images/assets/3408.png", + "decimals": 6, + "updatedAt": "1685371052880" + }, + "makerFeeRate": "-0.0001", + "takerFeeRate": "0.001", + "serviceProviderFee": "0.4", + "minPriceTickSize": "0.0001", + "minQuantityTickSize": "100", + } + + def _usdt_usdc_market_info(self): + return { + "marketId": "0x8b1a4d3e8f6b559e30e40922ee3662dd78edf7042330d4d620d188699d1a9715", # noqa: mock + "marketStatus": "active", + "ticker": "USDT/USDC", + "baseDenom": "peggy0xdAC17F958D2ee523a2206206994597C13D831ec7", + "baseTokenMeta": { + "name": "Tether", + "address": "0xdAC17F958D2ee523a2206206994597C13D831ec7", + "symbol": "USDT", + "logo": "https://static.alchemyapi.io/images/assets/825.png", + "decimals": 6, + "updatedAt": "1685371052879" + }, + "quoteDenom": "peggy0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48", + "quoteTokenMeta": { + "name": "USD Coin", + "address": "0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48", + "symbol": "USDC", + "logo": "https://static.alchemyapi.io/images/assets/3408.png", + "decimals": 6, + "updatedAt": "1685371052879" + }, + "makerFeeRate": "0.001", + "takerFeeRate": "0.002", + "serviceProviderFee": "0.4", + "minPriceTickSize": "0.0001", + "minQuantityTickSize": "100", + } + + def _inj_usdt_market_info(self): + return { + "marketId": "0xa508cb32923323679f29a032c70342c147c17d0145625922b0ef22e955c844c0", # noqa: mock + "marketStatus": "active", + "ticker": "INJ/USDT", + "baseDenom": "inj", + "baseTokenMeta": { + "name": "Injective Protocol", + "address": "0xe28b3B32B6c345A34Ff64674606124Dd5Aceca30", + "symbol": "INJ", + "logo": "https://static.alchemyapi.io/images/assets/7226.png", + "decimals": 18, + "updatedAt": "1685371052879" + }, + "quoteDenom": "peggy0xdAC17F958D2ee523a2206206994597C13D831ec7", + "quoteTokenMeta": { + "name": "Tether", + "address": "0xdAC17F958D2ee523a2206206994597C13D831ec7", + "symbol": "USDT", + "logo": "https://static.alchemyapi.io/images/assets/825.png", + "decimals": 6, + "updatedAt": "1685371052879" + }, + "makerFeeRate": "-0.0001", + "takerFeeRate": "0.001", + "serviceProviderFee": "0.4", + "minPriceTickSize": "0.000000000000001", + "minQuantityTickSize": "1000000000000000" + } diff --git a/test/hummingbot/connector/exchange/injective_v2/test_injective_market.py b/test/hummingbot/connector/exchange/injective_v2/test_injective_market.py new file mode 100644 index 00000000000..de1afe5a363 --- /dev/null +++ b/test/hummingbot/connector/exchange/injective_v2/test_injective_market.py @@ -0,0 +1,106 @@ +from decimal import Decimal +from unittest import TestCase + +from hummingbot.connector.exchange.injective_v2.injective_market import InjectiveSpotMarket, InjectiveToken + + +class InjectiveSpotMarketTests(TestCase): + + def setUp(self) -> None: + super().setUp() + + self._inj_token = InjectiveToken( + denom="inj", + symbol="INJ", + unique_symbol="INJ", + name="Injective Protocol", + decimals=18, + ) + self._usdt_token = InjectiveToken( + denom="peggy0xdAC17F958D2ee523a2206206994597C13D831ec7", # noqa: mock + symbol="USDT", + unique_symbol="USDT", + name="Tether", + decimals=6, + ) + + self._inj_usdt_market = InjectiveSpotMarket( + market_id="0xa508cb32923323679f29a032c70342c147c17d0145625922b0ef22e955c844c0", # noqa: mock + base_token=self._inj_token, + quote_token=self._usdt_token, + market_info={ + "marketId": "0xa508cb32923323679f29a032c70342c147c17d0145625922b0ef22e955c844c0", # noqa: mock + "marketStatus": "active", + "ticker": "INJ/USDT", + "baseDenom": "inj", + "baseTokenMeta": { + "name": "Injective Protocol", + "address": "0xe28b3B32B6c345A34Ff64674606124Dd5Aceca30", + "symbol": "INJ", + "logo": "https://static.alchemyapi.io/images/assets/7226.png", + "decimals": 18, + "updatedAt": "1685371052879" + }, + "quoteDenom": "peggy0xdAC17F958D2ee523a2206206994597C13D831ec7", + "quoteTokenMeta": { + "name": "Tether", + "address": "0xdAC17F958D2ee523a2206206994597C13D831ec7", # noqa: mock + "symbol": "USDT", + "logo": "https://static.alchemyapi.io/images/assets/825.png", + "decimals": 6, + "updatedAt": "1685371052879" + }, + "makerFeeRate": "-0.0001", + "takerFeeRate": "0.001", + "serviceProviderFee": "0.4", + "minPriceTickSize": "0.000000000000001", + "minQuantityTickSize": "1000000000000000" + } + ) + + def test_trading_pair(self): + self.assertEqual("INJ-USDT", self._inj_usdt_market.trading_pair()) + + def test_convert_quantity_from_chain_format(self): + expected_quantity = Decimal("1234") + chain_quantity = expected_quantity * Decimal(f"1e{self._inj_token.decimals}") + converted_quantity = self._inj_usdt_market.quantity_from_chain_format(chain_quantity=chain_quantity) + + self.assertEqual(expected_quantity, converted_quantity) + + def test_convert_price_from_chain_format(self): + expected_price = Decimal("15.43") + chain_price = expected_price * Decimal(f"1e{self._usdt_token.decimals}") / Decimal(f"1e{self._inj_token.decimals}") + converted_price = self._inj_usdt_market.price_from_chain_format(chain_price=chain_price) + + self.assertEqual(expected_price, converted_price) + + def test_min_price_tick_size(self): + market = self._inj_usdt_market + expected_value = market.price_from_chain_format(chain_price=Decimal(market.market_info["minPriceTickSize"])) + + self.assertEqual(expected_value, market.min_price_tick_size()) + + def test_min_quantity_tick_size(self): + market = self._inj_usdt_market + expected_value = market.quantity_from_chain_format( + chain_quantity=Decimal(market.market_info["minQuantityTickSize"]) + ) + + self.assertEqual(expected_value, market.min_quantity_tick_size()) + + +class InjectiveTokenTests(TestCase): + + def test_convert_value_from_chain_format(self): + token = InjectiveToken( + denom="inj", + symbol="INJ", + unique_symbol="INJ", + name="Injective Protocol", + decimals=18, + ) + + converted_value = token.value_from_chain_format(chain_value=Decimal("100_000_000_000_000_000_000")) + + self.assertEqual(Decimal("100"), converted_value) diff --git a/test/hummingbot/connector/exchange/injective_v2/test_injective_v2_api_order_book_data_source.py b/test/hummingbot/connector/exchange/injective_v2/test_injective_v2_api_order_book_data_source.py new file mode 100644 index 00000000000..a82fab99a4d --- /dev/null +++ b/test/hummingbot/connector/exchange/injective_v2/test_injective_v2_api_order_book_data_source.py @@ -0,0 +1,389 @@ +import asyncio +import re +from decimal import Decimal +from test.hummingbot.connector.exchange.injective_v2.programmable_query_executor import ProgrammableQueryExecutor +from typing import Awaitable, Optional, Union +from unittest import TestCase +from unittest.mock import AsyncMock, MagicMock, patch + +from bidict import bidict +from pyinjective.wallet import Address, PrivateKey + +from hummingbot.client.config.client_config_map import ClientConfigMap +from hummingbot.client.config.config_helpers import ClientConfigAdapter +from hummingbot.connector.exchange.injective_v2.injective_v2_api_order_book_data_source import ( + InjectiveV2APIOrderBookDataSource, +) +from hummingbot.connector.exchange.injective_v2.injective_v2_exchange import InjectiveV2Exchange +from hummingbot.core.data_type.common import TradeType +from hummingbot.core.data_type.order_book_message import OrderBookMessage, OrderBookMessageType + + +class InjectiveV2APIOrderBookDataSourceTests(TestCase): + # the level is required to receive logs from the data source logger + level = 0 + + @classmethod + def setUpClass(cls) -> None: + super().setUpClass() + cls.base_asset = "INJ" + cls.quote_asset = "USDT" + cls.trading_pair = f"{cls.base_asset}-{cls.quote_asset}" + cls.ex_trading_pair = f"{cls.base_asset}/{cls.quote_asset}" + cls.market_id = "0x0611780ba69656949525013d947713300f56c37b6175e02f26bffa495c3208fe" # noqa: mock + + @patch("hummingbot.core.utils.trading_pair_fetcher.TradingPairFetcher.fetch_all") + def setUp(self, _) -> None: + super().setUp() + self._original_async_loop = asyncio.get_event_loop() + self.async_loop = asyncio.new_event_loop() + self.async_tasks = [] + asyncio.set_event_loop(self.async_loop) + + client_config_map = ClientConfigAdapter(ClientConfigMap()) + + _, grantee_private_key = PrivateKey.generate() + _, granter_private_key = PrivateKey.generate() + self.connector = InjectiveV2Exchange( + client_config_map=client_config_map, + injective_private_key=grantee_private_key.to_hex(), + injective_subaccount_index=0, + injective_granter_address=Address(bytes.fromhex(granter_private_key.to_public_key().to_hex())).to_acc_bech32(), + injective_granter_subaccount_index=0, + trading_pairs=[self.trading_pair], + ) + self.data_source = InjectiveV2APIOrderBookDataSource( + trading_pairs=[self.trading_pair], + connector=self.connector, + data_source=self.connector._data_source, + ) + + self.query_executor = ProgrammableQueryExecutor() + self.connector._data_source._query_executor = self.query_executor + + self.log_records = [] + self._logs_event: Optional[asyncio.Event] = None + self.data_source.logger().setLevel(1) + self.data_source.logger().addHandler(self) + self.data_source._data_source.logger().setLevel(1) + self.data_source._data_source.logger().addHandler(self) + + self.connector._set_trading_pair_symbol_map(bidict({self.market_id: self.trading_pair})) + + def tearDown(self) -> None: + self.async_run_with_timeout(self.data_source._data_source.stop()) + for task in self.async_tasks: + task.cancel() + self.async_loop.stop() + # self.async_loop.close() + # Since the event loop will change we need to remove the logs event created in the old event loop + self._logs_event = None + asyncio.set_event_loop(self._original_async_loop) + super().tearDown() + + def async_run_with_timeout(self, coroutine: Awaitable, timeout: float = 1): + ret = self.async_loop.run_until_complete(asyncio.wait_for(coroutine, timeout)) + return ret + + def create_task(self, coroutine: Awaitable) -> asyncio.Task: + task = self.async_loop.create_task(coroutine) + self.async_tasks.append(task) + return task + + def handle(self, record): + self.log_records.append(record) + if self._logs_event is not None: + self._logs_event.set() + + def is_logged(self, log_level: str, message: Union[str, re.Pattern]) -> bool: + expression = ( + re.compile( + f"^{message}$" + .replace(".", r"\.") + .replace("?", r"\?") + .replace("/", r"\/") + .replace("(", r"\(") + .replace(")", r"\)") + .replace("[", r"\[") + .replace("]", r"\]") + ) + if isinstance(message, str) + else message + ) + return any( + record.levelname == log_level and expression.match(record.getMessage()) is not None + for record in self.log_records + ) + + def test_get_new_order_book_successful(self): + spot_markets_response = self._spot_markets_response() + self.query_executor._spot_markets_responses.put_nowait(spot_markets_response) + base_decimals = spot_markets_response[0]["baseTokenMeta"]["decimals"] + quote_decimals = spot_markets_response[0]["quoteTokenMeta"]["decimals"] + + order_book_snapshot = { + "buys": [(Decimal("9487") * Decimal(f"1e{quote_decimals-base_decimals}"), + Decimal("336241") * Decimal(f"1e{base_decimals}"), + 1640001112223)], + "sells": [(Decimal("9487.5") * Decimal(f"1e{quote_decimals-base_decimals}"), + Decimal("522147") * Decimal(f"1e{base_decimals}"), + 1640001112224)], + "sequence": 512, + "timestamp": 1650001112223, + } + + self.query_executor._spot_order_book_responses.put_nowait(order_book_snapshot) + + order_book = self.async_run_with_timeout(self.data_source.get_new_order_book(self.trading_pair)) + + expected_update_id = order_book_snapshot["sequence"] + + self.assertEqual(expected_update_id, order_book.snapshot_uid) + bids = list(order_book.bid_entries()) + asks = list(order_book.ask_entries()) + self.assertEqual(1, len(bids)) + self.assertEqual(9487, bids[0].price) + self.assertEqual(336241, bids[0].amount) + self.assertEqual(expected_update_id, bids[0].update_id) + self.assertEqual(1, len(asks)) + self.assertEqual(9487.5, asks[0].price) + self.assertEqual(522147, asks[0].amount) + self.assertEqual(expected_update_id, asks[0].update_id) + + def test_listen_for_trades_cancelled_when_listening(self): + mock_queue = MagicMock() + mock_queue.get.side_effect = asyncio.CancelledError() + self.data_source._message_queue[self.data_source._trade_messages_queue_key] = mock_queue + + msg_queue: asyncio.Queue = asyncio.Queue() + + with self.assertRaises(asyncio.CancelledError): + self.async_run_with_timeout(self.data_source.listen_for_trades(self.async_loop, msg_queue)) + + def test_listen_for_trades_logs_exception(self): + spot_markets_response = self._spot_markets_response() + self.query_executor._spot_markets_responses.put_nowait(spot_markets_response) + + self.query_executor._public_spot_trade_updates.put_nowait({}) + trade_data = { + "orderHash": "0x070e2eb3d361c8b26eae510f481bed513a1fb89c0869463a387cfa7995a27043", # noqa: mock + "subaccountId": "0x7998ca45575408f8b4fa354fe615abf3435cf1a7000000000000000000000000", # noqa: mock + "marketId": self.market_id, + "tradeExecutionType": "limitMatchRestingOrder", + "tradeDirection": "sell", + "price": { + "price": "0.000000000007701", + "quantity": "324600000000000000000", + "timestamp": "1687878089569" + }, + "fee": "-249974.46", + "executedAt": "1687878089569", + "feeRecipient": "inj10xvv532h2sy03d86x487v9dt7dp4eud8fe2qv5", # noqa: mock + "tradeId": "37120120_60_0", + "executionSide": "maker" + } + self.query_executor._public_spot_trade_updates.put_nowait(trade_data) + + self.async_run_with_timeout(self.data_source.listen_for_subscriptions()) + + msg_queue = asyncio.Queue() + self.create_task(self.data_source.listen_for_trades(self.async_loop, msg_queue)) + self.async_run_with_timeout(msg_queue.get()) + + self.assertTrue( + self.is_logged( + "WARNING", re.compile(r"^Invalid public trade event format \(.*") + ) + ) + + def test_listen_for_trades_successful(self): + spot_markets_response = self._spot_markets_response() + self.query_executor._spot_markets_responses.put_nowait(spot_markets_response) + base_decimals = spot_markets_response[0]["baseTokenMeta"]["decimals"] + quote_decimals = spot_markets_response[0]["quoteTokenMeta"]["decimals"] + + trade_data = { + "orderHash": "0x070e2eb3d361c8b26eae510f481bed513a1fb89c0869463a387cfa7995a27043", # noqa: mock + "subaccountId": "0x7998ca45575408f8b4fa354fe615abf3435cf1a7000000000000000000000000", # noqa: mock + "marketId": self.market_id, + "tradeExecutionType": "limitMatchRestingOrder", + "tradeDirection": "sell", + "price": { + "price": "0.000000000007701", + "quantity": "324600000000000000000", + "timestamp": "1687878089569" + }, + "fee": "-249974.46", + "executedAt": "1687878089569", + "feeRecipient": "inj10xvv532h2sy03d86x487v9dt7dp4eud8fe2qv5", # noqa: mock + "tradeId": "37120120_60_0", + "executionSide": "maker" + } + self.query_executor._public_spot_trade_updates.put_nowait(trade_data) + + self.async_run_with_timeout(self.data_source.listen_for_subscriptions()) + + msg_queue = asyncio.Queue() + self.create_task(self.data_source.listen_for_trades(self.async_loop, msg_queue)) + + msg: OrderBookMessage = self.async_run_with_timeout(msg_queue.get()) + + self.assertEqual(OrderBookMessageType.TRADE, msg.type) + self.assertEqual(trade_data["tradeId"], msg.trade_id) + self.assertEqual(int(trade_data["executedAt"]) * 1e-3, msg.timestamp) + expected_price = Decimal(trade_data["price"]["price"]) * Decimal(f"1e{base_decimals-quote_decimals}") + expected_amount = Decimal(trade_data["price"]["quantity"]) * Decimal(f"1e{-base_decimals}") + self.assertEqual(expected_amount, msg.content["amount"]) + self.assertEqual(expected_price, msg.content["price"]) + self.assertEqual(self.trading_pair, msg.content["trading_pair"]) + self.assertEqual(float(TradeType.SELL.value), msg.content["trade_type"]) + + def test_listen_for_order_book_diffs_cancelled(self): + mock_queue = AsyncMock() + mock_queue.get.side_effect = asyncio.CancelledError() + self.data_source._message_queue[self.data_source._diff_messages_queue_key] = mock_queue + + msg_queue: asyncio.Queue = asyncio.Queue() + + with self.assertRaises(asyncio.CancelledError): + self.async_run_with_timeout(self.data_source.listen_for_order_book_diffs(self.async_loop, msg_queue)) + + def test_listen_for_order_book_diffs_logs_exception(self): + spot_markets_response = self._spot_markets_response() + self.query_executor._spot_markets_responses.put_nowait(spot_markets_response) + + self.query_executor._spot_order_book_updates.put_nowait({}) + order_book_data = { + "marketId": self.market_id, + "sequence": "7734169", + "buys": [ + { + "price": "0.000000000007684", + "quantity": "4578787000000000000000", + "isActive": True, + "timestamp": "1687889315683" + }, + { + "price": "0.000000000007685", + "quantity": "4412340000000000000000", + "isActive": True, + "timestamp": "1687889316000" + } + ], + "sells": [ + { + "price": "0.000000000007723", + "quantity": "3478787000000000000000", + "isActive": True, + "timestamp": "1687889315683" + } + ], + "updatedAt": "1687889315683", + } + self.query_executor._spot_order_book_updates.put_nowait(order_book_data) + + self.async_run_with_timeout(self.data_source.listen_for_subscriptions()) + + msg_queue: asyncio.Queue = asyncio.Queue() + self.create_task(self.data_source.listen_for_order_book_diffs(self.async_loop, msg_queue)) + + self.async_run_with_timeout(msg_queue.get()) + + self.assertTrue( + self.is_logged( + "WARNING", re.compile(r"^Invalid orderbook diff event format \(.*") + ) + ) + + def test_listen_for_order_book_diffs_successful(self): + spot_markets_response = self._spot_markets_response() + self.query_executor._spot_markets_responses.put_nowait(spot_markets_response) + base_decimals = spot_markets_response[0]["baseTokenMeta"]["decimals"] + quote_decimals = spot_markets_response[0]["quoteTokenMeta"]["decimals"] + + order_book_data = { + "marketId": self.market_id, + "sequence": "7734169", + "buys": [ + { + "price": "0.000000000007684", + "quantity": "4578787000000000000000", + "isActive": True, + "timestamp": "1687889315683" + }, + { + "price": "0.000000000007685", + "quantity": "4412340000000000000000", + "isActive": True, + "timestamp": "1687889316000" + } + ], + "sells": [ + { + "price": "0.000000000007723", + "quantity": "3478787000000000000000", + "isActive": True, + "timestamp": "1687889315683" + } + ], + "updatedAt": "1687889315683", + } + self.query_executor._spot_order_book_updates.put_nowait(order_book_data) + + self.async_run_with_timeout(self.data_source.listen_for_subscriptions()) + + msg_queue: asyncio.Queue = asyncio.Queue() + self.create_task(self.data_source.listen_for_order_book_diffs(self.async_loop, msg_queue)) + + msg: OrderBookMessage = self.async_run_with_timeout(msg_queue.get()) + + self.assertEqual(OrderBookMessageType.DIFF, msg.type) + self.assertEqual(-1, msg.trade_id) + self.assertEqual(int(order_book_data["updatedAt"]) * 1e-3, msg.timestamp) + expected_update_id = int(order_book_data["sequence"]) + self.assertEqual(expected_update_id, msg.update_id) + + bids = msg.bids + asks = msg.asks + self.assertEqual(2, len(bids)) + first_bid_price = Decimal(order_book_data["buys"][0]["price"]) * Decimal(f"1e{base_decimals-quote_decimals}") + first_bid_quantity = Decimal(order_book_data["buys"][0]["quantity"]) * Decimal(f"1e{-base_decimals}") + self.assertEqual(float(first_bid_price), bids[0].price) + self.assertEqual(float(first_bid_quantity), bids[0].amount) + self.assertEqual(expected_update_id, bids[0].update_id) + self.assertEqual(1, len(asks)) + first_ask_price = Decimal(order_book_data["sells"][0]["price"]) * Decimal(f"1e{base_decimals - quote_decimals}") + first_ask_quantity = Decimal(order_book_data["sells"][0]["quantity"]) * Decimal(f"1e{-base_decimals}") + self.assertEqual(float(first_ask_price), asks[0].price) + self.assertEqual(float(first_ask_quantity), asks[0].amount) + self.assertEqual(expected_update_id, asks[0].update_id) + + def _spot_markets_response(self): + return [{ + "marketId": self.market_id, + "marketStatus": "active", + "ticker": self.ex_trading_pair, + "baseDenom": "inj", + "baseTokenMeta": { + "name": "Base Asset", + "address": "0xe28b3B32B6c345A34Ff64674606124Dd5Aceca30", # noqa: mock + "symbol": self.base_asset, + "logo": "https://static.alchemyapi.io/images/assets/7226.png", + "decimals": 18, + "updatedAt": "1687190809715" + }, + "quoteDenom": "peggy0x87aB3B4C8661e07D6372361211B96ed4Dc36B1B5", # noqa: mock + "quoteTokenMeta": { + "name": "Quote Asset", + "address": "0x0000000000000000000000000000000000000000", + "symbol": self.quote_asset, + "logo": "https://static.alchemyapi.io/images/assets/825.png", + "decimals": 6, + "updatedAt": "1687190809716" + }, + "makerFeeRate": "-0.0001", + "takerFeeRate": "0.001", + "serviceProviderFee": "0.4", + "minPriceTickSize": "0.000000000000001", + "minQuantityTickSize": "1000000000000000" + }] diff --git a/test/hummingbot/connector/exchange/injective_v2/test_injective_v2_exchange.py b/test/hummingbot/connector/exchange/injective_v2/test_injective_v2_exchange.py new file mode 100644 index 00000000000..682d75ed514 --- /dev/null +++ b/test/hummingbot/connector/exchange/injective_v2/test_injective_v2_exchange.py @@ -0,0 +1,1853 @@ +import asyncio +import base64 +from collections import OrderedDict +from decimal import Decimal +from functools import partial +from test.hummingbot.connector.exchange.injective_v2.programmable_query_executor import ProgrammableQueryExecutor +from typing import Any, Callable, Dict, List, Optional, Tuple, Union +from unittest.mock import AsyncMock, MagicMock + +from aioresponses import aioresponses +from aioresponses.core import RequestCall +from bidict import bidict +from grpc import RpcError +from pyinjective.orderhash import OrderHashManager, OrderHashResponse +from pyinjective.wallet import Address, PrivateKey + +from hummingbot.client.config.client_config_map import ClientConfigMap +from hummingbot.client.config.config_helpers import ClientConfigAdapter +from hummingbot.connector.exchange.injective_v2 import injective_constants as CONSTANTS +from hummingbot.connector.exchange.injective_v2.injective_v2_exchange import InjectiveV2Exchange +from hummingbot.connector.gateway.gateway_in_flight_order import GatewayInFlightOrder +from hummingbot.connector.test_support.exchange_connector_test import AbstractExchangeConnectorTests +from hummingbot.connector.trading_rule import TradingRule +from hummingbot.core.data_type.common import OrderType, TradeType +from hummingbot.core.data_type.in_flight_order import InFlightOrder, OrderState +from hummingbot.core.data_type.limit_order import LimitOrder +from hummingbot.core.data_type.trade_fee import AddedToCostTradeFee, TokenAmount, TradeFeeBase +from hummingbot.core.event.events import ( + BuyOrderCompletedEvent, + BuyOrderCreatedEvent, + MarketOrderFailureEvent, + OrderCancelledEvent, + OrderFilledEvent, +) +from hummingbot.core.network_iterator import NetworkStatus +from hummingbot.core.utils.async_utils import safe_gather + + +class InjectiveV2ExchangeTests(AbstractExchangeConnectorTests.ExchangeConnectorTests): + + @classmethod + def setUpClass(cls) -> None: + super().setUpClass() + cls.base_asset = "INJ" + cls.quote_asset = "USDT" + cls.base_asset_denom = "inj" + cls.quote_asset_denom = "peggy0x87aB3B4C8661e07D6372361211B96ed4Dc36B1B5" # noqa: mock + cls.trading_pair = f"{cls.base_asset}-{cls.quote_asset}" + cls.market_id = "0x0611780ba69656949525013d947713300f56c37b6175e02f26bffa495c3208fe" # noqa: mock + + _, grantee_private_key = PrivateKey.generate() + cls.trading_account_private_key = grantee_private_key.to_hex() + cls.trading_account_subaccount_index = 0 + _, granter_private_key = PrivateKey.generate() + granter_address = Address(bytes.fromhex(granter_private_key.to_public_key().to_hex())) + cls.portfolio_account_injective_address = granter_address.to_acc_bech32() + cls.portfolio_account_subaccount_index = 0 + portfolio_adderss = Address.from_acc_bech32(cls.portfolio_account_injective_address) + cls.portfolio_account_subaccount_id = portfolio_adderss.get_subaccount_id( + index=cls.portfolio_account_subaccount_index + ) + cls.base_decimals = 18 + cls.quote_decimals = 6 + + def setUp(self) -> None: + super().setUp() + self._original_async_loop = asyncio.get_event_loop() + self.async_loop = asyncio.new_event_loop() + asyncio.set_event_loop(self.async_loop) + self._logs_event: Optional[asyncio.Event] = None + self.exchange._data_source.logger().setLevel(1) + self.exchange._data_source.logger().addHandler(self) + + self.exchange._orders_processing_delta_time = 0.1 + self.async_tasks.append(self.async_loop.create_task(self.exchange._process_queued_orders())) + + def tearDown(self) -> None: + super().tearDown() + self.async_loop.stop() + self.async_loop.close() + asyncio.set_event_loop(self._original_async_loop) + self._logs_event = None + + def handle(self, record): + super().handle(record=record) + if self._logs_event is not None: + self._logs_event.set() + + def reset_log_event(self): + if self._logs_event is not None: + self._logs_event.clear() + + async def wait_for_a_log(self): + if self._logs_event is not None: + await self._logs_event.wait() + + @property + def all_symbols_url(self): + raise NotImplementedError + + @property + def latest_prices_url(self): + raise NotImplementedError + + @property + def network_status_url(self): + raise NotImplementedError + + @property + def trading_rules_url(self): + raise NotImplementedError + + @property + def order_creation_url(self): + raise NotImplementedError + + @property + def balance_url(self): + raise NotImplementedError + + @property + def all_symbols_request_mock_response(self): + raise NotImplementedError + + @property + def latest_prices_request_mock_response(self): + return { + "trades": [ + { + "orderHash": "0x9ffe4301b24785f09cb529c1b5748198098b17bd6df8fe2744d923a574179229", # noqa: mock + "subaccountId": "0xa73ad39eab064051fb468a5965ee48ca87ab66d4000000000000000000000000", # noqa: mock + "marketId": "0x0611780ba69656949525013d947713300f56c37b6175e02f26bffa495c3208fe", # noqa: mock + "tradeExecutionType": "limitMatchRestingOrder", + "tradeDirection": "sell", + "price": { + "price": str(Decimal(str(self.expected_latest_price)) * Decimal(f"1e{self.quote_decimals - self.base_decimals}")), + "quantity": "142000000000000000000", + "timestamp": "1688734042063" + }, + "fee": "-112393", + "executedAt": "1688734042063", + "feeRecipient": "inj15uad884tqeq9r76x3fvktmjge2r6kek55c2zpa", # noqa: mock + "tradeId": "13374245_801_0", + "executionSide": "maker" + } + ], + "paging": { + "total": "1000", + "from": 1, + "to": 1 + } + } + + @property + def all_symbols_including_invalid_pair_mock_response(self) -> Tuple[str, Any]: + response = self.all_markets_mock_response + response.append({ + "marketId": "invalid_market_id", + "marketStatus": "active", + "ticker": "INVALID/MARKET", + "makerFeeRate": "-0.0001", + "takerFeeRate": "0.001", + "serviceProviderFee": "0.4", + "minPriceTickSize": "0.000000000000001", + "minQuantityTickSize": "1000000000000000" + }) + + return ("INVALID_MARKET", response) + + @property + def network_status_request_successful_mock_response(self): + return {} + + @property + def trading_rules_request_mock_response(self): + raise NotImplementedError + + @property + def trading_rules_request_erroneous_mock_response(self): + return [{ + "marketId": self.market_id, + "marketStatus": "active", + "ticker": f"{self.base_asset}/{self.quote_asset}", + "baseDenom": self.base_asset_denom, + "baseTokenMeta": { + "name": "Base Asset", + "address": "0xe28b3B32B6c345A34Ff64674606124Dd5Aceca30", # noqa: mock + "symbol": self.base_asset, + "logo": "https://static.alchemyapi.io/images/assets/7226.png", + "decimals": 18, + "updatedAt": "1687190809715" + }, + "quoteDenom": self.quote_asset_denom, # noqa: mock + "quoteTokenMeta": { + "name": "Quote Asset", + "address": "0x0000000000000000000000000000000000000000", # noqa: mock + "symbol": self.quote_asset, + "logo": "https://static.alchemyapi.io/images/assets/825.png", + "decimals": 6, + "updatedAt": "1687190809716" + }, + "makerFeeRate": "-0.0001", + "takerFeeRate": "0.001", + "serviceProviderFee": "0.4", + }] + + @property + def order_creation_request_successful_mock_response(self): + return {"txhash": "017C130E3602A48E5C9D661CAC657BF1B79262D4B71D5C25B1DA62DE2338DA0E", "rawLog": "[]"} # noqa: mock + + @property + def balance_request_mock_response_for_base_and_quote(self): + return { + "accountAddress": self.portfolio_account_injective_address, + "bankBalances": [ + { + "denom": self.base_asset_denom, + "amount": str(Decimal(5) * Decimal(1e18)) + }, + { + "denom": self.quote_asset_denom, + "amount": str(Decimal(1000) * Decimal(1e6)) + } + ], + "subaccounts": [ + { + "subaccountId": self.portfolio_account_subaccount_id, + "denom": self.quote_asset_denom, + "deposit": { + "totalBalance": str(Decimal(1000) * Decimal(1e6)), + "availableBalance": str(Decimal(1000) * Decimal(1e6)) + } + }, + { + "subaccountId": self.portfolio_account_subaccount_id, + "denom": self.base_asset_denom, + "deposit": { + "totalBalance": str(Decimal(10) * Decimal(1e18)), + "availableBalance": str(Decimal(5) * Decimal(1e18)) + } + }, + ] + } + + @property + def balance_request_mock_response_only_base(self): + return { + "accountAddress": self.portfolio_account_injective_address, + "bankBalances": [ + { + "denom": self.base_asset_denom, + "amount": str(Decimal(5) * Decimal(1e18)) + }, + ], + "subaccounts": [ + { + "subaccountId": self.portfolio_account_subaccount_id, + "denom": self.base_asset_denom, + "deposit": { + "totalBalance": str(Decimal(10) * Decimal(1e18)), + "availableBalance": str(Decimal(5) * Decimal(1e18)) + } + }, + ] + } + + @property + def balance_event_websocket_update(self): + return { + "balance": { + "subaccountId": self.portfolio_account_subaccount_id, + "accountAddress": self.portfolio_account_injective_address, + "denom": self.base_asset_denom, + "deposit": { + "totalBalance": str(Decimal(15) * Decimal(1e18)), + "availableBalance": str(Decimal(10) * Decimal(1e18)), + } + }, + "timestamp": "1688659208000" + } + + @property + def expected_latest_price(self): + return 9999.9 + + @property + def expected_supported_order_types(self) -> List[OrderType]: + return [OrderType.LIMIT, OrderType.LIMIT_MAKER] + + @property + def expected_trading_rule(self): + market_info = self.all_markets_mock_response[0] + min_price_tick_size = (Decimal(market_info["minPriceTickSize"]) + * Decimal(f"1e{market_info['baseTokenMeta']['decimals']-market_info['quoteTokenMeta']['decimals']}")) + min_quantity_tick_size = Decimal(market_info["minQuantityTickSize"]) * Decimal( + f"1e{-market_info['baseTokenMeta']['decimals']}") + trading_rule = TradingRule( + trading_pair=self.trading_pair, + min_order_size=min_quantity_tick_size, + min_price_increment=min_price_tick_size, + min_base_amount_increment=min_quantity_tick_size, + min_quote_amount_increment=min_price_tick_size, + ) + + return trading_rule + + @property + def expected_logged_error_for_erroneous_trading_rule(self): + erroneous_rule = self.trading_rules_request_erroneous_mock_response[0] + return f"Error parsing the trading pair rule: {erroneous_rule}. Skipping..." + + @property + def expected_exchange_order_id(self): + return "0x3870fbdd91f07d54425147b1bb96404f4f043ba6335b422a6d494d285b387f00" # noqa: mock + + @property + def is_order_fill_http_update_included_in_status_update(self) -> bool: + return True + + @property + def is_order_fill_http_update_executed_during_websocket_order_event_processing(self) -> bool: + raise NotImplementedError + + @property + def expected_partial_fill_price(self) -> Decimal: + return Decimal("100") + + @property + def expected_partial_fill_amount(self) -> Decimal: + return Decimal("10") + + @property + def expected_fill_fee(self) -> TradeFeeBase: + return AddedToCostTradeFee( + percent_token=self.quote_asset, flat_fees=[TokenAmount(token=self.quote_asset, amount=Decimal("30"))] + ) + + @property + def expected_fill_trade_id(self) -> str: + return "10414162_22_33" + + @property + def all_markets_mock_response(self): + return [{ + "marketId": self.market_id, + "marketStatus": "active", + "ticker": f"{self.base_asset}/{self.quote_asset}", + "baseDenom": self.base_asset_denom, + "baseTokenMeta": { + "name": "Base Asset", + "address": "0xe28b3B32B6c345A34Ff64674606124Dd5Aceca30", # noqa: mock + "symbol": self.base_asset, + "logo": "https://static.alchemyapi.io/images/assets/7226.png", + "decimals": self.base_decimals, + "updatedAt": "1687190809715" + }, + "quoteDenom": self.quote_asset_denom, # noqa: mock + "quoteTokenMeta": { + "name": "Quote Asset", + "address": "0x0000000000000000000000000000000000000000", # noqa: mock + "symbol": self.quote_asset, + "logo": "https://static.alchemyapi.io/images/assets/825.png", + "decimals": self.quote_decimals, + "updatedAt": "1687190809716" + }, + "makerFeeRate": "-0.0001", + "takerFeeRate": "0.001", + "serviceProviderFee": "0.4", + "minPriceTickSize": "0.000000000000001", + "minQuantityTickSize": "1000000000000000" + }] + + def exchange_symbol_for_tokens(self, base_token: str, quote_token: str) -> str: + return self.market_id + + def create_exchange_instance(self): + client_config_map = ClientConfigAdapter(ClientConfigMap()) + + exchange = InjectiveV2Exchange( + client_config_map=client_config_map, + injective_private_key=self.trading_account_private_key, + injective_subaccount_index=self.trading_account_subaccount_index, + injective_granter_address=self.portfolio_account_injective_address, + injective_granter_subaccount_index=self.portfolio_account_subaccount_index, + trading_pairs=[self.trading_pair], + domain=CONSTANTS.TESTNET_DOMAIN, + ) + + exchange._data_source._query_executor = ProgrammableQueryExecutor() + exchange._data_source._market_and_trading_pair_map = bidict({self.market_id: self.trading_pair}) + return exchange + + def validate_auth_credentials_present(self, request_call: RequestCall): + raise NotImplementedError + + def validate_order_creation_request(self, order: InFlightOrder, request_call: RequestCall): + raise NotImplementedError + + def validate_order_cancelation_request(self, order: InFlightOrder, request_call: RequestCall): + raise NotImplementedError + + def validate_order_status_request(self, order: InFlightOrder, request_call: RequestCall): + raise NotImplementedError + + def validate_trades_request(self, order: InFlightOrder, request_call: RequestCall): + raise NotImplementedError + + def configure_all_symbols_response( + self, mock_api: aioresponses, callback: Optional[Callable] = lambda *args, **kwargs: None + ) -> str: + all_markets_mock_response = self.all_markets_mock_response + self.exchange._data_source._query_executor._spot_markets_responses.put_nowait(all_markets_mock_response) + return "" + + def configure_trading_rules_response( + self, + mock_api: aioresponses, + callback: Optional[Callable] = lambda *args, **kwargs: None, + ) -> List[str]: + + self.configure_all_symbols_response(mock_api=mock_api, callback=callback) + return "" + + def configure_erroneous_trading_rules_response( + self, + mock_api: aioresponses, + callback: Optional[Callable] = lambda *args, **kwargs: None, + ) -> List[str]: + + response = self.trading_rules_request_erroneous_mock_response + self.exchange._data_source._query_executor._spot_markets_responses = asyncio.Queue() + self.exchange._data_source._query_executor._spot_markets_responses.put_nowait(response) + return "" + + def configure_successful_cancelation_response(self, order: InFlightOrder, mock_api: aioresponses, + callback: Optional[Callable] = lambda *args, **kwargs: None) -> str: + transaction_simulation_response = self._msg_exec_simulation_mock_response() + self.exchange._data_source._query_executor._simulate_transaction_responses.put_nowait( + transaction_simulation_response) + response = self._order_cancelation_request_successful_mock_response(order=order) + mock_queue = AsyncMock() + mock_queue.get.side_effect = partial(self._callback_wrapper_with_response, callback=callback, response=response) + self.exchange._data_source._query_executor._send_transaction_responses = mock_queue + return "" + + def configure_erroneous_cancelation_response(self, order: InFlightOrder, mock_api: aioresponses, + callback: Optional[Callable] = lambda *args, **kwargs: None) -> str: + transaction_simulation_response = self._msg_exec_simulation_mock_response() + self.exchange._data_source._query_executor._simulate_transaction_responses.put_nowait( + transaction_simulation_response) + response = self._order_cancelation_request_erroneous_mock_response(order=order) + mock_queue = AsyncMock() + mock_queue.get.side_effect = partial(self._callback_wrapper_with_response, callback=callback, response=response) + self.exchange._data_source._query_executor._send_transaction_responses = mock_queue + return "" + + def configure_order_not_found_error_cancelation_response( + self, + order: InFlightOrder, + mock_api: aioresponses, + callback: Optional[Callable] = lambda *args, **kwargs: None, + ) -> str: + raise NotImplementedError + + def configure_one_successful_one_erroneous_cancel_all_response( + self, + successful_order: InFlightOrder, + erroneous_order: InFlightOrder, + mock_api: aioresponses + ) -> List[str]: + raise NotImplementedError + + def configure_completely_filled_order_status_response( + self, + order: InFlightOrder, + mock_api: aioresponses, + callback: Optional[Callable] = lambda *args, **kwargs: None, + ) -> List[str]: + self.configure_all_symbols_response(mock_api=mock_api) + response = self._order_status_request_completely_filled_mock_response(order=order) + mock_queue = AsyncMock() + mock_queue.get.side_effect = partial(self._callback_wrapper_with_response, callback=callback, response=response) + self.exchange._data_source._query_executor._historical_spot_orders_responses = mock_queue + return [] + + def configure_canceled_order_status_response( + self, + order: InFlightOrder, + mock_api: aioresponses, + callback: Optional[Callable] = lambda *args, **kwargs: None + ) -> Union[str, List[str]]: + self.configure_all_symbols_response(mock_api=mock_api) + + self.exchange._data_source._query_executor._spot_trades_responses.put_nowait({"trades": [], "paging": {"total": "0"}}) + + response = self._order_status_request_canceled_mock_response(order=order) + mock_queue = AsyncMock() + mock_queue.get.side_effect = partial(self._callback_wrapper_with_response, callback=callback, response=response) + self.exchange._data_source._query_executor._historical_spot_orders_responses = mock_queue + return [] + + def configure_open_order_status_response(self, order: InFlightOrder, mock_api: aioresponses, + callback: Optional[Callable] = lambda *args, **kwargs: None) -> List[str]: + self.configure_all_symbols_response(mock_api=mock_api) + + self.exchange._data_source._query_executor._spot_trades_responses.put_nowait( + {"trades": [], "paging": {"total": "0"}}) + + response = self._order_status_request_open_mock_response(order=order) + mock_queue = AsyncMock() + mock_queue.get.side_effect = partial(self._callback_wrapper_with_response, callback=callback, response=response) + self.exchange._data_source._query_executor._historical_spot_orders_responses = mock_queue + return [] + + def configure_http_error_order_status_response(self, order: InFlightOrder, mock_api: aioresponses, + callback: Optional[Callable] = lambda *args, **kwargs: None) -> str: + self.configure_all_symbols_response(mock_api=mock_api) + + mock_queue = AsyncMock() + mock_queue.get.side_effect = IOError("Test error for trades responses") + self.exchange._data_source._query_executor._spot_trades_responses = mock_queue + + mock_queue = AsyncMock() + mock_queue.get.side_effect = IOError("Test error for historical orders responses") + self.exchange._data_source._query_executor._historical_spot_orders_responses = mock_queue + return None + + def configure_partially_filled_order_status_response( + self, + order: InFlightOrder, + mock_api: aioresponses, + callback: Optional[Callable] = lambda *args, **kwargs: None + ) -> str: + self.configure_all_symbols_response(mock_api=mock_api) + response = self._order_status_request_partially_filled_mock_response(order=order) + mock_queue = AsyncMock() + mock_queue.get.side_effect = partial(self._callback_wrapper_with_response, callback=callback, response=response) + self.exchange._data_source._query_executor._historical_spot_orders_responses = mock_queue + return None + + def configure_order_not_found_error_order_status_response( + self, + order: InFlightOrder, + mock_api: aioresponses, + callback: Optional[Callable] = lambda *args, **kwargs: None + ) -> List[str]: + self.configure_all_symbols_response(mock_api=mock_api) + response = self._order_status_request_not_found_mock_response(order=order) + mock_queue = AsyncMock() + mock_queue.get.side_effect = partial(self._callback_wrapper_with_response, callback=callback, response=response) + self.exchange._data_source._query_executor._historical_spot_orders_responses = mock_queue + return [] + + def configure_partial_fill_trade_response(self, order: InFlightOrder, mock_api: aioresponses, + callback: Optional[Callable] = lambda *args, **kwargs: None) -> str: + response = self._order_fills_request_partial_fill_mock_response(order=order) + mock_queue = AsyncMock() + mock_queue.get.side_effect = partial(self._callback_wrapper_with_response, callback=callback, response=response) + self.exchange._data_source._query_executor._spot_trades_responses = mock_queue + return None + + def configure_erroneous_http_fill_trade_response( + self, + order: InFlightOrder, + mock_api: aioresponses, + callback: Optional[Callable] = lambda *args, **kwargs: None + ) -> str: + mock_queue = AsyncMock() + mock_queue.get.side_effect = IOError("Test error for trades responses") + self.exchange._data_source._query_executor._spot_trades_responses = mock_queue + return None + + def configure_full_fill_trade_response(self, order: InFlightOrder, mock_api: aioresponses, + callback: Optional[Callable] = lambda *args, **kwargs: None) -> str: + response = self._order_fills_request_full_fill_mock_response(order=order) + mock_queue = AsyncMock() + mock_queue.get.side_effect = partial(self._callback_wrapper_with_response, callback=callback, response=response) + self.exchange._data_source._query_executor._spot_trades_responses = mock_queue + return [] + + def order_event_for_new_order_websocket_update(self, order: InFlightOrder): + return { + "orderHash": order.exchange_order_id, + "marketId": self.market_id, + "isActive": True, + "subaccountId": self.portfolio_account_subaccount_id, + "executionType": "market" if order.order_type == OrderType.MARKET else "limit", + "orderType": order.trade_type.name.lower(), + "price": str(order.price * Decimal(f"1e{self.quote_decimals - self.base_decimals}")), + "triggerPrice": "0", + "quantity": str(order.amount * Decimal(f"1e{self.base_decimals}")), + "filledQuantity": "0", + "state": "booked", + "createdAt": "1688667498756", + "updatedAt": "1688667498756", + "direction": order.trade_type.name.lower(), + "txHash": "0x0000000000000000000000000000000000000000000000000000000000000000" # noqa: mock + } + + def order_event_for_canceled_order_websocket_update(self, order: InFlightOrder): + return { + "orderHash": order.exchange_order_id, + "marketId": self.market_id, + "isActive": True, + "subaccountId": self.portfolio_account_subaccount_id, + "executionType": "market" if order.order_type == OrderType.MARKET else "limit", + "orderType": order.trade_type.name.lower(), + "price": str(order.price * Decimal(f"1e{self.quote_decimals - self.base_decimals}")), + "triggerPrice": "0", + "quantity": str(order.amount * Decimal(f"1e{self.base_decimals}")), + "filledQuantity": "0", + "state": "canceled", + "createdAt": "1688667498756", + "updatedAt": "1688667498756", + "direction": order.trade_type.name.lower(), + "txHash": "0x0000000000000000000000000000000000000000000000000000000000000000" # noqa: mock + } + + def order_event_for_full_fill_websocket_update(self, order: InFlightOrder): + return { + "orderHash": order.exchange_order_id, + "marketId": self.market_id, + "isActive": True, + "subaccountId": self.portfolio_account_subaccount_id, + "executionType": "market" if order.order_type == OrderType.MARKET else "limit", + "orderType": order.trade_type.name.lower(), + "price": str(order.price * Decimal(f"1e{self.quote_decimals - self.base_decimals}")), + "triggerPrice": "0", + "quantity": str(order.amount * Decimal(f"1e{self.base_decimals}")), + "filledQuantity": str(order.amount * Decimal(f"1e{self.base_decimals}")), + "state": "filled", + "createdAt": "1688476825015", + "updatedAt": "1688476825015", + "direction": order.trade_type.name.lower(), + "txHash": order.creation_transaction_hash + } + + def trade_event_for_full_fill_websocket_update(self, order: InFlightOrder): + return { + "orderHash": order.exchange_order_id, + "subaccountId": self.portfolio_account_subaccount_id, + "marketId": self.market_id, + "tradeExecutionType": "limitMatchRestingOrder", + "tradeDirection": order.trade_type.name.lower(), + "price": { + "price": str(order.price * Decimal(f"1e{self.quote_decimals - self.base_decimals}")), + "quantity": str(order.amount * Decimal(f"1e{self.base_decimals}")), + "timestamp": "1687878089569" + }, + "fee": str(self.expected_fill_fee.flat_fees[0].amount * Decimal(f"1e{self.quote_decimals}")), + "executedAt": "1687878089569", + "feeRecipient": self.portfolio_account_injective_address, # noqa: mock + "tradeId": self.expected_fill_trade_id, + "executionSide": "maker" + } + + @aioresponses() + def test_all_trading_pairs_does_not_raise_exception(self, mock_api): + self.exchange._set_trading_pair_symbol_map(None) + self.exchange._data_source._market_and_trading_pair_map = None + queue_mock = AsyncMock() + queue_mock.get.side_effect = Exception("Test error") + self.exchange._data_source._query_executor._spot_markets_responses = queue_mock + + result: List[str] = self.async_run_with_timeout(self.exchange.all_trading_pairs(), timeout=10) + + self.assertEqual(0, len(result)) + + def test_batch_order_create(self): + request_sent_event = asyncio.Event() + self.exchange._set_current_timestamp(1640780000) + self.exchange._data_source._order_hash_manager = MagicMock(spec=OrderHashManager) + self.exchange._data_source._order_hash_manager.compute_order_hashes.return_value = OrderHashResponse( + spot=["hash1", "hash2"], derivative=[] + ) + + # Configure all symbols response to initialize the trading rules + self.configure_all_symbols_response(mock_api=None) + self.async_run_with_timeout(self.exchange._update_trading_rules()) + + buy_order_to_create = LimitOrder( + client_order_id="", + trading_pair=self.trading_pair, + is_buy=True, + base_currency=self.base_asset, + quote_currency=self.quote_asset, + price=Decimal("10"), + quantity=Decimal("2"), + ) + sell_order_to_create = LimitOrder( + client_order_id="", + trading_pair=self.trading_pair, + is_buy=False, + base_currency=self.base_asset, + quote_currency=self.quote_asset, + price=Decimal("11"), + quantity=Decimal("3"), + ) + orders_to_create = [buy_order_to_create, sell_order_to_create] + + transaction_simulation_response = self._msg_exec_simulation_mock_response() + self.exchange._data_source._query_executor._simulate_transaction_responses.put_nowait( + transaction_simulation_response) + + response = self.order_creation_request_successful_mock_response + mock_queue = AsyncMock() + mock_queue.get.side_effect = partial( + self._callback_wrapper_with_response, + callback=lambda args, kwargs: request_sent_event.set(), + response=response + ) + self.exchange._data_source._query_executor._send_transaction_responses = mock_queue + + orders: List[LimitOrder] = self.exchange.batch_order_create(orders_to_create=orders_to_create) + + buy_order_to_create_in_flight = GatewayInFlightOrder( + client_order_id=orders[0].client_order_id, + trading_pair=self.trading_pair, + order_type=OrderType.LIMIT, + trade_type=TradeType.BUY, + creation_timestamp=1640780000, + price=orders[0].price, + amount=orders[0].quantity, + exchange_order_id="hash1", + creation_transaction_hash=response["txhash"] + ) + sell_order_to_create_in_flight = GatewayInFlightOrder( + client_order_id=orders[1].client_order_id, + trading_pair=self.trading_pair, + order_type=OrderType.LIMIT, + trade_type=TradeType.SELL, + creation_timestamp=1640780000, + price=orders[1].price, + amount=orders[1].quantity, + exchange_order_id="hash2", + creation_transaction_hash=response["txhash"] + ) + + self.async_run_with_timeout(request_sent_event.wait()) + + self.assertEqual(2, len(orders)) + self.assertEqual(2, len(self.exchange.in_flight_orders)) + + self.assertIn(buy_order_to_create_in_flight.client_order_id, self.exchange.in_flight_orders) + self.assertIn(sell_order_to_create_in_flight.client_order_id, self.exchange.in_flight_orders) + + self.assertEqual( + buy_order_to_create_in_flight.exchange_order_id, + self.exchange.in_flight_orders[buy_order_to_create_in_flight.client_order_id].exchange_order_id + ) + self.assertEqual( + buy_order_to_create_in_flight.creation_transaction_hash, + self.exchange.in_flight_orders[buy_order_to_create_in_flight.client_order_id].creation_transaction_hash + ) + self.assertEqual( + sell_order_to_create_in_flight.exchange_order_id, + self.exchange.in_flight_orders[sell_order_to_create_in_flight.client_order_id].exchange_order_id + ) + self.assertEqual( + sell_order_to_create_in_flight.creation_transaction_hash, + self.exchange.in_flight_orders[sell_order_to_create_in_flight.client_order_id].creation_transaction_hash + ) + + @aioresponses() + def test_create_buy_limit_order_successfully(self, mock_api): + self._simulate_trading_rules_initialized() + request_sent_event = asyncio.Event() + self.exchange._set_current_timestamp(1640780000) + self.exchange._data_source._order_hash_manager = MagicMock(spec=OrderHashManager) + self.exchange._data_source._order_hash_manager.compute_order_hashes.return_value = OrderHashResponse( + spot=["hash1"], derivative=[] + ) + + transaction_simulation_response = self._msg_exec_simulation_mock_response() + self.exchange._data_source._query_executor._simulate_transaction_responses.put_nowait( + transaction_simulation_response) + + response = self.order_creation_request_successful_mock_response + mock_queue = AsyncMock() + mock_queue.get.side_effect = partial( + self._callback_wrapper_with_response, + callback=lambda args, kwargs: request_sent_event.set(), + response=response + ) + self.exchange._data_source._query_executor._send_transaction_responses = mock_queue + + order_id = self.place_buy_order() + self.async_run_with_timeout(request_sent_event.wait()) + + self.assertEqual(1, len(self.exchange.in_flight_orders)) + self.assertIn(order_id, self.exchange.in_flight_orders) + + order = self.exchange.in_flight_orders[order_id] + + self.assertEqual("hash1", order.exchange_order_id) + self.assertEqual(response["txhash"], order.creation_transaction_hash) + + @aioresponses() + def test_create_sell_limit_order_successfully(self, mock_api): + self._simulate_trading_rules_initialized() + request_sent_event = asyncio.Event() + self.exchange._set_current_timestamp(1640780000) + self.exchange._data_source._order_hash_manager = MagicMock(spec=OrderHashManager) + self.exchange._data_source._order_hash_manager.compute_order_hashes.return_value = OrderHashResponse( + spot=["hash1"], derivative=[] + ) + + transaction_simulation_response = self._msg_exec_simulation_mock_response() + self.exchange._data_source._query_executor._simulate_transaction_responses.put_nowait( + transaction_simulation_response) + + response = self.order_creation_request_successful_mock_response + mock_queue = AsyncMock() + mock_queue.get.side_effect = partial( + self._callback_wrapper_with_response, + callback=lambda args, kwargs: request_sent_event.set(), + response=response + ) + self.exchange._data_source._query_executor._send_transaction_responses = mock_queue + + order_id = self.place_sell_order() + self.async_run_with_timeout(request_sent_event.wait()) + + self.assertEqual(1, len(self.exchange.in_flight_orders)) + self.assertIn(order_id, self.exchange.in_flight_orders) + + order = self.exchange.in_flight_orders[order_id] + + self.assertEqual("hash1", order.exchange_order_id) + self.assertEqual(response["txhash"], order.creation_transaction_hash) + + @aioresponses() + def test_create_order_fails_and_raises_failure_event(self, mock_api): + self._simulate_trading_rules_initialized() + request_sent_event = asyncio.Event() + self.exchange._set_current_timestamp(1640780000) + self.exchange._data_source._order_hash_manager = MagicMock(spec=OrderHashManager) + self.exchange._data_source._order_hash_manager.compute_order_hashes.return_value = OrderHashResponse( + spot=["hash1"], derivative=[] + ) + + transaction_simulation_response = self._msg_exec_simulation_mock_response() + self.exchange._data_source._query_executor._simulate_transaction_responses.put_nowait( + transaction_simulation_response) + + response = {"txhash": "", "rawLog": "Error"} + mock_queue = AsyncMock() + mock_queue.get.side_effect = partial( + self._callback_wrapper_with_response, + callback=lambda args, kwargs: request_sent_event.set(), + response=response + ) + self.exchange._data_source._query_executor._send_transaction_responses = mock_queue + + order_id = self.place_buy_order() + self.async_run_with_timeout(request_sent_event.wait()) + + self.assertNotIn(order_id, self.exchange.in_flight_orders) + + self.assertEquals(0, len(self.buy_order_created_logger.event_log)) + failure_event: MarketOrderFailureEvent = self.order_failure_logger.event_log[0] + self.assertEqual(self.exchange.current_timestamp, failure_event.timestamp) + self.assertEqual(OrderType.LIMIT, failure_event.order_type) + self.assertEqual(order_id, failure_event.order_id) + + self.assertTrue( + self.is_logged( + "INFO", + f"Order {order_id} has failed. Order Update: OrderUpdate(trading_pair='{self.trading_pair}', " + f"update_timestamp={self.exchange.current_timestamp}, new_state={repr(OrderState.FAILED)}, " + f"client_order_id='{order_id}', exchange_order_id=None, misc_updates=None)" + ) + ) + + @aioresponses() + def test_create_order_fails_when_trading_rule_error_and_raises_failure_event(self, mock_api): + self._simulate_trading_rules_initialized() + request_sent_event = asyncio.Event() + self.exchange._set_current_timestamp(1640780000) + + order_id_for_invalid_order = self.place_buy_order( + amount=Decimal("0.0001"), price=Decimal("0.0001") + ) + # The second order is used only to have the event triggered and avoid using timeouts for tests + self.exchange._data_source._order_hash_manager = MagicMock(spec=OrderHashManager) + self.exchange._data_source._order_hash_manager.compute_order_hashes.return_value = OrderHashResponse( + spot=["hash1"], derivative=[] + ) + + transaction_simulation_response = self._msg_exec_simulation_mock_response() + self.exchange._data_source._query_executor._simulate_transaction_responses.put_nowait( + transaction_simulation_response) + + response = {"txhash": "", "rawLog": "Error"} + mock_queue = AsyncMock() + mock_queue.get.side_effect = partial( + self._callback_wrapper_with_response, + callback=lambda args, kwargs: request_sent_event.set(), + response=response + ) + self.exchange._data_source._query_executor._send_transaction_responses = mock_queue + + order_id = self.place_buy_order() + self.async_run_with_timeout(request_sent_event.wait()) + + self.assertNotIn(order_id_for_invalid_order, self.exchange.in_flight_orders) + self.assertNotIn(order_id, self.exchange.in_flight_orders) + + self.assertEquals(0, len(self.buy_order_created_logger.event_log)) + failure_event: MarketOrderFailureEvent = self.order_failure_logger.event_log[0] + self.assertEqual(self.exchange.current_timestamp, failure_event.timestamp) + self.assertEqual(OrderType.LIMIT, failure_event.order_type) + self.assertEqual(order_id_for_invalid_order, failure_event.order_id) + + self.assertTrue( + self.is_logged( + "WARNING", + "Buy order amount 0.0001 is lower than the minimum order size 0.01. The order will not be created, " + "increase the amount to be higher than the minimum order size." + ) + ) + self.assertTrue( + self.is_logged( + "INFO", + f"Order {order_id} has failed. Order Update: OrderUpdate(trading_pair='{self.trading_pair}', " + f"update_timestamp={self.exchange.current_timestamp}, new_state={repr(OrderState.FAILED)}, " + f"client_order_id='{order_id}', exchange_order_id=None, misc_updates=None)" + ) + ) + + def test_batch_order_cancel(self): + request_sent_event = asyncio.Event() + self.exchange._set_current_timestamp(1640780000) + + self.exchange.start_tracking_order( + order_id="11", + exchange_order_id=self.expected_exchange_order_id + "1", + trading_pair=self.trading_pair, + trade_type=TradeType.BUY, + price=Decimal("10000"), + amount=Decimal("100"), + order_type=OrderType.LIMIT, + ) + self.exchange.start_tracking_order( + order_id="12", + exchange_order_id=self.expected_exchange_order_id + "2", + trading_pair=self.trading_pair, + trade_type=TradeType.SELL, + price=Decimal("11000"), + amount=Decimal("110"), + order_type=OrderType.LIMIT, + ) + + buy_order_to_cancel: GatewayInFlightOrder = self.exchange.in_flight_orders["11"] + sell_order_to_cancel: GatewayInFlightOrder = self.exchange.in_flight_orders["12"] + orders_to_cancel = [buy_order_to_cancel, sell_order_to_cancel] + + transaction_simulation_response = self._msg_exec_simulation_mock_response() + self.exchange._data_source._query_executor._simulate_transaction_responses.put_nowait(transaction_simulation_response) + + response = self._order_cancelation_request_successful_mock_response(order=buy_order_to_cancel) + mock_queue = AsyncMock() + mock_queue.get.side_effect = partial( + self._callback_wrapper_with_response, + callback=lambda args, kwargs: request_sent_event.set(), + response=response + ) + self.exchange._data_source._query_executor._send_transaction_responses = mock_queue + + self.exchange.batch_order_cancel(orders_to_cancel=orders_to_cancel) + + self.async_run_with_timeout(request_sent_event.wait()) + + self.assertIn(buy_order_to_cancel.client_order_id, self.exchange.in_flight_orders) + self.assertIn(sell_order_to_cancel.client_order_id, self.exchange.in_flight_orders) + self.assertTrue(buy_order_to_cancel.is_pending_cancel_confirmation) + self.assertEqual(response["txhash"], buy_order_to_cancel.cancel_tx_hash) + self.assertTrue(sell_order_to_cancel.is_pending_cancel_confirmation) + self.assertEqual(response["txhash"], sell_order_to_cancel.cancel_tx_hash) + + @aioresponses() + def test_cancel_order_not_found_in_the_exchange(self, mock_api): + # This tests does not apply for Injective. The batch orders update message used for cancelations will not + # detect if the orders exists or not. That will happen when the transaction is executed. + pass + + @aioresponses() + def test_cancel_two_orders_with_cancel_all_and_one_fails(self, mock_api): + # This tests does not apply for Injective. The batch orders update message used for cancelations will not + # detect if the orders exists or not. That will happen when the transaction is executed. + pass + + def test_order_not_found_in_its_creating_transaction_marked_as_failed_during_order_creation_check(self): + self.exchange._set_current_timestamp(1640780000) + + self.exchange.start_tracking_order( + order_id=self.client_order_id_prefix + "1", + exchange_order_id="0x9f94598b4842ab66037eaa7c64ec10ae16dcf196e61db8522921628522c0f62e", # noqa: mock + trading_pair=self.trading_pair, + trade_type=TradeType.BUY, + price=Decimal("10000"), + amount=Decimal("100"), + order_type=OrderType.LIMIT, + ) + + self.assertIn(self.client_order_id_prefix + "1", self.exchange.in_flight_orders) + order: GatewayInFlightOrder = self.exchange.in_flight_orders[self.client_order_id_prefix + "1"] + order.update_creation_transaction_hash(creation_transaction_hash="66A360DA2FD6884B53B5C019F1A2B5BED7C7C8FC07E83A9C36AD3362EDE096AE") # noqa: mock + + transaction_data = (b'\x12\xd1\x01\n8/injective.exchange.v1beta1.MsgBatchUpdateOrdersResponse' + b'\x12\x94\x01\n\x02\x00\x00\x12\x02\x00\x00\x1aB' + b'0xc5d66f56942e1ae407c01eedccd0471deb8e202a514cde3bae56a8307e376cd1' # noqa: mock + b'\x1aB' + b'0x115975551b4f86188eee6b93d789fcc78df6e89e40011b929299b6e142f53515' # noqa: mock + b'"\x00"\x00') + transaction_response = { + "s": "ok", + "data": { + "blockNumber": "13302254", + "blockTimestamp": "2023-07-05 13:55:09.94 +0000 UTC", + "hash": "0x66a360da2fd6884b53b5c019f1a2b5bed7c7c8fc07e83a9c36ad3362ede096ae", # noqa: mock + "data": base64.b64encode(transaction_data).decode(), + "gasWanted": "168306", + "gasUsed": "167769", + "gasFee": { + "amount": [ + { + "denom": "inj", + "amount": "84153000000000" + } + ], + "gasLimit": "168306", + "payer": "inj1hkhdaj2a2clmq5jq6mspsggqs32vynpk228q3r" # noqa: mock + }, + "txType": "injective", + "messages": "W3sidHlwZSI6Ii9pbmplY3RpdmUuZXhjaGFuZ2UudjFiZXRhMS5Nc2dCYXRjaFVwZGF0ZU9yZGVycyIsInZhbHVlIjp7InNlbmRlciI6ImluajFoa2hkYWoyYTJjbG1xNWpxNm1zcHNnZ3FzMzJ2eW5wazIyOHEzciIsInN1YmFjY291bnRfaWQiOiIiLCJzcG90X21hcmtldF9pZHNfdG9fY2FuY2VsX2FsbCI6W10sImRlcml2YXRpdmVfbWFya2V0X2lkc190b19jYW5jZWxfYWxsIjpbXSwic3BvdF9vcmRlcnNfdG9fY2FuY2VsIjpbeyJtYXJrZXRfaWQiOiIweDA2MTE3ODBiYTY5NjU2OTQ5NTI1MDEzZDk0NzcxMzMwMGY1NmMzN2I2MTc1ZTAyZjI2YmZmYTQ5NWMzMjA4ZmUiLCJzdWJhY2NvdW50X2lkIjoiMHhiZGFlZGVjOTVkNTYzZmIwNTI0MGQ2ZTAxODIxMDA4NDU0YzI0YzM2MDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwIiwib3JkZXJfaGFzaCI6IjB4Mzg3MGZiZGQ5MWYwN2Q1NDQyNTE0N2IxYmI5NjQwNGY0ZjA0M2JhNjMzNWI0MjJhNmQ0OTRkMjg1YjM4N2YyZCIsIm9yZGVyX21hc2siOjJ9LHsibWFya2V0X2lkIjoiMHg3YTU3ZTcwNWJiNGUwOWM4OGFlY2ZjMjk1NTY5NDgxZGJmMmZlMWQ1ZWZlMzY0NjUxZmJlNzIzODU5MzhlOWIwIiwic3ViYWNjb3VudF9pZCI6IjB4YmRhZWRlYzk1ZDU2M2ZiMDUyNDBkNmUwMTgyMTAwODQ1NGMyNGMzNjAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMCIsIm9yZGVyX2hhc2giOiIweDIyMmRhYTIyZjYwZmU5ZjA3NWVkMGNhNTgzNDU5ZTEyMWMyM2U2NDQzMWMzZmJmZmRlZGRhMDQ1OThlZGUwZDIiLCJvcmRlcl9tYXNrIjoyfV0sImRlcml2YXRpdmVfb3JkZXJzX3RvX2NhbmNlbCI6W3sibWFya2V0X2lkIjoiMHhkNWU0YjEyYjE5ZWNmMTc2ZTRlMTRiNDI5NDQ3MzFjMjc2Nzc4MTlkMmVkOTNiZTQxMDRhZDcwMjU1MjljN2ZmIiwic3ViYWNjb3VudF9pZCI6IjB4YmRhZWRlYzk1ZDU2M2ZiMDUyNDBkNmUwMTgyMTAwODQ1NGMyNGMzNjAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMCIsIm9yZGVyX2hhc2giOiIweDQ4NjkwMDEzYzM4MmQ1ZGJhZmY5OTg5ZGIwNDYyOWExNmE1ODE4ZDc1MjRlMDI3ZDUxN2NjYzg5ZmQwNjgxMDMiLCJvcmRlcl9tYXNrIjoyfSx7Im1hcmtldF9pZCI6IjB4OTBlNjYyMTkzZmEyOWEzYTdlNmMwN2JlNDQwN2M5NDgzM2U3NjJkOWVlODIxMzZhMmNjNzEyZDZiODdkN2RlMyIsInN1YmFjY291bnRfaWQiOiIweGJkYWVkZWM5NWQ1NjNmYjA1MjQwZDZlMDE4MjEwMDg0NTRjMjRjMzYwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAiLCJvcmRlcl9oYXNoIjoiMHg3ZWU3NjI1NWQ3Y2E3NjNjNTZiMGVhYjk4MjhmY2E4OWZkZDM3Mzk2NDU1MDFjOGE4MGY1OGI2MmI0Zjc2ZGE1Iiwib3JkZXJfbWFzayI6Mn1dLCJzcG90X29yZGVyc190b19jcmVhdGUiOlt7Im1hcmtldF9pZCI6IjB4MDYxMTc4MGJhNjk2NTY5NDk1MjUwMTNkOTQ3NzEzMzAwZjU2YzM3YjYxNzVlMDJmMjZiZmZhNDk1YzMyMDhmZSIsIm9yZGVyX2luZm8iOnsic3ViYWNjb3VudF9pZCI6IjB4YmRhZWRlYzk1ZDU2M2ZiMDUyNDBkNmUwMTgyMTAwODQ1NGMyNGMzNjAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMCIsImZlZV9yZWNpcGllbnQiOiJpbmoxaGtoZGFqMmEyY2xtcTVqcTZtc3BzZ2dxczMydnlucGsyMjhxM3IiLCJwcmljZSI6IjAuMDAwMDAwMDAwMDAzMDAwMDAwIiwicXVhbnRpdHkiOiI1NTAwMDAwMDAwMDAwMDAwMDAwMC4wMDAwMDAwMDAwMDAwMDAwMDAifSwib3JkZXJfdHlwZSI6IkJVWSIsInRyaWdnZXJfcHJpY2UiOiIwLjAwMDAwMDAwMDAwMDAwMDAwMCJ9LHsibWFya2V0X2lkIjoiMHgwNjExNzgwYmE2OTY1Njk0OTUyNTAxM2Q5NDc3MTMzMDBmNTZjMzdiNjE3NWUwMmYyNmJmZmE0OTVjMzIwOGZlIiwib3JkZXJfaW5mbyI6eyJzdWJhY2NvdW50X2lkIjoiMHhiZGFlZGVjOTVkNTYzZmIwNTI0MGQ2ZTAxODIxMDA4NDU0YzI0YzM2MDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwIiwiZmVlX3JlY2lwaWVudCI6ImluajFoa2hkYWoyYTJjbG1xNWpxNm1zcHNnZ3FzMzJ2eW5wazIyOHEzciIsInByaWNlIjoiMC4wMDAwMDAwMDAzMDAwMDAwMDAiLCJxdWFudGl0eSI6IjU1MDAwMDAwMDAwMDAwMDAwMDAwLjAwMDAwMDAwMDAwMDAwMDAwMCJ9LCJvcmRlcl90eXBlIjoiU0VMTCIsInRyaWdnZXJfcHJpY2UiOiIwLjAwMDAwMDAwMDAwMDAwMDAwMCJ9XSwiZGVyaXZhdGl2ZV9vcmRlcnNfdG9fY3JlYXRlIjpbeyJtYXJrZXRfaWQiOiIweDkwZTY2MjE5M2ZhMjlhM2E3ZTZjMDdiZTQ0MDdjOTQ4MzNlNzYyZDllZTgyMTM2YTJjYzcxMmQ2Yjg3ZDdkZTMiLCJvcmRlcl9pbmZvIjp7InN1YmFjY291bnRfaWQiOiIweGJkYWVkZWM5NWQ1NjNmYjA1MjQwZDZlMDE4MjEwMDg0NTRjMjRjMzYwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAiLCJmZWVfcmVjaXBpZW50IjoiaW5qMWhraGRhajJhMmNsbXE1anE2bXNwc2dncXMzMnZ5bnBrMjI4cTNyIiwicHJpY2UiOiIyNTAwMDAwMDAwMC4wMDAwMDAwMDAwMDAwMDAwMDAiLCJxdWFudGl0eSI6IjAuMTAwMDAwMDAwMDAwMDAwMDAwIn0sIm9yZGVyX3R5cGUiOiJCVVkiLCJtYXJnaW4iOiIyNTAwMDAwMDAwLjAwMDAwMDAwMDAwMDAwMDAwMCIsInRyaWdnZXJfcHJpY2UiOiIwLjAwMDAwMDAwMDAwMDAwMDAwMCJ9LHsibWFya2V0X2lkIjoiMHg5MGU2NjIxOTNmYTI5YTNhN2U2YzA3YmU0NDA3Yzk0ODMzZTc2MmQ5ZWU4MjEzNmEyY2M3MTJkNmI4N2Q3ZGUzIiwib3JkZXJfaW5mbyI6eyJzdWJhY2NvdW50X2lkIjoiMHhiZGFlZGVjOTVkNTYzZmIwNTI0MGQ2ZTAxODIxMDA4NDU0YzI0YzM2MDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwIiwiZmVlX3JlY2lwaWVudCI6ImluajFoa2hkYWoyYTJjbG1xNWpxNm1zcHNnZ3FzMzJ2eW5wazIyOHEzciIsInByaWNlIjoiNTAwMDAwMDAwMDAuMDAwMDAwMDAwMDAwMDAwMDAwIiwicXVhbnRpdHkiOiIwLjAxMDAwMDAwMDAwMDAwMDAwMCJ9LCJvcmRlcl90eXBlIjoiU0VMTCIsIm1hcmdpbiI6IjUwMDAwMDAwMC4wMDAwMDAwMDAwMDAwMDAwMDAiLCJ0cmlnZ2VyX3ByaWNlIjoiMC4wMDAwMDAwMDAwMDAwMDAwMDAifV0sImJpbmFyeV9vcHRpb25zX29yZGVyc190b19jYW5jZWwiOltdLCJiaW5hcnlfb3B0aW9uc19tYXJrZXRfaWRzX3RvX2NhbmNlbF9hbGwiOltdLCJiaW5hcnlfb3B0aW9uc19vcmRlcnNfdG9fY3JlYXRlIjpbXX19XQ==", + "signatures": [ + { + "pubkey": "035ddc4d5642b9383e2f087b2ee88b7207f6286ebc9f310e9df1406eccc2c31813", # noqa: mock + "address": "inj1hkhdaj2a2clmq5jq6mspsggqs32vynpk228q3r", # noqa: mock + "sequence": "16450", + "signature": "S9atCwiVg9+8vTpbciuwErh54pJOAry3wHvbHT2fG8IumoE+7vfuoP7mAGDy2w9am+HHa1yv60VSWo3cRhWC9g==" + } + ], + "txNumber": "13182", + "blockUnixTimestamp": "1688565309940", + "logs": "W3sibXNnX2luZGV4IjowLCJldmVudHMiOlt7InR5cGUiOiJtZXNzYWdlIiwiYXR0cmlidXRlcyI6W3sia2V5IjoiYWN0aW9uIiwidmFsdWUiOiIvaW5qZWN0aXZlLmV4Y2hhbmdlLnYxYmV0YTEuTXNnQmF0Y2hVcGRhdGVPcmRlcnMifSx7ImtleSI6InNlbmRlciIsInZhbHVlIjoiaW5qMWhraGRhajJhMmNsbXE1anE2bXNwc2dncXMzMnZ5bnBrMjI4cTNyIn0seyJrZXkiOiJtb2R1bGUiLCJ2YWx1ZSI6ImV4Y2hhbmdlIn1dfSx7InR5cGUiOiJjb2luX3NwZW50IiwiYXR0cmlidXRlcyI6W3sia2V5Ijoic3BlbmRlciIsInZhbHVlIjoiaW5qMWhraGRhajJhMmNsbXE1anE2bXNwc2dncXMzMnZ5bnBrMjI4cTNyIn0seyJrZXkiOiJhbW91bnQiLCJ2YWx1ZSI6IjE2NTE2NTAwMHBlZ2d5MHg4N2FCM0I0Qzg2NjFlMDdENjM3MjM2MTIxMUI5NmVkNERjMzZCMUI1In1dfSx7InR5cGUiOiJjb2luX3JlY2VpdmVkIiwiYXR0cmlidXRlcyI6W3sia2V5IjoicmVjZWl2ZXIiLCJ2YWx1ZSI6ImluajE0dm5tdzJ3ZWUzeHRyc3FmdnBjcWczNWpnOXY3ajJ2ZHB6eDBrayJ9LHsia2V5IjoiYW1vdW50IiwidmFsdWUiOiIxNjUxNjUwMDBwZWdneTB4ODdhQjNCNEM4NjYxZTA3RDYzNzIzNjEyMTFCOTZlZDREYzM2QjFCNSJ9XX0seyJ0eXBlIjoidHJhbnNmZXIiLCJhdHRyaWJ1dGVzIjpbeyJrZXkiOiJyZWNpcGllbnQiLCJ2YWx1ZSI6ImluajE0dm5tdzJ3ZWUzeHRyc3FmdnBjcWczNWpnOXY3ajJ2ZHB6eDBrayJ9LHsia2V5Ijoic2VuZGVyIiwidmFsdWUiOiJpbmoxaGtoZGFqMmEyY2xtcTVqcTZtc3BzZ2dxczMydnlucGsyMjhxM3IifSx7ImtleSI6ImFtb3VudCIsInZhbHVlIjoiMTY1MTY1MDAwcGVnZ3kweDg3YUIzQjRDODY2MWUwN0Q2MzcyMzYxMjExQjk2ZWQ0RGMzNkIxQjUifV19LHsidHlwZSI6Im1lc3NhZ2UiLCJhdHRyaWJ1dGVzIjpbeyJrZXkiOiJzZW5kZXIiLCJ2YWx1ZSI6ImluajFoa2hkYWoyYTJjbG1xNWpxNm1zcHNnZ3FzMzJ2eW5wazIyOHEzciJ9XX0seyJ0eXBlIjoiY29pbl9zcGVudCIsImF0dHJpYnV0ZXMiOlt7ImtleSI6InNwZW5kZXIiLCJ2YWx1ZSI6ImluajFoa2hkYWoyYTJjbG1xNWpxNm1zcHNnZ3FzMzJ2eW5wazIyOHEzciJ9LHsia2V5IjoiYW1vdW50IiwidmFsdWUiOiI1NTAwMDAwMDAwMDAwMDAwMDAwMGluaiJ9XX0seyJ0eXBlIjoiY29pbl9yZWNlaXZlZCIsImF0dHJpYnV0ZXMiOlt7ImtleSI6InJlY2VpdmVyIiwidmFsdWUiOiJpbmoxNHZubXcyd2VlM3h0cnNxZnZwY3FnMzVqZzl2N2oydmRwengwa2sifSx7ImtleSI6ImFtb3VudCIsInZhbHVlIjoiNTUwMDAwMDAwMDAwMDAwMDAwMDBpbmoifV19LHsidHlwZSI6InRyYW5zZmVyIiwiYXR0cmlidXRlcyI6W3sia2V5IjoicmVjaXBpZW50IiwidmFsdWUiOiJpbmoxNHZubXcyd2VlM3h0cnNxZnZwY3FnMzVqZzl2N2oydmRwengwa2sifSx7ImtleSI6InNlbmRlciIsInZhbHVlIjoiaW5qMWhraGRhajJhMmNsbXE1anE2bXNwc2dncXMzMnZ5bnBrMjI4cTNyIn0seyJrZXkiOiJhbW91bnQiLCJ2YWx1ZSI6IjU1MDAwMDAwMDAwMDAwMDAwMDAwaW5qIn1dfSx7InR5cGUiOiJtZXNzYWdlIiwiYXR0cmlidXRlcyI6W3sia2V5Ijoic2VuZGVyIiwidmFsdWUiOiJpbmoxaGtoZGFqMmEyY2xtcTVqcTZtc3BzZ2dxczMydnlucGsyMjhxM3IifV19XX1d" # noqa: mock + } + } + self.exchange._data_source._query_executor._transaction_by_hash_responses.put_nowait(transaction_response) + + original_order_hash_manager = self.exchange._data_source.order_hash_manager + + self.async_run_with_timeout(self.exchange._check_orders_creation_transactions()) + + self.assertEquals(0, len(self.buy_order_created_logger.event_log)) + failure_event: MarketOrderFailureEvent = self.order_failure_logger.event_log[0] + self.assertEqual(self.exchange.current_timestamp, failure_event.timestamp) + self.assertEqual(OrderType.LIMIT, failure_event.order_type) + self.assertEqual(order.client_order_id, failure_event.order_id) + + self.assertTrue( + self.is_logged( + "INFO", + f"Order {order.client_order_id} has failed. Order Update: OrderUpdate(trading_pair='{self.trading_pair}', " + f"update_timestamp={self.exchange.current_timestamp}, new_state={repr(OrderState.FAILED)}, " + f"client_order_id='{order.client_order_id}', exchange_order_id=None, misc_updates=None)" + ) + ) + + self.assertNotEqual(original_order_hash_manager, self.exchange._data_source._order_hash_manager) + + def test_order_creation_check_waits_for_originating_transaction_to_be_mined(self): + request_sent_event = asyncio.Event() + self.exchange._set_current_timestamp(1640780000) + + self.exchange.start_tracking_order( + order_id=self.client_order_id_prefix + "1", + exchange_order_id="hash1", + trading_pair=self.trading_pair, + trade_type=TradeType.BUY, + price=Decimal("10000"), + amount=Decimal("100"), + order_type=OrderType.LIMIT, + ) + + self.exchange.start_tracking_order( + order_id=self.client_order_id_prefix + "2", + exchange_order_id="hash2", + trading_pair=self.trading_pair, + trade_type=TradeType.BUY, + price=Decimal("20000"), + amount=Decimal("200"), + order_type=OrderType.LIMIT, + ) + + self.assertIn(self.client_order_id_prefix + "1", self.exchange.in_flight_orders) + self.assertIn(self.client_order_id_prefix + "2", self.exchange.in_flight_orders) + + hash_not_matching_order: GatewayInFlightOrder = self.exchange.in_flight_orders[self.client_order_id_prefix + "1"] + hash_not_matching_order.update_creation_transaction_hash(creation_transaction_hash="66A360DA2FD6884B53B5C019F1A2B5BED7C7C8FC07E83A9C36AD3362EDE096AE") # noqa: mock + + no_mined_tx_order: GatewayInFlightOrder = self.exchange.in_flight_orders[self.client_order_id_prefix + "2"] + no_mined_tx_order.update_creation_transaction_hash( + creation_transaction_hash="HHHHHHHHHHHHHHH") + + transaction_data = (b'\x12\xd1\x01\n8/injective.exchange.v1beta1.MsgBatchUpdateOrdersResponse' + b'\x12\x94\x01\n\x02\x00\x00\x12\x02\x00\x00\x1aB' + b'0xc5d66f56942e1ae407c01eedccd0471deb8e202a514cde3bae56a8307e376cd1' # noqa: mock + b'\x1aB' + b'0x115975551b4f86188eee6b93d789fcc78df6e89e40011b929299b6e142f53515' # noqa: mock + b'"\x00"\x00') + transaction_response = { + "s": "ok", + "data": { + "blockNumber": "13302254", + "blockTimestamp": "2023-07-05 13:55:09.94 +0000 UTC", + "hash": "0x66a360da2fd6884b53b5c019f1a2b5bed7c7c8fc07e83a9c36ad3362ede096ae", # noqa: mock + "data": base64.b64encode(transaction_data).decode(), + "gasWanted": "168306", + "gasUsed": "167769", + "gasFee": { + "amount": [ + { + "denom": "inj", + "amount": "84153000000000" + } + ], + "gasLimit": "168306", + "payer": "inj1hkhdaj2a2clmq5jq6mspsggqs32vynpk228q3r" # noqa: mock + }, + "txType": "injective", + "messages": "W3sidHlwZSI6Ii9pbmplY3RpdmUuZXhjaGFuZ2UudjFiZXRhMS5Nc2dCYXRjaFVwZGF0ZU9yZGVycyIsInZhbHVlIjp7InNlbmRlciI6ImluajFoa2hkYWoyYTJjbG1xNWpxNm1zcHNnZ3FzMzJ2eW5wazIyOHEzciIsInN1YmFjY291bnRfaWQiOiIiLCJzcG90X21hcmtldF9pZHNfdG9fY2FuY2VsX2FsbCI6W10sImRlcml2YXRpdmVfbWFya2V0X2lkc190b19jYW5jZWxfYWxsIjpbXSwic3BvdF9vcmRlcnNfdG9fY2FuY2VsIjpbeyJtYXJrZXRfaWQiOiIweDA2MTE3ODBiYTY5NjU2OTQ5NTI1MDEzZDk0NzcxMzMwMGY1NmMzN2I2MTc1ZTAyZjI2YmZmYTQ5NWMzMjA4ZmUiLCJzdWJhY2NvdW50X2lkIjoiMHhiZGFlZGVjOTVkNTYzZmIwNTI0MGQ2ZTAxODIxMDA4NDU0YzI0YzM2MDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwIiwib3JkZXJfaGFzaCI6IjB4Mzg3MGZiZGQ5MWYwN2Q1NDQyNTE0N2IxYmI5NjQwNGY0ZjA0M2JhNjMzNWI0MjJhNmQ0OTRkMjg1YjM4N2YyZCIsIm9yZGVyX21hc2siOjJ9LHsibWFya2V0X2lkIjoiMHg3YTU3ZTcwNWJiNGUwOWM4OGFlY2ZjMjk1NTY5NDgxZGJmMmZlMWQ1ZWZlMzY0NjUxZmJlNzIzODU5MzhlOWIwIiwic3ViYWNjb3VudF9pZCI6IjB4YmRhZWRlYzk1ZDU2M2ZiMDUyNDBkNmUwMTgyMTAwODQ1NGMyNGMzNjAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMCIsIm9yZGVyX2hhc2giOiIweDIyMmRhYTIyZjYwZmU5ZjA3NWVkMGNhNTgzNDU5ZTEyMWMyM2U2NDQzMWMzZmJmZmRlZGRhMDQ1OThlZGUwZDIiLCJvcmRlcl9tYXNrIjoyfV0sImRlcml2YXRpdmVfb3JkZXJzX3RvX2NhbmNlbCI6W3sibWFya2V0X2lkIjoiMHhkNWU0YjEyYjE5ZWNmMTc2ZTRlMTRiNDI5NDQ3MzFjMjc2Nzc4MTlkMmVkOTNiZTQxMDRhZDcwMjU1MjljN2ZmIiwic3ViYWNjb3VudF9pZCI6IjB4YmRhZWRlYzk1ZDU2M2ZiMDUyNDBkNmUwMTgyMTAwODQ1NGMyNGMzNjAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMCIsIm9yZGVyX2hhc2giOiIweDQ4NjkwMDEzYzM4MmQ1ZGJhZmY5OTg5ZGIwNDYyOWExNmE1ODE4ZDc1MjRlMDI3ZDUxN2NjYzg5ZmQwNjgxMDMiLCJvcmRlcl9tYXNrIjoyfSx7Im1hcmtldF9pZCI6IjB4OTBlNjYyMTkzZmEyOWEzYTdlNmMwN2JlNDQwN2M5NDgzM2U3NjJkOWVlODIxMzZhMmNjNzEyZDZiODdkN2RlMyIsInN1YmFjY291bnRfaWQiOiIweGJkYWVkZWM5NWQ1NjNmYjA1MjQwZDZlMDE4MjEwMDg0NTRjMjRjMzYwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAiLCJvcmRlcl9oYXNoIjoiMHg3ZWU3NjI1NWQ3Y2E3NjNjNTZiMGVhYjk4MjhmY2E4OWZkZDM3Mzk2NDU1MDFjOGE4MGY1OGI2MmI0Zjc2ZGE1Iiwib3JkZXJfbWFzayI6Mn1dLCJzcG90X29yZGVyc190b19jcmVhdGUiOlt7Im1hcmtldF9pZCI6IjB4MDYxMTc4MGJhNjk2NTY5NDk1MjUwMTNkOTQ3NzEzMzAwZjU2YzM3YjYxNzVlMDJmMjZiZmZhNDk1YzMyMDhmZSIsIm9yZGVyX2luZm8iOnsic3ViYWNjb3VudF9pZCI6IjB4YmRhZWRlYzk1ZDU2M2ZiMDUyNDBkNmUwMTgyMTAwODQ1NGMyNGMzNjAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMCIsImZlZV9yZWNpcGllbnQiOiJpbmoxaGtoZGFqMmEyY2xtcTVqcTZtc3BzZ2dxczMydnlucGsyMjhxM3IiLCJwcmljZSI6IjAuMDAwMDAwMDAwMDAzMDAwMDAwIiwicXVhbnRpdHkiOiI1NTAwMDAwMDAwMDAwMDAwMDAwMC4wMDAwMDAwMDAwMDAwMDAwMDAifSwib3JkZXJfdHlwZSI6IkJVWSIsInRyaWdnZXJfcHJpY2UiOiIwLjAwMDAwMDAwMDAwMDAwMDAwMCJ9LHsibWFya2V0X2lkIjoiMHgwNjExNzgwYmE2OTY1Njk0OTUyNTAxM2Q5NDc3MTMzMDBmNTZjMzdiNjE3NWUwMmYyNmJmZmE0OTVjMzIwOGZlIiwib3JkZXJfaW5mbyI6eyJzdWJhY2NvdW50X2lkIjoiMHhiZGFlZGVjOTVkNTYzZmIwNTI0MGQ2ZTAxODIxMDA4NDU0YzI0YzM2MDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwIiwiZmVlX3JlY2lwaWVudCI6ImluajFoa2hkYWoyYTJjbG1xNWpxNm1zcHNnZ3FzMzJ2eW5wazIyOHEzciIsInByaWNlIjoiMC4wMDAwMDAwMDAzMDAwMDAwMDAiLCJxdWFudGl0eSI6IjU1MDAwMDAwMDAwMDAwMDAwMDAwLjAwMDAwMDAwMDAwMDAwMDAwMCJ9LCJvcmRlcl90eXBlIjoiU0VMTCIsInRyaWdnZXJfcHJpY2UiOiIwLjAwMDAwMDAwMDAwMDAwMDAwMCJ9XSwiZGVyaXZhdGl2ZV9vcmRlcnNfdG9fY3JlYXRlIjpbeyJtYXJrZXRfaWQiOiIweDkwZTY2MjE5M2ZhMjlhM2E3ZTZjMDdiZTQ0MDdjOTQ4MzNlNzYyZDllZTgyMTM2YTJjYzcxMmQ2Yjg3ZDdkZTMiLCJvcmRlcl9pbmZvIjp7InN1YmFjY291bnRfaWQiOiIweGJkYWVkZWM5NWQ1NjNmYjA1MjQwZDZlMDE4MjEwMDg0NTRjMjRjMzYwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAiLCJmZWVfcmVjaXBpZW50IjoiaW5qMWhraGRhajJhMmNsbXE1anE2bXNwc2dncXMzMnZ5bnBrMjI4cTNyIiwicHJpY2UiOiIyNTAwMDAwMDAwMC4wMDAwMDAwMDAwMDAwMDAwMDAiLCJxdWFudGl0eSI6IjAuMTAwMDAwMDAwMDAwMDAwMDAwIn0sIm9yZGVyX3R5cGUiOiJCVVkiLCJtYXJnaW4iOiIyNTAwMDAwMDAwLjAwMDAwMDAwMDAwMDAwMDAwMCIsInRyaWdnZXJfcHJpY2UiOiIwLjAwMDAwMDAwMDAwMDAwMDAwMCJ9LHsibWFya2V0X2lkIjoiMHg5MGU2NjIxOTNmYTI5YTNhN2U2YzA3YmU0NDA3Yzk0ODMzZTc2MmQ5ZWU4MjEzNmEyY2M3MTJkNmI4N2Q3ZGUzIiwib3JkZXJfaW5mbyI6eyJzdWJhY2NvdW50X2lkIjoiMHhiZGFlZGVjOTVkNTYzZmIwNTI0MGQ2ZTAxODIxMDA4NDU0YzI0YzM2MDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwIiwiZmVlX3JlY2lwaWVudCI6ImluajFoa2hkYWoyYTJjbG1xNWpxNm1zcHNnZ3FzMzJ2eW5wazIyOHEzciIsInByaWNlIjoiNTAwMDAwMDAwMDAuMDAwMDAwMDAwMDAwMDAwMDAwIiwicXVhbnRpdHkiOiIwLjAxMDAwMDAwMDAwMDAwMDAwMCJ9LCJvcmRlcl90eXBlIjoiU0VMTCIsIm1hcmdpbiI6IjUwMDAwMDAwMC4wMDAwMDAwMDAwMDAwMDAwMDAiLCJ0cmlnZ2VyX3ByaWNlIjoiMC4wMDAwMDAwMDAwMDAwMDAwMDAifV0sImJpbmFyeV9vcHRpb25zX29yZGVyc190b19jYW5jZWwiOltdLCJiaW5hcnlfb3B0aW9uc19tYXJrZXRfaWRzX3RvX2NhbmNlbF9hbGwiOltdLCJiaW5hcnlfb3B0aW9uc19vcmRlcnNfdG9fY3JlYXRlIjpbXX19XQ==", + "signatures": [ + { + "pubkey": "035ddc4d5642b9383e2f087b2ee88b7207f6286ebc9f310e9df1406eccc2c31813", # noqa: mock + "address": "inj1hkhdaj2a2clmq5jq6mspsggqs32vynpk228q3r", # noqa: mock + "sequence": "16450", + "signature": "S9atCwiVg9+8vTpbciuwErh54pJOAry3wHvbHT2fG8IumoE+7vfuoP7mAGDy2w9am+HHa1yv60VSWo3cRhWC9g==" + } + ], + "txNumber": "13182", + "blockUnixTimestamp": "1688565309940", + "logs": "W3sibXNnX2luZGV4IjowLCJldmVudHMiOlt7InR5cGUiOiJtZXNzYWdlIiwiYXR0cmlidXRlcyI6W3sia2V5IjoiYWN0aW9uIiwidmFsdWUiOiIvaW5qZWN0aXZlLmV4Y2hhbmdlLnYxYmV0YTEuTXNnQmF0Y2hVcGRhdGVPcmRlcnMifSx7ImtleSI6InNlbmRlciIsInZhbHVlIjoiaW5qMWhraGRhajJhMmNsbXE1anE2bXNwc2dncXMzMnZ5bnBrMjI4cTNyIn0seyJrZXkiOiJtb2R1bGUiLCJ2YWx1ZSI6ImV4Y2hhbmdlIn1dfSx7InR5cGUiOiJjb2luX3NwZW50IiwiYXR0cmlidXRlcyI6W3sia2V5Ijoic3BlbmRlciIsInZhbHVlIjoiaW5qMWhraGRhajJhMmNsbXE1anE2bXNwc2dncXMzMnZ5bnBrMjI4cTNyIn0seyJrZXkiOiJhbW91bnQiLCJ2YWx1ZSI6IjE2NTE2NTAwMHBlZ2d5MHg4N2FCM0I0Qzg2NjFlMDdENjM3MjM2MTIxMUI5NmVkNERjMzZCMUI1In1dfSx7InR5cGUiOiJjb2luX3JlY2VpdmVkIiwiYXR0cmlidXRlcyI6W3sia2V5IjoicmVjZWl2ZXIiLCJ2YWx1ZSI6ImluajE0dm5tdzJ3ZWUzeHRyc3FmdnBjcWczNWpnOXY3ajJ2ZHB6eDBrayJ9LHsia2V5IjoiYW1vdW50IiwidmFsdWUiOiIxNjUxNjUwMDBwZWdneTB4ODdhQjNCNEM4NjYxZTA3RDYzNzIzNjEyMTFCOTZlZDREYzM2QjFCNSJ9XX0seyJ0eXBlIjoidHJhbnNmZXIiLCJhdHRyaWJ1dGVzIjpbeyJrZXkiOiJyZWNpcGllbnQiLCJ2YWx1ZSI6ImluajE0dm5tdzJ3ZWUzeHRyc3FmdnBjcWczNWpnOXY3ajJ2ZHB6eDBrayJ9LHsia2V5Ijoic2VuZGVyIiwidmFsdWUiOiJpbmoxaGtoZGFqMmEyY2xtcTVqcTZtc3BzZ2dxczMydnlucGsyMjhxM3IifSx7ImtleSI6ImFtb3VudCIsInZhbHVlIjoiMTY1MTY1MDAwcGVnZ3kweDg3YUIzQjRDODY2MWUwN0Q2MzcyMzYxMjExQjk2ZWQ0RGMzNkIxQjUifV19LHsidHlwZSI6Im1lc3NhZ2UiLCJhdHRyaWJ1dGVzIjpbeyJrZXkiOiJzZW5kZXIiLCJ2YWx1ZSI6ImluajFoa2hkYWoyYTJjbG1xNWpxNm1zcHNnZ3FzMzJ2eW5wazIyOHEzciJ9XX0seyJ0eXBlIjoiY29pbl9zcGVudCIsImF0dHJpYnV0ZXMiOlt7ImtleSI6InNwZW5kZXIiLCJ2YWx1ZSI6ImluajFoa2hkYWoyYTJjbG1xNWpxNm1zcHNnZ3FzMzJ2eW5wazIyOHEzciJ9LHsia2V5IjoiYW1vdW50IiwidmFsdWUiOiI1NTAwMDAwMDAwMDAwMDAwMDAwMGluaiJ9XX0seyJ0eXBlIjoiY29pbl9yZWNlaXZlZCIsImF0dHJpYnV0ZXMiOlt7ImtleSI6InJlY2VpdmVyIiwidmFsdWUiOiJpbmoxNHZubXcyd2VlM3h0cnNxZnZwY3FnMzVqZzl2N2oydmRwengwa2sifSx7ImtleSI6ImFtb3VudCIsInZhbHVlIjoiNTUwMDAwMDAwMDAwMDAwMDAwMDBpbmoifV19LHsidHlwZSI6InRyYW5zZmVyIiwiYXR0cmlidXRlcyI6W3sia2V5IjoicmVjaXBpZW50IiwidmFsdWUiOiJpbmoxNHZubXcyd2VlM3h0cnNxZnZwY3FnMzVqZzl2N2oydmRwengwa2sifSx7ImtleSI6InNlbmRlciIsInZhbHVlIjoiaW5qMWhraGRhajJhMmNsbXE1anE2bXNwc2dncXMzMnZ5bnBrMjI4cTNyIn0seyJrZXkiOiJhbW91bnQiLCJ2YWx1ZSI6IjU1MDAwMDAwMDAwMDAwMDAwMDAwaW5qIn1dfSx7InR5cGUiOiJtZXNzYWdlIiwiYXR0cmlidXRlcyI6W3sia2V5Ijoic2VuZGVyIiwidmFsdWUiOiJpbmoxaGtoZGFqMmEyY2xtcTVqcTZtc3BzZ2dxczMydnlucGsyMjhxM3IifV19XX1d" # noqa: mock + } + } + mock_tx_by_hash_queue = AsyncMock() + mock_tx_by_hash_queue.get.side_effect = [transaction_response, ValueError("Transaction not found in a block")] + self.exchange._data_source._query_executor._transaction_by_hash_responses = mock_tx_by_hash_queue + + mock_queue = AsyncMock() + mock_queue.get.side_effect = partial( + self._callback_wrapper_with_response, + callback=lambda args, kwargs: request_sent_event.set(), + response=13302254 + ) + self.exchange._data_source._query_executor._transaction_block_height_responses = mock_queue + + original_order_hash_manager = self.exchange._data_source.order_hash_manager + + self.async_tasks.append( + asyncio.get_event_loop().create_task( + self.exchange._check_orders_creation_transactions() + ) + ) + + self.async_run_with_timeout(request_sent_event.wait()) + + self.assertEquals(0, len(self.buy_order_created_logger.event_log)) + failure_event: MarketOrderFailureEvent = self.order_failure_logger.event_log[0] + self.assertEqual(self.exchange.current_timestamp, failure_event.timestamp) + self.assertEqual(OrderType.LIMIT, failure_event.order_type) + self.assertEqual(hash_not_matching_order.client_order_id, failure_event.order_id) + + self.assertTrue( + self.is_logged( + "INFO", + f"Order {hash_not_matching_order.client_order_id} has failed. Order Update: OrderUpdate(trading_pair='{self.trading_pair}', " + f"update_timestamp={self.exchange.current_timestamp}, new_state={repr(OrderState.FAILED)}, " + f"client_order_id='{hash_not_matching_order.client_order_id}', exchange_order_id=None, misc_updates=None)" + ) + ) + + self.assertNotEqual(original_order_hash_manager, self.exchange._data_source._order_hash_manager) + + mock_queue.get.assert_called() + + def test_user_stream_balance_update(self): + client_config_map = ClientConfigAdapter(ClientConfigMap()) + exchange_with_non_default_subaccount = InjectiveV2Exchange( + client_config_map=client_config_map, + injective_private_key=self.trading_account_private_key, + injective_subaccount_index=self.trading_account_subaccount_index, + injective_granter_address=self.portfolio_account_injective_address, + injective_granter_subaccount_index=1, + trading_pairs=[self.trading_pair], + domain=CONSTANTS.TESTNET_DOMAIN, + ) + + exchange_with_non_default_subaccount._data_source._query_executor = self.exchange._data_source._query_executor + self.exchange = exchange_with_non_default_subaccount + self.configure_all_symbols_response(mock_api=None) + self.exchange._set_current_timestamp(1640780000) + + balance_event = self.balance_event_websocket_update + + mock_queue = AsyncMock() + mock_queue.get.side_effect = [balance_event, asyncio.CancelledError] + self.exchange._data_source._query_executor._subaccount_balance_events = mock_queue + + try: + self.async_run_with_timeout(self.exchange._data_source._listen_to_account_balance_updates()) + except asyncio.CancelledError: + pass + + self.assertEqual(Decimal("10"), self.exchange.available_balances[self.base_asset]) + self.assertEqual(Decimal("15"), self.exchange.get_balance(self.base_asset)) + + def test_user_stream_update_for_new_order(self): + self.exchange._set_current_timestamp(1640780000) + self.exchange.start_tracking_order( + order_id=self.client_order_id_prefix + "1", + exchange_order_id=str(self.expected_exchange_order_id), + trading_pair=self.trading_pair, + order_type=OrderType.LIMIT, + trade_type=TradeType.BUY, + price=Decimal("10000"), + amount=Decimal("1"), + ) + order = self.exchange.in_flight_orders[self.client_order_id_prefix + "1"] + + order_event = self.order_event_for_new_order_websocket_update(order=order) + + mock_queue = AsyncMock() + event_messages = [order_event, asyncio.CancelledError] + mock_queue.get.side_effect = event_messages + self.exchange._data_source._query_executor._historical_spot_order_events = mock_queue + + try: + self.async_run_with_timeout( + self.exchange._data_source._listen_to_subaccount_order_updates(market_id=self.market_id) + ) + except asyncio.CancelledError: + pass + + event: BuyOrderCreatedEvent = self.buy_order_created_logger.event_log[0] + self.assertEqual(self.exchange.current_timestamp, event.timestamp) + self.assertEqual(order.order_type, event.type) + self.assertEqual(order.trading_pair, event.trading_pair) + self.assertEqual(order.amount, event.amount) + self.assertEqual(order.price, event.price) + self.assertEqual(order.client_order_id, event.order_id) + self.assertEqual(order.exchange_order_id, event.exchange_order_id) + self.assertTrue(order.is_open) + + tracked_order: InFlightOrder = list(self.exchange.in_flight_orders.values())[0] + + self.assertTrue(self.is_logged("INFO", tracked_order.build_order_created_message())) + + def test_user_stream_update_for_canceled_order(self): + self.exchange._set_current_timestamp(1640780000) + self.exchange.start_tracking_order( + order_id=self.client_order_id_prefix + "1", + exchange_order_id=str(self.expected_exchange_order_id), + trading_pair=self.trading_pair, + order_type=OrderType.LIMIT, + trade_type=TradeType.BUY, + price=Decimal("10000"), + amount=Decimal("1"), + ) + order = self.exchange.in_flight_orders[self.client_order_id_prefix + "1"] + + order_event = self.order_event_for_canceled_order_websocket_update(order=order) + + mock_queue = AsyncMock() + event_messages = [order_event, asyncio.CancelledError] + mock_queue.get.side_effect = event_messages + self.exchange._data_source._query_executor._historical_spot_order_events = mock_queue + + try: + self.async_run_with_timeout( + self.exchange._data_source._listen_to_subaccount_order_updates(market_id=self.market_id) + ) + except asyncio.CancelledError: + pass + + cancel_event: OrderCancelledEvent = self.order_cancelled_logger.event_log[0] + self.assertEqual(self.exchange.current_timestamp, cancel_event.timestamp) + self.assertEqual(order.client_order_id, cancel_event.order_id) + self.assertEqual(order.exchange_order_id, cancel_event.exchange_order_id) + self.assertNotIn(order.client_order_id, self.exchange.in_flight_orders) + self.assertTrue(order.is_cancelled) + self.assertTrue(order.is_done) + + self.assertTrue( + self.is_logged("INFO", f"Successfully canceled order {order.client_order_id}.") + ) + + @aioresponses() + def test_user_stream_update_for_order_full_fill(self, mock_api): + self.exchange._set_current_timestamp(1640780000) + self.exchange.start_tracking_order( + order_id=self.client_order_id_prefix + "1", + exchange_order_id=str(self.expected_exchange_order_id), + trading_pair=self.trading_pair, + order_type=OrderType.LIMIT, + trade_type=TradeType.BUY, + price=Decimal("10000"), + amount=Decimal("1"), + ) + order = self.exchange.in_flight_orders[self.client_order_id_prefix + "1"] + + self.configure_all_symbols_response(mock_api=None) + order_event = self.order_event_for_full_fill_websocket_update(order=order) + trade_event = self.trade_event_for_full_fill_websocket_update(order=order) + + orders_queue_mock = AsyncMock() + trades_queue_mock = AsyncMock() + orders_messages = [] + trades_messages = [] + if trade_event: + trades_messages.append(trade_event) + if order_event: + orders_messages.append(order_event) + orders_messages.append(asyncio.CancelledError) + trades_messages.append(asyncio.CancelledError) + + orders_queue_mock.get.side_effect = orders_messages + trades_queue_mock.get.side_effect = trades_messages + self.exchange._data_source._query_executor._historical_spot_order_events = orders_queue_mock + self.exchange._data_source._query_executor._public_spot_trade_updates = trades_queue_mock + + tasks = [ + asyncio.get_event_loop().create_task( + self.exchange._data_source._listen_to_public_trades(market_ids=[self.market_id]) + ), + asyncio.get_event_loop().create_task( + self.exchange._data_source._listen_to_subaccount_order_updates(market_id=self.market_id) + ) + ] + try: + self.async_run_with_timeout(safe_gather(*tasks)) + except asyncio.CancelledError: + pass + # Execute one more synchronization to ensure the async task that processes the update is finished + self.async_run_with_timeout(order.wait_until_completely_filled()) + + fill_event: OrderFilledEvent = self.order_filled_logger.event_log[0] + self.assertEqual(self.exchange.current_timestamp, fill_event.timestamp) + self.assertEqual(order.client_order_id, fill_event.order_id) + self.assertEqual(order.trading_pair, fill_event.trading_pair) + self.assertEqual(order.trade_type, fill_event.trade_type) + self.assertEqual(order.order_type, fill_event.order_type) + self.assertEqual(order.price, fill_event.price) + self.assertEqual(order.amount, fill_event.amount) + expected_fee = self.expected_fill_fee + self.assertEqual(expected_fee, fill_event.trade_fee) + + buy_event: BuyOrderCompletedEvent = self.buy_order_completed_logger.event_log[0] + self.assertEqual(self.exchange.current_timestamp, buy_event.timestamp) + self.assertEqual(order.client_order_id, buy_event.order_id) + self.assertEqual(order.base_asset, buy_event.base_asset) + self.assertEqual(order.quote_asset, buy_event.quote_asset) + self.assertEqual(order.amount, buy_event.base_asset_amount) + self.assertEqual(order.amount * fill_event.price, buy_event.quote_asset_amount) + self.assertEqual(order.order_type, buy_event.order_type) + self.assertEqual(order.exchange_order_id, buy_event.exchange_order_id) + self.assertNotIn(order.client_order_id, self.exchange.in_flight_orders) + self.assertTrue(order.is_filled) + self.assertTrue(order.is_done) + + self.assertTrue( + self.is_logged( + "INFO", + f"BUY order {order.client_order_id} completely filled." + ) + ) + + def test_user_stream_logs_errors(self): + # This test does not apply to Injective because it handles private events in its own data source + pass + + def test_user_stream_raises_cancel_exception(self): + # This test does not apply to Injective because it handles private events in its own data source + pass + + def test_lost_order_removed_after_cancel_status_user_event_received(self): + self.exchange._set_current_timestamp(1640780000) + self.exchange.start_tracking_order( + order_id=self.client_order_id_prefix + "1", + exchange_order_id=str(self.expected_exchange_order_id), + trading_pair=self.trading_pair, + order_type=OrderType.LIMIT, + trade_type=TradeType.BUY, + price=Decimal("10000"), + amount=Decimal("1"), + ) + order = self.exchange.in_flight_orders[self.client_order_id_prefix + "1"] + + for _ in range(self.exchange._order_tracker._lost_order_count_limit + 1): + self.async_run_with_timeout( + self.exchange._order_tracker.process_order_not_found(client_order_id=order.client_order_id)) + + self.assertNotIn(order.client_order_id, self.exchange.in_flight_orders) + + order_event = self.order_event_for_canceled_order_websocket_update(order=order) + + mock_queue = AsyncMock() + event_messages = [order_event, asyncio.CancelledError] + mock_queue.get.side_effect = event_messages + self.exchange._data_source._query_executor._historical_spot_order_events = mock_queue + + try: + self.async_run_with_timeout( + self.exchange._data_source._listen_to_subaccount_order_updates(market_id=self.market_id) + ) + except asyncio.CancelledError: + pass + + self.assertNotIn(order.client_order_id, self.exchange._order_tracker.lost_orders) + self.assertEqual(0, len(self.order_cancelled_logger.event_log)) + self.assertNotIn(order.client_order_id, self.exchange.in_flight_orders) + self.assertFalse(order.is_cancelled) + self.assertTrue(order.is_failure) + + @aioresponses() + def test_lost_order_user_stream_full_fill_events_are_processed(self, mock_api): + self.exchange._set_current_timestamp(1640780000) + self.exchange.start_tracking_order( + order_id=self.client_order_id_prefix + "1", + exchange_order_id=str(self.expected_exchange_order_id), + trading_pair=self.trading_pair, + order_type=OrderType.LIMIT, + trade_type=TradeType.BUY, + price=Decimal("10000"), + amount=Decimal("1"), + ) + order = self.exchange.in_flight_orders[self.client_order_id_prefix + "1"] + + for _ in range(self.exchange._order_tracker._lost_order_count_limit + 1): + self.async_run_with_timeout( + self.exchange._order_tracker.process_order_not_found(client_order_id=order.client_order_id)) + + self.assertNotIn(order.client_order_id, self.exchange.in_flight_orders) + + self.configure_all_symbols_response(mock_api=None) + order_event = self.order_event_for_full_fill_websocket_update(order=order) + trade_event = self.trade_event_for_full_fill_websocket_update(order=order) + + orders_queue_mock = AsyncMock() + trades_queue_mock = AsyncMock() + orders_messages = [] + trades_messages = [] + if trade_event: + trades_messages.append(trade_event) + if order_event: + orders_messages.append(order_event) + orders_messages.append(asyncio.CancelledError) + trades_messages.append(asyncio.CancelledError) + + orders_queue_mock.get.side_effect = orders_messages + trades_queue_mock.get.side_effect = trades_messages + self.exchange._data_source._query_executor._historical_spot_order_events = orders_queue_mock + self.exchange._data_source._query_executor._public_spot_trade_updates = trades_queue_mock + + tasks = [ + asyncio.get_event_loop().create_task( + self.exchange._data_source._listen_to_public_trades(market_ids=[self.market_id]) + ), + asyncio.get_event_loop().create_task( + self.exchange._data_source._listen_to_subaccount_order_updates(market_id=self.market_id) + ) + ] + try: + self.async_run_with_timeout(safe_gather(*tasks)) + except asyncio.CancelledError: + pass + # Execute one more synchronization to ensure the async task that processes the update is finished + self.async_run_with_timeout(order.wait_until_completely_filled()) + + fill_event: OrderFilledEvent = self.order_filled_logger.event_log[0] + self.assertEqual(self.exchange.current_timestamp, fill_event.timestamp) + self.assertEqual(order.client_order_id, fill_event.order_id) + self.assertEqual(order.trading_pair, fill_event.trading_pair) + self.assertEqual(order.trade_type, fill_event.trade_type) + self.assertEqual(order.order_type, fill_event.order_type) + self.assertEqual(order.price, fill_event.price) + self.assertEqual(order.amount, fill_event.amount) + expected_fee = self.expected_fill_fee + self.assertEqual(expected_fee, fill_event.trade_fee) + + self.assertEqual(0, len(self.buy_order_completed_logger.event_log)) + self.assertNotIn(order.client_order_id, self.exchange.in_flight_orders) + self.assertNotIn(order.client_order_id, self.exchange._order_tracker.lost_orders) + self.assertTrue(order.is_filled) + self.assertTrue(order.is_failure) + + @aioresponses() + def test_invalid_trading_pair_not_in_all_trading_pairs(self, mock_api): + self.exchange._set_trading_pair_symbol_map(None) + + invalid_pair, response = self.all_symbols_including_invalid_pair_mock_response + self.exchange._data_source._query_executor._spot_markets_responses.put_nowait(response) + + all_trading_pairs = self.async_run_with_timeout(coroutine=self.exchange.all_trading_pairs()) + + self.assertNotIn(invalid_pair, all_trading_pairs) + + @aioresponses() + def test_check_network_success(self, mock_api): + response = self.network_status_request_successful_mock_response + self.exchange._data_source._query_executor._ping_responses.put_nowait(response) + + network_status = self.async_run_with_timeout(coroutine=self.exchange.check_network(), timeout=10) + + self.assertEqual(NetworkStatus.CONNECTED, network_status) + + @aioresponses() + def test_check_network_failure(self, mock_api): + mock_queue = AsyncMock() + mock_queue.get.side_effect = RpcError("Test Error") + self.exchange._data_source._query_executor._ping_responses = mock_queue + + ret = self.async_run_with_timeout(coroutine=self.exchange.check_network()) + + self.assertEqual(ret, NetworkStatus.NOT_CONNECTED) + + @aioresponses() + def test_check_network_raises_cancel_exception(self, mock_api): + mock_queue = AsyncMock() + mock_queue.get.side_effect = asyncio.CancelledError() + self.exchange._data_source._query_executor._ping_responses = mock_queue + + self.assertRaises(asyncio.CancelledError, self.async_run_with_timeout, self.exchange.check_network()) + + @aioresponses() + def test_get_last_trade_prices(self, mock_api): + self.configure_all_symbols_response(mock_api=mock_api) + response = self.latest_prices_request_mock_response + self.exchange._data_source._query_executor._spot_trades_responses.put_nowait(response) + + latest_prices: Dict[str, float] = self.async_run_with_timeout( + self.exchange.get_last_traded_prices(trading_pairs=[self.trading_pair]) + ) + + self.assertEqual(1, len(latest_prices)) + self.assertEqual(self.expected_latest_price, latest_prices[self.trading_pair]) + + def test_get_fee(self): + self.configure_all_symbols_response(mock_api=None) + self.async_run_with_timeout(self.exchange._update_trading_fees()) + + maker_fee_rate = Decimal(self.all_markets_mock_response[0]["makerFeeRate"]) + taker_fee_rate = Decimal(self.all_markets_mock_response[0]["takerFeeRate"]) + + maker_fee = self.exchange.get_fee( + base_currency=self.base_asset, + quote_currency=self.quote_asset, + order_type=OrderType.LIMIT, + order_side=TradeType.BUY, + amount=Decimal("1000"), + price=Decimal("5"), + is_maker=True + ) + + self.assertEqual(maker_fee_rate, maker_fee.percent) + self.assertEqual(self.quote_asset, maker_fee.percent_token) + + taker_fee = self.exchange.get_fee( + base_currency=self.base_asset, + quote_currency=self.quote_asset, + order_type=OrderType.LIMIT, + order_side=TradeType.BUY, + amount=Decimal("1000"), + price=Decimal("5"), + is_maker=False, + ) + + self.assertEqual(taker_fee_rate, taker_fee.percent) + self.assertEqual(self.quote_asset, maker_fee.percent_token) + + def test_restore_tracking_states_only_registers_open_orders(self): + orders = [] + orders.append(GatewayInFlightOrder( + client_order_id=self.client_order_id_prefix + "1", + exchange_order_id=str(self.expected_exchange_order_id), + trading_pair=self.trading_pair, + order_type=OrderType.LIMIT, + trade_type=TradeType.BUY, + amount=Decimal("1000.0"), + price=Decimal("1.0"), + creation_timestamp=1640001112.223, + )) + orders.append(GatewayInFlightOrder( + client_order_id=self.client_order_id_prefix + "2", + exchange_order_id=self.exchange_order_id_prefix + "2", + trading_pair=self.trading_pair, + order_type=OrderType.LIMIT, + trade_type=TradeType.BUY, + amount=Decimal("1000.0"), + price=Decimal("1.0"), + creation_timestamp=1640001112.223, + initial_state=OrderState.CANCELED + )) + orders.append(GatewayInFlightOrder( + client_order_id=self.client_order_id_prefix + "3", + exchange_order_id=self.exchange_order_id_prefix + "3", + trading_pair=self.trading_pair, + order_type=OrderType.LIMIT, + trade_type=TradeType.BUY, + amount=Decimal("1000.0"), + price=Decimal("1.0"), + creation_timestamp=1640001112.223, + initial_state=OrderState.FILLED + )) + orders.append(GatewayInFlightOrder( + client_order_id=self.client_order_id_prefix + "4", + exchange_order_id=self.exchange_order_id_prefix + "4", + trading_pair=self.trading_pair, + order_type=OrderType.LIMIT, + trade_type=TradeType.BUY, + amount=Decimal("1000.0"), + price=Decimal("1.0"), + creation_timestamp=1640001112.223, + initial_state=OrderState.FAILED + )) + + tracking_states = {order.client_order_id: order.to_json() for order in orders} + + self.exchange.restore_tracking_states(tracking_states) + + self.assertIn(self.client_order_id_prefix + "1", self.exchange.in_flight_orders) + self.assertNotIn(self.client_order_id_prefix + "2", self.exchange.in_flight_orders) + self.assertNotIn(self.client_order_id_prefix + "3", self.exchange.in_flight_orders) + self.assertNotIn(self.client_order_id_prefix + "4", self.exchange.in_flight_orders) + + def _expected_initial_status_dict(self) -> Dict[str, bool]: + status_dict = super()._expected_initial_status_dict() + status_dict["data_source_initialized"] = False + return status_dict + + @staticmethod + def _callback_wrapper_with_response(callback: Callable, response: Any, *args, **kwargs): + callback(args, kwargs) + if isinstance(response, Exception): + raise response + else: + return response + + def _configure_balance_response( + self, + response: Dict[str, Any], + mock_api: aioresponses, + callback: Optional[Callable] = lambda *args, **kwargs: None, + ) -> str: + all_markets_mock_response = self.all_markets_mock_response + self.exchange._data_source._query_executor._spot_markets_responses.put_nowait(all_markets_mock_response) + self.exchange._data_source._query_executor._account_portfolio_responses.put_nowait(response) + return "" + + def _msg_exec_simulation_mock_response(self) -> Any: + return { + "gasInfo": { + "gasWanted": "50000000", + "gasUsed": "90749" + }, + "result": { + "data": "Em8KJS9jb3Ntb3MuYXV0aHoudjFiZXRhMS5Nc2dFeGVjUmVzcG9uc2USRgpECkIweGYxNGU5NGMxZmQ0MjE0M2I3ZGRhZjA4ZDE3ZWMxNzAzZGMzNzZlOWU2YWI0YjY0MjBhMzNkZTBhZmFlYzJjMTA=", + "log": "", + "events": [], + "msgResponses": [ + OrderedDict([ + ("@type", "/cosmos.authz.v1beta1.MsgExecResponse"), + ("results", [ + "CkIweGYxNGU5NGMxZmQ0MjE0M2I3ZGRhZjA4ZDE3ZWMxNzAzZGMzNzZlOWU2YWI0YjY0MjBhMzNkZTBhZmFlYzJjMTA="]) + ]) + ] + } + } + + def _order_cancelation_request_successful_mock_response(self, order: InFlightOrder) -> Dict[str, Any]: + return {"txhash": "79DBF373DE9C534EE2DC9D009F32B850DA8D0C73833FAA0FD52C6AE8989EC659", "rawLog": "[]"} # noqa: mock + + def _order_cancelation_request_erroneous_mock_response(self, order: InFlightOrder) -> Dict[str, Any]: + return {"txhash": "79DBF373DE9C534EE2DC9D009F32B850DA8D0C73833FAA0FD52C6AE8989EC659", "rawLog": "Error"} # noqa: mock + + def _order_status_request_open_mock_response(self, order: GatewayInFlightOrder) -> Dict[str, Any]: + return { + "orders": [ + { + "orderHash": order.exchange_order_id, + "marketId": self.market_id, + "isActive": True, + "subaccountId": self.portfolio_account_subaccount_id, + "executionType": "market" if order.order_type == OrderType.MARKET else "limit", + "orderType": order.trade_type.name.lower(), + "price": str(order.price * Decimal(f"1e{self.quote_decimals - self.base_decimals}")), + "triggerPrice": "0", + "quantity": str(order.amount * Decimal(f"1e{self.base_decimals}")), + "filledQuantity": "0", + "state": "booked", + "createdAt": "1688476825015", + "updatedAt": "1688476825015", + "direction": order.trade_type.name.lower(), + "txHash": order.creation_transaction_hash + }, + ], + "paging": { + "total": "1" + }, + } + + def _order_status_request_partially_filled_mock_response(self, order: GatewayInFlightOrder) -> Dict[str, Any]: + return { + "orders": [ + { + "orderHash": order.exchange_order_id, + "marketId": self.market_id, + "isActive": True, + "subaccountId": self.portfolio_account_subaccount_id, + "executionType": "market" if order.order_type == OrderType.MARKET else "limit", + "orderType": order.trade_type.name.lower(), + "price": str(order.price * Decimal(f"1e{self.quote_decimals - self.base_decimals}")), + "triggerPrice": "0", + "quantity": str(order.amount * Decimal(f"1e{self.base_decimals}")), + "filledQuantity": str(self.expected_partial_fill_amount * Decimal(f"1e{self.base_decimals}")), + "state": "partial_filled", + "createdAt": "1688476825015", + "updatedAt": "1688476825015", + "direction": order.trade_type.name.lower(), + "txHash": order.creation_transaction_hash + }, + ], + "paging": { + "total": "1" + }, + } + + def _order_status_request_completely_filled_mock_response(self, order: GatewayInFlightOrder) -> Dict[str, Any]: + return { + "orders": [ + { + "orderHash": order.exchange_order_id, + "marketId": self.market_id, + "isActive": True, + "subaccountId": self.portfolio_account_subaccount_id, + "executionType": "market" if order.order_type == OrderType.MARKET else "limit", + "orderType": order.trade_type.name.lower(), + "price": str(order.price * Decimal(f"1e{self.quote_decimals - self.base_decimals}")), + "triggerPrice": "0", + "quantity": str(order.amount * Decimal(f"1e{self.base_decimals}")), + "filledQuantity": str(order.amount * Decimal(f"1e{self.base_decimals}")), + "state": "filled", + "createdAt": "1688476825015", + "updatedAt": "1688476825015", + "direction": order.trade_type.name.lower(), + "txHash": order.creation_transaction_hash + }, + ], + "paging": { + "total": "1" + }, + } + + def _order_status_request_canceled_mock_response(self, order: GatewayInFlightOrder) -> Dict[str, Any]: + return { + "orders": [ + { + "orderHash": order.exchange_order_id, + "marketId": self.market_id, + "isActive": True, + "subaccountId": self.portfolio_account_subaccount_id, + "executionType": "market" if order.order_type == OrderType.MARKET else "limit", + "orderType": order.trade_type.name.lower(), + "price": str(order.price * Decimal(f"1e{self.quote_decimals - self.base_decimals}")), + "triggerPrice": "0", + "quantity": str(order.amount * Decimal(f"1e{self.base_decimals}")), + "filledQuantity": "0", + "state": "canceled", + "createdAt": "1688476825015", + "updatedAt": "1688476825015", + "direction": order.trade_type.name.lower(), + "txHash": order.creation_transaction_hash + }, + ], + "paging": { + "total": "1" + }, + } + + def _order_status_request_not_found_mock_response(self, order: GatewayInFlightOrder) -> Dict[str, Any]: + return { + "orders": [], + "paging": { + "total": "0" + }, + } + + def _order_fills_request_partial_fill_mock_response(self, order: GatewayInFlightOrder) -> Dict[str, Any]: + return { + "trades": [ + { + "orderHash": order.exchange_order_id, + "subaccountId": self.portfolio_account_subaccount_id, + "marketId": self.market_id, + "tradeExecutionType": "limitFill", + "tradeDirection": order.trade_type.name.lower(), + "price": { + "price": str(self.expected_partial_fill_price * Decimal(f"1e{self.quote_decimals - self.base_decimals}")), + "quantity": str(self.expected_partial_fill_amount * Decimal(f"1e{self.base_decimals}")), + "timestamp": "1681735786785" + }, + "fee": str(self.expected_fill_fee.flat_fees[0].amount * Decimal(f"1e{self.quote_decimals}")), + "executedAt": "1681735786785", + "feeRecipient": self.portfolio_account_injective_address, + "tradeId": self.expected_fill_trade_id, + "executionSide": "maker" + }, + ], + "paging": { + "total": "1", + "from": 1, + "to": 1 + } + } + + def _order_fills_request_full_fill_mock_response(self, order: GatewayInFlightOrder) -> Dict[str, Any]: + return { + "trades": [ + { + "orderHash": order.exchange_order_id, + "subaccountId": self.portfolio_account_subaccount_id, + "marketId": self.market_id, + "tradeExecutionType": "limitFill", + "tradeDirection": order.trade_type.name.lower(), + "price": { + "price": str(order.price * Decimal(f"1e{self.quote_decimals - self.base_decimals}")), + "quantity": str(order.amount * Decimal(f"1e{self.base_decimals}")), + "timestamp": "1681735786785" + }, + "fee": str(self.expected_fill_fee.flat_fees[0].amount * Decimal(f"1e{self.quote_decimals}")), + "executedAt": "1681735786785", + "feeRecipient": self.portfolio_account_injective_address, + "tradeId": self.expected_fill_trade_id, + "executionSide": "maker" + }, + ], + "paging": { + "total": "1", + "from": 1, + "to": 1 + } + } diff --git a/test/hummingbot/connector/exchange/kucoin/test_kucoin_exchange.py b/test/hummingbot/connector/exchange/kucoin/test_kucoin_exchange.py index 4a8fba7826a..5dfe3e7a748 100644 --- a/test/hummingbot/connector/exchange/kucoin/test_kucoin_exchange.py +++ b/test/hummingbot/connector/exchange/kucoin/test_kucoin_exchange.py @@ -737,7 +737,7 @@ def test_create_order_fails_when_trading_rule_error_and_raises_failure_event(sel trading_pair=self.trading_pair, amount=Decimal("0.0001"), order_type=OrderType.LIMIT, - price=Decimal("0.0000001"))) + price=Decimal("0.0001"))) # The second order is used only to have the event triggered and avoid using timeouts for tests asyncio.get_event_loop().create_task( self.exchange._create_order(trade_type=TradeType.BUY, @@ -759,7 +759,9 @@ def test_create_order_fails_when_trading_rule_error_and_raises_failure_event(sel self.assertTrue( self._is_logged( "WARNING", - "Buy order amount 0 is lower than the minimum order size 0.01. The order will not be created." + "Buy order amount 0.0001 is lower than the minimum order " + "size 0.01. The order will not be created, increase the " + "amount to be higher than the minimum order size." ) ) self.assertTrue( diff --git a/test/hummingbot/connector/exchange/polkadex/test_polkadex_exchange.py b/test/hummingbot/connector/exchange/polkadex/test_polkadex_exchange.py index f99397fe9cd..2f741e6920a 100644 --- a/test/hummingbot/connector/exchange/polkadex/test_polkadex_exchange.py +++ b/test/hummingbot/connector/exchange/polkadex/test_polkadex_exchange.py @@ -766,7 +766,7 @@ def test_create_order_fails_when_trading_rule_error_and_raises_failure_event(sel ) order_id_for_invalid_order = self.place_buy_order( - amount=Decimal("0.0001"), price=Decimal("0.0000001") + amount=Decimal("0.0001"), price=Decimal("0.0001") ) # The second order is used only to have the event triggered and avoid using timeouts for tests order_id = self.place_buy_order() @@ -784,7 +784,9 @@ def test_create_order_fails_when_trading_rule_error_and_raises_failure_event(sel self.assertTrue( self.is_logged( "WARNING", - "Buy order amount 0 is lower than the minimum order size 0.01. The order will not be created." + "Buy order amount 0.0001 is lower than the minimum order " + "size 0.01. The order will not be created, increase the " + "amount to be higher than the minimum order size." ) ) self.assertTrue( diff --git a/test/hummingbot/connector/exchange/vertex/test_vertex_exchange.py b/test/hummingbot/connector/exchange/vertex/test_vertex_exchange.py index 91805d131ff..7e6e7d3b048 100644 --- a/test/hummingbot/connector/exchange/vertex/test_vertex_exchange.py +++ b/test/hummingbot/connector/exchange/vertex/test_vertex_exchange.py @@ -934,7 +934,7 @@ def test_create_order_fails_when_trading_rule_error_and_raises_failure_event(sel trading_pair=self.trading_pair, amount=Decimal("0.0001"), order_type=OrderType.LIMIT, - price=Decimal("0.0000001"), + price=Decimal("0.0001"), ) ) # The second order is used only to have the event triggered and avoid using timeouts for tests @@ -961,7 +961,9 @@ def test_create_order_fails_when_trading_rule_error_and_raises_failure_event(sel self.assertTrue( self._is_logged( "WARNING", - "Buy order amount 0 is lower than the minimum order size 0.01. The order will not be created.", + "Buy order amount 0.0001 is lower than the minimum order " + "size 0.01. The order will not be created, increase the " + "amount to be higher than the minimum order size." ) ) self.assertTrue( diff --git a/test/hummingbot/connector/gateway/clob_perp/test_gateway_clob_perp.py b/test/hummingbot/connector/gateway/clob_perp/test_gateway_clob_perp.py index 14a7eea3ebb..70b4e0a853a 100644 --- a/test/hummingbot/connector/gateway/clob_perp/test_gateway_clob_perp.py +++ b/test/hummingbot/connector/gateway/clob_perp/test_gateway_clob_perp.py @@ -630,7 +630,9 @@ def test_create_order_fails_when_trading_rule_error_and_raises_failure_event(sel self.assertTrue( self.is_logged( "WARNING", - "Buy order amount 0 is lower than the minimum order size 0.001. The order will not be created." + "Buy order amount 0.0001 is lower than the minimum order " + "size 0.001. The order will not be created, increase the " + "amount to be higher than the minimum order size." ) ) self.assertTrue( diff --git a/test/hummingbot/connector/gateway/clob_spot/test_gateway_clob_spot.py b/test/hummingbot/connector/gateway/clob_spot/test_gateway_clob_spot.py index 814abd74ae6..8b2f7c8d1d1 100644 --- a/test/hummingbot/connector/gateway/clob_spot/test_gateway_clob_spot.py +++ b/test/hummingbot/connector/gateway/clob_spot/test_gateway_clob_spot.py @@ -592,7 +592,7 @@ def test_create_order_fails_when_trading_rule_error_and_raises_failure_event(sel self.clob_data_source_mock.configure_place_order_fails_response(exception=RuntimeError("some error")) order_id_for_invalid_order = self.place_buy_order( - size=Decimal("0.0001"), price=Decimal("0.0000001") + size=Decimal("0.0001"), price=Decimal("0.0001") ) # The second order is used only to have the event triggered and avoid using timeouts for tests order_id = self.place_buy_order() @@ -610,7 +610,9 @@ def test_create_order_fails_when_trading_rule_error_and_raises_failure_event(sel self.assertTrue( self.is_logged( "WARNING", - "Buy order amount 0 is lower than the minimum order size 0.001. The order will not be created." + "Buy order amount 0.0001 is lower than the minimum order " + "size 0.001. The order will not be created, increase the " + "amount to be higher than the minimum order size." ) ) self.assertTrue( diff --git a/test/hummingbot/data_feed/candles_feed/ascend_ex_spot_candles/__init__.py b/test/hummingbot/data_feed/candles_feed/ascend_ex_spot_candles/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/test/hummingbot/data_feed/candles_feed/ascend_ex_spot_candles/test_ascend_ex_spot_candles.py b/test/hummingbot/data_feed/candles_feed/ascend_ex_spot_candles/test_ascend_ex_spot_candles.py new file mode 100644 index 00000000000..d68e57b8da6 --- /dev/null +++ b/test/hummingbot/data_feed/candles_feed/ascend_ex_spot_candles/test_ascend_ex_spot_candles.py @@ -0,0 +1,311 @@ +import asyncio +import json +import re +import unittest +from typing import Awaitable +from unittest.mock import AsyncMock, MagicMock, patch + +from aioresponses import aioresponses + +from hummingbot.connector.test_support.network_mocking_assistant import NetworkMockingAssistant +from hummingbot.data_feed.candles_feed.ascend_ex_spot_candles import AscendExSpotCandles, constants as CONSTANTS + + +class TestAscendExSpotCandles(unittest.TestCase): + # the level is required to receive logs from the data source logger + level = 0 + + @classmethod + def setUpClass(cls) -> None: + super().setUpClass() + cls.ev_loop = asyncio.get_event_loop() + cls.base_asset = "BTC" + cls.quote_asset = "USDT" + cls.interval = "1h" + cls.trading_pair = f"{cls.base_asset}-{cls.quote_asset}" + cls.ex_trading_pair = cls.base_asset + "/" + cls.quote_asset + + def setUp(self) -> None: + super().setUp() + self.mocking_assistant = NetworkMockingAssistant() + self.data_feed = AscendExSpotCandles(trading_pair=self.trading_pair, interval=self.interval) + + self.log_records = [] + self.data_feed.logger().setLevel(1) + self.data_feed.logger().addHandler(self) + self.resume_test_event = asyncio.Event() + + def handle(self, record): + self.log_records.append(record) + + def is_logged(self, log_level: str, message: str) -> bool: + return any( + record.levelname == log_level and record.getMessage() == message for + record in self.log_records) + + def async_run_with_timeout(self, coroutine: Awaitable, timeout: int = 2): + ret = asyncio.get_event_loop().run_until_complete(asyncio.wait_for(coroutine, timeout)) + return ret + + def get_candles_rest_data_mock(self): + data = { + "code": 0, + "data": [ + { + "m": "bar", + "s": "BTC/USDT", + "data": { + "i": "1", + "ts": 1688973840000, + "o": "30105.52", + "c": "30099.41", + "h": "30115.58", + "l": "30098.19", + "v": "0.13736" + } + }, + { + "m": "bar", + "s": "BTC/USDT", + "data": { + "i": "1", + "ts": 1688973900000, + "o": "30096.84", + "c": "30097.88", + "h": "30115.67", + "l": "30096.84", + "v": "0.16625" + } + }, + { + "m": "bar", + "s": "BTC/USDT", + "data": { + "i": "1", + "ts": 1688973960000, + "o": "30092.53", + "c": "30087.11", + "h": "30115.97", + "l": "30087.11", + "v": "0.06992" + } + }, + { + "m": "bar", + "s": "BTC/USDT", + "data": { + "i": "1", + "ts": 1688974020000, + "o": "30086.51", + "c": "30102.34", + "h": "30102.34", + "l": "30082.68", + "v": "0.14145" + } + }, + { + "m": "bar", + "s": "BTC/USDT", + "data": { + "i": "1", + "ts": 1688974080000, + "o": "30095.93", + "c": "30085.25", + "h": "30103.04", + "l": "30077.94", + "v": "0.15819" + } + } + ] + } + return data + + def get_candles_ws_data_mock_1(self): + data = { + "m": "bar", + "s": "BTC/USDT", + "data": { + "i": "1", + "ts": 1575398940000, + "o": "0.04993", + "c": "0.04970", + "h": "0.04993", + "l": "0.04970", + "v": "8052" + } + } + return data + + def get_candles_ws_data_mock_2(self): + data = { + "m": "bar", + "s": "BTC/USDT", + "data": { + "i": "1", + "ts": 1575398950000, + "o": "0.04993", + "c": "0.04970", + "h": "0.04993", + "l": "0.04970", + "v": "8052" + } + } + return data + + @aioresponses() + def test_fetch_candles(self, mock_api: aioresponses): + start_time = 1685167200 + end_time = 1685172600 + url = f"{CONSTANTS.REST_URL}{CONSTANTS.CANDLES_ENDPOINT}" + regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?")) + data_mock = self.get_candles_rest_data_mock() + mock_api.get(url=regex_url, body=json.dumps(data_mock)) + + resp = self.async_run_with_timeout(self.data_feed.fetch_candles(start_time=start_time, end_time=end_time)) + + self.assertEqual(resp.shape[0], len(data_mock['data'])) + self.assertEqual(resp.shape[1], 10) + + def test_candles_empty(self): + self.assertTrue(self.data_feed.candles_df.empty) + + @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) + def test_listen_for_subscriptions_subscribes_to_klines(self, ws_connect_mock): + ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock() + + result_subscribe_klines = { + "result": None, + "id": 1 + } + + self.mocking_assistant.add_websocket_aiohttp_message( + websocket_mock=ws_connect_mock.return_value, + message=json.dumps(result_subscribe_klines)) + + self.listening_task = self.ev_loop.create_task(self.data_feed.listen_for_subscriptions()) + + self.mocking_assistant.run_until_all_aiohttp_messages_delivered(ws_connect_mock.return_value) + + sent_subscription_messages = self.mocking_assistant.json_messages_sent_through_websocket( + websocket_mock=ws_connect_mock.return_value) + + self.assertEqual(1, len(sent_subscription_messages)) + expected_kline_subscription = { + "op": CONSTANTS.SUB_ENDPOINT_NAME, + "ch": f"bar:{CONSTANTS.INTERVALS[self.interval]}:{self.ex_trading_pair}" + } + self.assertEqual(expected_kline_subscription["ch"], sent_subscription_messages[0]["ch"]) + + self.assertTrue(self.is_logged( + "INFO", + "Subscribed to public klines..." + )) + + @patch("hummingbot.data_feed.candles_feed.ascend_ex_spot_candles.AscendExSpotCandles._sleep") + @patch("aiohttp.ClientSession.ws_connect") + def test_listen_for_subscriptions_raises_cancel_exception(self, mock_ws, _: AsyncMock): + mock_ws.side_effect = asyncio.CancelledError + + with self.assertRaises(asyncio.CancelledError): + self.listening_task = self.ev_loop.create_task(self.data_feed.listen_for_subscriptions()) + self.async_run_with_timeout(self.listening_task) + + @patch("hummingbot.data_feed.candles_feed.ascend_ex_spot_candles.AscendExSpotCandles._sleep") + @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) + def test_listen_for_subscriptions_logs_exception_details(self, mock_ws, sleep_mock: AsyncMock): + mock_ws.side_effect = Exception("TEST ERROR.") + sleep_mock.side_effect = lambda _: self._create_exception_and_unlock_test_with_event( + asyncio.CancelledError()) + + self.listening_task = self.ev_loop.create_task(self.data_feed.listen_for_subscriptions()) + + self.async_run_with_timeout(self.resume_test_event.wait()) + + self.assertTrue( + self.is_logged( + "ERROR", + "Unexpected error occurred when listening to public klines. Retrying in 1 seconds...")) + + def test_subscribe_channels_raises_cancel_exception(self): + mock_ws = MagicMock() + mock_ws.send.side_effect = asyncio.CancelledError + + with self.assertRaises(asyncio.CancelledError): + self.listening_task = self.ev_loop.create_task(self.data_feed._subscribe_channels(mock_ws)) + self.async_run_with_timeout(self.listening_task) + + def test_subscribe_channels_raises_exception_and_logs_error(self): + mock_ws = MagicMock() + mock_ws.send.side_effect = Exception("Test Error") + + with self.assertRaises(Exception): + self.listening_task = self.ev_loop.create_task(self.data_feed._subscribe_channels(mock_ws)) + self.async_run_with_timeout(self.listening_task) + + self.assertTrue( + self.is_logged("ERROR", "Unexpected error occurred subscribing to public klines...") + ) + + @patch("hummingbot.data_feed.candles_feed.ascend_ex_spot_candles.AscendExSpotCandles.fill_historical_candles", + new_callable=AsyncMock) + @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) + def test_process_websocket_messages_empty_candle(self, ws_connect_mock, fill_historical_candles_mock): + ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock() + + self.mocking_assistant.add_websocket_aiohttp_message( + websocket_mock=ws_connect_mock.return_value, + message=json.dumps(self.get_candles_ws_data_mock_1())) + + self.listening_task = self.ev_loop.create_task(self.data_feed.listen_for_subscriptions()) + + self.mocking_assistant.run_until_all_aiohttp_messages_delivered(ws_connect_mock.return_value) + + self.assertEqual(self.data_feed.candles_df.shape[0], 1) + self.assertEqual(self.data_feed.candles_df.shape[1], 10) + fill_historical_candles_mock.assert_called_once() + + @patch("hummingbot.data_feed.candles_feed.ascend_ex_spot_candles.AscendExSpotCandles.fill_historical_candles", + new_callable=AsyncMock) + @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) + def test_process_websocket_messages_duplicated_candle_not_included(self, ws_connect_mock, fill_historical_candles): + ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock() + fill_historical_candles.return_value = None + + self.mocking_assistant.add_websocket_aiohttp_message( + websocket_mock=ws_connect_mock.return_value, + message=json.dumps(self.get_candles_ws_data_mock_1())) + + self.mocking_assistant.add_websocket_aiohttp_message( + websocket_mock=ws_connect_mock.return_value, + message=json.dumps(self.get_candles_ws_data_mock_1())) + + self.listening_task = self.ev_loop.create_task(self.data_feed.listen_for_subscriptions()) + + self.mocking_assistant.run_until_all_aiohttp_messages_delivered(ws_connect_mock.return_value) + + self.assertEqual(self.data_feed.candles_df.shape[0], 1) + self.assertEqual(self.data_feed.candles_df.shape[1], 10) + + @patch("hummingbot.data_feed.candles_feed.ascend_ex_spot_candles.AscendExSpotCandles.fill_historical_candles") + @patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock) + def test_process_websocket_messages_with_two_valid_messages(self, ws_connect_mock, _): + ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock() + + self.mocking_assistant.add_websocket_aiohttp_message( + websocket_mock=ws_connect_mock.return_value, + message=json.dumps(self.get_candles_ws_data_mock_1())) + + self.mocking_assistant.add_websocket_aiohttp_message( + websocket_mock=ws_connect_mock.return_value, + message=json.dumps(self.get_candles_ws_data_mock_2())) + + self.listening_task = self.ev_loop.create_task(self.data_feed.listen_for_subscriptions()) + + self.mocking_assistant.run_until_all_aiohttp_messages_delivered(ws_connect_mock.return_value, timeout=2) + + self.assertEqual(self.data_feed.candles_df.shape[0], 2) + self.assertEqual(self.data_feed.candles_df.shape[1], 10) + + def _create_exception_and_unlock_test_with_event(self, exception): + self.resume_test_event.set() + raise exception diff --git a/test/hummingbot/data_feed/test_amm_gateway_data_feed.py b/test/hummingbot/data_feed/test_amm_gateway_data_feed.py new file mode 100644 index 00000000000..ba0f831c946 --- /dev/null +++ b/test/hummingbot/data_feed/test_amm_gateway_data_feed.py @@ -0,0 +1,60 @@ +import asyncio +from decimal import Decimal +from test.isolated_asyncio_wrapper_test_case import IsolatedAsyncioWrapperTestCase +from test.logger_mxin import LogLevel, TestLoggerMixin +from unittest.mock import AsyncMock, patch + +from hummingbot.core.network_iterator import NetworkStatus +from hummingbot.data_feed.amm_gateway_data_feed import AmmGatewayDataFeed + + +class TestAmmGatewayDataFeed(IsolatedAsyncioWrapperTestCase, TestLoggerMixin): + + @classmethod + def setUpClass(cls): + super().setUpClass() + cls.data_feed = AmmGatewayDataFeed( + connector_chain_network="connector_chain_network", + trading_pairs={"HBOT-USDT"}, + order_amount_in_base=Decimal("1"), + ) + + def setUp(self) -> None: + super().setUp() + self.set_loggers(loggers=[self.data_feed.logger()]) + + @patch("hummingbot.data_feed.amm_gateway_data_feed.AmmGatewayDataFeed.gateway_client", new_callable=AsyncMock) + async def test_check_network_connected(self, gateway_client_mock: AsyncMock): + gateway_client_mock.ping_gateway.return_value = True + self.assertEqual(NetworkStatus.CONNECTED, await self.data_feed.check_network()) + + @patch("hummingbot.data_feed.amm_gateway_data_feed.AmmGatewayDataFeed.gateway_client", new_callable=AsyncMock) + async def test_check_network_not_connected(self, gateway_client_mock: AsyncMock): + gateway_client_mock.ping_gateway.return_value = False + self.assertEqual(NetworkStatus.NOT_CONNECTED, await self.data_feed.check_network()) + self.assertTrue(self.is_logged(log_level=LogLevel.WARNING, + message="Gateway is not online. Please check your gateway connection.", )) + + @patch("hummingbot.data_feed.amm_gateway_data_feed.AmmGatewayDataFeed._fetch_data", new_callable=AsyncMock) + async def test_fetch_data_loop_exception(self, fetch_data_mock: AsyncMock): + fetch_data_mock.side_effect = [Exception("test exception"), asyncio.CancelledError()] + try: + await self.data_feed._fetch_data_loop() + except asyncio.CancelledError: + pass + self.assertEqual(2, fetch_data_mock.call_count) + self.assertTrue( + self.is_logged(log_level=LogLevel.ERROR, + message="Error getting data from AmmDataFeed[connector_chain_network]Check network " + "connection. Error: test exception")) + + @patch("hummingbot.data_feed.amm_gateway_data_feed.AmmGatewayDataFeed.gateway_client", new_callable=AsyncMock) + async def test_fetch_data_successful(self, gateway_client_mock: AsyncMock): + gateway_client_mock.get_price.side_effect = [{"price": "1"}, {"price": "2"}] + try: + await self.data_feed._fetch_data() + except asyncio.CancelledError: + pass + self.assertEqual(2, gateway_client_mock.get_price.call_count) + self.assertEqual(Decimal("1"), self.data_feed.price_dict["HBOT-USDT"].buy_price) + self.assertEqual(Decimal("2"), self.data_feed.price_dict["HBOT-USDT"].sell_price) diff --git a/test/hummingbot/strategy/amm_arb/test_amm_arb.py b/test/hummingbot/strategy/amm_arb/test_amm_arb.py index 1c2246e14b8..3bb63926192 100644 --- a/test/hummingbot/strategy/amm_arb/test_amm_arb.py +++ b/test/hummingbot/strategy/amm_arb/test_amm_arb.py @@ -26,7 +26,6 @@ from hummingbot.core.utils.fixed_rate_source import FixedRateSource from hummingbot.core.utils.tracking_nonce import get_tracking_nonce from hummingbot.strategy.amm_arb.amm_arb import AmmArbStrategy -from hummingbot.strategy.amm_arb.data_types import ArbProposal, ArbProposalSide from hummingbot.strategy.market_trading_pair_tuple import MarketTradingPairTuple TRADING_PAIR: str = "HBOT-USDT" @@ -323,52 +322,6 @@ async def test_non_concurrent_orders_submission(self): # Check if new order is submitted when arb opportunity still presents self.assertNotEqual(amm_1_order.client_order_id, new_amm_1_order.client_order_id) - @async_test(loop=ev_loop) - async def test_format_status(self): - first_side = ArbProposalSide( - self.market_info_1, - True, - Decimal(101), - Decimal(100), - Decimal(50), - [] - ) - second_side = ArbProposalSide( - self.market_info_2, - False, - Decimal(105), - Decimal(104), - Decimal(50), - [] - ) - self.strategy._all_arb_proposals = [ArbProposal(first_side, second_side)] - - expected_status = """ Markets: - Exchange Market Sell Price Buy Price Mid Price - onion HBOT-USDT 100.00000000 101.00000000 100.50000000 - garlic HBOT-USDT 104.00000000 105.00000000 104.50000000 - - Network Fees: - Exchange Gas Fees - onion 0 ETH - garlic 0 ETH - - Assets: - Exchange Asset Total Balance Available Balance - 0 onion HBOT 500 500 - 1 onion USDT 500 500 - 2 garlic HBOT 500 500 - 3 garlic USDT 500 500 - - Profitability: - buy at onion, sell at garlic: 3.96% - - Quotes Rates (fixed rates) - Quotes pair Rate - 0 USDT-USDT 1""" - current_status = await self.strategy.format_status() - self.assertTrue(expected_status in current_status) - @async_test(loop=ev_loop) async def test_arb_not_profitable_from_gas_prices(self): self.amm_1.set_prices(TRADING_PAIR, True, 101)