diff --git a/fastlane_bot/events/managers/contracts.py b/fastlane_bot/events/managers/contracts.py index 9963aabee..63044e375 100644 --- a/fastlane_bot/events/managers/contracts.py +++ b/fastlane_bot/events/managers/contracts.py @@ -12,7 +12,7 @@ from web3 import Web3 from web3.contract import Contract -from fastlane_bot.data.abi import BANCOR_V3_NETWORK_INFO_ABI, ERC20_ABI +from fastlane_bot.data.abi import BANCOR_V3_NETWORK_INFO_ABI, ERC20_ABI, BANCOR_POL_ABI from fastlane_bot.events.managers.base import BaseManager @@ -53,6 +53,13 @@ def init_exchange_contracts(self): abi=BANCOR_V3_NETWORK_INFO_ABI, name="BancorNetwork", ) + elif exchange_name == "bancor_pol": + self.pool_contracts[exchange_name][ + self.cfg.BANCOR_POL_ADDRESS + ] = self.web3.eth.contract( + address=self.cfg.BANCOR_POL_ADDRESS, + abi=BANCOR_POL_ABI, + ) @staticmethod def get_or_create_token_contracts( diff --git a/fastlane_bot/events/managers/manager.py b/fastlane_bot/events/managers/manager.py index eb9903930..78098e64b 100644 --- a/fastlane_bot/events/managers/manager.py +++ b/fastlane_bot/events/managers/manager.py @@ -121,7 +121,7 @@ def update_from_pool_info( ) pool = self.get_or_init_pool(pool_info) params = pool.update_from_contract( - contract, self.tenderly_fork_id, self.w3_tenderly + contract, self.tenderly_fork_id, self.w3_tenderly, self.web3 ) for key, value in params.items(): pool_info[key] = value @@ -182,6 +182,7 @@ def update_from_contract( contract, tenderly_fork_id=self.tenderly_fork_id, w3_tenderly=self.w3_tenderly, + w3=self.web3, ) for key, value in params.items(): pool_info[key] = value @@ -216,6 +217,7 @@ def update_from_erc20_balance( contract=pool_contract, tenderly_fork_id=self.tenderly_fork_id, w3_tenderly=self.w3_tenderly, + w3=self.web3, ) params["last_updated_block"] = current_block diff --git a/fastlane_bot/events/managers/pools.py b/fastlane_bot/events/managers/pools.py index f5bc1ff6c..596e2ecc0 100644 --- a/fastlane_bot/events/managers/pools.py +++ b/fastlane_bot/events/managers/pools.py @@ -273,7 +273,7 @@ def add_pool_info( if contract: pool_info.update( pool.update_from_contract( - contract, self.tenderly_fork_id, self.w3_tenderly + contract, self.tenderly_fork_id, self.w3_tenderly, self.web3 ) ) diff --git a/fastlane_bot/events/pools/bancor_pol.py b/fastlane_bot/events/pools/bancor_pol.py index 6676a32e7..0d6f32886 100644 --- a/fastlane_bot/events/pools/bancor_pol.py +++ b/fastlane_bot/events/pools/bancor_pol.py @@ -25,6 +25,7 @@ class BancorPolPool(Pool): exchange_name: str = "bancor_pol" ONE = 2**48 + contract: Contract = None @staticmethod def unique_key() -> str: @@ -83,7 +84,7 @@ def update_from_event( return data def update_from_contract( - self, contract: Contract, tenderly_fork_id: str = None, w3_tenderly: Web3 = None + self, contract: Contract, tenderly_fork_id: str = None, w3_tenderly: Web3 = None, w3: Web3 = None ) -> Dict[str, Any]: """ See base class. @@ -96,13 +97,13 @@ def update_from_contract( p0 = 0 p1 = 0 - tkn_balance = self.get_erc20_tkn_balance(contract, tkn0, w3_tenderly) + tkn_balance = self.get_erc20_tkn_balance(contract, tkn0, w3_tenderly, w3) if tenderly_fork_id: contract = w3_tenderly.eth.contract( abi=BANCOR_POL_ABI, address=contract.address ) - + try: p0, p1 = contract.functions.tokenPrice(tkn0).call() except web3.exceptions.BadFunctionCallOutput: @@ -128,7 +129,7 @@ def update_from_contract( @staticmethod def get_erc20_tkn_balance( - contract: Contract, tkn0: str, w3_tenderly: Web3 = None + contract: Contract, tkn0: str, w3_tenderly: Web3 = None, w3: Web3 = None ) -> int: """ Get the ERC20 token balance of the POL contract @@ -141,6 +142,8 @@ def get_erc20_tkn_balance( The token address w3_tenderly: Web3 The tenderly web3 object + w3: Web3 + The web3 object Returns ------- @@ -148,7 +151,10 @@ def get_erc20_tkn_balance( The token balance """ - erc20_contract = w3_tenderly.eth.contract(abi=ERC20_ABI, address=tkn0) + if w3_tenderly: + erc20_contract = w3_tenderly.eth.contract(abi=ERC20_ABI, address=tkn0) + else: + erc20_contract = w3.eth.contract(abi=ERC20_ABI, address=tkn0) return erc20_contract.functions.balanceOf(contract.address).call() @staticmethod diff --git a/fastlane_bot/events/pools/bancor_v2.py b/fastlane_bot/events/pools/bancor_v2.py index 486fdff87..0bebe2a47 100644 --- a/fastlane_bot/events/pools/bancor_v2.py +++ b/fastlane_bot/events/pools/bancor_v2.py @@ -85,7 +85,7 @@ def update_from_event( return data def update_from_contract( - self, contract: Contract, tenderly_fork_id: str = None, w3_tenderly: Web3 = None + self, contract: Contract, tenderly_fork_id: str = None, w3_tenderly: Web3 = None, w3: Web3 = None ) -> Dict[str, Any]: """ See base class. diff --git a/fastlane_bot/events/pools/bancor_v3.py b/fastlane_bot/events/pools/bancor_v3.py index 01fcbd9ee..0806ac900 100644 --- a/fastlane_bot/events/pools/bancor_v3.py +++ b/fastlane_bot/events/pools/bancor_v3.py @@ -70,7 +70,7 @@ def update_from_event( return data def update_from_contract( - self, contract: Contract, tenderly_fork_id: str = None, w3_tenderly: Web3 = None + self, contract: Contract, tenderly_fork_id: str = None, w3_tenderly: Web3 = None, w3: Web3 = None ) -> Dict[str, Any]: """ See base class. diff --git a/fastlane_bot/events/pools/base.py b/fastlane_bot/events/pools/base.py index 4018f3e59..2060c48c8 100644 --- a/fastlane_bot/events/pools/base.py +++ b/fastlane_bot/events/pools/base.py @@ -76,7 +76,7 @@ def update_from_event( @abstractmethod def update_from_contract( - self, contract: Contract, tenderly_fork_id: str = None, w3_tenderly: Web3 = None + self, contract: Contract, tenderly_fork_id: str = None, w3_tenderly: Web3 = None, w3: Web3 = None ) -> Dict[str, Any]: """ Update the pool state from a contract. @@ -89,6 +89,8 @@ def update_from_contract( The tenderly fork id, by default None w3_tenderly : Web3, optional The tenderly web3 instance, by default None + w3 : Web3, optional + The web3 instance, by default None Returns ------- diff --git a/fastlane_bot/events/pools/carbon_v1.py b/fastlane_bot/events/pools/carbon_v1.py index a67231efd..5d2fc8efa 100644 --- a/fastlane_bot/events/pools/carbon_v1.py +++ b/fastlane_bot/events/pools/carbon_v1.py @@ -113,7 +113,7 @@ def parse_orders( return order0, order1 def update_from_contract( - self, contract: Contract, tenderly_fork_id: str = None, w3_tenderly: Web3 = None + self, contract: Contract, tenderly_fork_id: str = None, w3_tenderly: Web3 = None, w3: Web3 = None ) -> Dict[str, Any]: """ See base class. diff --git a/fastlane_bot/events/pools/sushiswap_v2.py b/fastlane_bot/events/pools/sushiswap_v2.py index bfe4171cc..f826cb916 100644 --- a/fastlane_bot/events/pools/sushiswap_v2.py +++ b/fastlane_bot/events/pools/sushiswap_v2.py @@ -57,7 +57,7 @@ def update_from_event( return data def update_from_contract( - self, contract: Contract, tenderly_fork_id: str = None, w3_tenderly: Web3 = None + self, contract: Contract, tenderly_fork_id: str = None, w3_tenderly: Web3 = None, w3: Web3 = None ) -> Dict[str, Any]: """ See base class. diff --git a/fastlane_bot/events/pools/uniswap_v2.py b/fastlane_bot/events/pools/uniswap_v2.py index 16472d33e..42fd2c6a5 100644 --- a/fastlane_bot/events/pools/uniswap_v2.py +++ b/fastlane_bot/events/pools/uniswap_v2.py @@ -57,7 +57,7 @@ def update_from_event( return data def update_from_contract( - self, contract: Contract, tenderly_fork_id: str = None, w3_tenderly: Web3 = None + self, contract: Contract, tenderly_fork_id: str = None, w3_tenderly: Web3 = None, w3: Web3 = None ) -> Dict[str, Any]: """ See base class. diff --git a/fastlane_bot/events/pools/uniswap_v3.py b/fastlane_bot/events/pools/uniswap_v3.py index 7962dddbe..368b12140 100644 --- a/fastlane_bot/events/pools/uniswap_v3.py +++ b/fastlane_bot/events/pools/uniswap_v3.py @@ -64,7 +64,7 @@ def update_from_event( return data def update_from_contract( - self, contract: Contract, tenderly_fork_id: str = None, w3_tenderly: Any = None + self, contract: Contract, tenderly_fork_id: str = None, w3_tenderly: Any = None, w3: Any = None ) -> Dict[str, Any]: """ See base class. diff --git a/fastlane_bot/events/utils.py b/fastlane_bot/events/utils.py index 917557aba..5f736e09f 100644 --- a/fastlane_bot/events/utils.py +++ b/fastlane_bot/events/utils.py @@ -967,13 +967,14 @@ def multicall_every_iteration( The number of jobs to run in parallel. """ + multicallable_exchanges = [exchange for exchange in mgr.cfg.MULTICALLABLE_EXCHANGES if exchange in mgr.exchanges] multicallable_pool_rows = [ list(set(get_pools_for_exchange(mgr=mgr, exchange=ex_name))) for ex_name in mgr.cfg.MULTICALLABLE_EXCHANGES if ex_name in mgr.exchanges ] - for idx, exchange in enumerate(mgr.cfg.MULTICALLABLE_EXCHANGES): + for idx, exchange in enumerate(multicallable_exchanges): update_pools_from_contracts( n_jobs=n_jobs, current_block=current_block, diff --git a/main.py b/main.py index d890aa0b6..5f29ba401 100644 --- a/main.py +++ b/main.py @@ -462,169 +462,169 @@ def run( mainnet_uri = mgr.cfg.w3.provider.endpoint_uri forks_to_cleanup = [] while True: - # try: - - # Save initial state of pool data to assert whether it has changed - initial_state = mgr.pool_data.copy() - - # Get current block number, then adjust to the block number reorg_delay blocks ago to avoid reorgs - start_block, replay_from_block = get_start_block( - alchemy_max_block_fetch, last_block, mgr, reorg_delay, replay_from_block - ) - - # Get all events from the last block to the current block - current_block = get_current_block(last_block, mgr, reorg_delay, replay_from_block, tenderly_fork_id) - - # Log the current start, end and last block - mgr.cfg.logger.info( - f"Fetching events from {start_block} to {current_block}... {last_block}" - ) + try: + + # Save initial state of pool data to assert whether it has changed + initial_state = mgr.pool_data.copy() + + # Get current block number, then adjust to the block number reorg_delay blocks ago to avoid reorgs + start_block, replay_from_block = get_start_block( + alchemy_max_block_fetch, last_block, mgr, reorg_delay, replay_from_block + ) - # Set the network connection to Mainnet if replaying from a block - mgr = set_network_to_mainnet_if_replay( - last_block, - loop_idx, - mainnet_uri, - mgr, - replay_from_block, - use_cached_events, - ) + # Get all events from the last block to the current block + current_block = get_current_block(last_block, mgr, reorg_delay, replay_from_block, tenderly_fork_id) - # Get the events - latest_events = ( - get_cached_events(mgr, logging_path) - if use_cached_events - else get_latest_events( - current_block, - mgr, - n_jobs, - start_block, - cache_latest_only, - logging_path, + # Log the current start, end and last block + mgr.cfg.logger.info( + f"Fetching events from {start_block} to {current_block}... {last_block}" ) - ) - if mgr.cfg.BANCOR_POL_NAME in mgr.exchanges: - update_pools_from_contracts( + # Set the network connection to Mainnet if replaying from a block + mgr = set_network_to_mainnet_if_replay( + last_block, + loop_idx, + mainnet_uri, mgr, - n_jobs=n_jobs, - rows_to_update=[ - i - for i, pool_info in enumerate(mgr.pool_data) - if pool_info["exchange_name"] == mgr.cfg.BANCOR_POL_NAME - ], - current_block=current_block, - token_address=True, + replay_from_block, + use_cached_events, ) - # Update the pools from the latest events - update_pools_from_events(n_jobs, mgr, latest_events) - - # Set the network connection to Tenderly if replaying from a block - mgr, tenderly_uri, forked_from_block = set_network_to_tenderly_if_replay( - last_block=last_block, - loop_idx=loop_idx, - mgr=mgr, - replay_from_block=replay_from_block, - tenderly_uri=tenderly_uri, - use_cached_events=use_cached_events, - tenderly_fork_id=tenderly_fork_id, - ) - - # Handle the initial iteration (backdate pools, update pools from contracts, etc.) - handle_initial_iteration( - backdate_pools=backdate_pools, - current_block=current_block, - last_block=last_block, - mgr=mgr, - n_jobs=n_jobs, - start_block=start_block - ) - - # Run multicall every iteration - multicall_every_iteration(current_block=current_block, mgr=mgr, n_jobs=n_jobs) - - # Update the last block number - last_block = current_block - - # Write the pool data to disk - write_pool_data_to_disk(cache_latest_only=cache_latest_only, logging_path=logging_path, mgr=mgr, current_block=current_block) - - # Handle/remove duplicates in the pool data - handle_duplicates(mgr) - - # Delete the bot (if it exists) to avoid memory leaks - del bot - - # Re-initialize the bot - bot = init_bot(mgr) - - # Verify that the state has changed - verify_state_changed(bot=bot, initial_state=initial_state, mgr=mgr) - - # Verify that the minimum profit in BNT is respected - verify_min_bnt_is_respected(bot=bot, mgr=mgr) - - # Handle subsequent iterations - handle_subsequent_iterations( - arb_mode=arb_mode, - bot=bot, - flashloan_tokens=flashloan_tokens, - polling_interval=polling_interval, - randomizer=randomizer, - run_data_validator=run_data_validator, - target_tokens=target_tokens, - loop_idx=loop_idx, - logging_path=logging_path, - replay_from_block=replay_from_block, - tenderly_uri=tenderly_uri, - forks_to_cleanup=forks_to_cleanup, - mgr=mgr, - forked_from_block=forked_from_block, - ) - - # Increment the loop index - loop_idx += 1 - - # Sleep for the polling interval - if not replay_from_block: - time.sleep(polling_interval) - - # Check if timeout has been hit, and if so, break the loop for tests - if timeout is not None and time.time() - start_timeout > timeout: - mgr.cfg.logger.info("Timeout hit... stopping bot") - break - - # Delete all Tenderly forks except the most recent one - if replay_from_block and not tenderly_fork_id: - break - - if loop_idx == 1: - mgr.cfg.logger.info( - """ - +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ - +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ - - Finished first iteration of data sync. Now starting main loop arbitrage search. - - +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ - +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ - """ + # Get the events + latest_events = ( + get_cached_events(mgr, logging_path) + if use_cached_events + else get_latest_events( + current_block, + mgr, + n_jobs, + start_block, + cache_latest_only, + logging_path, + ) ) - if tenderly_fork_id: - w3 = Web3(HTTPProvider(tenderly_uri)) + if mgr.cfg.BANCOR_POL_NAME in mgr.exchanges: + update_pools_from_contracts( + mgr, + n_jobs=n_jobs, + rows_to_update=[ + i + for i, pool_info in enumerate(mgr.pool_data) + if pool_info["exchange_name"] == mgr.cfg.BANCOR_POL_NAME + ], + current_block=current_block, + token_address=True, + ) + + # Update the pools from the latest events + update_pools_from_events(n_jobs, mgr, latest_events) + + # Set the network connection to Tenderly if replaying from a block + mgr, tenderly_uri, forked_from_block = set_network_to_tenderly_if_replay( + last_block=last_block, + loop_idx=loop_idx, + mgr=mgr, + replay_from_block=replay_from_block, + tenderly_uri=tenderly_uri, + use_cached_events=use_cached_events, + tenderly_fork_id=tenderly_fork_id, + ) - # Increase time and blocks - params = [w3.toHex(increment_time)] # number of seconds - w3.provider.make_request(method="evm_increaseTime", params=params) + # Handle the initial iteration (backdate pools, update pools from contracts, etc.) + handle_initial_iteration( + backdate_pools=backdate_pools, + current_block=current_block, + last_block=last_block, + mgr=mgr, + n_jobs=n_jobs, + start_block=start_block + ) - params = [w3.toHex(increment_blocks)] # number of blocks - w3.provider.make_request(method="evm_increaseBlocks", params=params) + # Run multicall every iteration + multicall_every_iteration(current_block=current_block, mgr=mgr, n_jobs=n_jobs) + + # Update the last block number + last_block = current_block + + # Write the pool data to disk + write_pool_data_to_disk(cache_latest_only=cache_latest_only, logging_path=logging_path, mgr=mgr, current_block=current_block) + + # Handle/remove duplicates in the pool data + handle_duplicates(mgr) + + # Delete the bot (if it exists) to avoid memory leaks + del bot + + # Re-initialize the bot + bot = init_bot(mgr) + + # Verify that the state has changed + verify_state_changed(bot=bot, initial_state=initial_state, mgr=mgr) + + # Verify that the minimum profit in BNT is respected + verify_min_bnt_is_respected(bot=bot, mgr=mgr) + + # Handle subsequent iterations + handle_subsequent_iterations( + arb_mode=arb_mode, + bot=bot, + flashloan_tokens=flashloan_tokens, + polling_interval=polling_interval, + randomizer=randomizer, + run_data_validator=run_data_validator, + target_tokens=target_tokens, + loop_idx=loop_idx, + logging_path=logging_path, + replay_from_block=replay_from_block, + tenderly_uri=tenderly_uri, + forks_to_cleanup=forks_to_cleanup, + mgr=mgr, + forked_from_block=forked_from_block, + ) - # except Exception as e: - # mgr.cfg.logger.error(f"Error in main loop: {e}") - # time.sleep(polling_interval) + # Increment the loop index + loop_idx += 1 + + # Sleep for the polling interval + if not replay_from_block: + time.sleep(polling_interval) + + # Check if timeout has been hit, and if so, break the loop for tests + if timeout is not None and time.time() - start_timeout > timeout: + mgr.cfg.logger.info("Timeout hit... stopping bot") + break + + # Delete all Tenderly forks except the most recent one + if replay_from_block and not tenderly_fork_id: + break + + if loop_idx == 1: + mgr.cfg.logger.info( + """ + +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + + Finished first iteration of data sync. Now starting main loop arbitrage search. + + +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + """ + ) + + if tenderly_fork_id: + w3 = Web3(HTTPProvider(tenderly_uri)) + + # Increase time and blocks + params = [w3.toHex(increment_time)] # number of seconds + w3.provider.make_request(method="evm_increaseTime", params=params) + + params = [w3.toHex(increment_blocks)] # number of blocks + w3.provider.make_request(method="evm_increaseBlocks", params=params) + + except Exception as e: + mgr.cfg.logger.error(f"Error in main loop: {e}") + time.sleep(polling_interval) if __name__ == "__main__":