diff --git a/CHANGELOG.md b/CHANGELOG.md index e5f8ecdee..d96bd7869 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,9 +2,17 @@ ## [Unreleased](https://github.com/bancorprotocol/fastlane-bot/tree/HEAD) -[Full Changelog](https://github.com/bancorprotocol/fastlane-bot/compare/v2.7.45...HEAD) +[Full Changelog](https://github.com/bancorprotocol/fastlane-bot/compare/v2.7.46...HEAD) - bug when identifying wrong-direction Carbon curves in Balancer multi pair mode [\#164](https://github.com/bancorprotocol/fastlane-bot/issues/164) + +- Rate limiter for free alchemy accounts [\#166](https://github.com/bancorprotocol/fastlane-bot/issues/166) +- adds rate limiter and removes unnecessary alchemy calls for chain\_id [\#167](https://github.com/bancorprotocol/fastlane-bot/pull/167) ([mikewcasale](https://github.com/mikewcasale)) + +## [v2.7.46](https://github.com/bancorprotocol/fastlane-bot/tree/v2.7.46) (2023-10-09) + +[Full Changelog](https://github.com/bancorprotocol/fastlane-bot/compare/v2.7.45...v2.7.46) + - Update pairwise\_multi\_bal.py [\#165](https://github.com/bancorprotocol/fastlane-bot/pull/165) ([Lesigh-3100](https://github.com/Lesigh-3100)) - Add Support for Balancer Exchange [\#70](https://github.com/bancorprotocol/fastlane-bot/issues/70) diff --git a/fastlane_bot/__init__.py b/fastlane_bot/__init__.py index d5dd70947..e02deb5bc 100644 --- a/fastlane_bot/__init__.py +++ b/fastlane_bot/__init__.py @@ -1,7 +1,7 @@ from .bot import CarbonBot as Bot, __VERSION__, __DATE__ from .config import Config, ConfigNetwork, ConfigDB, ConfigLogger, ConfigProvider -__version__ = '2.7.46' +__version__ = '2.7.47' diff --git a/fastlane_bot/data/tokens.csv b/fastlane_bot/data/tokens.csv index 6b3887204..ce8528a78 100644 --- a/fastlane_bot/data/tokens.csv +++ b/fastlane_bot/data/tokens.csv @@ -4183,3 +4183,4 @@ 4181,NEAR-f6a4,NEAR,NEAR,0x85F17Cf997934a597031b2E18a9aB6ebD4B9f6a4,24 4182,BENT-C375,BENT,BENT,0x01597E397605Bf280674Bf292623460b4204C375,18 4183,BRO-77c6,BRO,BRO,0x6e08B5D1169765f94d5ACe5524F56E8ac75B77c6,18 +4184,EWTB-6054,EWTB,EWTB,0x178c820f862B14f316509ec36b13123DA19A6054,18 diff --git a/fastlane_bot/events/managers/manager.py b/fastlane_bot/events/managers/manager.py index aa704bae9..3f4655fd3 100644 --- a/fastlane_bot/events/managers/manager.py +++ b/fastlane_bot/events/managers/manager.py @@ -137,13 +137,9 @@ def update_from_pool_info( ), ) pool = self.get_or_init_pool(pool_info) - try: - params = pool.update_from_contract( - contract, self.tenderly_fork_id, self.w3_tenderly, self.web3 - ) - except Exception as e: - self.cfg.logger.error(f"Error updating pool: {e} {pool_info}") - raise e + params = pool.update_from_contract( + contract, self.tenderly_fork_id, self.w3_tenderly, self.web3 + ) for key, value in params.items(): pool_info[key] = value return pool_info @@ -264,13 +260,17 @@ def update( break break except Exception as e: - if all( - err_msg not in str(e) - for err_msg in [ - "Too Many Requests for url", - "format_name", - ] - ): + if 'Too Many Requests for url' in str(e): + time.sleep(random.random()) + return self.update( + event, + address, + token_address, + pool_info, + contract, + block_number, + ) + elif "format_name" not in str(e): self.cfg.logger.error(f"Error updating pool: {e} {address} {event}") if "ERC721:" not in str(e): raise e @@ -279,6 +279,7 @@ def update( rate_limiter = 0.1 + 0.9 * random.random() time.sleep(random.random()) + def handle_pair_trading_fee_updated( self, event: Dict[str, Any] = None, diff --git a/fastlane_bot/helpers/submithandler.py b/fastlane_bot/helpers/submithandler.py index 78aefa011..4a33c5cff 100644 --- a/fastlane_bot/helpers/submithandler.py +++ b/fastlane_bot/helpers/submithandler.py @@ -349,7 +349,7 @@ def _get_transaction_details( "gasPrice": self._get_gas_price(), "value": tx_params.get("value", 0), "nonce": tx_params.get("nonce", None), - "chainId": self.w3.eth.chain_id, + "chainId": 1, } def _get_transaction_hash(self, tx_details: TxParams, key: str) -> str: diff --git a/main.py b/main.py index 1d5d70ecc..67cfe5373 100644 --- a/main.py +++ b/main.py @@ -460,156 +460,150 @@ def run( while True: try: - # Save initial state of pool data to assert whether it has changed - initial_state = mgr.pool_data.copy() - - # Get current block number, then adjust to the block number reorg_delay blocks ago to avoid reorgs - start_block, replay_from_block = get_start_block( - alchemy_max_block_fetch, last_block, mgr, reorg_delay, replay_from_block - ) - - # Get all events from the last block to the current block - current_block = get_current_block( - last_block, mgr, reorg_delay, replay_from_block, tenderly_fork_id - ) - - # Log the current start, end and last block - mgr.cfg.logger.info( - f"Fetching events from {start_block} to {current_block}... {last_block}" - ) - - # Set the network connection to Mainnet if replaying from a block - mgr = set_network_to_mainnet_if_replay( - last_block, - loop_idx, - mainnet_uri, - mgr, - replay_from_block, - use_cached_events, - ) - - # Get the events - latest_events = ( - get_cached_events(mgr, logging_path) - if use_cached_events - else get_latest_events( - current_block, - mgr, - n_jobs, - start_block, - cache_latest_only, - logging_path, - ) - ) - - # Update the pools from the latest events - update_pools_from_events(n_jobs, mgr, latest_events) - - # Set the network connection to Tenderly if replaying from a block - mgr, tenderly_uri, forked_from_block = set_network_to_tenderly_if_replay( - last_block=last_block, - loop_idx=loop_idx, - mgr=mgr, - replay_from_block=replay_from_block, - tenderly_uri=tenderly_uri, - use_cached_events=use_cached_events, - tenderly_fork_id=tenderly_fork_id, - ) - - # Handle the initial iteration (backdate pools, update pools from contracts, etc.) - handle_initial_iteration( - backdate_pools=backdate_pools, - current_block=current_block, - last_block=last_block, - mgr=mgr, - n_jobs=n_jobs, - start_block=start_block, - ) - - # Run multicall every iteration - multicall_every_iteration(current_block=current_block, mgr=mgr) - - # Update the last block number - last_block = current_block - - # Write the pool data to disk - write_pool_data_to_disk( - cache_latest_only=cache_latest_only, - logging_path=logging_path, - mgr=mgr, - current_block=current_block, - ) - - # Handle/remove duplicates in the pool data - handle_duplicates(mgr) - - # Delete the bot (if it exists) to avoid memory leaks - del bot - - # Re-initialize the bot - bot = init_bot(mgr) - - # Verify that the state has changed - verify_state_changed(bot=bot, initial_state=initial_state, mgr=mgr) - - # Verify that the minimum profit in BNT is respected - verify_min_bnt_is_respected(bot=bot, mgr=mgr) - - # Handle subsequent iterations - handle_subsequent_iterations( - arb_mode=arb_mode, - bot=bot, - flashloan_tokens=flashloan_tokens, - polling_interval=polling_interval, - randomizer=randomizer, - run_data_validator=run_data_validator, - target_tokens=target_tokens, - loop_idx=loop_idx, - logging_path=logging_path, - replay_from_block=replay_from_block, - tenderly_uri=tenderly_uri, - mgr=mgr, - forked_from_block=forked_from_block, - ) - - # Increment the loop index - loop_idx += 1 - - # Sleep for the polling interval - if not replay_from_block: - time.sleep(polling_interval) - - # Check if timeout has been hit, and if so, break the loop for tests - if timeout is not None and time.time() - start_timeout > timeout: - mgr.cfg.logger.info("Timeout hit... stopping bot") - break - - # Delete all Tenderly forks except the most recent one - if replay_from_block and not tenderly_fork_id: - break - - if loop_idx == 1: - mgr.cfg.logger.info( - """ - +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ - +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ - - Finished first iteration of data sync. Now starting main loop arbitrage search. - - +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ - +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ - """ - ) - - if tenderly_fork_id: - w3 = Web3(HTTPProvider(tenderly_uri)) - - # Increase time and blocks - params = [w3.toHex(increment_time)] # number of seconds - w3.provider.make_request(method="evm_increaseTime", params=params) - - params = [w3.toHex(increment_blocks)] # number of blocks - w3.provider.make_request(method="evm_increaseBlocks", params=params) + # Save initial state of pool data to assert whether it has changed + initial_state = mgr.pool_data.copy() + + # Get current block number, then adjust to the block number reorg_delay blocks ago to avoid reorgs + start_block, replay_from_block = get_start_block( + alchemy_max_block_fetch, last_block, mgr, reorg_delay, replay_from_block + ) + + + # Get all events from the last block to the current block + current_block = get_current_block(last_block, mgr, reorg_delay, replay_from_block, tenderly_fork_id) + + # Log the current start, end and last block + mgr.cfg.logger.info( + f"Fetching events from {start_block} to {current_block}... {last_block}" + ) + + # Set the network connection to Mainnet if replaying from a block + mgr = set_network_to_mainnet_if_replay( + last_block, + loop_idx, + mainnet_uri, + mgr, + replay_from_block, + use_cached_events, + ) + + # Get the events + latest_events = ( + get_cached_events(mgr, logging_path) + if use_cached_events + else get_latest_events( + current_block, + mgr, + n_jobs, + start_block, + cache_latest_only, + logging_path, + ) + ) + + # Update the pools from the latest events + update_pools_from_events(n_jobs, mgr, latest_events) + + # Set the network connection to Tenderly if replaying from a block + mgr, tenderly_uri, forked_from_block = set_network_to_tenderly_if_replay( + last_block=last_block, + loop_idx=loop_idx, + mgr=mgr, + replay_from_block=replay_from_block, + tenderly_uri=tenderly_uri, + use_cached_events=use_cached_events, + tenderly_fork_id=tenderly_fork_id, + ) + + # Handle the initial iteration (backdate pools, update pools from contracts, etc.) + handle_initial_iteration( + backdate_pools=backdate_pools, + current_block=current_block, + last_block=last_block, + mgr=mgr, + n_jobs=n_jobs, + start_block=start_block + ) + + # Run multicall every iteration + multicall_every_iteration(current_block=current_block, mgr=mgr) + + # Update the last block number + last_block = current_block + + # Write the pool data to disk + write_pool_data_to_disk(cache_latest_only=cache_latest_only, logging_path=logging_path, mgr=mgr, current_block=current_block) + + # Handle/remove duplicates in the pool data + handle_duplicates(mgr) + + # Delete the bot (if it exists) to avoid memory leaks + del bot + + # Re-initialize the bot + bot = init_bot(mgr) + + # Verify that the state has changed + verify_state_changed(bot=bot, initial_state=initial_state, mgr=mgr) + + # Verify that the minimum profit in BNT is respected + verify_min_bnt_is_respected(bot=bot, mgr=mgr) + + # Handle subsequent iterations + handle_subsequent_iterations( + arb_mode=arb_mode, + bot=bot, + flashloan_tokens=flashloan_tokens, + polling_interval=polling_interval, + randomizer=randomizer, + run_data_validator=run_data_validator, + target_tokens=target_tokens, + loop_idx=loop_idx, + logging_path=logging_path, + replay_from_block=replay_from_block, + tenderly_uri=tenderly_uri, + mgr=mgr, + forked_from_block=forked_from_block, + ) + + # Increment the loop index + loop_idx += 1 + + # Sleep for the polling interval + if not replay_from_block: + time.sleep(polling_interval) + + # Check if timeout has been hit, and if so, break the loop for tests + if timeout is not None and time.time() - start_timeout > timeout: + mgr.cfg.logger.info("Timeout hit... stopping bot") + break + + # Delete all Tenderly forks except the most recent one + if replay_from_block and not tenderly_fork_id: + break + + if loop_idx == 1: + mgr.cfg.logger.info( + """ + +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + + Finished first iteration of data sync. Now starting main loop arbitrage search. + + +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + """ + ) + + if tenderly_fork_id: + w3 = Web3(HTTPProvider(tenderly_uri)) + + # Increase time and blocks + params = [w3.toHex(increment_time)] # number of seconds + w3.provider.make_request(method="evm_increaseTime", params=params) + + params = [w3.toHex(increment_blocks)] # number of blocks + w3.provider.make_request(method="evm_increaseBlocks", params=params) except Exception as e: mgr.cfg.logger.error(f"Error in main loop: {e}")