From bb784cf2d03416dda2210deb899b28d1bc05c928 Mon Sep 17 00:00:00 2001 From: Earle Lowe Date: Mon, 9 Dec 2024 10:13:36 -0800 Subject: [PATCH 01/25] Update GUI pin to 'b2caac88dc2ced4d4e2a904456506bc7318a5434' --- chia-blockchain-gui | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/chia-blockchain-gui b/chia-blockchain-gui index 7e6f90ed1bbd..b2caac88dc2c 160000 --- a/chia-blockchain-gui +++ b/chia-blockchain-gui @@ -1 +1 @@ -Subproject commit 7e6f90ed1bbda7dcc9181a7b424a82e4fb473901 +Subproject commit b2caac88dc2ced4d4e2a904456506bc7318a5434 From bc065b5d525601de27fb1451fd1b8140cc1e3b63 Mon Sep 17 00:00:00 2001 From: Arvid Norberg Date: Tue, 10 Dec 2024 18:30:43 +0100 Subject: [PATCH 02/25] add some validation of `fork_info` in Blockchain.add_block() (#18981) * add some validation of fork_info in Blockchain.add_block(). This would have caught the bug introduced in main a while ago * simplify add_blocks_in_batches --- chia/_tests/blockchain/test_blockchain.py | 123 +++++++++++------- .../test_blockchain_transactions.py | 91 ++++++------- .../core/full_node/stores/test_block_store.py | 13 +- .../core/full_node/stores/test_coin_store.py | 14 +- .../full_node/stores/test_full_node_store.py | 15 ++- chia/_tests/core/full_node/test_full_node.py | 14 +- chia/_tests/core/mempool/test_mempool.py | 31 ++--- chia/_tests/pools/test_pool_rpc.py | 16 +-- chia/_tests/wallet/sync/test_wallet_sync.py | 8 +- chia/consensus/blockchain.py | 15 +++ chia/full_node/full_node.py | 4 + chia/simulator/add_blocks_in_batches.py | 13 +- chia/simulator/full_node_simulator.py | 2 +- 13 files changed, 199 insertions(+), 160 deletions(-) diff --git a/chia/_tests/blockchain/test_blockchain.py b/chia/_tests/blockchain/test_blockchain.py index 5f3e3ea78e4a..89b015cc524f 100644 --- a/chia/_tests/blockchain/test_blockchain.py +++ b/chia/_tests/blockchain/test_blockchain.py @@ -155,6 +155,7 @@ class TestBlockHeaderValidation: @pytest.mark.anyio async def test_long_chain(self, empty_blockchain: Blockchain, default_1000_blocks: list[FullBlock]) -> None: blocks = default_1000_blocks + fork_info = ForkInfo(blocks[0].height - 1, blocks[0].height - 1, blocks[0].prev_header_hash) for block in blocks: if ( len(block.finished_sub_slots) > 0 @@ -181,7 +182,9 @@ async def test_long_chain(self, empty_blockchain: Blockchain, default_1000_block assert error.code == Err.INVALID_NEW_SUB_SLOT_ITERS # Also fails calling the outer methods, but potentially with a different error - await _validate_and_add_block(empty_blockchain, block_bad, expected_result=AddBlockResult.INVALID_BLOCK) + await _validate_and_add_block( + empty_blockchain, block_bad, expected_result=AddBlockResult.INVALID_BLOCK, fork_info=fork_info + ) new_finished_ss_2 = recursive_replace( block.finished_sub_slots[0], @@ -205,7 +208,7 @@ async def test_long_chain(self, empty_blockchain: Blockchain, default_1000_block # Also fails calling the outer methods, but potentially with a different error await _validate_and_add_block( - empty_blockchain, block_bad_2, expected_result=AddBlockResult.INVALID_BLOCK + empty_blockchain, block_bad_2, expected_result=AddBlockResult.INVALID_BLOCK, fork_info=fork_info ) # 3c @@ -235,7 +238,7 @@ async def test_long_chain(self, empty_blockchain: Blockchain, default_1000_block # Also fails calling the outer methods, but potentially with a different error await _validate_and_add_block( - empty_blockchain, block_bad_3, expected_result=AddBlockResult.INVALID_BLOCK + empty_blockchain, block_bad_3, expected_result=AddBlockResult.INVALID_BLOCK, fork_info=fork_info ) # 3d @@ -264,9 +267,9 @@ async def test_long_chain(self, empty_blockchain: Blockchain, default_1000_block # Also fails calling the outer methods, but potentially with a different error await _validate_and_add_block( - empty_blockchain, block_bad_4, expected_result=AddBlockResult.INVALID_BLOCK + empty_blockchain, block_bad_4, expected_result=AddBlockResult.INVALID_BLOCK, fork_info=fork_info ) - await _validate_and_add_block(empty_blockchain, block) + await _validate_and_add_block(empty_blockchain, block, fork_info=fork_info) log.info( f"Added block {block.height} total iters {block.total_iters} " f"new slot? {len(block.finished_sub_slots)}" @@ -3041,8 +3044,13 @@ async def test_double_spent_in_reorg(self, empty_blockchain: Blockchain, bt: Blo await _validate_and_add_block(b, block) blocks_reorg = bt.get_consecutive_blocks(2, block_list_input=blocks[:-7], guarantee_transaction_block=True) - await _validate_and_add_block(b, blocks_reorg[-2], expected_result=AddBlockResult.ADDED_AS_ORPHAN) - await _validate_and_add_block(b, blocks_reorg[-1], expected_result=AddBlockResult.ADDED_AS_ORPHAN) + fork_info = ForkInfo(blocks[-8].height, blocks[-8].height, blocks[-8].header_hash) + await _validate_and_add_block( + b, blocks_reorg[-2], expected_result=AddBlockResult.ADDED_AS_ORPHAN, fork_info=fork_info + ) + await _validate_and_add_block( + b, blocks_reorg[-1], expected_result=AddBlockResult.ADDED_AS_ORPHAN, fork_info=fork_info + ) # Coin does not exist in reorg blocks_reorg = bt.get_consecutive_blocks( @@ -3050,7 +3058,6 @@ async def test_double_spent_in_reorg(self, empty_blockchain: Blockchain, bt: Blo ) peak = b.get_peak() assert peak is not None - fork_info = await get_fork_info(b, blocks_reorg[-1], peak) await _validate_and_add_block(b, blocks_reorg[-1], expected_error=Err.UNKNOWN_UNSPENT, fork_info=fork_info) # Finally add the block to the fork (spending both in same bundle, this is ephemeral) @@ -3061,7 +3068,6 @@ async def test_double_spent_in_reorg(self, empty_blockchain: Blockchain, bt: Blo peak = b.get_peak() assert peak is not None - fork_info = await get_fork_info(b, blocks_reorg[-1], peak) await _validate_and_add_block( b, blocks_reorg[-1], expected_result=AddBlockResult.ADDED_AS_ORPHAN, fork_info=fork_info ) @@ -3071,7 +3077,6 @@ async def test_double_spent_in_reorg(self, empty_blockchain: Blockchain, bt: Blo ) peak = b.get_peak() assert peak is not None - fork_info = await get_fork_info(b, blocks_reorg[-1], peak) await _validate_and_add_block(b, blocks_reorg[-1], expected_error=Err.DOUBLE_SPEND_IN_FORK, fork_info=fork_info) rewards_ph = wt.get_new_puzzlehash() @@ -3084,7 +3089,6 @@ async def test_double_spent_in_reorg(self, empty_blockchain: Blockchain, bt: Blo peak = b.get_peak() assert peak is not None - fork_info = await get_fork_info(b, blocks_reorg[-10], peak) for block in blocks_reorg[-10:]: await _validate_and_add_block_multi_result( b, block, expected_result=[AddBlockResult.ADDED_AS_ORPHAN, AddBlockResult.NEW_PEAK], fork_info=fork_info @@ -3264,13 +3268,18 @@ async def test_basic_reorg(self, empty_blockchain: Blockchain, bt: BlockTools) - assert peak.height == 14 blocks_reorg_chain = bt.get_consecutive_blocks(7, blocks[:10], seed=b"2") + fork_info = ForkInfo(-1, -1, bt.constants.GENESIS_CHALLENGE) for reorg_block in blocks_reorg_chain: if reorg_block.height < 10: - await _validate_and_add_block(b, reorg_block, expected_result=AddBlockResult.ALREADY_HAVE_BLOCK) + await _validate_and_add_block( + b, reorg_block, expected_result=AddBlockResult.ALREADY_HAVE_BLOCK, fork_info=fork_info + ) elif reorg_block.height < 15: - await _validate_and_add_block(b, reorg_block, expected_result=AddBlockResult.ADDED_AS_ORPHAN) + await _validate_and_add_block( + b, reorg_block, expected_result=AddBlockResult.ADDED_AS_ORPHAN, fork_info=fork_info + ) elif reorg_block.height >= 15: - await _validate_and_add_block(b, reorg_block) + await _validate_and_add_block(b, reorg_block, fork_info=fork_info) peak = b.get_peak() assert peak is not None assert peak.height == 16 @@ -3463,7 +3472,7 @@ async def test_long_reorg( # start the fork point a few blocks back, to test that the blockchain # can catch up - fork_block = default_10000_blocks[num_blocks_chain_2_start - 200] + fork_block = default_10000_blocks[num_blocks_chain_2_start - 101] fork_info = ForkInfo(fork_block.height, fork_block.height, fork_block.header_hash) await b.warmup(fork_block.height) for block in blocks: @@ -3514,22 +3523,34 @@ async def test_reorg_from_genesis(self, empty_blockchain: Blockchain, bt: BlockT # Reorg to alternate chain that is 1 height longer blocks_reorg_chain = bt.get_consecutive_blocks(16, [], seed=b"2") + fork_info = ForkInfo(-1, -1, bt.constants.GENESIS_CHALLENGE) for reorg_block in blocks_reorg_chain: if reorg_block.height < 15: await _validate_and_add_block_multi_result( b, reorg_block, expected_result=[AddBlockResult.ADDED_AS_ORPHAN, AddBlockResult.ALREADY_HAVE_BLOCK], + fork_info=fork_info, ) elif reorg_block.height >= 15: - await _validate_and_add_block(b, reorg_block) + await _validate_and_add_block(b, reorg_block, fork_info=fork_info) # Back to original chain blocks_reorg_chain_2 = bt.get_consecutive_blocks(3, blocks, seed=b"3") - await _validate_and_add_block(b, blocks_reorg_chain_2[-3], expected_result=AddBlockResult.ADDED_AS_ORPHAN) - await _validate_and_add_block(b, blocks_reorg_chain_2[-2]) - await _validate_and_add_block(b, blocks_reorg_chain_2[-1]) + # we start from the beginning to make sure fork_info is built correctly + fork_info = ForkInfo(-1, -1, bt.constants.GENESIS_CHALLENGE) + for reorg_block in blocks_reorg_chain_2: + if reorg_block.height < 15: + await _validate_and_add_block( + b, reorg_block, expected_result=AddBlockResult.ALREADY_HAVE_BLOCK, fork_info=fork_info + ) + elif reorg_block.height < 16: + await _validate_and_add_block( + b, reorg_block, expected_result=AddBlockResult.ADDED_AS_ORPHAN, fork_info=fork_info + ) + else: + await _validate_and_add_block(b, reorg_block, fork_info=fork_info) peak = b.get_peak() assert peak is not None @@ -3579,7 +3600,7 @@ async def test_reorg_transaction(self, empty_blockchain: Blockchain, bt: BlockTo await _validate_and_add_block(b, block) fork_block = blocks[11] fork_info = ForkInfo(fork_block.height, fork_block.height, fork_block.header_hash) - for block in blocks_fork: + for block in blocks_fork[12:]: await _validate_and_add_block_no_error(b, block, fork_info=fork_info) @pytest.mark.anyio @@ -3694,8 +3715,8 @@ async def test_reorg_new_ref(empty_blockchain: Blockchain, bt: BlockTools) -> No ) blocks_reorg_chain = bt.get_consecutive_blocks(4, blocks_reorg_chain, seed=b"2") + fork_info = ForkInfo(-1, -1, b.constants.GENESIS_CHALLENGE) for i, block in enumerate(blocks_reorg_chain): - fork_info: Optional[ForkInfo] = None if i < 10: expected = AddBlockResult.ALREADY_HAVE_BLOCK elif i < 19: @@ -3709,8 +3730,6 @@ async def test_reorg_new_ref(empty_blockchain: Blockchain, bt: BlockTools) -> No expected = AddBlockResult.NEW_PEAK else: expected = AddBlockResult.NEW_PEAK - if fork_info is None: - fork_info = ForkInfo(blocks[1].height, blocks[1].height, blocks[1].header_hash) await _validate_and_add_block(b, block, expected_result=expected, fork_info=fork_info) peak = b.get_peak() assert peak is not None @@ -3762,7 +3781,7 @@ async def test_reorg_stale_fork_height(empty_blockchain: Blockchain, bt: BlockTo await _validate_and_add_block(b, block, expected_result=AddBlockResult.NEW_PEAK) # fake the fork_info to make every new block look like a reorg - fork_info = ForkInfo(blocks[1].height, blocks[1].height, blocks[1].header_hash) + fork_info = ForkInfo(blocks[4].height, blocks[4].height, blocks[4].header_hash) for block in blocks[5:]: await _validate_and_add_block(b, block, expected_result=AddBlockResult.NEW_PEAK, fork_info=fork_info) peak = b.get_peak() @@ -3812,8 +3831,10 @@ async def test_chain_failed_rollback(empty_blockchain: Blockchain, bt: BlockTool guarantee_transaction_block=True, ) + fork_block = blocks_reorg_chain[9] + fork_info = ForkInfo(fork_block.height, fork_block.height, fork_block.header_hash) for block in blocks_reorg_chain[10:-1]: - await _validate_and_add_block(b, block, expected_result=AddBlockResult.ADDED_AS_ORPHAN) + await _validate_and_add_block(b, block, expected_result=AddBlockResult.ADDED_AS_ORPHAN, fork_info=fork_info) # Incorrectly set the height as spent in DB to trigger an error print(f"{await b.coin_store.get_coin_record(spend_bundle.coin_spends[0].coin.name())}") @@ -3823,7 +3844,7 @@ async def test_chain_failed_rollback(empty_blockchain: Blockchain, bt: BlockTool print(f"{await b.coin_store.get_coin_record(spend_bundle.coin_spends[0].coin.name())}") fork_block = blocks_reorg_chain[10 - 1] - fork_info = ForkInfo(fork_block.height, fork_block.height, fork_block.header_hash) + # fork_info = ForkInfo(fork_block.height, fork_block.height, fork_block.header_hash) with pytest.raises(ValueError, match="Invalid operation to set spent"): await _validate_and_add_block(b, blocks_reorg_chain[-1], fork_info=fork_info) @@ -3924,28 +3945,36 @@ async def test_reorg_flip_flop(empty_blockchain: Blockchain, bt: BlockTools) -> block1, block2 = b1, b2 counter += 1 - future = await pre_validate_block( - b.constants, - AugmentedBlockchain(b), - block1, - b.pool, - None, - ValidationState(ssi, diff, None), + preval = await ( + await pre_validate_block( + b.constants, + AugmentedBlockchain(b), + block1, + b.pool, + None, + ValidationState(ssi, diff, None), + ) ) - preval = await future - fork_info = ForkInfo(block1.height - 1, block1.height - 1, block1.prev_header_hash) + peak = b.get_peak() + if peak is None: + fork_info = ForkInfo(-1, -1, bt.constants.GENESIS_CHALLENGE) + else: + fork_info = await get_fork_info(b, block1, peak) _, err, _ = await b.add_block(block1, preval, sub_slot_iters=ssi, fork_info=fork_info) assert err is None - future = await pre_validate_block( - b.constants, - AugmentedBlockchain(b), - block2, - b.pool, - None, - ValidationState(ssi, diff, None), + preval = await ( + await pre_validate_block( + b.constants, + AugmentedBlockchain(b), + block2, + b.pool, + None, + ValidationState(ssi, diff, None), + ) ) - preval = await future - fork_info = ForkInfo(block2.height - 1, block2.height - 1, block2.prev_header_hash) + peak = b.get_peak() + assert peak is not None + fork_info = await get_fork_info(b, block2, peak) _, err, _ = await b.add_block(block2, preval, sub_slot_iters=ssi, fork_info=fork_info) assert err is None @@ -4042,11 +4071,13 @@ async def test_lookup_block_generators( # 507, 516, 527, 535, 539, 543, 547 # start with adding some blocks to test lookups from the mainchain + fork_info = ForkInfo(-1, -1, bt.constants.GENESIS_CHALLENGE) for block in blocks_2[:550]: - await _validate_and_add_block(b, block, expected_result=AddBlockResult.NEW_PEAK) + await _validate_and_add_block(b, block, expected_result=AddBlockResult.NEW_PEAK, fork_info=fork_info) + fork_info = ForkInfo(blocks_1[500].height - 1, blocks_1[500].height - 1, blocks_1[500].prev_header_hash) for block in blocks_1[500:550]: - await _validate_and_add_block(b, block, expected_result=AddBlockResult.ADDED_AS_ORPHAN) + await _validate_and_add_block(b, block, expected_result=AddBlockResult.ADDED_AS_ORPHAN, fork_info=fork_info) # now we have a blockchain with two forks, the peak is at blocks_2[550] and # the leight weight peak is at blocks_1[550] diff --git a/chia/_tests/blockchain/test_blockchain_transactions.py b/chia/_tests/blockchain/test_blockchain_transactions.py index 8ac88bc03571..95d0a7b2b01a 100644 --- a/chia/_tests/blockchain/test_blockchain_transactions.py +++ b/chia/_tests/blockchain/test_blockchain_transactions.py @@ -7,6 +7,7 @@ from chia._tests.blockchain.blockchain_test_utils import _validate_and_add_block from chia._tests.util.generator_tools_testing import run_and_get_removals_and_additions +from chia.consensus.blockchain import AddBlockResult from chia.full_node.full_node_api import FullNodeAPI from chia.protocols import wallet_protocol from chia.server.server import ChiaServer @@ -17,7 +18,7 @@ from chia.types.condition_opcodes import ConditionOpcode from chia.types.condition_with_args import ConditionWithArgs from chia.types.spend_bundle import SpendBundle, estimate_fees -from chia.util.errors import ConsensusError, Err +from chia.util.errors import Err from chia.util.ints import uint32, uint64 from chia.wallet.conditions import AssertCoinAnnouncement, AssertPuzzleAnnouncement @@ -44,8 +45,7 @@ async def test_basic_blockchain_tx( num_blocks, farmer_reward_puzzle_hash=coinbase_puzzlehash, guarantee_transaction_block=True ) - for block in blocks: - await full_node_api_1.full_node.add_block(block, None) + await add_blocks_in_batches(blocks, full_node_api_1.full_node) spend_block = blocks[2] spend_coin = None @@ -110,8 +110,7 @@ async def test_validate_blockchain_with_double_spend( num_blocks, farmer_reward_puzzle_hash=coinbase_puzzlehash, guarantee_transaction_block=True ) - for block in blocks: - await full_node_api_1.full_node.add_block(block) + await add_blocks_in_batches(blocks, full_node_api_1.full_node) spend_block = blocks[2] spend_coin = None @@ -150,8 +149,7 @@ async def test_validate_blockchain_duplicate_output( num_blocks, farmer_reward_puzzle_hash=coinbase_puzzlehash, guarantee_transaction_block=True ) - for block in blocks: - await full_node_api_1.full_node.add_block(block) + await add_blocks_in_batches(blocks, full_node_api_1.full_node) spend_block = blocks[2] @@ -189,8 +187,7 @@ async def test_validate_blockchain_with_reorg_double_spend( num_blocks, farmer_reward_puzzle_hash=coinbase_puzzlehash, guarantee_transaction_block=True ) - for block in blocks: - await full_node_api_1.full_node.add_block(block) + await add_blocks_in_batches(blocks, full_node_api_1.full_node) spend_block = blocks[2] @@ -209,8 +206,7 @@ async def test_validate_blockchain_with_reorg_double_spend( transaction_data=spend_bundle, ) # Move chain to height 10, with a spend at height 10 - for block in blocks_spend: - await full_node_api_1.full_node.add_block(block) + await add_blocks_in_batches(blocks_spend, full_node_api_1.full_node) # Reorg at height 5, add up to and including height 12 new_blocks = bt.get_consecutive_blocks( @@ -221,8 +217,7 @@ async def test_validate_blockchain_with_reorg_double_spend( seed=b"another seed", ) - for block in new_blocks: - await full_node_api_1.full_node.add_block(block) + await add_blocks_in_batches(new_blocks[-7:], full_node_api_1.full_node) # Spend the same coin in the new reorg chain at height 13 new_blocks = bt.get_consecutive_blocks( @@ -257,8 +252,9 @@ async def test_validate_blockchain_with_reorg_double_spend( transaction_data=spend_bundle, seed=b"spend at 12 is ok", ) - for block in new_blocks_reorg: - await full_node_api_1.full_node.add_block(block) + await _validate_and_add_block( + full_node_api_1.full_node.blockchain, new_blocks_reorg[-1], expected_result=AddBlockResult.ADDED_AS_ORPHAN + ) # Spend at height 13 is also OK (same height) new_blocks_reorg = bt.get_consecutive_blocks( @@ -269,8 +265,9 @@ async def test_validate_blockchain_with_reorg_double_spend( transaction_data=spend_bundle, seed=b"spend at 13 is ok", ) - for block in new_blocks_reorg: - await full_node_api_1.full_node.add_block(block) + await _validate_and_add_block( + full_node_api_1.full_node.blockchain, new_blocks_reorg[-1], expected_result=AddBlockResult.ADDED_AS_ORPHAN + ) # Spend at height 14 is not OK (already spend) new_blocks_reorg = bt.get_consecutive_blocks( @@ -281,9 +278,12 @@ async def test_validate_blockchain_with_reorg_double_spend( transaction_data=spend_bundle, seed=b"spend at 14 is double spend", ) - with pytest.raises(ConsensusError): - for block in new_blocks_reorg: - await full_node_api_1.full_node.add_block(block) + await _validate_and_add_block( + full_node_api_1.full_node.blockchain, + new_blocks_reorg[-1], + expected_result=AddBlockResult.INVALID_BLOCK, + expected_error=Err.DOUBLE_SPEND, + ) @pytest.mark.anyio async def test_validate_blockchain_spend_reorg_coin( @@ -300,8 +300,7 @@ async def test_validate_blockchain_spend_reorg_coin( num_blocks, farmer_reward_puzzle_hash=coinbase_puzzlehash, guarantee_transaction_block=True ) - for block in blocks: - await full_node_api_1.full_node.add_block(block) + await add_blocks_in_batches(blocks, full_node_api_1.full_node) spend_block = blocks[2] @@ -321,7 +320,7 @@ async def test_validate_blockchain_spend_reorg_coin( transaction_data=spend_bundle, guarantee_transaction_block=True, ) - await add_blocks_in_batches([new_blocks[-1]], full_node_api_1.full_node, blocks[5].prev_header_hash) + await add_blocks_in_batches([new_blocks[-1]], full_node_api_1.full_node) coin_2 = None for coin in run_and_get_removals_and_additions( @@ -345,7 +344,7 @@ async def test_validate_blockchain_spend_reorg_coin( transaction_data=spend_bundle, guarantee_transaction_block=True, ) - await add_blocks_in_batches([new_blocks[-1]], full_node_api_1.full_node, blocks[5].prev_header_hash) + await add_blocks_in_batches(new_blocks, full_node_api_1.full_node) coin_3 = None for coin in run_and_get_removals_and_additions( @@ -369,7 +368,7 @@ async def test_validate_blockchain_spend_reorg_coin( transaction_data=spend_bundle, guarantee_transaction_block=True, ) - await add_blocks_in_batches([new_blocks[-1]], full_node_api_1.full_node, blocks[5].prev_header_hash) + await add_blocks_in_batches(new_blocks, full_node_api_1.full_node) @pytest.mark.anyio async def test_validate_blockchain_spend_reorg_cb_coin( @@ -392,7 +391,7 @@ async def test_validate_blockchain_spend_reorg_cb_coin( guarantee_transaction_block=True, ) - await add_blocks_in_batches(new_blocks, full_node_api_1.full_node, blocks[6].prev_header_hash) + await add_blocks_in_batches(new_blocks, full_node_api_1.full_node) spend_block = new_blocks[-1] spend_coin = None @@ -410,7 +409,7 @@ async def test_validate_blockchain_spend_reorg_cb_coin( transaction_data=spend_bundle, guarantee_transaction_block=True, ) - await add_blocks_in_batches([new_blocks[-1]], full_node_api_1.full_node, blocks[6].prev_header_hash) + await add_blocks_in_batches(new_blocks, full_node_api_1.full_node) @pytest.mark.anyio async def test_validate_blockchain_spend_reorg_since_genesis( @@ -425,8 +424,7 @@ async def test_validate_blockchain_spend_reorg_since_genesis( num_blocks, farmer_reward_puzzle_hash=coinbase_puzzlehash, guarantee_transaction_block=True ) - for block in blocks: - await full_node_api_1.full_node.add_block(block) + await add_blocks_in_batches(blocks, full_node_api_1.full_node) spend_block = blocks[-1] spend_coin = None @@ -439,7 +437,7 @@ async def test_validate_blockchain_spend_reorg_since_genesis( new_blocks = bt.get_consecutive_blocks( 1, blocks, seed=b"", farmer_reward_puzzle_hash=coinbase_puzzlehash, transaction_data=spend_bundle ) - await full_node_api_1.full_node.add_block(new_blocks[-1]) + await _validate_and_add_block(full_node_api_1.full_node.blockchain, new_blocks[-1]) # Spends a coin in a genesis reorg, that was already spent new_blocks = bt.get_consecutive_blocks( @@ -450,9 +448,6 @@ async def test_validate_blockchain_spend_reorg_since_genesis( guarantee_transaction_block=True, ) - for block in new_blocks: - await full_node_api_1.full_node.add_block(block) - new_blocks = bt.get_consecutive_blocks( 1, new_blocks, @@ -461,7 +456,7 @@ async def test_validate_blockchain_spend_reorg_since_genesis( transaction_data=spend_bundle, ) - await full_node_api_1.full_node.add_block(new_blocks[-1]) + await add_blocks_in_batches(new_blocks, full_node_api_1.full_node) @pytest.mark.anyio async def test_assert_my_coin_id( @@ -478,8 +473,7 @@ async def test_assert_my_coin_id( num_blocks, farmer_reward_puzzle_hash=coinbase_puzzlehash, guarantee_transaction_block=True ) - for block in blocks: - await full_node_api_1.full_node.add_block(block) + await add_blocks_in_batches(blocks, full_node_api_1.full_node) # Coinbase that gets spent @@ -550,8 +544,7 @@ async def test_assert_coin_announcement_consumed( num_blocks, farmer_reward_puzzle_hash=coinbase_puzzlehash, guarantee_transaction_block=True ) - for block in blocks: - await full_node_api_1.full_node.add_block(block) + await add_blocks_in_batches(blocks, full_node_api_1.full_node) # Coinbase that gets spent block1 = blocks[2] @@ -634,8 +627,7 @@ async def test_assert_puzzle_announcement_consumed( num_blocks, farmer_reward_puzzle_hash=coinbase_puzzlehash, guarantee_transaction_block=True ) - for block in blocks: - await full_node_api_1.full_node.add_block(block) + await add_blocks_in_batches(blocks, full_node_api_1.full_node) # Coinbase that gets spent block1 = blocks[2] @@ -718,8 +710,7 @@ async def test_assert_height_absolute( num_blocks, farmer_reward_puzzle_hash=coinbase_puzzlehash, guarantee_transaction_block=True ) - for block in blocks: - await full_node_api_1.full_node.add_block(block) + await add_blocks_in_batches(blocks, full_node_api_1.full_node) # Coinbase that gets spent block1 = blocks[2] @@ -784,8 +775,7 @@ async def test_assert_height_relative( num_blocks, farmer_reward_puzzle_hash=coinbase_puzzlehash, guarantee_transaction_block=True ) - for block in blocks: - await full_node_api_1.full_node.add_block(block) + await add_blocks_in_batches(blocks, full_node_api_1.full_node) # Coinbase that gets spent block1 = blocks[2] @@ -852,8 +842,7 @@ async def test_assert_seconds_relative( num_blocks, farmer_reward_puzzle_hash=coinbase_puzzlehash, guarantee_transaction_block=True ) - for block in blocks: - await full_node_api_1.full_node.add_block(block) + await add_blocks_in_batches(blocks, full_node_api_1.full_node) # Coinbase that gets spent block1 = blocks[2] @@ -897,7 +886,7 @@ async def test_assert_seconds_relative( time_per_block=301, ) ) - await full_node_api_1.full_node.add_block(blocks[-1]) + await _validate_and_add_block(full_node_1.blockchain, blocks[-1]) valid_new_blocks = bt.get_consecutive_blocks( 1, @@ -924,8 +913,7 @@ async def test_assert_seconds_absolute( num_blocks, farmer_reward_puzzle_hash=coinbase_puzzlehash, guarantee_transaction_block=True ) - for block in blocks: - await full_node_api_1.full_node.add_block(block) + await add_blocks_in_batches(blocks, full_node_api_1.full_node) # Coinbase that gets spent block1 = blocks[2] @@ -971,7 +959,7 @@ async def test_assert_seconds_absolute( time_per_block=30, ) ) - await full_node_api_1.full_node.add_block(blocks[-1]) + await _validate_and_add_block(full_node_1.blockchain, blocks[-1]) valid_new_blocks = bt.get_consecutive_blocks( 1, @@ -998,8 +986,7 @@ async def test_assert_fee_condition( num_blocks, farmer_reward_puzzle_hash=coinbase_puzzlehash, guarantee_transaction_block=True ) - for block in blocks: - await full_node_api_1.full_node.add_block(block) + await add_blocks_in_batches(blocks, full_node_api_1.full_node) # Coinbase that gets spent block1 = blocks[2] diff --git a/chia/_tests/core/full_node/stores/test_block_store.py b/chia/_tests/core/full_node/stores/test_block_store.py index 199c852cde34..aefa8607782b 100644 --- a/chia/_tests/core/full_node/stores/test_block_store.py +++ b/chia/_tests/core/full_node/stores/test_block_store.py @@ -15,6 +15,7 @@ from chia._tests.blockchain.blockchain_test_utils import _validate_and_add_block from chia._tests.util.db_connection import DBConnection, PathDBConnection +from chia.consensus.block_body_validation import ForkInfo from chia.consensus.blockchain import AddBlockResult, Blockchain from chia.consensus.default_constants import DEFAULT_CONSTANTS from chia.consensus.full_block_to_block_record import header_block_to_sub_block_record @@ -148,9 +149,10 @@ async def test_get_full_blocks_at( bc = await Blockchain.create(coin_store, block_store, bt.constants, tmp_dir, 2) count = 0 + fork_info = ForkInfo(-1, -1, bt.constants.GENESIS_CHALLENGE) for b1, b2 in zip(blocks, alt_blocks): await _validate_and_add_block(bc, b1) - await _validate_and_add_block(bc, b2, expected_result=AddBlockResult.ADDED_AS_ORPHAN) + await _validate_and_add_block(bc, b2, expected_result=AddBlockResult.ADDED_AS_ORPHAN, fork_info=fork_info) ret = await block_store.get_full_blocks_at([uint32(count)]) assert set(ret) == set([b1, b2]) count += 1 @@ -174,9 +176,10 @@ async def test_get_block_records_in_range( bc = await Blockchain.create(coin_store, block_store, bt.constants, tmp_dir, 2) count = 0 + fork_info = ForkInfo(-1, -1, bt.constants.GENESIS_CHALLENGE) for b1, b2 in zip(blocks, alt_blocks): await _validate_and_add_block(bc, b1) - await _validate_and_add_block(bc, b2, expected_result=AddBlockResult.ADDED_AS_ORPHAN) + await _validate_and_add_block(bc, b2, expected_result=AddBlockResult.ADDED_AS_ORPHAN, fork_info=fork_info) # the range is inclusive ret = await block_store.get_block_records_in_range(count, count) assert len(ret) == 1 @@ -202,9 +205,10 @@ async def test_get_block_bytes_in_range_in_main_chain( bc = await Blockchain.create(coin_store, block_store, bt.constants, tmp_dir, 2) count = 0 + fork_info = ForkInfo(-1, -1, bt.constants.GENESIS_CHALLENGE) for b1, b2 in zip(blocks, alt_blocks): await _validate_and_add_block(bc, b1) - await _validate_and_add_block(bc, b2, expected_result=AddBlockResult.ADDED_AS_ORPHAN) + await _validate_and_add_block(bc, b2, expected_result=AddBlockResult.ADDED_AS_ORPHAN, fork_info=fork_info) # the range is inclusive ret = await block_store.get_block_bytes_in_range(count, count) assert ret == [bytes(b1)] @@ -261,9 +265,10 @@ async def test_rollback(bt: BlockTools, tmp_dir: Path, use_cache: bool, default_ # insert all blocks count = 0 + fork_info = ForkInfo(-1, -1, bt.constants.GENESIS_CHALLENGE) for b1, b2 in zip(blocks, alt_blocks): await _validate_and_add_block(bc, b1) - await _validate_and_add_block(bc, b2, expected_result=AddBlockResult.ADDED_AS_ORPHAN) + await _validate_and_add_block(bc, b2, expected_result=AddBlockResult.ADDED_AS_ORPHAN, fork_info=fork_info) count += 1 ret = await block_store.get_random_not_compactified(count) assert len(ret) == count diff --git a/chia/_tests/core/full_node/stores/test_coin_store.py b/chia/_tests/core/full_node/stores/test_coin_store.py index a9a5f47c9c86..440e2ce2d4d2 100644 --- a/chia/_tests/core/full_node/stores/test_coin_store.py +++ b/chia/_tests/core/full_node/stores/test_coin_store.py @@ -12,6 +12,7 @@ from chia._tests.util.db_connection import DBConnection from chia._tests.util.get_name_puzzle_conditions import get_name_puzzle_conditions from chia._tests.util.misc import Marks, datacases +from chia.consensus.block_body_validation import ForkInfo from chia.consensus.block_rewards import calculate_base_farmer_reward, calculate_pool_reward from chia.consensus.blockchain import AddBlockResult, Blockchain from chia.consensus.coinbase import create_farmer_coin, create_pool_coin @@ -364,13 +365,20 @@ async def test_basic_reorg(tmp_dir: Path, db_version: int, bt: BlockTools) -> No blocks_reorg_chain = bt.get_consecutive_blocks(reorg_length, blocks[: initial_block_count - 10], seed=b"2") + fork_info = ForkInfo(-1, -1, bt.constants.GENESIS_CHALLENGE) for reorg_block in blocks_reorg_chain: if reorg_block.height < initial_block_count - 10: - await _validate_and_add_block(b, reorg_block, expected_result=AddBlockResult.ALREADY_HAVE_BLOCK) + await _validate_and_add_block( + b, reorg_block, expected_result=AddBlockResult.ALREADY_HAVE_BLOCK, fork_info=fork_info + ) elif reorg_block.height < initial_block_count: - await _validate_and_add_block(b, reorg_block, expected_result=AddBlockResult.ADDED_AS_ORPHAN) + await _validate_and_add_block( + b, reorg_block, expected_result=AddBlockResult.ADDED_AS_ORPHAN, fork_info=fork_info + ) elif reorg_block.height >= initial_block_count: - await _validate_and_add_block(b, reorg_block, expected_result=AddBlockResult.NEW_PEAK) + await _validate_and_add_block( + b, reorg_block, expected_result=AddBlockResult.NEW_PEAK, fork_info=fork_info + ) if reorg_block.is_transaction_block(): coins = reorg_block.get_included_reward_coins() records = [await coin_store.get_coin_record(coin.name()) for coin in coins] diff --git a/chia/_tests/core/full_node/stores/test_full_node_store.py b/chia/_tests/core/full_node/stores/test_full_node_store.py index 328a1ad50649..802939c31a43 100644 --- a/chia/_tests/core/full_node/stores/test_full_node_store.py +++ b/chia/_tests/core/full_node/stores/test_full_node_store.py @@ -452,8 +452,9 @@ async def test_basic_store( normalized_to_identity_cc_sp=normalized_to_identity, ) + fork_info = ForkInfo(blocks[0].height - 1, blocks[0].height - 1, blocks[0].prev_header_hash) for block in blocks: - await _validate_and_add_block_no_error(blockchain, block) + await _validate_and_add_block_no_error(blockchain, block, fork_info=fork_info) sb = blockchain.block_record(block.header_hash) next_sub_slot_iters, next_difficulty = get_next_sub_slot_iters_and_difficulty( blockchain.constants, False, sb, blockchain @@ -834,6 +835,7 @@ async def test_basic_store( # Test future EOS cache store.initialize_genesis_sub_slot() + fork_info = ForkInfo(-1, -1, blockchain.constants.GENESIS_CHALLENGE) blocks = custom_block_tools.get_consecutive_blocks( 1, normalized_to_identity_cc_eos=normalized_to_identity, @@ -841,7 +843,7 @@ async def test_basic_store( normalized_to_identity_cc_ip=normalized_to_identity, normalized_to_identity_cc_sp=normalized_to_identity, ) - await _validate_and_add_block_no_error(blockchain, blocks[-1]) + await _validate_and_add_block_no_error(blockchain, blocks[-1], fork_info=fork_info) while True: blocks = custom_block_tools.get_consecutive_blocks( 1, @@ -851,7 +853,7 @@ async def test_basic_store( normalized_to_identity_cc_ip=normalized_to_identity, normalized_to_identity_cc_sp=normalized_to_identity, ) - await _validate_and_add_block_no_error(blockchain, blocks[-1]) + await _validate_and_add_block_no_error(blockchain, blocks[-1], fork_info=fork_info) sb = blockchain.block_record(blocks[-1].header_hash) if sb.first_in_sub_slot: break @@ -982,6 +984,7 @@ async def test_basic_store( # i2 ......... i1 # Then do a reorg up to B2, removing all signage points after B2, but not before log.warning(f"Adding blocks up to {blocks[-1]}") + fork_info = ForkInfo(-1, -1, blockchain.constants.GENESIS_CHALLENGE) for block in blocks: await _validate_and_add_block_no_error(blockchain, block, fork_info=fork_info) @@ -1042,7 +1045,7 @@ def assert_sp_none(sp_index: int, is_none: bool) -> None: assert store.new_signage_point(uint8(i), blockchain, peak, peak.sub_slot_iters, sp) # Adding a new peak clears all SPs after that peak - await _validate_and_add_block_no_error(blockchain, blocks[-2]) + await _validate_and_add_block_no_error(blockchain, blocks[-2], fork_info=fork_info) peak = blockchain.get_peak() assert peak is not None result = await blockchain.get_sp_and_ip_sub_slots(peak.header_hash) @@ -1090,7 +1093,7 @@ def assert_sp_none(sp_index: int, is_none: bool) -> None: assert_sp_none(i1 + 1, False) assert_sp_none(i1 + 4, False) - await _validate_and_add_block_no_error(blockchain, blocks[-1]) + await _validate_and_add_block_no_error(blockchain, blocks[-1], fork_info=fork_info) peak = blockchain.get_peak() assert peak is not None result = await blockchain.get_sp_and_ip_sub_slots(peak.header_hash) @@ -1120,7 +1123,7 @@ def assert_sp_none(sp_index: int, is_none: bool) -> None: break else: for block in blocks[-2:]: - await _validate_and_add_block_no_error(blockchain, block) + await _validate_and_add_block_no_error(blockchain, block, fork_info=fork_info) @pytest.mark.limit_consensus_modes(reason="save time") diff --git a/chia/_tests/core/full_node/test_full_node.py b/chia/_tests/core/full_node/test_full_node.py index de6da463f30e..aad87ec8067d 100644 --- a/chia/_tests/core/full_node/test_full_node.py +++ b/chia/_tests/core/full_node/test_full_node.py @@ -549,8 +549,9 @@ async def test_basic_chain(self, wallet_nodes, self_hostname): assert full_node_1.full_node.blockchain.get_peak().height == 0 + fork_info = ForkInfo(-1, -1, bt.constants.GENESIS_CHALLENGE) for block in bt.get_consecutive_blocks(30): - await full_node_1.full_node.add_block(block, peer) + await full_node_1.full_node.add_block(block, peer, fork_info=fork_info) assert full_node_1.full_node.blockchain.get_peak().height == 29 @@ -1018,7 +1019,7 @@ async def test_new_transaction_and_mempool(self, wallet_nodes, self_hostname, se block_list_input=blocks[:-1], guarantee_transaction_block=True, ) - await add_blocks_in_batches(blocks[-2:], full_node_1.full_node, blocks[-2].prev_header_hash) + await add_blocks_in_batches(blocks[-2:], full_node_1.full_node) # Can now resubmit a transaction after the reorg status, err = await full_node_1.full_node.add_transaction( successful_bundle, successful_bundle.name(), peer, test=True @@ -2602,13 +2603,13 @@ def check_nodes_in_sync(): assert chain_b[-1].total_iters < chain_a[-1].total_iters - await add_blocks_in_batches(chain_a[-1:], full_node_1.full_node, chain[-1].header_hash) + await add_blocks_in_batches(chain_a[-1:], full_node_1.full_node) await time_out_assert(10, check_nodes_in_sync) await validate_coin_set(full_node_1.full_node.blockchain.coin_store, chain_a) await validate_coin_set(full_node_2.full_node.blockchain.coin_store, chain_a) - await add_blocks_in_batches(chain_b[-1:], full_node_1.full_node, chain[-1].header_hash) + await add_blocks_in_batches(chain_b[-1:], full_node_1.full_node) # make sure node 1 reorged onto chain B assert full_node_1.full_node.blockchain.get_peak().header_hash == chain_b[-1].header_hash @@ -2648,7 +2649,7 @@ def check_nodes_in_sync(): all_coins.append(coin) spend_bundle = wallet_a.generate_signed_transaction(uint64(1_000), receiver_puzzlehash, all_coins.pop()) - await add_blocks_in_batches(chain[-4:], full_node_1.full_node, chain[-5].header_hash) + await add_blocks_in_batches(chain[-4:], full_node_1.full_node) await time_out_assert(10, check_nodes_in_sync) await validate_coin_set(full_node_1.full_node.blockchain.coin_store, chain) await validate_coin_set(full_node_2.full_node.blockchain.coin_store, chain) @@ -2665,8 +2666,7 @@ async def test_eviction_from_bls_cache(one_node_one_block: tuple[FullNodeSimulat blocks = bt.get_consecutive_blocks( 3, guarantee_transaction_block=True, farmer_reward_puzzle_hash=bt.pool_ph, pool_reward_puzzle_hash=bt.pool_ph ) - for block in blocks: - await full_node_1.full_node.add_block(block) + await add_blocks_in_batches(blocks, full_node_1.full_node) wt = bt.get_pool_wallet_tool() reward_coins = blocks[-1].get_included_reward_coins() # Setup a test block with two pk msg pairs diff --git a/chia/_tests/core/mempool/test_mempool.py b/chia/_tests/core/mempool/test_mempool.py index 7c1580d9d70e..ebf260357db8 100644 --- a/chia/_tests/core/mempool/test_mempool.py +++ b/chia/_tests/core/mempool/test_mempool.py @@ -43,6 +43,7 @@ from chia.server.outbound_message import Message from chia.server.server import ChiaServer from chia.server.ws_connection import WSChiaConnection +from chia.simulator.add_blocks_in_batches import add_blocks_in_batches from chia.simulator.block_tools import BlockTools, test_constants from chia.simulator.full_node_simulator import FullNodeSimulator from chia.simulator.simulator_protocol import FarmNewBlockProtocol @@ -373,8 +374,7 @@ async def next_block(full_node_1: FullNodeSimulator, wallet_a: WalletTool, bt: B time_per_block=10, ) - for block in blocks: - await full_node_1.full_node.add_block(block) + await add_blocks_in_batches(blocks, full_node_1.full_node) await time_out_assert(60, node_height_at_least, True, full_node_1, start_height + 1) return blocks[-1].get_included_reward_coins()[0] @@ -569,8 +569,7 @@ async def test_double_spend( ) peer = await connect_and_get_peer(server_1, server_2, self_hostname) - for block in blocks: - await full_node_1.full_node.add_block(block) + await add_blocks_in_batches(blocks, full_node_1.full_node) await time_out_assert(60, node_height_at_least, True, full_node_1, start_height + 3) spend_bundle1 = generate_test_spend_bundle(wallet_a, blocks[-1].get_included_reward_coins()[0]) @@ -615,8 +614,7 @@ async def test_double_spend_with_higher_fee( ) invariant_check_mempool(full_node_1.full_node.mempool_manager.mempool) - for block in blocks: - await full_node_1.full_node.add_block(block) + await add_blocks_in_batches(blocks, full_node_1.full_node) await time_out_assert(60, node_height_at_least, True, full_node_1, start_height + 3) coins = iter(blocks[-1].get_included_reward_coins()) @@ -698,8 +696,7 @@ async def test_invalid_signature( pool_reward_puzzle_hash=reward_ph, ) - for block in blocks: - await full_node_1.full_node.add_block(block) + await add_blocks_in_batches(blocks, full_node_1.full_node) await time_out_assert(60, node_height_at_least, True, full_node_1, start_height + 3) coins = iter(blocks[-1].get_included_reward_coins()) @@ -743,8 +740,7 @@ async def condition_tester( else: raise Exception("dummy peer not found") - for block in blocks: - await full_node_1.full_node.add_block(block) + await add_blocks_in_batches(blocks, full_node_1.full_node) await time_out_assert(60, node_height_at_least, True, full_node_1, start_height + num_blocks) @@ -786,8 +782,7 @@ async def condition_tester2( else: raise Exception("dummy peer not found") - for block in blocks: - await full_node_1.full_node.add_block(block) + await add_blocks_in_batches(blocks, full_node_1.full_node) await time_out_assert(60, node_height_at_least, True, full_node_1, start_height + 3) @@ -1740,8 +1735,7 @@ async def test_stealing_fee( peer = await connect_and_get_peer(server_1, server_2, bt.config["self_hostname"]) - for block in blocks: - await full_node_1.full_node.add_block(block) + await add_blocks_in_batches(blocks, full_node_1.full_node) await time_out_assert(60, node_height_at_least, True, full_node_1, start_height + 5) @@ -1799,8 +1793,7 @@ async def test_double_spend_same_bundle( pool_reward_puzzle_hash=reward_ph, ) - for block in blocks: - await full_node_1.full_node.add_block(block) + await add_blocks_in_batches(blocks, full_node_1.full_node) await time_out_assert(60, node_height_at_least, True, full_node_1, start_height + 3) # coin = blocks[-1].get_included_reward_coins()[0] @@ -1848,8 +1841,7 @@ async def test_agg_sig_condition( pool_reward_puzzle_hash=reward_ph, ) - for block in blocks: - await full_node_1.full_node.add_block(block) + await add_blocks_in_batches(blocks, full_node_1.full_node) await time_out_assert(60, node_height_at_least, True, full_node_1, start_height + 3) @@ -2854,8 +2846,7 @@ async def test_invalid_coin_spend_coin( pool_reward_puzzle_hash=reward_ph, ) - for block in blocks: - await full_node_1.full_node.add_block(block) + await add_blocks_in_batches(blocks, full_node_1.full_node) await time_out_assert(60, node_height_at_least, True, full_node_1, blocks[-1].height) diff --git a/chia/_tests/pools/test_pool_rpc.py b/chia/_tests/pools/test_pool_rpc.py index e8be4bf8f71c..dd30f7d3a0cf 100644 --- a/chia/_tests/pools/test_pool_rpc.py +++ b/chia/_tests/pools/test_pool_rpc.py @@ -22,6 +22,7 @@ from chia.pools.pool_puzzles import SINGLETON_LAUNCHER_HASH from chia.pools.pool_wallet_info import PoolSingletonState, PoolWalletInfo from chia.rpc.wallet_rpc_client import WalletRpcClient +from chia.simulator.add_blocks_in_batches import add_blocks_in_batches from chia.simulator.block_tools import BlockTools, get_plot_dir from chia.simulator.full_node_simulator import FullNodeSimulator from chia.simulator.simulator_protocol import ReorgProtocol @@ -432,8 +433,7 @@ async def test_absorb_self( guarantee_transaction_block=True, ) - for block in blocks[-3:]: - await full_node_api.full_node.add_block(block) + await add_blocks_in_batches(blocks[-3:], full_node_api.full_node) await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node, timeout=20) bal = await client.get_wallet_balance(2) @@ -532,8 +532,7 @@ async def test_absorb_self_multiple_coins( ) block_count = 3 - for block in blocks[-block_count:]: - await full_node_api.full_node.add_block(block) + await add_blocks_in_batches(blocks[-block_count:], full_node_api.full_node) await full_node_api.farm_blocks_to_puzzlehash(count=1, guarantee_transaction_blocks=True) await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node, timeout=20) @@ -604,8 +603,7 @@ async def farming_to_pool() -> bool: ) block_count = 3 - for block in blocks[-block_count:]: - await full_node_api.full_node.add_block(block) + await add_blocks_in_batches(blocks[-block_count:], full_node_api.full_node) await full_node_api.farm_blocks_to_puzzlehash(count=1, guarantee_transaction_blocks=True) await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node, timeout=20) # Pooled plots don't have balance @@ -664,8 +662,7 @@ async def status_updated() -> bool: block_list_input=blocks, guarantee_transaction_block=True, ) - for block in blocks[-2:]: - await full_node_api.full_node.add_block(block) + await add_blocks_in_batches(blocks[-2:], full_node_api.full_node) await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node, timeout=20) # Absorb the farmed reward @@ -1007,8 +1004,7 @@ async def status_is_leaving_no_blocks() -> bool: transaction_data=next(tx.spend_bundle for tx in join_pool_txs if tx.spend_bundle is not None), ) - for block in more_blocks[-3:]: - await full_node_api.full_node.add_block(block) + await add_blocks_in_batches(more_blocks[-3:], full_node_api.full_node) await time_out_assert(timeout=WAIT_SECS, function=status_is_leaving_no_blocks) diff --git a/chia/_tests/wallet/sync/test_wallet_sync.py b/chia/_tests/wallet/sync/test_wallet_sync.py index 8e16c7d71831..10c09aa7d4f0 100644 --- a/chia/_tests/wallet/sync/test_wallet_sync.py +++ b/chia/_tests/wallet/sync/test_wallet_sync.py @@ -188,7 +188,7 @@ async def test_basic_sync_wallet( blocks_reorg = bt.get_consecutive_blocks(num_blocks - 1, block_list_input=default_400_blocks[:-5]) blocks_reorg = bt.get_consecutive_blocks(1, blocks_reorg, guarantee_transaction_block=True, current_time=True) - await add_blocks_in_batches(blocks_reorg[1:], full_node, blocks_reorg[0].header_hash) + await add_blocks_in_batches(blocks_reorg[1:], full_node) for wallet_node, wallet_server in wallets: await time_out_assert( @@ -245,9 +245,7 @@ async def test_almost_recent( blockchain_constants.WEIGHT_PROOF_RECENT_BLOCKS + 10, block_list_input=all_blocks ) - await add_blocks_in_batches( - new_blocks[base_num_blocks + 20 :], full_node, new_blocks[base_num_blocks + 19].header_hash - ) + await add_blocks_in_batches(new_blocks[base_num_blocks + 20 :], full_node) for wallet_node, wallet_server in wallets: wallet = wallet_node.wallet_state_manager.main_wallet @@ -434,7 +432,7 @@ async def test_wallet_reorg_sync( num_blocks = 30 blocks_reorg = bt.get_consecutive_blocks(num_blocks, block_list_input=default_400_blocks[:-5]) - await add_blocks_in_batches(blocks_reorg[-30:], full_node, blocks_reorg[-30].prev_header_hash) + await add_blocks_in_batches(blocks_reorg[-30:], full_node) for wallet_node, wallet_server in wallets: wallet = wallet_node.wallet_state_manager.main_wallet diff --git a/chia/consensus/blockchain.py b/chia/consensus/blockchain.py index f989bfe96344..71e5d49c58a9 100644 --- a/chia/consensus/blockchain.py +++ b/chia/consensus/blockchain.py @@ -337,6 +337,21 @@ async def add_block( header_hash: bytes32 = block.header_hash + # passing in correct fork_info is critical for performing reorgs + # correctly, so we perform some validation of it here + assert block.height - 1 == fork_info.peak_height + assert len(fork_info.block_hashes) == fork_info.peak_height - fork_info.fork_height + if fork_info.peak_height == fork_info.fork_height: + # if fork_info is saying we're not on a fork, the previous block better + # be part of the main chain + assert block.prev_header_hash == fork_info.peak_hash + if fork_info.fork_height == -1: + assert fork_info.peak_hash == self.constants.GENESIS_CHALLENGE + else: + assert self.height_to_hash(uint32(fork_info.fork_height)) == block.prev_header_hash + else: + assert fork_info.peak_hash == block.prev_header_hash + if extending_main_chain: fork_info.reset(block.height - 1, block.prev_header_hash) diff --git a/chia/full_node/full_node.py b/chia/full_node/full_node.py index 3cc37b37c22d..434fd7524e94 100644 --- a/chia/full_node/full_node.py +++ b/chia/full_node/full_node.py @@ -2006,6 +2006,8 @@ async def add_block( # Adds the block to seen, and check if it's seen before (which means header is in memory) header_hash = block.header_hash if self.blockchain.contains_block(header_hash): + if fork_info is not None: + await self.blockchain.run_single_block(block, fork_info) return None pre_validation_result: Optional[PreValidationResult] = None @@ -2078,6 +2080,8 @@ async def add_block( ): # After acquiring the lock, check again, because another asyncio thread might have added it if self.blockchain.contains_block(header_hash): + if fork_info is not None: + await self.blockchain.run_single_block(block, fork_info) return None validation_start = time.monotonic() # Tries to add the block to the blockchain, if we already validated transactions, don't do it again diff --git a/chia/simulator/add_blocks_in_batches.py b/chia/simulator/add_blocks_in_batches.py index 712fe9d804d3..dc0a1910060b 100644 --- a/chia/simulator/add_blocks_in_batches.py +++ b/chia/simulator/add_blocks_in_batches.py @@ -5,7 +5,6 @@ from chia.consensus.block_body_validation import ForkInfo from chia.consensus.difficulty_adjustment import get_next_sub_slot_iters_and_difficulty from chia.full_node.full_node import FullNode, PeakPostProcessingResult -from chia.types.blockchain_format.sized_bytes import bytes32 from chia.types.full_block import FullBlock from chia.types.peer_info import PeerInfo from chia.types.validation_state import ValidationState @@ -16,21 +15,23 @@ async def add_blocks_in_batches( blocks: list[FullBlock], full_node: FullNode, - header_hash: Optional[bytes32] = None, ) -> None: - if header_hash is None: + peak_hash = blocks[0].prev_header_hash + if blocks[0].height == 0: + assert peak_hash == full_node.constants.GENESIS_CHALLENGE diff = full_node.constants.DIFFICULTY_STARTING ssi = full_node.constants.SUB_SLOT_ITERS_STARTING fork_height = -1 - fork_info = ForkInfo(-1, fork_height, full_node.constants.GENESIS_CHALLENGE) else: - block_record = await full_node.blockchain.get_block_record_from_db(header_hash) + # assume the fork point is immediately before the + # batch of block we're about to add + block_record = await full_node.blockchain.get_block_record_from_db(peak_hash) assert block_record is not None ssi, diff = get_next_sub_slot_iters_and_difficulty( full_node.constants, True, block_record, full_node.blockchain ) fork_height = block_record.height - fork_info = ForkInfo(block_record.height, fork_height, block_record.header_hash) + fork_info = ForkInfo(fork_height, blocks[0].height - 1, peak_hash) vs = ValidationState(ssi, diff, None) diff --git a/chia/simulator/full_node_simulator.py b/chia/simulator/full_node_simulator.py index a3334fab3e3a..17cec22dfd5a 100644 --- a/chia/simulator/full_node_simulator.py +++ b/chia/simulator/full_node_simulator.py @@ -300,7 +300,7 @@ async def reorg_from_index_to_new_index(self, request: ReorgProtocol): guarantee_transaction_block=True, seed=seed, ) - await add_blocks_in_batches(more_blocks, self.full_node, current_blocks[old_index].header_hash) + await add_blocks_in_batches(more_blocks[old_index + 1 :], self.full_node) async def farm_blocks_to_puzzlehash( self, From 214ecf79dbaac1e983f854e8c85880f8ccb65a21 Mon Sep 17 00:00:00 2001 From: ChiaAutomation <85647627+ChiaAutomation@users.noreply.github.com> Date: Tue, 10 Dec 2024 13:43:35 -0600 Subject: [PATCH 03/25] Update Managed Files (#19012) Update dependabot --- .github/dependabot.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 910400b75710..135d2e989164 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -26,7 +26,7 @@ updates: interval: "weekly" day: "tuesday" open-pull-requests-limit: 10 - rebase-strategy: disabled + rebase-strategy: auto labels: - dependencies - python From c4e714eeab4d804613a8f508ad1cd12b18e489af Mon Sep 17 00:00:00 2001 From: Arvid Norberg Date: Wed, 11 Dec 2024 17:09:09 +0100 Subject: [PATCH 04/25] [CHIA-1829] don't drop outgoing response messages (#18990) never rate limit outgoing response messages (RespondBlock, RespondBlocks, RejectBlock, RejectBlocks). Instead, disconnect any peer sending unsolicited response blocks --- chia/_tests/core/server/test_rate_limits.py | 121 +++++++++++++++++++- chia/full_node/full_node_api.py | 37 ++++-- chia/protocols/protocol_timing.py | 1 + chia/server/rate_limit_numbers.py | 18 ++- chia/server/rate_limits.py | 94 ++++++++------- chia/server/ws_connection.py | 5 +- 6 files changed, 213 insertions(+), 63 deletions(-) diff --git a/chia/_tests/core/server/test_rate_limits.py b/chia/_tests/core/server/test_rate_limits.py index 8dc646f8074f..196892cbf9d0 100644 --- a/chia/_tests/core/server/test_rate_limits.py +++ b/chia/_tests/core/server/test_rate_limits.py @@ -5,6 +5,8 @@ import pytest from chia._tests.conftest import node_with_params +from chia._tests.util.time_out_assert import time_out_assert +from chia.protocols.full_node_protocol import RejectBlock, RejectBlocks, RespondBlock, RespondBlocks from chia.protocols.protocol_message_types import ProtocolMessageTypes from chia.protocols.shared_protocol import Capability from chia.server.outbound_message import make_msg @@ -13,7 +15,9 @@ from chia.server.rate_limits import RateLimiter from chia.server.server import ChiaServer from chia.server.ws_connection import WSChiaConnection +from chia.simulator.block_tools import BlockTools from chia.types.peer_info import PeerInfo +from chia.util.ints import uint32 rl_v2 = [Capability.BASE, Capability.BLOCK_HEADERS, Capability.RATE_LIMITS_V2] rl_v1 = [Capability.BASE] @@ -68,10 +72,13 @@ async def test_large_message(self): small_vdf_message = make_msg(ProtocolMessageTypes.respond_signage_point, bytes([1] * 5 * 1024)) large_vdf_message = make_msg(ProtocolMessageTypes.respond_signage_point, bytes([1] * 600 * 1024)) + large_blocks_message = make_msg(ProtocolMessageTypes.respond_blocks, bytes([1] * 51 * 1024 * 1024)) r = RateLimiter(incoming=True) assert r.process_msg_and_check(small_vdf_message, rl_v2, rl_v2) is None assert r.process_msg_and_check(small_vdf_message, rl_v2, rl_v2) is None assert r.process_msg_and_check(large_vdf_message, rl_v2, rl_v2) is not None + # this limit applies even though this message type is unlimited + assert r.process_msg_and_check(large_blocks_message, rl_v2, rl_v2) is not None @pytest.mark.anyio async def test_too_much_data(self): @@ -89,7 +96,7 @@ async def test_too_much_data(self): assert saw_disconnect r = RateLimiter(incoming=True) - block_message = make_msg(ProtocolMessageTypes.respond_block, bytes([1] * 1024 * 1024)) + block_message = make_msg(ProtocolMessageTypes.respond_unfinished_block, bytes([1] * 1024 * 1024)) for i in range(10): assert r.process_msg_and_check(block_message, rl_v2, rl_v2) is None @@ -124,7 +131,7 @@ async def test_non_tx_aggregate_limits(self): # Size limits r = RateLimiter(incoming=True) message_4 = make_msg(ProtocolMessageTypes.respond_proof_of_weight, bytes([1] * 49 * 1024 * 1024)) - message_5 = make_msg(ProtocolMessageTypes.respond_blocks, bytes([1] * 49 * 1024 * 1024)) + message_5 = make_msg(ProtocolMessageTypes.request_blocks, bytes([1] * 49 * 1024 * 1024)) for i in range(2): assert r.process_msg_and_check(message_4, rl_v2, rl_v2) is None @@ -183,7 +190,7 @@ async def test_percentage_limits(self): assert saw_disconnect r = RateLimiter(True, 60, 40) - block_message = make_msg(ProtocolMessageTypes.respond_block, bytes([1] * 1024 * 1024)) + block_message = make_msg(ProtocolMessageTypes.respond_unfinished_block, bytes([1] * 1024 * 1024)) for i in range(5): assert r.process_msg_and_check(block_message, rl_v2, rl_v2) is None @@ -215,7 +222,7 @@ async def test_percentage_limits(self): # Aggregate percentage limit max total size r = RateLimiter(True, 60, 40) message_4 = make_msg(ProtocolMessageTypes.respond_proof_of_weight, bytes([1] * 18 * 1024 * 1024)) - message_5 = make_msg(ProtocolMessageTypes.respond_blocks, bytes([1] * 24 * 1024 * 1024)) + message_5 = make_msg(ProtocolMessageTypes.respond_unfinished_block, bytes([1] * 24 * 1024 * 1024)) for i in range(2): assert r.process_msg_and_check(message_4, rl_v2, rl_v2) is None @@ -367,3 +374,109 @@ async def test_compose(self): # Otherwise, fall back to v1 assert ProtocolMessageTypes.request_block in rl_1["rate_limits_other"] assert ProtocolMessageTypes.request_block not in rl_1["rate_limits_tx"] + + +@pytest.mark.anyio +@pytest.mark.parametrize( + "msg_type, size", + [ + (ProtocolMessageTypes.respond_blocks, 10 * 1024 * 1024), + (ProtocolMessageTypes.reject_blocks, 90), + (ProtocolMessageTypes.respond_block, 1024 * 1024), + (ProtocolMessageTypes.reject_block, 90), + ], +) +async def test_unlimited(msg_type: ProtocolMessageTypes, size: int): + r = RateLimiter(incoming=False) + + message = make_msg(msg_type, bytes([1] * size)) + + for i in range(1000): + # since this is a backwards compatible change, it also affects V1 + assert r.process_msg_and_check(message, rl_v1, rl_v1) is None + + +@pytest.mark.anyio +@pytest.mark.parametrize( + "msg_type", + [ + ProtocolMessageTypes.respond_blocks, + ProtocolMessageTypes.reject_blocks, + ProtocolMessageTypes.respond_block, + ProtocolMessageTypes.reject_block, + ], +) +@pytest.mark.parametrize( + "node_with_params", + [ + pytest.param( + dict( + disable_capabilities=[Capability.BLOCK_HEADERS, Capability.RATE_LIMITS_V2], + ), + id="V1", + ), + pytest.param( + dict( + disable_capabilities=[], + ), + id="V2", + ), + ], + indirect=True, +) +@pytest.mark.parametrize( + "node_with_params_b", + [ + pytest.param( + dict( + disable_capabilities=[Capability.BLOCK_HEADERS, Capability.RATE_LIMITS_V2], + ), + id="V1", + ), + pytest.param( + dict( + disable_capabilities=[], + ), + id="V2", + ), + ], + indirect=True, +) +async def test_unsolicited_responses( + node_with_params, node_with_params_b, self_hostname: str, msg_type: ProtocolMessageTypes, bt: BlockTools +): + node_a = node_with_params + node_b = node_with_params_b + + msg = { + ProtocolMessageTypes.respond_blocks: make_msg( + ProtocolMessageTypes.respond_blocks, bytes(RespondBlocks(uint32(1), uint32(2), [])) + ), + ProtocolMessageTypes.reject_blocks: make_msg( + ProtocolMessageTypes.reject_blocks, bytes(RejectBlocks(uint32(1), uint32(2))) + ), + ProtocolMessageTypes.respond_block: make_msg( + ProtocolMessageTypes.respond_block, bytes(RespondBlock(bt.get_consecutive_blocks(1)[0])) + ), + ProtocolMessageTypes.reject_block: make_msg(ProtocolMessageTypes.reject_block, bytes(RejectBlock(uint32(0)))), + }[msg_type] + + full_node_server_a: ChiaServer = node_a.full_node.server + full_node_server_b: ChiaServer = node_b.full_node.server + + await full_node_server_b.start_client(PeerInfo(self_hostname, full_node_server_a.get_port()), None) + + assert len(full_node_server_b.get_connections()) == 1 + assert len(full_node_server_a.get_connections()) == 1 + + a_con: WSChiaConnection = full_node_server_a.get_connections()[0] + b_con: WSChiaConnection = full_node_server_b.get_connections()[0] + + assert not a_con.closed + assert not b_con.closed + + await a_con.send_message(msg) + + # make sure the connection is closed because of the unsolicited response + # message + await time_out_assert(5, lambda: a_con.closed) diff --git a/chia/full_node/full_node_api.py b/chia/full_node/full_node_api.py index 6bf09d867d1d..aaa50715946c 100644 --- a/chia/full_node/full_node_api.py +++ b/chia/full_node/full_node_api.py @@ -34,6 +34,7 @@ from chia.protocols import farmer_protocol, full_node_protocol, introducer_protocol, timelord_protocol, wallet_protocol from chia.protocols.full_node_protocol import RejectBlock, RejectBlocks from chia.protocols.protocol_message_types import ProtocolMessageTypes +from chia.protocols.protocol_timing import RATE_LIMITER_BAN_SECONDS from chia.protocols.shared_protocol import Capability from chia.protocols.wallet_protocol import ( CoinState, @@ -402,17 +403,32 @@ async def request_blocks(self, request: full_node_protocol.RequestBlocks) -> Opt return msg - @metadata.request() - async def reject_block(self, request: full_node_protocol.RejectBlock) -> None: - self.log.debug(f"reject_block {request.height}") + @metadata.request(peer_required=True) + async def reject_block( + self, + request: full_node_protocol.RejectBlock, + peer: WSChiaConnection, + ) -> None: + self.log.warning(f"unsolicited reject_block {request.height}") + await peer.close(RATE_LIMITER_BAN_SECONDS) - @metadata.request() - async def reject_blocks(self, request: full_node_protocol.RejectBlocks) -> None: - self.log.debug(f"reject_blocks {request.start_height} {request.end_height}") + @metadata.request(peer_required=True) + async def reject_blocks( + self, + request: full_node_protocol.RejectBlocks, + peer: WSChiaConnection, + ) -> None: + self.log.warning(f"reject_blocks {request.start_height} {request.end_height}") + await peer.close(RATE_LIMITER_BAN_SECONDS) - @metadata.request() - async def respond_blocks(self, request: full_node_protocol.RespondBlocks) -> None: + @metadata.request(peer_required=True) + async def respond_blocks( + self, + request: full_node_protocol.RespondBlocks, + peer: WSChiaConnection, + ) -> None: self.log.warning("Received unsolicited/late blocks") + await peer.close(RATE_LIMITER_BAN_SECONDS) @metadata.request(peer_required=True) async def respond_block( @@ -420,11 +436,8 @@ async def respond_block( respond_block: full_node_protocol.RespondBlock, peer: WSChiaConnection, ) -> Optional[Message]: - """ - Receive a full block from a peer full node (or ourselves). - """ - self.log.warning(f"Received unsolicited/late block from peer {peer.get_peer_logging()}") + await peer.close(RATE_LIMITER_BAN_SECONDS) return None @metadata.request() diff --git a/chia/protocols/protocol_timing.py b/chia/protocols/protocol_timing.py index 215d4e2a1859..1015da5ba0a2 100644 --- a/chia/protocols/protocol_timing.py +++ b/chia/protocols/protocol_timing.py @@ -5,3 +5,4 @@ API_EXCEPTION_BAN_SECONDS = 10 INTERNAL_PROTOCOL_ERROR_BAN_SECONDS = 10 # Don't flap if our client is at fault CONSENSUS_ERROR_BAN_SECONDS = 600 +RATE_LIMITER_BAN_SECONDS = 300 diff --git a/chia/server/rate_limit_numbers.py b/chia/server/rate_limit_numbers.py index 521cf73ac134..9d7c8123a0eb 100644 --- a/chia/server/rate_limit_numbers.py +++ b/chia/server/rate_limit_numbers.py @@ -12,6 +12,8 @@ compose_rate_limits_cache: dict[int, dict[str, Any]] = {} +# this class is used to configure the *rate* limit for a message type. The +# limits are counts and size per 60 seconds. @dataclasses.dataclass(frozen=True) class RLSettings: frequency: int # Max request per time period (ie 1 min) @@ -19,6 +21,14 @@ class RLSettings: max_total_size: Optional[int] = None # Max cumulative size of all requests in that period +# this class is used to indicate that a message type is not subject to a rate +# limit, but just a per-message size limit. This may be appropriate for response +# messages that are implicitly limited by their corresponding request message +@dataclasses.dataclass(frozen=True) +class Unlimited: + max_size: int # Max size of each request + + def get_rate_limits_to_use(our_capabilities: list[Capability], peer_capabilities: list[Capability]) -> dict[str, Any]: # This will use the newest possible rate limits that both peers support. At this time there are only two # options, v1 and v2. @@ -94,11 +104,11 @@ def compose_rate_limits(old_rate_limits: dict[str, Any], new_rate_limits: dict[s ProtocolMessageTypes.request_proof_of_weight: RLSettings(5, 100), ProtocolMessageTypes.respond_proof_of_weight: RLSettings(5, 50 * 1024 * 1024, 100 * 1024 * 1024), ProtocolMessageTypes.request_block: RLSettings(200, 100), - ProtocolMessageTypes.reject_block: RLSettings(200, 100), + ProtocolMessageTypes.reject_block: Unlimited(100), ProtocolMessageTypes.request_blocks: RLSettings(500, 100), - ProtocolMessageTypes.respond_blocks: RLSettings(100, 50 * 1024 * 1024, 5 * 50 * 1024 * 1024), - ProtocolMessageTypes.reject_blocks: RLSettings(100, 100), - ProtocolMessageTypes.respond_block: RLSettings(200, 2 * 1024 * 1024, 10 * 2 * 1024 * 1024), + ProtocolMessageTypes.respond_blocks: Unlimited(50 * 1024 * 1024), + ProtocolMessageTypes.reject_blocks: Unlimited(100), + ProtocolMessageTypes.respond_block: Unlimited(2 * 1024 * 1024), ProtocolMessageTypes.new_unfinished_block: RLSettings(200, 100), ProtocolMessageTypes.request_unfinished_block: RLSettings(200, 100), ProtocolMessageTypes.new_unfinished_block2: RLSettings(200, 100), diff --git a/chia/server/rate_limits.py b/chia/server/rate_limits.py index ab39b0d34afb..02e0e7cddabe 100644 --- a/chia/server/rate_limits.py +++ b/chia/server/rate_limits.py @@ -9,7 +9,7 @@ from chia.protocols.protocol_message_types import ProtocolMessageTypes from chia.protocols.shared_protocol import Capability from chia.server.outbound_message import Message -from chia.server.rate_limit_numbers import RLSettings, get_rate_limits_to_use +from chia.server.rate_limit_numbers import RLSettings, Unlimited, get_rate_limits_to_use log = logging.getLogger(__name__) @@ -79,56 +79,68 @@ def process_msg_and_check( limits = rate_limits["rate_limits_tx"][message_type] elif message_type in rate_limits["rate_limits_other"]: limits = rate_limits["rate_limits_other"][message_type] - non_tx_freq = rate_limits["non_tx_freq"] - non_tx_max_total_size = rate_limits["non_tx_max_total_size"] - new_non_tx_count = self.non_tx_message_counts + 1 - new_non_tx_size = self.non_tx_cumulative_size + len(message.data) - if new_non_tx_count > non_tx_freq * proportion_of_limit: + if isinstance(limits, RLSettings): + non_tx_freq = rate_limits["non_tx_freq"] + non_tx_max_total_size = rate_limits["non_tx_max_total_size"] + new_non_tx_count = self.non_tx_message_counts + 1 + new_non_tx_size = self.non_tx_cumulative_size + len(message.data) + if new_non_tx_count > non_tx_freq * proportion_of_limit: + return " ".join( + [ + f"non-tx count: {new_non_tx_count}", + f"> {non_tx_freq * proportion_of_limit}", + f"(scale factor: {proportion_of_limit})", + ] + ) + if new_non_tx_size > non_tx_max_total_size * proportion_of_limit: + return " ".join( + [ + f"non-tx size: {new_non_tx_size}", + f"> {non_tx_max_total_size * proportion_of_limit}", + f"(scale factor: {proportion_of_limit})", + ] + ) + else: # pragma: no cover + log.warning( + f"Message type {message_type} not found in rate limits (scale factor: {proportion_of_limit})", + ) + + if isinstance(limits, Unlimited): + # this message type is not rate limited. This is used for + # response messages and must be combined with banning peers + # sending unsolicited responses of this type + if len(message.data) > limits.max_size: + return f"message size: {len(message.data)} > {limits.max_size}" + ret = True + return None + elif isinstance(limits, RLSettings): + if limits.max_total_size is None: + limits = dataclasses.replace(limits, max_total_size=limits.frequency * limits.max_size) + assert limits.max_total_size is not None + + if new_message_counts > limits.frequency * proportion_of_limit: return " ".join( [ - f"non-tx count: {new_non_tx_count}", - f"> {non_tx_freq * proportion_of_limit}", - f"(scale factor: {proportion_of_limit})", + f"message count: {new_message_counts}" + f"> {limits.frequency * proportion_of_limit}" + f"(scale factor: {proportion_of_limit})" ] ) - if new_non_tx_size > non_tx_max_total_size * proportion_of_limit: + if len(message.data) > limits.max_size: + return f"message size: {len(message.data)} > {limits.max_size}" + if new_cumulative_size > limits.max_total_size * proportion_of_limit: return " ".join( [ - f"non-tx size: {new_non_tx_size}", - f"> {non_tx_max_total_size * proportion_of_limit}", + f"cumulative size: {new_cumulative_size}", + f"> {limits.max_total_size * proportion_of_limit}", f"(scale factor: {proportion_of_limit})", ] ) - else: - log.warning( - f"Message type {message_type} not found in rate limits (scale factor: {proportion_of_limit})", - ) - if limits.max_total_size is None: - limits = dataclasses.replace(limits, max_total_size=limits.frequency * limits.max_size) - assert limits.max_total_size is not None - - if new_message_counts > limits.frequency * proportion_of_limit: - return " ".join( - [ - f"message count: {new_message_counts}" - f"> {limits.frequency * proportion_of_limit}" - f"(scale factor: {proportion_of_limit})" - ] - ) - if len(message.data) > limits.max_size: - return f"message size: {len(message.data)} > {limits.max_size}" - if new_cumulative_size > limits.max_total_size * proportion_of_limit: - return " ".join( - [ - f"cumulative size: {new_cumulative_size}", - f"> {limits.max_total_size * proportion_of_limit}", - f"(scale factor: {proportion_of_limit})", - ] - ) - - ret = True - return None + ret = True + return None + else: # pragma: no cover + return f"Internal Error, unknown rate limit for message: {message_type}, limit: {limits}" finally: if self.incoming or ret: # now that we determined that it's OK to send the message, commit the diff --git a/chia/server/ws_connection.py b/chia/server/ws_connection.py index 093c5efa7cfb..5528c1eba178 100644 --- a/chia/server/ws_connection.py +++ b/chia/server/ws_connection.py @@ -22,6 +22,7 @@ API_EXCEPTION_BAN_SECONDS, CONSENSUS_ERROR_BAN_SECONDS, INTERNAL_PROTOCOL_ERROR_BAN_SECONDS, + RATE_LIMITER_BAN_SECONDS, ) from chia.protocols.shared_protocol import Capability, Error, Handshake, protocol_version from chia.server.api_protocol import ApiMetadata, ApiProtocol @@ -713,7 +714,7 @@ async def _read_one_message(self) -> Optional[Message]: self.log.error(f"Peer has been rate limited and will be disconnected: {details}") # Only full node disconnects peers, to prevent abuse and crashing timelords, farmers, etc # TODO: stop dropping tasks on the floor - asyncio.create_task(self.close(300)) # noqa: RUF006 + asyncio.create_task(self.close(RATE_LIMITER_BAN_SECONDS)) # noqa: RUF006 await asyncio.sleep(3) return None else: @@ -727,7 +728,7 @@ async def _read_one_message(self) -> Optional[Message]: self.log.error(f"WebSocket Error: {message}") if isinstance(message.data, WebSocketError) and message.data.code == WSCloseCode.MESSAGE_TOO_BIG: # TODO: stop dropping tasks on the floor - asyncio.create_task(self.close(300)) # noqa: RUF006 + asyncio.create_task(self.close(RATE_LIMITER_BAN_SECONDS)) # noqa: RUF006 else: # TODO: stop dropping tasks on the floor asyncio.create_task(self.close()) # noqa: RUF006 From a4a4be3bda6bec2aad0a237c9958b29964b98cce Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 11 Dec 2024 09:13:00 -0700 Subject: [PATCH 05/25] build(deps): bump aiohttp from 3.11.9 to 3.11.10 (#19015) Bumps [aiohttp](https://github.com/aio-libs/aiohttp) from 3.11.9 to 3.11.10. - [Release notes](https://github.com/aio-libs/aiohttp/releases) - [Changelog](https://github.com/aio-libs/aiohttp/blob/master/CHANGES.rst) - [Commits](https://github.com/aio-libs/aiohttp/compare/v3.11.9...v3.11.10) --- updated-dependencies: - dependency-name: aiohttp dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 154 ++++++++++++++++++++++++++-------------------------- 1 file changed, 77 insertions(+), 77 deletions(-) diff --git a/poetry.lock b/poetry.lock index a8d581a7673e..bb2aece0344f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -24,87 +24,87 @@ files = [ [[package]] name = "aiohttp" -version = "3.11.9" +version = "3.11.10" description = "Async http client/server framework (asyncio)" optional = false python-versions = ">=3.9" files = [ - {file = "aiohttp-3.11.9-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0411777249f25d11bd2964a230b3ffafcbed6cd65d0f2b132bc2b8f5b8c347c7"}, - {file = "aiohttp-3.11.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:499368eb904566fbdf1a3836a1532000ef1308f34a1bcbf36e6351904cced771"}, - {file = "aiohttp-3.11.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0b5a5009b0159a8f707879dc102b139466d8ec6db05103ec1520394fdd8ea02c"}, - {file = "aiohttp-3.11.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:176f8bb8931da0613bb0ed16326d01330066bb1e172dd97e1e02b1c27383277b"}, - {file = "aiohttp-3.11.9-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6435a66957cdba1a0b16f368bde03ce9c79c57306b39510da6ae5312a1a5b2c1"}, - {file = "aiohttp-3.11.9-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:202f40fb686e5f93908eee0c75d1e6fbe50a43e9bd4909bf3bf4a56b560ca180"}, - {file = "aiohttp-3.11.9-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39625703540feb50b6b7f938b3856d1f4886d2e585d88274e62b1bd273fae09b"}, - {file = "aiohttp-3.11.9-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c6beeac698671baa558e82fa160be9761cf0eb25861943f4689ecf9000f8ebd0"}, - {file = "aiohttp-3.11.9-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:96726839a42429318017e67a42cca75d4f0d5248a809b3cc2e125445edd7d50d"}, - {file = "aiohttp-3.11.9-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3f5461c77649358610fb9694e790956b4238ac5d9e697a17f63619c096469afe"}, - {file = "aiohttp-3.11.9-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:4313f3bc901255b22f01663eeeae167468264fdae0d32c25fc631d5d6e15b502"}, - {file = "aiohttp-3.11.9-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:d6e274661c74195708fc4380a4ef64298926c5a50bb10fbae3d01627d7a075b7"}, - {file = "aiohttp-3.11.9-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:db2914de2559809fdbcf3e48f41b17a493b58cb7988d3e211f6b63126c55fe82"}, - {file = "aiohttp-3.11.9-cp310-cp310-win32.whl", hash = "sha256:27935716f8d62c1c73010428db310fd10136002cfc6d52b0ba7bdfa752d26066"}, - {file = "aiohttp-3.11.9-cp310-cp310-win_amd64.whl", hash = "sha256:afbe85b50ade42ddff5669947afde9e8a610e64d2c80be046d67ec4368e555fa"}, - {file = "aiohttp-3.11.9-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:afcda759a69c6a8be3aae764ec6733155aa4a5ad9aad4f398b52ba4037942fe3"}, - {file = "aiohttp-3.11.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c5bba6b83fde4ca233cfda04cbd4685ab88696b0c8eaf76f7148969eab5e248a"}, - {file = "aiohttp-3.11.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:442356e8924fe1a121f8c87866b0ecdc785757fd28924b17c20493961b3d6697"}, - {file = "aiohttp-3.11.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f737fef6e117856400afee4f17774cdea392b28ecf058833f5eca368a18cf1bf"}, - {file = "aiohttp-3.11.9-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea142255d4901b03f89cb6a94411ecec117786a76fc9ab043af8f51dd50b5313"}, - {file = "aiohttp-3.11.9-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6e1e9e447856e9b7b3d38e1316ae9a8c92e7536ef48373de758ea055edfd5db5"}, - {file = "aiohttp-3.11.9-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e7f6173302f8a329ca5d1ee592af9e628d3ade87816e9958dcf7cdae2841def7"}, - {file = "aiohttp-3.11.9-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7c6147c6306f537cff59409609508a1d2eff81199f0302dd456bb9e7ea50c39"}, - {file = "aiohttp-3.11.9-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e9d036a9a41fc78e8a3f10a86c2fc1098fca8fab8715ba9eb999ce4788d35df0"}, - {file = "aiohttp-3.11.9-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:2ac9fd83096df36728da8e2f4488ac3b5602238f602706606f3702f07a13a409"}, - {file = "aiohttp-3.11.9-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:d3108f0ad5c6b6d78eec5273219a5bbd884b4aacec17883ceefaac988850ce6e"}, - {file = "aiohttp-3.11.9-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:96bbec47beb131bbf4bae05d8ef99ad9e5738f12717cfbbf16648b78b0232e87"}, - {file = "aiohttp-3.11.9-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:fc726c3fa8f606d07bd2b500e5dc4c0fd664c59be7788a16b9e34352c50b6b6b"}, - {file = "aiohttp-3.11.9-cp311-cp311-win32.whl", hash = "sha256:5720ebbc7a1b46c33a42d489d25d36c64c419f52159485e55589fbec648ea49a"}, - {file = "aiohttp-3.11.9-cp311-cp311-win_amd64.whl", hash = "sha256:17af09d963fa1acd7e4c280e9354aeafd9e3d47eaa4a6bfbd2171ad7da49f0c5"}, - {file = "aiohttp-3.11.9-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:c1f2d7fd583fc79c240094b3e7237d88493814d4b300d013a42726c35a734bc9"}, - {file = "aiohttp-3.11.9-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d4b8a1b6c7a68c73191f2ebd3bf66f7ce02f9c374e309bdb68ba886bbbf1b938"}, - {file = "aiohttp-3.11.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bd3f711f4c99da0091ced41dccdc1bcf8be0281dc314d6d9c6b6cf5df66f37a9"}, - {file = "aiohttp-3.11.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:44cb1a1326a0264480a789e6100dc3e07122eb8cd1ad6b784a3d47d13ed1d89c"}, - {file = "aiohttp-3.11.9-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7a7ddf981a0b953ade1c2379052d47ccda2f58ab678fca0671c7c7ca2f67aac2"}, - {file = "aiohttp-3.11.9-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6ffa45cc55b18d4ac1396d1ddb029f139b1d3480f1594130e62bceadf2e1a838"}, - {file = "aiohttp-3.11.9-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cca505829cdab58c2495ff418c96092d225a1bbd486f79017f6de915580d3c44"}, - {file = "aiohttp-3.11.9-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44d323aa80a867cb6db6bebb4bbec677c6478e38128847f2c6b0f70eae984d72"}, - {file = "aiohttp-3.11.9-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b2fab23003c4bb2249729a7290a76c1dda38c438300fdf97d4e42bf78b19c810"}, - {file = "aiohttp-3.11.9-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:be0c7c98e38a1e3ad7a6ff64af8b6d6db34bf5a41b1478e24c3c74d9e7f8ed42"}, - {file = "aiohttp-3.11.9-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5cc5e0d069c56645446c45a4b5010d4b33ac6c5ebfd369a791b5f097e46a3c08"}, - {file = "aiohttp-3.11.9-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9bcf97b971289be69638d8b1b616f7e557e1342debc7fc86cf89d3f08960e411"}, - {file = "aiohttp-3.11.9-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c7333e7239415076d1418dbfb7fa4df48f3a5b00f8fdf854fca549080455bc14"}, - {file = "aiohttp-3.11.9-cp312-cp312-win32.whl", hash = "sha256:9384b07cfd3045b37b05ed002d1c255db02fb96506ad65f0f9b776b762a7572e"}, - {file = "aiohttp-3.11.9-cp312-cp312-win_amd64.whl", hash = "sha256:f5252ba8b43906f206048fa569debf2cd0da0316e8d5b4d25abe53307f573941"}, - {file = "aiohttp-3.11.9-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:282e0a7ddd36ebc411f156aeaa0491e8fe7f030e2a95da532cf0c84b0b70bc66"}, - {file = "aiohttp-3.11.9-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ebd3e6b0c7d4954cca59d241970011f8d3327633d555051c430bd09ff49dc494"}, - {file = "aiohttp-3.11.9-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:30f9f89ae625d412043f12ca3771b2ccec227cc93b93bb1f994db6e1af40a7d3"}, - {file = "aiohttp-3.11.9-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7a3b5b2c012d70c63d9d13c57ed1603709a4d9d7d473e4a9dfece0e4ea3d5f51"}, - {file = "aiohttp-3.11.9-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6ef1550bb5f55f71b97a6a395286db07f7f2c01c8890e613556df9a51da91e8d"}, - {file = "aiohttp-3.11.9-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:317251b9c9a2f1a9ff9cd093775b34c6861d1d7df9439ce3d32a88c275c995cd"}, - {file = "aiohttp-3.11.9-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21cbe97839b009826a61b143d3ca4964c8590d7aed33d6118125e5b71691ca46"}, - {file = "aiohttp-3.11.9-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:618b18c3a2360ac940a5503da14fa4f880c5b9bc315ec20a830357bcc62e6bae"}, - {file = "aiohttp-3.11.9-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a0cf4d814689e58f57ecd5d8c523e6538417ca2e72ff52c007c64065cef50fb2"}, - {file = "aiohttp-3.11.9-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:15c4e489942d987d5dac0ba39e5772dcbed4cc9ae3710d1025d5ba95e4a5349c"}, - {file = "aiohttp-3.11.9-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:ec8df0ff5a911c6d21957a9182402aad7bf060eaeffd77c9ea1c16aecab5adbf"}, - {file = "aiohttp-3.11.9-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:ed95d66745f53e129e935ad726167d3a6cb18c5d33df3165974d54742c373868"}, - {file = "aiohttp-3.11.9-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:647ec5bee7e4ec9f1034ab48173b5fa970d9a991e565549b965e93331f1328fe"}, - {file = "aiohttp-3.11.9-cp313-cp313-win32.whl", hash = "sha256:ef2c9499b7bd1e24e473dc1a85de55d72fd084eea3d8bdeec7ee0720decb54fa"}, - {file = "aiohttp-3.11.9-cp313-cp313-win_amd64.whl", hash = "sha256:84de955314aa5e8d469b00b14d6d714b008087a0222b0f743e7ffac34ef56aff"}, - {file = "aiohttp-3.11.9-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e738aabff3586091221044b7a584865ddc4d6120346d12e28e788307cd731043"}, - {file = "aiohttp-3.11.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:28f29bce89c3b401a53d6fd4bee401ee943083bf2bdc12ef297c1d63155070b0"}, - {file = "aiohttp-3.11.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:31de2f10f63f96cc19e04bd2df9549559beadd0b2ee2da24a17e7ed877ca8c60"}, - {file = "aiohttp-3.11.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f31cebd8c27a36af6c7346055ac564946e562080ee1a838da724585c67474f"}, - {file = "aiohttp-3.11.9-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0bcb7f6976dc0b6b56efde13294862adf68dd48854111b422a336fa729a82ea6"}, - {file = "aiohttp-3.11.9-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1a8b13b9950d8b2f8f58b6e5842c4b842b5887e2c32e3f4644d6642f1659a530"}, - {file = "aiohttp-3.11.9-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9c23e62f3545c2216100603614f9e019e41b9403c47dd85b8e7e5015bf1bde0"}, - {file = "aiohttp-3.11.9-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ec656680fc53a13f849c71afd0c84a55c536206d524cbc831cde80abbe80489e"}, - {file = "aiohttp-3.11.9-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:36df00e0541f264ce42d62280281541a47474dfda500bc5b7f24f70a7f87be7a"}, - {file = "aiohttp-3.11.9-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:8dcfd14c712aa9dd18049280bfb2f95700ff6a8bde645e09f17c3ed3f05a0130"}, - {file = "aiohttp-3.11.9-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:14624d96f0d69cf451deed3173079a68c322279be6030208b045ab77e1e8d550"}, - {file = "aiohttp-3.11.9-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:4b01d9cfcb616eeb6d40f02e66bebfe7b06d9f2ef81641fdd50b8dd981166e0b"}, - {file = "aiohttp-3.11.9-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:928f92f80e2e8d6567b87d3316c1fd9860ccfe36e87a9a7f5237d4cda8baa1ba"}, - {file = "aiohttp-3.11.9-cp39-cp39-win32.whl", hash = "sha256:c8a02f74ae419e3955af60f570d83187423e42e672a6433c5e292f1d23619269"}, - {file = "aiohttp-3.11.9-cp39-cp39-win_amd64.whl", hash = "sha256:0a97d657f6cf8782a830bb476c13f7d777cfcab8428ac49dde15c22babceb361"}, - {file = "aiohttp-3.11.9.tar.gz", hash = "sha256:a9266644064779840feec0e34f10a89b3ff1d2d6b751fe90017abcad1864fa7c"}, + {file = "aiohttp-3.11.10-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cbad88a61fa743c5d283ad501b01c153820734118b65aee2bd7dbb735475ce0d"}, + {file = "aiohttp-3.11.10-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:80886dac673ceaef499de2f393fc80bb4481a129e6cb29e624a12e3296cc088f"}, + {file = "aiohttp-3.11.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:61b9bae80ed1f338c42f57c16918853dc51775fb5cb61da70d590de14d8b5fb4"}, + {file = "aiohttp-3.11.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9e2e576caec5c6a6b93f41626c9c02fc87cd91538b81a3670b2e04452a63def6"}, + {file = "aiohttp-3.11.10-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:02c13415b5732fb6ee7ff64583a5e6ed1c57aa68f17d2bda79c04888dfdc2769"}, + {file = "aiohttp-3.11.10-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4cfce37f31f20800a6a6620ce2cdd6737b82e42e06e6e9bd1b36f546feb3c44f"}, + {file = "aiohttp-3.11.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3bbbfff4c679c64e6e23cb213f57cc2c9165c9a65d63717108a644eb5a7398df"}, + {file = "aiohttp-3.11.10-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49c7dbbc1a559ae14fc48387a115b7d4bbc84b4a2c3b9299c31696953c2a5219"}, + {file = "aiohttp-3.11.10-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:68386d78743e6570f054fe7949d6cb37ef2b672b4d3405ce91fafa996f7d9b4d"}, + {file = "aiohttp-3.11.10-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:9ef405356ba989fb57f84cac66f7b0260772836191ccefbb987f414bcd2979d9"}, + {file = "aiohttp-3.11.10-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:5d6958671b296febe7f5f859bea581a21c1d05430d1bbdcf2b393599b1cdce77"}, + {file = "aiohttp-3.11.10-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:99b7920e7165be5a9e9a3a7f1b680f06f68ff0d0328ff4079e5163990d046767"}, + {file = "aiohttp-3.11.10-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0dc49f42422163efb7e6f1df2636fe3db72713f6cd94688e339dbe33fe06d61d"}, + {file = "aiohttp-3.11.10-cp310-cp310-win32.whl", hash = "sha256:40d1c7a7f750b5648642586ba7206999650208dbe5afbcc5284bcec6579c9b91"}, + {file = "aiohttp-3.11.10-cp310-cp310-win_amd64.whl", hash = "sha256:68ff6f48b51bd78ea92b31079817aff539f6c8fc80b6b8d6ca347d7c02384e33"}, + {file = "aiohttp-3.11.10-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:77c4aa15a89847b9891abf97f3d4048f3c2d667e00f8a623c89ad2dccee6771b"}, + {file = "aiohttp-3.11.10-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:909af95a72cedbefe5596f0bdf3055740f96c1a4baa0dd11fd74ca4de0b4e3f1"}, + {file = "aiohttp-3.11.10-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:386fbe79863eb564e9f3615b959e28b222259da0c48fd1be5929ac838bc65683"}, + {file = "aiohttp-3.11.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3de34936eb1a647aa919655ff8d38b618e9f6b7f250cc19a57a4bf7fd2062b6d"}, + {file = "aiohttp-3.11.10-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0c9527819b29cd2b9f52033e7fb9ff08073df49b4799c89cb5754624ecd98299"}, + {file = "aiohttp-3.11.10-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65a96e3e03300b41f261bbfd40dfdbf1c301e87eab7cd61c054b1f2e7c89b9e8"}, + {file = "aiohttp-3.11.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98f5635f7b74bcd4f6f72fcd85bea2154b323a9f05226a80bc7398d0c90763b0"}, + {file = "aiohttp-3.11.10-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:03b6002e20938fc6ee0918c81d9e776bebccc84690e2b03ed132331cca065ee5"}, + {file = "aiohttp-3.11.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6362cc6c23c08d18ddbf0e8c4d5159b5df74fea1a5278ff4f2c79aed3f4e9f46"}, + {file = "aiohttp-3.11.10-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:3691ed7726fef54e928fe26344d930c0c8575bc968c3e239c2e1a04bd8cf7838"}, + {file = "aiohttp-3.11.10-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:31d5093d3acd02b31c649d3a69bb072d539d4c7659b87caa4f6d2bcf57c2fa2b"}, + {file = "aiohttp-3.11.10-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:8b3cf2dc0f0690a33f2d2b2cb15db87a65f1c609f53c37e226f84edb08d10f52"}, + {file = "aiohttp-3.11.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:fbbaea811a2bba171197b08eea288b9402faa2bab2ba0858eecdd0a4105753a3"}, + {file = "aiohttp-3.11.10-cp311-cp311-win32.whl", hash = "sha256:4b2c7ac59c5698a7a8207ba72d9e9c15b0fc484a560be0788b31312c2c5504e4"}, + {file = "aiohttp-3.11.10-cp311-cp311-win_amd64.whl", hash = "sha256:974d3a2cce5fcfa32f06b13ccc8f20c6ad9c51802bb7f829eae8a1845c4019ec"}, + {file = "aiohttp-3.11.10-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:b78f053a7ecfc35f0451d961dacdc671f4bcbc2f58241a7c820e9d82559844cf"}, + {file = "aiohttp-3.11.10-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ab7485222db0959a87fbe8125e233b5a6f01f4400785b36e8a7878170d8c3138"}, + {file = "aiohttp-3.11.10-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cf14627232dfa8730453752e9cdc210966490992234d77ff90bc8dc0dce361d5"}, + {file = "aiohttp-3.11.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:076bc454a7e6fd646bc82ea7f98296be0b1219b5e3ef8a488afbdd8e81fbac50"}, + {file = "aiohttp-3.11.10-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:482cafb7dc886bebeb6c9ba7925e03591a62ab34298ee70d3dd47ba966370d2c"}, + {file = "aiohttp-3.11.10-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bf3d1a519a324af764a46da4115bdbd566b3c73fb793ffb97f9111dbc684fc4d"}, + {file = "aiohttp-3.11.10-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24213ba85a419103e641e55c27dc7ff03536c4873470c2478cce3311ba1eee7b"}, + {file = "aiohttp-3.11.10-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b99acd4730ad1b196bfb03ee0803e4adac371ae8efa7e1cbc820200fc5ded109"}, + {file = "aiohttp-3.11.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:14cdb5a9570be5a04eec2ace174a48ae85833c2aadc86de68f55541f66ce42ab"}, + {file = "aiohttp-3.11.10-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:7e97d622cb083e86f18317282084bc9fbf261801b0192c34fe4b1febd9f7ae69"}, + {file = "aiohttp-3.11.10-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:012f176945af138abc10c4a48743327a92b4ca9adc7a0e078077cdb5dbab7be0"}, + {file = "aiohttp-3.11.10-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44224d815853962f48fe124748227773acd9686eba6dc102578defd6fc99e8d9"}, + {file = "aiohttp-3.11.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c87bf31b7fdab94ae3adbe4a48e711bfc5f89d21cf4c197e75561def39e223bc"}, + {file = "aiohttp-3.11.10-cp312-cp312-win32.whl", hash = "sha256:06a8e2ee1cbac16fe61e51e0b0c269400e781b13bcfc33f5425912391a542985"}, + {file = "aiohttp-3.11.10-cp312-cp312-win_amd64.whl", hash = "sha256:be2b516f56ea883a3e14dda17059716593526e10fb6303189aaf5503937db408"}, + {file = "aiohttp-3.11.10-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8cc5203b817b748adccb07f36390feb730b1bc5f56683445bfe924fc270b8816"}, + {file = "aiohttp-3.11.10-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5ef359ebc6949e3a34c65ce20230fae70920714367c63afd80ea0c2702902ccf"}, + {file = "aiohttp-3.11.10-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9bca390cb247dbfaec3c664326e034ef23882c3f3bfa5fbf0b56cad0320aaca5"}, + {file = "aiohttp-3.11.10-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:811f23b3351ca532af598405db1093f018edf81368e689d1b508c57dcc6b6a32"}, + {file = "aiohttp-3.11.10-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ddf5f7d877615f6a1e75971bfa5ac88609af3b74796ff3e06879e8422729fd01"}, + {file = "aiohttp-3.11.10-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6ab29b8a0beb6f8eaf1e5049252cfe74adbaafd39ba91e10f18caeb0e99ffb34"}, + {file = "aiohttp-3.11.10-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c49a76c1038c2dd116fa443eba26bbb8e6c37e924e2513574856de3b6516be99"}, + {file = "aiohttp-3.11.10-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f3dc0e330575f5b134918976a645e79adf333c0a1439dcf6899a80776c9ab39"}, + {file = "aiohttp-3.11.10-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:efb15a17a12497685304b2d976cb4939e55137df7b09fa53f1b6a023f01fcb4e"}, + {file = "aiohttp-3.11.10-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:db1d0b28fcb7f1d35600150c3e4b490775251dea70f894bf15c678fdd84eda6a"}, + {file = "aiohttp-3.11.10-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:15fccaf62a4889527539ecb86834084ecf6e9ea70588efde86e8bc775e0e7542"}, + {file = "aiohttp-3.11.10-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:593c114a2221444f30749cc5e5f4012488f56bd14de2af44fe23e1e9894a9c60"}, + {file = "aiohttp-3.11.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7852bbcb4d0d2f0c4d583f40c3bc750ee033265d80598d0f9cb6f372baa6b836"}, + {file = "aiohttp-3.11.10-cp313-cp313-win32.whl", hash = "sha256:65e55ca7debae8faaffee0ebb4b47a51b4075f01e9b641c31e554fd376595c6c"}, + {file = "aiohttp-3.11.10-cp313-cp313-win_amd64.whl", hash = "sha256:beb39a6d60a709ae3fb3516a1581777e7e8b76933bb88c8f4420d875bb0267c6"}, + {file = "aiohttp-3.11.10-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:0580f2e12de2138f34debcd5d88894786453a76e98febaf3e8fe5db62d01c9bf"}, + {file = "aiohttp-3.11.10-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a55d2ad345684e7c3dd2c20d2f9572e9e1d5446d57200ff630e6ede7612e307f"}, + {file = "aiohttp-3.11.10-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:04814571cb72d65a6899db6099e377ed00710bf2e3eafd2985166f2918beaf59"}, + {file = "aiohttp-3.11.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e44a9a3c053b90c6f09b1bb4edd880959f5328cf63052503f892c41ea786d99f"}, + {file = "aiohttp-3.11.10-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:502a1464ccbc800b4b1995b302efaf426e8763fadf185e933c2931df7db9a199"}, + {file = "aiohttp-3.11.10-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:613e5169f8ae77b1933e42e418a95931fb4867b2991fc311430b15901ed67079"}, + {file = "aiohttp-3.11.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4cca22a61b7fe45da8fc73c3443150c3608750bbe27641fc7558ec5117b27fdf"}, + {file = "aiohttp-3.11.10-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:86a5dfcc39309470bd7b68c591d84056d195428d5d2e0b5ccadfbaf25b026ebc"}, + {file = "aiohttp-3.11.10-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:77ae58586930ee6b2b6f696c82cf8e78c8016ec4795c53e36718365f6959dc82"}, + {file = "aiohttp-3.11.10-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:78153314f26d5abef3239b4a9af20c229c6f3ecb97d4c1c01b22c4f87669820c"}, + {file = "aiohttp-3.11.10-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:98283b94cc0e11c73acaf1c9698dea80c830ca476492c0fe2622bd931f34b487"}, + {file = "aiohttp-3.11.10-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:53bf2097e05c2accc166c142a2090e4c6fd86581bde3fd9b2d3f9e93dda66ac1"}, + {file = "aiohttp-3.11.10-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c5532f0441fc09c119e1dca18fbc0687e64fbeb45aa4d6a87211ceaee50a74c4"}, + {file = "aiohttp-3.11.10-cp39-cp39-win32.whl", hash = "sha256:47ad15a65fb41c570cd0ad9a9ff8012489e68176e7207ec7b82a0940dddfd8be"}, + {file = "aiohttp-3.11.10-cp39-cp39-win_amd64.whl", hash = "sha256:c6b9e6d7e41656d78e37ce754813fa44b455c3d0d0dced2a047def7dc5570b74"}, + {file = "aiohttp-3.11.10.tar.gz", hash = "sha256:b1fc6b45010a8d0ff9e88f9f2418c6fd408c99c211257334aff41597ebece42e"}, ] [package.dependencies] From f6e08ba7e4535112cf31c67b4a7b36d3099676ca Mon Sep 17 00:00:00 2001 From: Almog De Paz Date: Wed, 11 Dec 2024 18:16:24 +0200 Subject: [PATCH 06/25] Track weight proof tasks (#18896) * keep track of wp tasks * use cancel_task_safe * await at shutdown * await when done * Update chia/full_node/full_node.py Co-authored-by: Kyle Altendorf --------- Co-authored-by: Kyle Altendorf --- chia/full_node/full_node.py | 25 +++++++++++++++---------- 1 file changed, 15 insertions(+), 10 deletions(-) diff --git a/chia/full_node/full_node.py b/chia/full_node/full_node.py index 434fd7524e94..623abfa6f367 100644 --- a/chia/full_node/full_node.py +++ b/chia/full_node/full_node.py @@ -127,7 +127,7 @@ class FullNode: log: logging.Logger db_path: Path wallet_sync_queue: asyncio.Queue[WalletUpdate] - _segment_task: Optional[asyncio.Task[None]] = None + _segment_task_list: list[asyncio.Task[None]] = dataclasses.field(default_factory=list) initialized: bool = False _server: Optional[ChiaServer] = None _shut_down: bool = False @@ -373,7 +373,8 @@ async def manage(self) -> AsyncIterator[None]: for one_sync_task in self._sync_task_list: if not one_sync_task.done(): cancel_task_safe(task=one_sync_task, log=self.log) - + for segment_task in self._segment_task_list: + cancel_task_safe(segment_task, self.log) for task_id, task in list(self.full_node_store.tx_fetch_tasks.items()): cancel_task_safe(task, self.log) if self._init_weight_proof is not None: @@ -392,6 +393,7 @@ async def manage(self) -> AsyncIterator[None]: with contextlib.suppress(asyncio.CancelledError): self.log.info(f"Awaiting long sync task {one_sync_task.get_name()}") await one_sync_task + await asyncio.gather(*self._segment_task_list, return_exceptions=True) @property def block_store(self) -> BlockStore: @@ -602,12 +604,11 @@ async def short_sync_batch(self, peer: WSChiaConnection, start_height: uint32, t return False batch_size = self.constants.MAX_BLOCK_COUNT_PER_REQUESTS - if self._segment_task is not None and (not self._segment_task.done()): - try: - self._segment_task.cancel() - except Exception as e: - self.log.warning(f"failed to cancel segment task {e}") - self._segment_task = None + for task in self._segment_task_list[:]: + if task.done(): + self._segment_task_list.remove(task) + else: + cancel_task_safe(task=task, log=self.log) try: peer_info = peer.get_peer_logging() @@ -2246,8 +2247,12 @@ async def add_block( record = self.blockchain.block_record(block.header_hash) if self.weight_proof_handler is not None and record.sub_epoch_summary_included is not None: - if self._segment_task is None or self._segment_task.done(): - self._segment_task = asyncio.create_task(self.weight_proof_handler.create_prev_sub_epoch_segments()) + self._segment_task_list.append( + asyncio.create_task(self.weight_proof_handler.create_prev_sub_epoch_segments()) + ) + for task in self._segment_task_list[:]: + if task.done(): + self._segment_task_list.remove(task) return None async def add_unfinished_block( From de7a74ecc31a6299073a46559a5427684225e9d7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 11 Dec 2024 09:16:42 -0700 Subject: [PATCH 07/25] build(deps): bump anyio from 4.6.2.post1 to 4.7.0 (#19016) Bumps [anyio](https://github.com/agronholm/anyio) from 4.6.2.post1 to 4.7.0. - [Release notes](https://github.com/agronholm/anyio/releases) - [Changelog](https://github.com/agronholm/anyio/blob/master/docs/versionhistory.rst) - [Commits](https://github.com/agronholm/anyio/compare/4.6.2.post1...4.7.0) --- updated-dependencies: - dependency-name: anyio dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/poetry.lock b/poetry.lock index bb2aece0344f..407928271620 100644 --- a/poetry.lock +++ b/poetry.lock @@ -179,24 +179,24 @@ files = [ [[package]] name = "anyio" -version = "4.6.2.post1" +version = "4.7.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false python-versions = ">=3.9" files = [ - {file = "anyio-4.6.2.post1-py3-none-any.whl", hash = "sha256:6d170c36fba3bdd840c73d3868c1e777e33676a69c3a72cf0a0d5d6d8009b61d"}, - {file = "anyio-4.6.2.post1.tar.gz", hash = "sha256:4c8bc31ccdb51c7f7bd251f51c609e038d63e34219b44aa86e47576389880b4c"}, + {file = "anyio-4.7.0-py3-none-any.whl", hash = "sha256:ea60c3723ab42ba6fff7e8ccb0488c898ec538ff4df1f1d5e642c3601d07e352"}, + {file = "anyio-4.7.0.tar.gz", hash = "sha256:2f834749c602966b7d456a7567cafcb309f96482b5081d14ac93ccd457f9dd48"}, ] [package.dependencies] exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} idna = ">=2.8" sniffio = ">=1.1" -typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} +typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""} [package.extras] -doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] -test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21.0b1)"] +doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx_rtd_theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21)"] trio = ["trio (>=0.26.1)"] [[package]] From 3308c85714043b3c0fa811d2bc180f1c8490f58c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 11 Dec 2024 09:16:56 -0700 Subject: [PATCH 08/25] build(deps): bump pytest-cov from 5.0.0 to 6.0.0 (#19018) Bumps [pytest-cov](https://github.com/pytest-dev/pytest-cov) from 5.0.0 to 6.0.0. - [Changelog](https://github.com/pytest-dev/pytest-cov/blob/master/CHANGELOG.rst) - [Commits](https://github.com/pytest-dev/pytest-cov/compare/v5.0.0...v6.0.0) --- updated-dependencies: - dependency-name: pytest-cov dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/poetry.lock b/poetry.lock index 407928271620..da30bcd670ad 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2464,17 +2464,17 @@ dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments [[package]] name = "pytest-cov" -version = "5.0.0" +version = "6.0.0" description = "Pytest plugin for measuring coverage." optional = true -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "pytest-cov-5.0.0.tar.gz", hash = "sha256:5837b58e9f6ebd335b0f8060eecce69b662415b16dc503883a02f45dfeb14857"}, - {file = "pytest_cov-5.0.0-py3-none-any.whl", hash = "sha256:4f0764a1219df53214206bf1feea4633c3b558a2925c8b59f144f682861ce652"}, + {file = "pytest-cov-6.0.0.tar.gz", hash = "sha256:fde0b595ca248bb8e2d76f020b465f3b107c9632e6a1d1705f17834c89dcadc0"}, + {file = "pytest_cov-6.0.0-py3-none-any.whl", hash = "sha256:eee6f1b9e61008bd34975a4d5bab25801eb31898b032dd55addc93e96fcaaa35"}, ] [package.dependencies] -coverage = {version = ">=5.2.1", extras = ["toml"]} +coverage = {version = ">=7.5", extras = ["toml"]} pytest = ">=4.6" [package.extras] From 4d70c7653eeac44c5199eb2e7da7ee8c80e625db Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 11 Dec 2024 09:18:05 -0700 Subject: [PATCH 09/25] build(deps): bump boto3 from 1.35.73 to 1.35.77 (#19019) Bumps [boto3](https://github.com/boto/boto3) from 1.35.73 to 1.35.77. - [Release notes](https://github.com/boto/boto3/releases) - [Commits](https://github.com/boto/boto3/compare/1.35.73...1.35.77) --- updated-dependencies: - dependency-name: boto3 dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/poetry.lock b/poetry.lock index da30bcd670ad..9afc3fab94a1 100644 --- a/poetry.lock +++ b/poetry.lock @@ -458,17 +458,17 @@ bitarray = ">=2.8.0,<3.0.0" [[package]] name = "boto3" -version = "1.35.73" +version = "1.35.77" description = "The AWS SDK for Python" optional = false python-versions = ">=3.8" files = [ - {file = "boto3-1.35.73-py3-none-any.whl", hash = "sha256:473438feafe77d29fbea532a91a65de0d8751a4fa5822127218710a205e28e7a"}, - {file = "boto3-1.35.73.tar.gz", hash = "sha256:ccb1a365d3084de53b58f8dfc056462f49b16931c139f4c8ac5f0bca8cb8fe81"}, + {file = "boto3-1.35.77-py3-none-any.whl", hash = "sha256:a09871805f8e462349a1c33c23eb413668df0bf68424e61d53518e1a7d883b2f"}, + {file = "boto3-1.35.77.tar.gz", hash = "sha256:cc819cdbccbc2d0dc185f1dcfe74cf3809489c4cae63c2e5d6a557aa0c5ab928"}, ] [package.dependencies] -botocore = ">=1.35.73,<1.36.0" +botocore = ">=1.35.77,<1.36.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.10.0,<0.11.0" @@ -477,13 +477,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.35.73" +version = "1.35.77" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">=3.8" files = [ - {file = "botocore-1.35.73-py3-none-any.whl", hash = "sha256:8a6a0f5ad119e38d850571df8c625dbad66aec1b20c15f84cdcb95258f9f1edb"}, - {file = "botocore-1.35.73.tar.gz", hash = "sha256:b2e3ecdd1769f011f72c4c0d0094570ba125f4ca327f24269e4d68eb5d9878b9"}, + {file = "botocore-1.35.77-py3-none-any.whl", hash = "sha256:3faa27d65841499762228902d7e215fa99a4c2fdc76c9113e1c3f339bdf685b8"}, + {file = "botocore-1.35.77.tar.gz", hash = "sha256:17b778016644e9342ca3ff2f430c1d1db0c6126e9b41a57cff52ac58e7a455e0"}, ] [package.dependencies] From 7e72d7c1a64456098ef26f98f6c1df4021cafe0b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 11 Dec 2024 09:18:18 -0700 Subject: [PATCH 10/25] build(deps): bump coverage from 7.6.4 to 7.6.9 (#19021) Bumps [coverage](https://github.com/nedbat/coveragepy) from 7.6.4 to 7.6.9. - [Release notes](https://github.com/nedbat/coveragepy/releases) - [Changelog](https://github.com/nedbat/coveragepy/blob/master/CHANGES.rst) - [Commits](https://github.com/nedbat/coveragepy/compare/7.6.4...7.6.9) --- updated-dependencies: - dependency-name: coverage dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 126 ++++++++++++++++++++++++++-------------------------- 1 file changed, 63 insertions(+), 63 deletions(-) diff --git a/poetry.lock b/poetry.lock index 9afc3fab94a1..85b8c692eb2d 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1027,73 +1027,73 @@ portalocker = ">=1.6.0" [[package]] name = "coverage" -version = "7.6.4" +version = "7.6.9" description = "Code coverage measurement for Python" optional = true python-versions = ">=3.9" files = [ - {file = "coverage-7.6.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5f8ae553cba74085db385d489c7a792ad66f7f9ba2ee85bfa508aeb84cf0ba07"}, - {file = "coverage-7.6.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8165b796df0bd42e10527a3f493c592ba494f16ef3c8b531288e3d0d72c1f6f0"}, - {file = "coverage-7.6.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7c8b95bf47db6d19096a5e052ffca0a05f335bc63cef281a6e8fe864d450a72"}, - {file = "coverage-7.6.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ed9281d1b52628e81393f5eaee24a45cbd64965f41857559c2b7ff19385df51"}, - {file = "coverage-7.6.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0809082ee480bb8f7416507538243c8863ac74fd8a5d2485c46f0f7499f2b491"}, - {file = "coverage-7.6.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d541423cdd416b78626b55f123412fcf979d22a2c39fce251b350de38c15c15b"}, - {file = "coverage-7.6.4-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:58809e238a8a12a625c70450b48e8767cff9eb67c62e6154a642b21ddf79baea"}, - {file = "coverage-7.6.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c9b8e184898ed014884ca84c70562b4a82cbc63b044d366fedc68bc2b2f3394a"}, - {file = "coverage-7.6.4-cp310-cp310-win32.whl", hash = "sha256:6bd818b7ea14bc6e1f06e241e8234508b21edf1b242d49831831a9450e2f35fa"}, - {file = "coverage-7.6.4-cp310-cp310-win_amd64.whl", hash = "sha256:06babbb8f4e74b063dbaeb74ad68dfce9186c595a15f11f5d5683f748fa1d172"}, - {file = "coverage-7.6.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:73d2b73584446e66ee633eaad1a56aad577c077f46c35ca3283cd687b7715b0b"}, - {file = "coverage-7.6.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:51b44306032045b383a7a8a2c13878de375117946d68dcb54308111f39775a25"}, - {file = "coverage-7.6.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b3fb02fe73bed561fa12d279a417b432e5b50fe03e8d663d61b3d5990f29546"}, - {file = "coverage-7.6.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed8fe9189d2beb6edc14d3ad19800626e1d9f2d975e436f84e19efb7fa19469b"}, - {file = "coverage-7.6.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b369ead6527d025a0fe7bd3864e46dbee3aa8f652d48df6174f8d0bac9e26e0e"}, - {file = "coverage-7.6.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ade3ca1e5f0ff46b678b66201f7ff477e8fa11fb537f3b55c3f0568fbfe6e718"}, - {file = "coverage-7.6.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:27fb4a050aaf18772db513091c9c13f6cb94ed40eacdef8dad8411d92d9992db"}, - {file = "coverage-7.6.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4f704f0998911abf728a7783799444fcbbe8261c4a6c166f667937ae6a8aa522"}, - {file = "coverage-7.6.4-cp311-cp311-win32.whl", hash = "sha256:29155cd511ee058e260db648b6182c419422a0d2e9a4fa44501898cf918866cf"}, - {file = "coverage-7.6.4-cp311-cp311-win_amd64.whl", hash = "sha256:8902dd6a30173d4ef09954bfcb24b5d7b5190cf14a43170e386979651e09ba19"}, - {file = "coverage-7.6.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:12394842a3a8affa3ba62b0d4ab7e9e210c5e366fbac3e8b2a68636fb19892c2"}, - {file = "coverage-7.6.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2b6b4c83d8e8ea79f27ab80778c19bc037759aea298da4b56621f4474ffeb117"}, - {file = "coverage-7.6.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d5b8007f81b88696d06f7df0cb9af0d3b835fe0c8dbf489bad70b45f0e45613"}, - {file = "coverage-7.6.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b57b768feb866f44eeed9f46975f3d6406380275c5ddfe22f531a2bf187eda27"}, - {file = "coverage-7.6.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5915fcdec0e54ee229926868e9b08586376cae1f5faa9bbaf8faf3561b393d52"}, - {file = "coverage-7.6.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0b58c672d14f16ed92a48db984612f5ce3836ae7d72cdd161001cc54512571f2"}, - {file = "coverage-7.6.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:2fdef0d83a2d08d69b1f2210a93c416d54e14d9eb398f6ab2f0a209433db19e1"}, - {file = "coverage-7.6.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8cf717ee42012be8c0cb205dbbf18ffa9003c4cbf4ad078db47b95e10748eec5"}, - {file = "coverage-7.6.4-cp312-cp312-win32.whl", hash = "sha256:7bb92c539a624cf86296dd0c68cd5cc286c9eef2d0c3b8b192b604ce9de20a17"}, - {file = "coverage-7.6.4-cp312-cp312-win_amd64.whl", hash = "sha256:1032e178b76a4e2b5b32e19d0fd0abbce4b58e77a1ca695820d10e491fa32b08"}, - {file = "coverage-7.6.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:023bf8ee3ec6d35af9c1c6ccc1d18fa69afa1cb29eaac57cb064dbb262a517f9"}, - {file = "coverage-7.6.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:b0ac3d42cb51c4b12df9c5f0dd2f13a4f24f01943627120ec4d293c9181219ba"}, - {file = "coverage-7.6.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8fe4984b431f8621ca53d9380901f62bfb54ff759a1348cd140490ada7b693c"}, - {file = "coverage-7.6.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5fbd612f8a091954a0c8dd4c0b571b973487277d26476f8480bfa4b2a65b5d06"}, - {file = "coverage-7.6.4-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dacbc52de979f2823a819571f2e3a350a7e36b8cb7484cdb1e289bceaf35305f"}, - {file = "coverage-7.6.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:dab4d16dfef34b185032580e2f2f89253d302facba093d5fa9dbe04f569c4f4b"}, - {file = "coverage-7.6.4-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:862264b12ebb65ad8d863d51f17758b1684560b66ab02770d4f0baf2ff75da21"}, - {file = "coverage-7.6.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5beb1ee382ad32afe424097de57134175fea3faf847b9af002cc7895be4e2a5a"}, - {file = "coverage-7.6.4-cp313-cp313-win32.whl", hash = "sha256:bf20494da9653f6410213424f5f8ad0ed885e01f7e8e59811f572bdb20b8972e"}, - {file = "coverage-7.6.4-cp313-cp313-win_amd64.whl", hash = "sha256:182e6cd5c040cec0a1c8d415a87b67ed01193ed9ad458ee427741c7d8513d963"}, - {file = "coverage-7.6.4-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:a181e99301a0ae128493a24cfe5cfb5b488c4e0bf2f8702091473d033494d04f"}, - {file = "coverage-7.6.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:df57bdbeffe694e7842092c5e2e0bc80fff7f43379d465f932ef36f027179806"}, - {file = "coverage-7.6.4-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bcd1069e710600e8e4cf27f65c90c7843fa8edfb4520fb0ccb88894cad08b11"}, - {file = "coverage-7.6.4-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:99b41d18e6b2a48ba949418db48159d7a2e81c5cc290fc934b7d2380515bd0e3"}, - {file = "coverage-7.6.4-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6b1e54712ba3474f34b7ef7a41e65bd9037ad47916ccb1cc78769bae324c01a"}, - {file = "coverage-7.6.4-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:53d202fd109416ce011578f321460795abfe10bb901b883cafd9b3ef851bacfc"}, - {file = "coverage-7.6.4-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:c48167910a8f644671de9f2083a23630fbf7a1cb70ce939440cd3328e0919f70"}, - {file = "coverage-7.6.4-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:cc8ff50b50ce532de2fa7a7daae9dd12f0a699bfcd47f20945364e5c31799fef"}, - {file = "coverage-7.6.4-cp313-cp313t-win32.whl", hash = "sha256:b8d3a03d9bfcaf5b0141d07a88456bb6a4c3ce55c080712fec8418ef3610230e"}, - {file = "coverage-7.6.4-cp313-cp313t-win_amd64.whl", hash = "sha256:f3ddf056d3ebcf6ce47bdaf56142af51bb7fad09e4af310241e9db7a3a8022e1"}, - {file = "coverage-7.6.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9cb7fa111d21a6b55cbf633039f7bc2749e74932e3aa7cb7333f675a58a58bf3"}, - {file = "coverage-7.6.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:11a223a14e91a4693d2d0755c7a043db43d96a7450b4f356d506c2562c48642c"}, - {file = "coverage-7.6.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a413a096c4cbac202433c850ee43fa326d2e871b24554da8327b01632673a076"}, - {file = "coverage-7.6.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:00a1d69c112ff5149cabe60d2e2ee948752c975d95f1e1096742e6077affd376"}, - {file = "coverage-7.6.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f76846299ba5c54d12c91d776d9605ae33f8ae2b9d1d3c3703cf2db1a67f2c0"}, - {file = "coverage-7.6.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:fe439416eb6380de434886b00c859304338f8b19f6f54811984f3420a2e03858"}, - {file = "coverage-7.6.4-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:0294ca37f1ba500667b1aef631e48d875ced93ad5e06fa665a3295bdd1d95111"}, - {file = "coverage-7.6.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:6f01ba56b1c0e9d149f9ac85a2f999724895229eb36bd997b61e62999e9b0901"}, - {file = "coverage-7.6.4-cp39-cp39-win32.whl", hash = "sha256:bc66f0bf1d7730a17430a50163bb264ba9ded56739112368ba985ddaa9c3bd09"}, - {file = "coverage-7.6.4-cp39-cp39-win_amd64.whl", hash = "sha256:c481b47f6b5845064c65a7bc78bc0860e635a9b055af0df46fdf1c58cebf8e8f"}, - {file = "coverage-7.6.4-pp39.pp310-none-any.whl", hash = "sha256:3c65d37f3a9ebb703e710befdc489a38683a5b152242664b973a7b7b22348a4e"}, - {file = "coverage-7.6.4.tar.gz", hash = "sha256:29fc0f17b1d3fea332f8001d4558f8214af7f1d87a345f3a133c901d60347c73"}, + {file = "coverage-7.6.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:85d9636f72e8991a1706b2b55b06c27545448baf9f6dbf51c4004609aacd7dcb"}, + {file = "coverage-7.6.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:608a7fd78c67bee8936378299a6cb9f5149bb80238c7a566fc3e6717a4e68710"}, + {file = "coverage-7.6.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:96d636c77af18b5cb664ddf12dab9b15a0cfe9c0bde715da38698c8cea748bfa"}, + {file = "coverage-7.6.9-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d75cded8a3cff93da9edc31446872d2997e327921d8eed86641efafd350e1df1"}, + {file = "coverage-7.6.9-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f7b15f589593110ae767ce997775d645b47e5cbbf54fd322f8ebea6277466cec"}, + {file = "coverage-7.6.9-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:44349150f6811b44b25574839b39ae35291f6496eb795b7366fef3bd3cf112d3"}, + {file = "coverage-7.6.9-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:d891c136b5b310d0e702e186d70cd16d1119ea8927347045124cb286b29297e5"}, + {file = "coverage-7.6.9-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:db1dab894cc139f67822a92910466531de5ea6034ddfd2b11c0d4c6257168073"}, + {file = "coverage-7.6.9-cp310-cp310-win32.whl", hash = "sha256:41ff7b0da5af71a51b53f501a3bac65fb0ec311ebed1632e58fc6107f03b9198"}, + {file = "coverage-7.6.9-cp310-cp310-win_amd64.whl", hash = "sha256:35371f8438028fdccfaf3570b31d98e8d9eda8bb1d6ab9473f5a390969e98717"}, + {file = "coverage-7.6.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:932fc826442132dde42ee52cf66d941f581c685a6313feebed358411238f60f9"}, + {file = "coverage-7.6.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:085161be5f3b30fd9b3e7b9a8c301f935c8313dcf928a07b116324abea2c1c2c"}, + {file = "coverage-7.6.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ccc660a77e1c2bf24ddbce969af9447a9474790160cfb23de6be4fa88e3951c7"}, + {file = "coverage-7.6.9-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c69e42c892c018cd3c8d90da61d845f50a8243062b19d228189b0224150018a9"}, + {file = "coverage-7.6.9-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0824a28ec542a0be22f60c6ac36d679e0e262e5353203bea81d44ee81fe9c6d4"}, + {file = "coverage-7.6.9-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4401ae5fc52ad8d26d2a5d8a7428b0f0c72431683f8e63e42e70606374c311a1"}, + {file = "coverage-7.6.9-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:98caba4476a6c8d59ec1eb00c7dd862ba9beca34085642d46ed503cc2d440d4b"}, + {file = "coverage-7.6.9-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ee5defd1733fd6ec08b168bd4f5387d5b322f45ca9e0e6c817ea6c4cd36313e3"}, + {file = "coverage-7.6.9-cp311-cp311-win32.whl", hash = "sha256:f2d1ec60d6d256bdf298cb86b78dd715980828f50c46701abc3b0a2b3f8a0dc0"}, + {file = "coverage-7.6.9-cp311-cp311-win_amd64.whl", hash = "sha256:0d59fd927b1f04de57a2ba0137166d31c1a6dd9e764ad4af552912d70428c92b"}, + {file = "coverage-7.6.9-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:99e266ae0b5d15f1ca8d278a668df6f51cc4b854513daab5cae695ed7b721cf8"}, + {file = "coverage-7.6.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9901d36492009a0a9b94b20e52ebfc8453bf49bb2b27bca2c9706f8b4f5a554a"}, + {file = "coverage-7.6.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:abd3e72dd5b97e3af4246cdada7738ef0e608168de952b837b8dd7e90341f015"}, + {file = "coverage-7.6.9-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff74026a461eb0660366fb01c650c1d00f833a086b336bdad7ab00cc952072b3"}, + {file = "coverage-7.6.9-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65dad5a248823a4996724a88eb51d4b31587aa7aa428562dbe459c684e5787ae"}, + {file = "coverage-7.6.9-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:22be16571504c9ccea919fcedb459d5ab20d41172056206eb2994e2ff06118a4"}, + {file = "coverage-7.6.9-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f957943bc718b87144ecaee70762bc2bc3f1a7a53c7b861103546d3a403f0a6"}, + {file = "coverage-7.6.9-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0ae1387db4aecb1f485fb70a6c0148c6cdaebb6038f1d40089b1fc84a5db556f"}, + {file = "coverage-7.6.9-cp312-cp312-win32.whl", hash = "sha256:1a330812d9cc7ac2182586f6d41b4d0fadf9be9049f350e0efb275c8ee8eb692"}, + {file = "coverage-7.6.9-cp312-cp312-win_amd64.whl", hash = "sha256:b12c6b18269ca471eedd41c1b6a1065b2f7827508edb9a7ed5555e9a56dcfc97"}, + {file = "coverage-7.6.9-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:899b8cd4781c400454f2f64f7776a5d87bbd7b3e7f7bda0cb18f857bb1334664"}, + {file = "coverage-7.6.9-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:61f70dc68bd36810972e55bbbe83674ea073dd1dcc121040a08cdf3416c5349c"}, + {file = "coverage-7.6.9-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a289d23d4c46f1a82d5db4abeb40b9b5be91731ee19a379d15790e53031c014"}, + {file = "coverage-7.6.9-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e216d8044a356fc0337c7a2a0536d6de07888d7bcda76febcb8adc50bdbbd00"}, + {file = "coverage-7.6.9-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c026eb44f744acaa2bda7493dad903aa5bf5fc4f2554293a798d5606710055d"}, + {file = "coverage-7.6.9-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e77363e8425325384f9d49272c54045bbed2f478e9dd698dbc65dbc37860eb0a"}, + {file = "coverage-7.6.9-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:777abfab476cf83b5177b84d7486497e034eb9eaea0d746ce0c1268c71652077"}, + {file = "coverage-7.6.9-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:447af20e25fdbe16f26e84eb714ba21d98868705cb138252d28bc400381f6ffb"}, + {file = "coverage-7.6.9-cp313-cp313-win32.whl", hash = "sha256:d872ec5aeb086cbea771c573600d47944eea2dcba8be5f3ee649bfe3cb8dc9ba"}, + {file = "coverage-7.6.9-cp313-cp313-win_amd64.whl", hash = "sha256:fd1213c86e48dfdc5a0cc676551db467495a95a662d2396ecd58e719191446e1"}, + {file = "coverage-7.6.9-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:ba9e7484d286cd5a43744e5f47b0b3fb457865baf07bafc6bee91896364e1419"}, + {file = "coverage-7.6.9-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e5ea1cf0872ee455c03e5674b5bca5e3e68e159379c1af0903e89f5eba9ccc3a"}, + {file = "coverage-7.6.9-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d10e07aa2b91835d6abec555ec8b2733347956991901eea6ffac295f83a30e4"}, + {file = "coverage-7.6.9-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:13a9e2d3ee855db3dd6ea1ba5203316a1b1fd8eaeffc37c5b54987e61e4194ae"}, + {file = "coverage-7.6.9-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c38bf15a40ccf5619fa2fe8f26106c7e8e080d7760aeccb3722664c8656b030"}, + {file = "coverage-7.6.9-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:d5275455b3e4627c8e7154feaf7ee0743c2e7af82f6e3b561967b1cca755a0be"}, + {file = "coverage-7.6.9-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:8f8770dfc6e2c6a2d4569f411015c8d751c980d17a14b0530da2d7f27ffdd88e"}, + {file = "coverage-7.6.9-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8d2dfa71665a29b153a9681edb1c8d9c1ea50dfc2375fb4dac99ea7e21a0bcd9"}, + {file = "coverage-7.6.9-cp313-cp313t-win32.whl", hash = "sha256:5e6b86b5847a016d0fbd31ffe1001b63355ed309651851295315031ea7eb5a9b"}, + {file = "coverage-7.6.9-cp313-cp313t-win_amd64.whl", hash = "sha256:97ddc94d46088304772d21b060041c97fc16bdda13c6c7f9d8fcd8d5ae0d8611"}, + {file = "coverage-7.6.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:adb697c0bd35100dc690de83154627fbab1f4f3c0386df266dded865fc50a902"}, + {file = "coverage-7.6.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:be57b6d56e49c2739cdf776839a92330e933dd5e5d929966fbbd380c77f060be"}, + {file = "coverage-7.6.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1592791f8204ae9166de22ba7e6705fa4ebd02936c09436a1bb85aabca3e599"}, + {file = "coverage-7.6.9-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4e12ae8cc979cf83d258acb5e1f1cf2f3f83524d1564a49d20b8bec14b637f08"}, + {file = "coverage-7.6.9-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb5555cff66c4d3d6213a296b360f9e1a8e323e74e0426b6c10ed7f4d021e464"}, + {file = "coverage-7.6.9-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:b9389a429e0e5142e69d5bf4a435dd688c14478a19bb901735cdf75e57b13845"}, + {file = "coverage-7.6.9-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:592ac539812e9b46046620341498caf09ca21023c41c893e1eb9dbda00a70cbf"}, + {file = "coverage-7.6.9-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a27801adef24cc30871da98a105f77995e13a25a505a0161911f6aafbd66e678"}, + {file = "coverage-7.6.9-cp39-cp39-win32.whl", hash = "sha256:8e3c3e38930cfb729cb8137d7f055e5a473ddaf1217966aa6238c88bd9fd50e6"}, + {file = "coverage-7.6.9-cp39-cp39-win_amd64.whl", hash = "sha256:e28bf44afa2b187cc9f41749138a64435bf340adfcacb5b2290c070ce99839d4"}, + {file = "coverage-7.6.9-pp39.pp310-none-any.whl", hash = "sha256:f3ca78518bc6bc92828cd11867b121891d75cae4ea9e908d72030609b996db1b"}, + {file = "coverage-7.6.9.tar.gz", hash = "sha256:4a8d8977b0c6ef5aeadcb644da9e69ae0dcfe66ec7f368c89c72e058bd71164d"}, ] [package.dependencies] From 3ab83f0eeb90c2895ee6632261865ca90f2a26bb Mon Sep 17 00:00:00 2001 From: Almog De Paz Date: Wed, 11 Dec 2024 19:05:27 +0200 Subject: [PATCH 11/25] use height to hash in short sync (#19007) * use height to hash in short sync * remove added debug code * refactor condition --- chia/full_node/full_node.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/chia/full_node/full_node.py b/chia/full_node/full_node.py index 623abfa6f367..b8b71dd91a61 100644 --- a/chia/full_node/full_node.py +++ b/chia/full_node/full_node.py @@ -597,7 +597,9 @@ async def short_sync_batch(self, peer: WSChiaConnection, start_height: uint32, t self.sync_store.batch_syncing.remove(peer.peer_node_id) self.log.error(f"Error short batch syncing, could not fetch block at height {start_height}") return False - if not self.blockchain.contains_block(first.block.prev_header_hash): + hash = self.blockchain.height_to_hash(first.block.height - 1) + assert hash is not None + if hash != first.block.prev_header_hash: self.log.info("Batch syncing stopped, this is a deep chain") self.sync_store.batch_syncing.remove(peer.peer_node_id) # First sb not connected to our blockchain, do a long sync instead @@ -700,7 +702,11 @@ async def short_sync_backtrack( f"Failed to fetch block {curr_height} from {peer.get_peer_logging()}, wrong type {type(curr)}" ) blocks.append(curr.block) - if self.blockchain.contains_block(curr.block.prev_header_hash) or curr_height == 0: + if curr_height == 0: + found_fork_point = True + break + hash_at_height = self.blockchain.height_to_hash(curr.block.height - 1) + if hash_at_height is not None and hash_at_height == curr.block.prev_header_hash: found_fork_point = True break curr_height -= 1 From 40ca4f94fc44fafc422b5cf20bfbd2db2b883f06 Mon Sep 17 00:00:00 2001 From: Earle Lowe <30607889+emlowe@users.noreply.github.com> Date: Wed, 11 Dec 2024 14:05:30 -0800 Subject: [PATCH 12/25] Update changelog for 2.5.0 (#19030) --- CHANGELOG.md | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index e58a50408abe..9bd09a80ae50 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,22 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project does not yet adhere to [Semantic Versioning](https://semver.org/spec/v2.0.0.html) for setuptools_scm/PEP 440 reasons. +## 2.5.0 Chia blockchain 2024-12-12 + +## What's Changed + +### Added + +- Implemented CHIP-36: Introduced new soft-fork with CLVM `keccak256` operator + +### Changed + +- Bump `chia_rs` to `0.16.0` + +### Removed + +- Python 3.8 support has been removed + ## 2.4.4 Chia blockchain 2024-10-15 ## What's Changed From be0a77db7d115cf3734b98f0bf3e0d14410a08b1 Mon Sep 17 00:00:00 2001 From: Matt Hauff Date: Thu, 12 Dec 2024 08:01:29 -0800 Subject: [PATCH 13/25] [CHIA-1316] Port VC Endpoints to `@marshal` decorator (#18970) * Port `vc_mint` * Port `vc_spend` * Port `vc_revoke` * Port `vc_get` * Port `vc_get_list` * Port `vc_add_proofs` * Port `vc_get_proofs_for_root` * address comments by @altendky --- chia/_tests/cmds/wallet/test_vcs.py | 129 ++++++++----- chia/_tests/wallet/cat_wallet/test_trades.py | 56 ++++-- .../_tests/wallet/vc_wallet/test_vc_wallet.py | 68 ++++--- chia/cmds/wallet_funcs.py | 59 ++++-- chia/rpc/wallet_request_types.py | 177 ++++++++++++++++-- chia/rpc/wallet_rpc_api.py | 158 ++++++---------- chia/rpc/wallet_rpc_client.py | 100 ++++------ 7 files changed, 455 insertions(+), 292 deletions(-) diff --git a/chia/_tests/cmds/wallet/test_vcs.py b/chia/_tests/cmds/wallet/test_vcs.py index 4358295687b4..8c4d7f948518 100644 --- a/chia/_tests/cmds/wallet/test_vcs.py +++ b/chia/_tests/cmds/wallet/test_vcs.py @@ -1,13 +1,29 @@ from __future__ import annotations from pathlib import Path -from typing import Any, Optional, cast from chia_rs import Coin from chia._tests.cmds.cmd_test_utils import TestRpcClients, TestWalletRpcClient, logType, run_cli_command_and_assert from chia._tests.cmds.wallet.test_consts import FINGERPRINT_ARG, STD_TX, STD_UTX, get_bytes32 -from chia.rpc.wallet_request_types import VCMintResponse, VCRevokeResponse, VCSpendResponse +from chia.rpc.wallet_request_types import ( + VCAddProofs, + VCGet, + VCGetList, + VCGetListResponse, + VCGetProofsForRoot, + VCGetProofsForRootResponse, + VCGetResponse, + VCMint, + VCMintResponse, + VCProofsRPC, + VCProofWithHash, + VCRecordWithCoinID, + VCRevoke, + VCRevokeResponse, + VCSpend, + VCSpendResponse, +) from chia.types.blockchain_format.sized_bytes import bytes32 from chia.util.bech32m import encode_puzzle_hash from chia.util.ints import uint32, uint64 @@ -29,14 +45,13 @@ def test_vcs_mint(capsys: object, get_test_cli_clients: tuple[TestRpcClients, Pa class VcsMintRpcClient(TestWalletRpcClient): async def vc_mint( self, - did_id: bytes32, + request: VCMint, tx_config: TXConfig, - target_address: Optional[bytes32] = None, - fee: uint64 = uint64(0), - push: bool = True, timelock_info: ConditionValidTimes = ConditionValidTimes(), ) -> VCMintResponse: - self.add_to_log("vc_mint", (did_id, tx_config, target_address, fee, push, timelock_info)) + self.add_to_log( + "vc_mint", (request.did_id, tx_config, request.target_address, request.fee, request.push, timelock_info) + ) return VCMintResponse( [STD_UTX], @@ -81,7 +96,7 @@ async def vc_mint( ] run_cli_command_and_assert(capsys, root_dir, command_args, assert_list) expected_calls: logType = { - "vc_mint": [(did_bytes, DEFAULT_TX_CONFIG, target_bytes, 500000000000, True, test_condition_valid_times)] + "vc_mint": [(did_id, DEFAULT_TX_CONFIG, target_addr, 500000000000, True, test_condition_valid_times)] } test_rpc_clients.wallet_rpc_client.check_log(expected_calls) @@ -91,22 +106,25 @@ def test_vcs_get(capsys: object, get_test_cli_clients: tuple[TestRpcClients, Pat # set RPC Client class VcsGetRpcClient(TestWalletRpcClient): - async def vc_get_list(self, start: int = 0, count: int = 50) -> tuple[list[VCRecord], dict[str, Any]]: - class FakeVC: - def __init__(self) -> None: - self.launcher_id = get_bytes32(3) - self.coin = Coin(get_bytes32(1), get_bytes32(2), uint64(12345678)) - self.inner_puzzle_hash = get_bytes32(3) - self.proof_hash = get_bytes32(4) - - def __getattr__(self, item: str) -> Any: - if item == "vc": - return self - - self.add_to_log("vc_get_list", (start, count)) - proofs = {get_bytes32(1).hex(): ["proof here"]} - records = [cast(VCRecord, FakeVC())] - return records, proofs + async def vc_get_list(self, request: VCGetList) -> VCGetListResponse: + self.add_to_log("vc_get_list", (request.start, request.end)) + proofs = [VCProofWithHash(get_bytes32(1), VCProofsRPC([("proof here", "")]))] + records = [ + VCRecordWithCoinID( + VerifiedCredential( + STD_TX.removals[0], + LineageProof(None, None, None), + VCLineageProof(None, None, None, None), + bytes32([3] * 32), + bytes32.zeros, + bytes32([1] * 32), + None, + ), + uint32(0), + bytes32.zeros, + ) + ] + return VCGetListResponse(records, proofs) inst_rpc_client = VcsGetRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client @@ -115,7 +133,7 @@ def __getattr__(self, item: str) -> Any: assert_list = [ f"Proofs:\n- {get_bytes32(1).hex()}\n - proof here", f"Launcher ID: {get_bytes32(3).hex()}", - f"Inner Address: {encode_puzzle_hash(get_bytes32(3), 'xch')}", + f"Inner Address: {encode_puzzle_hash(bytes32.zeros, 'xch')}", ] run_cli_command_and_assert(capsys, root_dir, command_args, assert_list) expected_calls: logType = {"vc_get_list": [(10, 10)]} @@ -129,18 +147,22 @@ def test_vcs_update_proofs(capsys: object, get_test_cli_clients: tuple[TestRpcCl class VcsUpdateProofsRpcClient(TestWalletRpcClient): async def vc_spend( self, - vc_id: bytes32, + request: VCSpend, tx_config: TXConfig, - new_puzhash: Optional[bytes32] = None, - new_proof_hash: Optional[bytes32] = None, - provider_inner_puzhash: Optional[bytes32] = None, - fee: uint64 = uint64(0), - push: bool = True, timelock_info: ConditionValidTimes = ConditionValidTimes(), ) -> VCSpendResponse: self.add_to_log( "vc_spend", - (vc_id, tx_config, new_puzhash, new_proof_hash, provider_inner_puzhash, fee, push, timelock_info), + ( + request.vc_id, + tx_config, + request.new_puzhash, + request.new_proof_hash, + request.provider_inner_puzhash, + request.fee, + request.push, + timelock_info, + ), ) return VCSpendResponse([STD_UTX], [STD_TX]) @@ -192,8 +214,8 @@ def test_vcs_add_proof_reveal(capsys: object, get_test_cli_clients: tuple[TestRp # set RPC Client class VcsAddProofRevealRpcClient(TestWalletRpcClient): - async def vc_add_proofs(self, proofs: dict[str, Any]) -> None: - self.add_to_log("vc_add_proofs", (proofs,)) + async def vc_add_proofs(self, request: VCAddProofs) -> None: + self.add_to_log("vc_add_proofs", (request.to_json_dict()["proofs"],)) inst_rpc_client = VcsAddProofRevealRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client @@ -215,9 +237,9 @@ def test_vcs_get_proofs_for_root(capsys: object, get_test_cli_clients: tuple[Tes # set RPC Client class VcsGetProofsForRootRpcClient(TestWalletRpcClient): - async def vc_get_proofs_for_root(self, root: bytes32) -> dict[str, Any]: - self.add_to_log("vc_get_proofs_for_root", (root,)) - return {"test_proof": "1", "test_proof2": "1"} + async def vc_get_proofs_for_root(self, request: VCGetProofsForRoot) -> VCGetProofsForRootResponse: + self.add_to_log("vc_get_proofs_for_root", (request.root,)) + return VCGetProofsForRootResponse([("test_proof", "1"), ("test_proof2", "1")]) inst_rpc_client = VcsGetProofsForRootRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client @@ -236,28 +258,31 @@ def test_vcs_revoke(capsys: object, get_test_cli_clients: tuple[TestRpcClients, # set RPC Client class VcsRevokeRpcClient(TestWalletRpcClient): - async def vc_get(self, vc_id: bytes32) -> Optional[VCRecord]: - self.add_to_log("vc_get", (vc_id,)) - - class FakeVC: - def __init__(self) -> None: - self.coin = Coin(get_bytes32(1), get_bytes32(2), uint64(12345678)) + async def vc_get(self, request: VCGet) -> VCGetResponse: + self.add_to_log("vc_get", (request.vc_id,)) - def __getattr__(self, item: str) -> Any: - if item == "vc": - return self - - return cast(VCRecord, FakeVC()) + return VCGetResponse( + VCRecord( + VerifiedCredential( + Coin(get_bytes32(1), get_bytes32(2), uint64(12345678)), + LineageProof(), + VCLineageProof(), + bytes32.zeros, + bytes32.zeros, + bytes32.zeros, + None, + ), + uint32(0), + ) + ) async def vc_revoke( self, - vc_parent_id: bytes32, + request: VCRevoke, tx_config: TXConfig, - fee: uint64 = uint64(0), - push: bool = True, timelock_info: ConditionValidTimes = ConditionValidTimes(), ) -> VCRevokeResponse: - self.add_to_log("vc_revoke", (vc_parent_id, tx_config, fee, push, timelock_info)) + self.add_to_log("vc_revoke", (request.vc_parent_id, tx_config, request.fee, request.push, timelock_info)) return VCRevokeResponse([STD_UTX], [STD_TX]) inst_rpc_client = VcsRevokeRpcClient() diff --git a/chia/_tests/wallet/cat_wallet/test_trades.py b/chia/_tests/wallet/cat_wallet/test_trades.py index da1f6786e281..9c55d80bb164 100644 --- a/chia/_tests/wallet/cat_wallet/test_trades.py +++ b/chia/_tests/wallet/cat_wallet/test_trades.py @@ -14,9 +14,11 @@ from chia.consensus.cost_calculator import NPCResult from chia.consensus.default_constants import DEFAULT_CONSTANTS from chia.full_node.bundle_tools import simple_solution_generator +from chia.rpc.wallet_request_types import VCAddProofs, VCGetList, VCGetProofsForRoot, VCMint, VCSpend from chia.types.blockchain_format.program import INFINITE_COST, Program from chia.types.blockchain_format.sized_bytes import bytes32 from chia.types.spend_bundle import SpendBundle +from chia.util.bech32m import encode_puzzle_hash from chia.util.hash import std_hash from chia.util.ints import uint32, uint64 from chia.wallet.cat_wallet.cat_wallet import CATWallet @@ -237,12 +239,22 @@ async def test_cat_trades( # Mint some VCs that can spend the CR-CATs vc_record_maker = ( await client_maker.vc_mint( - did_id_maker, wallet_environments.tx_config, target_address=await wallet_maker.get_new_puzzlehash() + VCMint( + did_id=encode_puzzle_hash(did_id_maker, "did"), + target_address=encode_puzzle_hash(await wallet_maker.get_new_puzzlehash(), "txch"), + push=True, + ), + wallet_environments.tx_config, ) ).vc_record vc_record_taker = ( await client_taker.vc_mint( - did_id_taker, wallet_environments.tx_config, target_address=await wallet_taker.get_new_puzzlehash() + VCMint( + did_id=encode_puzzle_hash(did_id_taker, "did"), + target_address=encode_puzzle_hash(await wallet_taker.get_new_puzzlehash(), "txch"), + push=True, + ), + wallet_environments.tx_config, ) ).vc_record await wallet_environments.process_pending_states( @@ -274,17 +286,23 @@ async def test_cat_trades( proofs_maker = VCProofs({"foo": "1", "bar": "1", "zap": "1"}) proof_root_maker: bytes32 = proofs_maker.root() await client_maker.vc_spend( - vc_record_maker.vc.launcher_id, + VCSpend( + vc_id=vc_record_maker.vc.launcher_id, + new_proof_hash=proof_root_maker, + push=True, + ), wallet_environments.tx_config, - new_proof_hash=proof_root_maker, ) proofs_taker = VCProofs({"foo": "1", "bar": "1", "zap": "1"}) proof_root_taker: bytes32 = proofs_taker.root() await client_taker.vc_spend( - vc_record_taker.vc.launcher_id, + VCSpend( + vc_id=vc_record_taker.vc.launcher_id, + new_proof_hash=proof_root_taker, + push=True, + ), wallet_environments.tx_config, - new_proof_hash=proof_root_taker, ) await wallet_environments.process_pending_states( [ @@ -374,17 +392,21 @@ async def test_cat_trades( ) if credential_restricted: - await client_maker.vc_add_proofs(proofs_maker.key_value_pairs) - assert await client_maker.vc_get_proofs_for_root(proof_root_maker) == proofs_maker.key_value_pairs - vc_records, fetched_proofs = await client_maker.vc_get_list() - assert len(vc_records) == 1 - assert fetched_proofs[proof_root_maker.hex()] == proofs_maker.key_value_pairs - - await client_taker.vc_add_proofs(proofs_taker.key_value_pairs) - assert await client_taker.vc_get_proofs_for_root(proof_root_taker) == proofs_taker.key_value_pairs - vc_records, fetched_proofs = await client_taker.vc_get_list() - assert len(vc_records) == 1 - assert fetched_proofs[proof_root_taker.hex()] == proofs_taker.key_value_pairs + await client_maker.vc_add_proofs(VCAddProofs.from_vc_proofs(proofs_maker)) + assert ( + await client_maker.vc_get_proofs_for_root(VCGetProofsForRoot(proof_root_maker)) + ).to_vc_proofs().key_value_pairs == proofs_maker.key_value_pairs + get_list_reponse = await client_maker.vc_get_list(VCGetList()) + assert len(get_list_reponse.vc_records) == 1 + assert get_list_reponse.proof_dict[proof_root_maker] == proofs_maker.key_value_pairs + + await client_taker.vc_add_proofs(VCAddProofs.from_vc_proofs(proofs_taker)) + assert ( + await client_taker.vc_get_proofs_for_root(VCGetProofsForRoot(proof_root_taker)) + ).to_vc_proofs().key_value_pairs == proofs_taker.key_value_pairs + get_list_reponse = await client_taker.vc_get_list(VCGetList()) + assert len(get_list_reponse.vc_records) == 1 + assert get_list_reponse.proof_dict[proof_root_taker] == proofs_taker.key_value_pairs # Add the taker's CAT to the maker's wallet if credential_restricted: diff --git a/chia/_tests/wallet/vc_wallet/test_vc_wallet.py b/chia/_tests/wallet/vc_wallet/test_vc_wallet.py index 135576ed605f..a27ddab2413e 100644 --- a/chia/_tests/wallet/vc_wallet/test_vc_wallet.py +++ b/chia/_tests/wallet/vc_wallet/test_vc_wallet.py @@ -10,6 +10,7 @@ from chia._tests.environments.wallet import WalletEnvironment, WalletStateTransition, WalletTestFramework from chia._tests.util.time_out_assert import time_out_assert_not_none +from chia.rpc.wallet_request_types import VCAddProofs, VCGet, VCGetList, VCGetProofsForRoot, VCMint, VCRevoke, VCSpend from chia.rpc.wallet_rpc_client import WalletRpcClient from chia.simulator.full_node_simulator import FullNodeSimulator from chia.types.blockchain_format.coin import coin_as_list @@ -162,10 +163,13 @@ async def test_vc_lifecycle(wallet_environments: WalletTestFramework) -> None: # Mint a VC vc_record = ( await client_0.vc_mint( - did_id, + VCMint( + did_id=encode_puzzle_hash(did_id, "did"), + target_address=encode_puzzle_hash(await wallet_0.get_new_puzzlehash(), "txch"), + fee=uint64(1_750_000_000_000), + push=True, + ), wallet_environments.tx_config, - target_address=await wallet_0.get_new_puzzlehash(), - fee=uint64(1_750_000_000_000), ) ).vc_record @@ -215,17 +219,20 @@ async def test_vc_lifecycle(wallet_environments: WalletTestFramework) -> None: WalletStateTransition(), ] ) - new_vc_record: Optional[VCRecord] = await client_0.vc_get(vc_record.vc.launcher_id) + new_vc_record: Optional[VCRecord] = (await client_0.vc_get(VCGet(vc_record.vc.launcher_id))).vc_record assert new_vc_record is not None # Spend VC proofs: VCProofs = VCProofs({"foo": "1", "bar": "1", "baz": "1", "qux": "1", "grault": "1"}) proof_root: bytes32 = proofs.root() await client_0.vc_spend( - vc_record.vc.launcher_id, + VCSpend( + vc_id=vc_record.vc.launcher_id, + new_proof_hash=proof_root, + fee=uint64(100), + push=True, + ), wallet_environments.tx_config, - new_proof_hash=proof_root, - fee=uint64(100), ) await wallet_environments.process_pending_states( [ @@ -268,12 +275,12 @@ async def test_vc_lifecycle(wallet_environments: WalletTestFramework) -> None: WalletStateTransition(), ] ) - vc_record_updated: Optional[VCRecord] = await client_0.vc_get(vc_record.vc.launcher_id) + vc_record_updated: Optional[VCRecord] = (await client_0.vc_get(VCGet(vc_record.vc.launcher_id))).vc_record assert vc_record_updated is not None assert vc_record_updated.vc.proof_hash == proof_root # Do a mundane spend - await client_0.vc_spend(vc_record.vc.launcher_id, wallet_environments.tx_config) + await client_0.vc_spend(VCSpend(vc_id=vc_record.vc.launcher_id, push=True), wallet_environments.tx_config) await wallet_environments.process_pending_states( [ WalletStateTransition( @@ -293,12 +300,15 @@ async def test_vc_lifecycle(wallet_environments: WalletTestFramework) -> None: ) # Add proofs to DB - await client_0.vc_add_proofs(proofs.key_value_pairs) - await client_0.vc_add_proofs(proofs.key_value_pairs) # Doing it again just to make sure it doesn't care - assert await client_0.vc_get_proofs_for_root(proof_root) == proofs.key_value_pairs - vc_records, fetched_proofs = await client_0.vc_get_list() - assert len(vc_records) == 1 - assert fetched_proofs[proof_root.hex()] == proofs.key_value_pairs + await client_0.vc_add_proofs(VCAddProofs.from_vc_proofs(proofs)) + # Doing it again just to make sure it doesn't care + await client_0.vc_add_proofs(VCAddProofs.from_vc_proofs(proofs)) + assert ( + await client_0.vc_get_proofs_for_root(VCGetProofsForRoot(proof_root)) + ).to_vc_proofs().key_value_pairs == proofs.key_value_pairs + get_list_reponse = await client_0.vc_get_list(VCGetList()) + assert len(get_list_reponse.vc_records) == 1 + assert get_list_reponse.proof_dict[proof_root] == proofs.key_value_pairs # Mint CR-CAT await mint_cr_cat(1, wallet_0, wallet_node_0, client_0, full_node_api, [did_id]) @@ -437,7 +447,8 @@ async def test_vc_lifecycle(wallet_environments: WalletTestFramework) -> None: # Send the VC to wallet_1 to use for the CR-CATs await client_0.vc_spend( - vc_record.vc.launcher_id, wallet_environments.tx_config, new_puzhash=await wallet_1.get_new_puzzlehash() + VCSpend(vc_id=vc_record.vc.launcher_id, new_puzhash=await wallet_1.get_new_puzzlehash(), push=True), + wallet_environments.tx_config, ) await wallet_environments.process_pending_states( [ @@ -461,7 +472,7 @@ async def test_vc_lifecycle(wallet_environments: WalletTestFramework) -> None: ), ] ) - await client_1.vc_add_proofs(proofs.key_value_pairs) + await client_1.vc_add_proofs(VCAddProofs.from_vc_proofs(proofs)) # Claim the pending approval to our wallet await client_1.crcat_approve_pending( @@ -585,11 +596,14 @@ async def test_vc_lifecycle(wallet_environments: WalletTestFramework) -> None: ), ] ) - vc_record_updated = await client_1.vc_get(vc_record_updated.vc.launcher_id) + vc_record_updated = (await client_1.vc_get(VCGet(vc_record_updated.vc.launcher_id))).vc_record assert vc_record_updated is not None # Revoke VC - await client_0.vc_revoke(vc_record_updated.vc.coin.parent_coin_info, wallet_environments.tx_config, uint64(1)) + await client_0.vc_revoke( + VCRevoke(vc_parent_id=vc_record_updated.vc.coin.parent_coin_info, fee=uint64(1), push=True), + wallet_environments.tx_config, + ) await wallet_environments.process_pending_states( [ WalletStateTransition( @@ -670,7 +684,13 @@ async def test_self_revoke(wallet_environments: WalletTestFramework) -> None: vc_record = ( await client_0.vc_mint( - did_id, wallet_environments.tx_config, target_address=await wallet_0.get_new_puzzlehash(), fee=uint64(200) + VCMint( + did_id=encode_puzzle_hash(did_id, "did"), + target_address=encode_puzzle_hash(await wallet_0.get_new_puzzlehash(), "txch"), + fee=uint64(200), + push=True, + ), + wallet_environments.tx_config, ) ).vc_record await wallet_environments.process_pending_states( @@ -690,7 +710,7 @@ async def test_self_revoke(wallet_environments: WalletTestFramework) -> None: ) ] ) - new_vc_record: Optional[VCRecord] = await client_0.vc_get(vc_record.vc.launcher_id) + new_vc_record: Optional[VCRecord] = (await client_0.vc_get(VCGet(vc_record.vc.launcher_id))).vc_record assert new_vc_record is not None # Test a negative case real quick (mostly unrelated) @@ -723,7 +743,9 @@ async def test_self_revoke(wallet_environments: WalletTestFramework) -> None: ) # Make sure revoking still works - await client_0.vc_revoke(new_vc_record.vc.coin.parent_coin_info, wallet_environments.tx_config, uint64(0)) + await client_0.vc_revoke( + VCRevoke(vc_parent_id=new_vc_record.vc.coin.parent_coin_info, push=True), wallet_environments.tx_config + ) await wallet_environments.process_pending_states( [ WalletStateTransition( @@ -742,7 +764,7 @@ async def test_self_revoke(wallet_environments: WalletTestFramework) -> None: ) ] ) - vc_record_revoked: Optional[VCRecord] = await client_0.vc_get(vc_record.vc.launcher_id) + vc_record_revoked: Optional[VCRecord] = (await client_0.vc_get(VCGet(vc_record.vc.launcher_id))).vc_record assert vc_record_revoked is None assert ( len(await (await wallet_node_0.wallet_state_manager.get_or_create_vc_wallet()).store.get_unconfirmed_vcs()) == 0 diff --git a/chia/cmds/wallet_funcs.py b/chia/cmds/wallet_funcs.py index 31167b255959..f676157ffedb 100644 --- a/chia/cmds/wallet_funcs.py +++ b/chia/cmds/wallet_funcs.py @@ -21,7 +21,18 @@ from chia.cmds.param_types import CliAddress, CliAmount from chia.cmds.peer_funcs import print_connections from chia.cmds.units import units -from chia.rpc.wallet_request_types import CATSpendResponse, GetNotifications, SendTransactionResponse +from chia.rpc.wallet_request_types import ( + CATSpendResponse, + GetNotifications, + SendTransactionResponse, + VCAddProofs, + VCGet, + VCGetList, + VCGetProofsForRoot, + VCMint, + VCRevoke, + VCSpend, +) from chia.rpc.wallet_rpc_client import WalletRpcClient from chia.types.blockchain_format.sized_bytes import bytes32 from chia.util.bech32m import bech32_decode, decode_puzzle_hash, encode_puzzle_hash @@ -1582,11 +1593,13 @@ async def mint_vc( ) -> list[TransactionRecord]: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): res = await wallet_client.vc_mint( - did.validate_address_type_get_ph(AddressType.DID), + VCMint( + did_id=did.validate_address_type(AddressType.DID), + target_address=target_address.validate_address_type(AddressType.XCH) if target_address else None, + fee=fee, + push=push, + ), CMDTXConfigLoader().to_tx_config(units["chia"], config, fingerprint), - target_address.validate_address_type_get_ph(AddressType.XCH) if target_address else None, - fee, - push=push, timelock_info=condition_valid_times, ) @@ -1607,14 +1620,14 @@ async def mint_vc( async def get_vcs(wallet_rpc_port: Optional[int], fp: Optional[int], start: int, count: int) -> None: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, _, config): - vc_records, proofs = await wallet_client.vc_get_list(start, count) + get_list_response = await wallet_client.vc_get_list(VCGetList(uint32(start), uint32(count))) print("Proofs:") - for hash, proof_dict in proofs.items(): + for hash, proof_dict in get_list_response.proof_dict.items(): if proof_dict is not None: print(f"- {hash}") for proof in proof_dict: print(f" - {proof}") - for record in vc_records: + for record in get_list_response.vc_records: print("") print(f"Launcher ID: {record.vc.launcher_id.hex()}") print(f"Coin ID: {record.vc.coin.name().hex()}") @@ -1643,14 +1656,16 @@ async def spend_vc( async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): txs = ( await wallet_client.vc_spend( - vc_id, - new_puzhash=new_puzhash, - new_proof_hash=bytes32.from_hexstr(new_proof_hash), - fee=fee, + VCSpend( + vc_id=vc_id, + new_puzhash=new_puzhash, + new_proof_hash=bytes32.from_hexstr(new_proof_hash), + fee=fee, + push=push, + ), tx_config=CMDTXConfigLoader( reuse_puzhash=reuse_puzhash, ).to_tx_config(units["chia"], config, fingerprint), - push=push, timelock_info=condition_valid_times, ) ).transactions @@ -1683,14 +1698,18 @@ async def add_proof_reveal( print(f"Proof Hash: {VCProofs(proof_dict).root()}") return else: - await wallet_client.vc_add_proofs(proof_dict) + await wallet_client.vc_add_proofs(VCAddProofs.from_json_dict({"proofs": proof_dict})) print("Proofs added to DB successfully!") return async def get_proofs_for_root(wallet_rpc_port: Optional[int], fp: Optional[int], proof_hash: str) -> None: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, _, _): - proof_dict: dict[str, str] = await wallet_client.vc_get_proofs_for_root(bytes32.from_hexstr(proof_hash)) + proof_dict: dict[str, str] = ( + (await wallet_client.vc_get_proofs_for_root(VCGetProofsForRoot(bytes32.from_hexstr(proof_hash)))) + .to_vc_proofs() + .key_value_pairs + ) print("Proofs:") for proof in proof_dict: print(f" - {proof}") @@ -1711,7 +1730,7 @@ async def revoke_vc( if vc_id is None: print("Must specify either --parent-coin-id or --vc-id") return [] - record = await wallet_client.vc_get(vc_id) + record = (await wallet_client.vc_get(VCGet(vc_id))).vc_record if record is None: print(f"Cannot find a VC with ID {vc_id.hex()}") return [] @@ -1720,12 +1739,14 @@ async def revoke_vc( parent_id = parent_coin_id txs = ( await wallet_client.vc_revoke( - parent_id, - fee=fee, + VCRevoke( + vc_parent_id=parent_id, + fee=fee, + push=push, + ), tx_config=CMDTXConfigLoader( reuse_puzhash=reuse_puzhash, ).to_tx_config(units["chia"], config, fingerprint), - push=push, timelock_info=condition_valid_times, ) ).transactions diff --git a/chia/rpc/wallet_request_types.py b/chia/rpc/wallet_request_types.py index c3fca4132813..9cd5e07409f9 100644 --- a/chia/rpc/wallet_request_types.py +++ b/chia/rpc/wallet_request_types.py @@ -2,7 +2,7 @@ import sys from dataclasses import dataclass, field -from typing import Any, Optional, TypeVar +from typing import Any, Optional, TypeVar, final from chia_rs import G1Element, G2Element, PrivateKey from typing_extensions import dataclass_transform @@ -25,7 +25,7 @@ from chia.wallet.transaction_record import TransactionRecord from chia.wallet.util.clvm_streamable import json_deserialize_with_clvm_streamable from chia.wallet.util.tx_config import TXConfig -from chia.wallet.vc_wallet.vc_store import VCRecord +from chia.wallet.vc_wallet.vc_store import VCProofs, VCRecord from chia.wallet.wallet_spend_bundle import WalletSpendBundle _T_OfferEndpointResponse = TypeVar("_T_OfferEndpointResponse", bound="_OfferEndpointResponse") @@ -375,6 +375,121 @@ class NFTCoin(Streamable): wallet_id: uint32 +@streamable +@dataclass(frozen=True) +class VCGet(Streamable): + vc_id: bytes32 + + +@streamable +@dataclass(frozen=True) +class VCGetResponse(Streamable): + vc_record: Optional[VCRecord] + + +@streamable +@dataclass(frozen=True) +class VCGetList(Streamable): + start: uint32 = uint32(0) + end: uint32 = uint32(50) + + +# utility for VC endpoints +@streamable +@dataclass(frozen=True) +class VCProofsRPC(Streamable): + key_value_pairs: list[tuple[str, str]] + + def to_vc_proofs(self) -> VCProofs: + return VCProofs({key: value for key, value in self.key_value_pairs}) + + @classmethod + def from_vc_proofs(cls: type[_T_VCProofsRPC], vc_proofs: VCProofs) -> _T_VCProofsRPC: + return cls([(key, value) for key, value in vc_proofs.key_value_pairs.items()]) + + +_T_VCProofsRPC = TypeVar("_T_VCProofsRPC", bound=VCProofsRPC) + + +# utility for VCGetListResponse +@streamable +@dataclass(frozen=True) +class VCProofWithHash(Streamable): + hash: bytes32 + proof: Optional[VCProofsRPC] + + +# utility for VCGetListResponse +@final +@streamable +@dataclass(frozen=True) +class VCRecordWithCoinID(VCRecord): + coin_id: bytes32 + + @classmethod + def from_vc_record(cls, vc_record: VCRecord) -> VCRecordWithCoinID: + return cls(coin_id=vc_record.vc.coin.name(), **vc_record.__dict__) + + +@streamable +@dataclass(frozen=True) +class VCGetListResponse(Streamable): + vc_records: list[VCRecordWithCoinID] + proofs: list[VCProofWithHash] + + @property + def proof_dict(self) -> dict[bytes32, Optional[dict[str, str]]]: + return { + pwh.hash: None if pwh.proof is None else {key: value for key, value in pwh.proof.key_value_pairs} + for pwh in self.proofs + } + + def to_json_dict(self) -> dict[str, Any]: + return { + "vc_records": [vc_record.to_json_dict() for vc_record in self.vc_records], + "proofs": {proof_hash.hex(): proof_data for proof_hash, proof_data in self.proof_dict.items()}, + } + + @classmethod + def from_json_dict(cls, json_dict: dict[str, Any]) -> VCGetListResponse: + return cls( + [VCRecordWithCoinID.from_json_dict(vc_record) for vc_record in json_dict["vc_records"]], + [ + VCProofWithHash( + bytes32.from_hexstr(proof_hash), + None if potential_proofs is None else VCProofsRPC.from_vc_proofs(VCProofs(potential_proofs)), + ) + for proof_hash, potential_proofs in json_dict["proofs"].items() + ], + ) + + +@streamable +@dataclass(frozen=True) +class VCAddProofs(VCProofsRPC): + def to_json_dict(self) -> dict[str, Any]: + return {"proofs": self.to_vc_proofs().key_value_pairs} + + @classmethod + def from_json_dict(cls: type[_T_VCAddProofs], json_dict: dict[str, Any]) -> _T_VCAddProofs: + return cls([(key, value) for key, value in json_dict["proofs"].items()]) + + +_T_VCAddProofs = TypeVar("_T_VCAddProofs", bound=VCAddProofs) + + +@streamable +@dataclass(frozen=True) +class VCGetProofsForRoot(Streamable): + root: bytes32 + + +@streamable +@dataclass(frozen=True) +class VCGetProofsForRootResponse(VCAddProofs): + pass + + @streamable @dataclass(frozen=True) class GatherSigningInfo(Streamable): @@ -553,6 +668,46 @@ class NFTTransferBulkResponse(TransactionEndpointResponse): spend_bundle: WalletSpendBundle +@streamable +@dataclass(frozen=True) +class VCMint(TransactionEndpointRequest): + did_id: str = field(default_factory=default_raise) + target_address: Optional[str] = None + + +@streamable +@dataclass(frozen=True) +class VCMintResponse(TransactionEndpointResponse): + vc_record: VCRecord + + +@streamable +@dataclass(frozen=True) +class VCSpend(TransactionEndpointRequest): + vc_id: bytes32 = field(default_factory=default_raise) + new_puzhash: Optional[bytes32] = None + new_proof_hash: Optional[bytes32] = None + provider_inner_puzhash: Optional[bytes32] = None + + +@streamable +@dataclass(frozen=True) +class VCSpendResponse(TransactionEndpointResponse): + pass + + +@streamable +@dataclass(frozen=True) +class VCRevoke(TransactionEndpointRequest): + vc_parent_id: bytes32 = field(default_factory=default_raise) + + +@streamable +@dataclass(frozen=True) +class VCRevokeResponse(TransactionEndpointResponse): + pass + + # TODO: The section below needs corresponding request types # TODO: The section below should be added to the API (currently only for client) @streamable @@ -747,21 +902,3 @@ class DAOSendToLockupResponse(TransactionEndpointResponse): class DAOExitLockupResponse(TransactionEndpointResponse): tx_id: bytes32 tx: TransactionRecord - - -@streamable -@dataclass(frozen=True) -class VCMintResponse(TransactionEndpointResponse): - vc_record: VCRecord - - -@streamable -@dataclass(frozen=True) -class VCSpendResponse(TransactionEndpointResponse): - pass - - -@streamable -@dataclass(frozen=True) -class VCRevokeResponse(TransactionEndpointResponse): - pass diff --git a/chia/rpc/wallet_rpc_api.py b/chia/rpc/wallet_rpc_api.py index e1538555a859..9e3052164b07 100644 --- a/chia/rpc/wallet_rpc_api.py +++ b/chia/rpc/wallet_rpc_api.py @@ -56,6 +56,22 @@ SplitCoinsResponse, SubmitTransactions, SubmitTransactionsResponse, + VCAddProofs, + VCGet, + VCGetList, + VCGetListResponse, + VCGetProofsForRoot, + VCGetProofsForRootResponse, + VCGetResponse, + VCMint, + VCMintResponse, + VCProofsRPC, + VCProofWithHash, + VCRecordWithCoinID, + VCRevoke, + VCRevokeResponse, + VCSpend, + VCSpendResponse, ) from chia.server.outbound_message import NodeType from chia.server.ws_connection import WSChiaConnection @@ -4530,12 +4546,13 @@ async def dl_verify_proof( # Verified Credential ########################################################################################## @tx_endpoint(push=True) + @marshal async def vc_mint( self, - request: dict[str, Any], + request: VCMint, action_scope: WalletActionScope, extra_conditions: tuple[Condition, ...] = tuple(), - ) -> EndpointResult: + ) -> VCMintResponse: """ Mint a verified credential using the assigned DID :param request: We require 'did_id' that will be minting the VC and options for a new 'target_address' as well @@ -4543,84 +4560,58 @@ async def vc_mint( :return: a 'vc_record' containing all the information of the soon-to-be-confirmed vc as well as any relevant 'transactions' """ - - @streamable - @dataclasses.dataclass(frozen=True) - class VCMint(Streamable): - did_id: str - target_address: Optional[str] = None - fee: uint64 = uint64(0) - - parsed_request = VCMint.from_json_dict(request) - - did_id = decode_puzzle_hash(parsed_request.did_id) + did_id = decode_puzzle_hash(request.did_id) puzhash: Optional[bytes32] = None - if parsed_request.target_address is not None: - puzhash = decode_puzzle_hash(parsed_request.target_address) + if request.target_address is not None: + puzhash = decode_puzzle_hash(request.target_address) vc_wallet: VCWallet = await self.service.wallet_state_manager.get_or_create_vc_wallet() vc_record = await vc_wallet.launch_new_vc( - did_id, action_scope, puzhash, parsed_request.fee, extra_conditions=extra_conditions + did_id, action_scope, puzhash, request.fee, extra_conditions=extra_conditions ) - return { - "vc_record": vc_record.to_json_dict(), - "transactions": None, # tx_endpoint wrapper will take care of this - } + return VCMintResponse([], [], vc_record) - async def vc_get(self, request: dict[str, Any]) -> EndpointResult: + @marshal + async def vc_get(self, request: VCGet) -> VCGetResponse: """ Given a launcher ID get the verified credential :param request: the 'vc_id' launcher id of a verifiable credential :return: the 'vc_record' representing the specified verifiable credential """ + vc_record = await self.service.wallet_state_manager.vc_store.get_vc_record(request.vc_id) + return VCGetResponse(vc_record) - @streamable - @dataclasses.dataclass(frozen=True) - class VCGet(Streamable): - vc_id: bytes32 - - parsed_request = VCGet.from_json_dict(request) - - vc_record = await self.service.wallet_state_manager.vc_store.get_vc_record(parsed_request.vc_id) - return {"vc_record": vc_record} - - async def vc_get_list(self, request: dict[str, Any]) -> EndpointResult: + @marshal + async def vc_get_list(self, request: VCGetList) -> VCGetListResponse: """ Get a list of verified credentials :param request: optional parameters for pagination 'start' and 'count' :return: all 'vc_records' in the specified range and any 'proofs' associated with the roots contained within """ - @streamable - @dataclasses.dataclass(frozen=True) - class VCGetList(Streamable): - start: uint32 = uint32(0) - end: uint32 = uint32(50) - - parsed_request = VCGetList.from_json_dict(request) - - vc_list = await self.service.wallet_state_manager.vc_store.get_vc_record_list( - parsed_request.start, parsed_request.end - ) - return { - "vc_records": [{"coin_id": "0x" + vc.vc.coin.name().hex(), **vc.to_json_dict()} for vc in vc_list], - "proofs": { - rec.vc.proof_hash.hex(): None if fetched_proof is None else fetched_proof.key_value_pairs + vc_list = await self.service.wallet_state_manager.vc_store.get_vc_record_list(request.start, request.end) + return VCGetListResponse( + [VCRecordWithCoinID.from_vc_record(vc) for vc in vc_list], + [ + VCProofWithHash( + rec.vc.proof_hash, None if fetched_proof is None else VCProofsRPC.from_vc_proofs(fetched_proof) + ) for rec in vc_list if rec.vc.proof_hash is not None for fetched_proof in ( await self.service.wallet_state_manager.vc_store.get_proofs_for_root(rec.vc.proof_hash), ) - }, - } + ], + ) @tx_endpoint(push=True) + @marshal async def vc_spend( self, - request: dict[str, Any], + request: VCSpend, action_scope: WalletActionScope, extra_conditions: tuple[Condition, ...] = tuple(), - ) -> EndpointResult: + ) -> VCSpendResponse: """ Spend a verified credential :param request: Required 'vc_id' launcher id of the vc we wish to spend. Optional parameters for a 'new_puzhash' @@ -4629,34 +4620,22 @@ async def vc_spend( :return: a list of all relevant 'transactions' (TransactionRecord) that this spend generates (VC TX + fee TX) """ - @streamable - @dataclasses.dataclass(frozen=True) - class VCSpend(Streamable): - vc_id: bytes32 - new_puzhash: Optional[bytes32] = None - new_proof_hash: Optional[bytes32] = None - provider_inner_puzhash: Optional[bytes32] = None - fee: uint64 = uint64(0) - - parsed_request = VCSpend.from_json_dict(request) - vc_wallet: VCWallet = await self.service.wallet_state_manager.get_or_create_vc_wallet() await vc_wallet.generate_signed_transaction( - parsed_request.vc_id, + request.vc_id, action_scope, - parsed_request.fee, - parsed_request.new_puzhash, - new_proof_hash=parsed_request.new_proof_hash, - provider_inner_puzhash=parsed_request.provider_inner_puzhash, + request.fee, + request.new_puzhash, + new_proof_hash=request.new_proof_hash, + provider_inner_puzhash=request.provider_inner_puzhash, extra_conditions=extra_conditions, ) - return { - "transactions": None, # tx_endpoint wrapper will take care of this - } + return VCSpendResponse([], []) # tx_endpoint takes care of filling this out - async def vc_add_proofs(self, request: dict[str, Any]) -> EndpointResult: + @marshal + async def vc_add_proofs(self, request: VCAddProofs) -> Empty: """ Add a set of proofs to the DB that can be used when spending a VC. VCs are near useless until their proofs have been added. @@ -4665,63 +4644,50 @@ async def vc_add_proofs(self, request: dict[str, Any]) -> EndpointResult: """ vc_wallet: VCWallet = await self.service.wallet_state_manager.get_or_create_vc_wallet() - await vc_wallet.store.add_vc_proofs(VCProofs(request["proofs"])) + await vc_wallet.store.add_vc_proofs(request.to_vc_proofs()) - return {} + return Empty() - async def vc_get_proofs_for_root(self, request: dict[str, Any]) -> EndpointResult: + @marshal + async def vc_get_proofs_for_root(self, request: VCGetProofsForRoot) -> VCGetProofsForRootResponse: """ Given a specified vc root, get any proofs associated with that root. :param request: must specify 'root' representing the tree hash of some set of proofs :return: a dictionary of root hashes mapped to dictionaries of key value pairs of 'proofs' """ - @streamable - @dataclasses.dataclass(frozen=True) - class VCGetProofsForRoot(Streamable): - root: bytes32 - - parsed_request = VCGetProofsForRoot.from_json_dict(request) vc_wallet: VCWallet = await self.service.wallet_state_manager.get_or_create_vc_wallet() - vc_proofs: Optional[VCProofs] = await vc_wallet.store.get_proofs_for_root(parsed_request.root) + vc_proofs: Optional[VCProofs] = await vc_wallet.store.get_proofs_for_root(request.root) if vc_proofs is None: raise ValueError("no proofs found for specified root") # pragma: no cover - return {"proofs": vc_proofs.key_value_pairs} + return VCGetProofsForRootResponse.from_vc_proofs(vc_proofs) @tx_endpoint(push=True) + @marshal async def vc_revoke( self, - request: dict[str, Any], + request: VCRevoke, action_scope: WalletActionScope, extra_conditions: tuple[Condition, ...] = tuple(), - ) -> EndpointResult: + ) -> VCRevokeResponse: """ Revoke an on chain VC provided the correct DID is available :param request: required 'vc_parent_id' for the VC coin. Standard transaction params 'fee' & 'reuse_puzhash'. :return: a list of all relevant 'transactions' (TransactionRecord) that this spend generates (VC TX + fee TX) """ - @streamable - @dataclasses.dataclass(frozen=True) - class VCRevoke(Streamable): - vc_parent_id: bytes32 - fee: uint64 = uint64(0) - - parsed_request = VCRevoke.from_json_dict(request) vc_wallet: VCWallet = await self.service.wallet_state_manager.get_or_create_vc_wallet() await vc_wallet.revoke_vc( - parsed_request.vc_parent_id, + request.vc_parent_id, self.service.get_full_node_peer(), action_scope, - parsed_request.fee, + request.fee, extra_conditions=extra_conditions, ) - return { - "transactions": None, # tx_endpoint wrapper will take care of this - } + return VCRevokeResponse([], []) # tx_endpoint takes care of filling this out @tx_endpoint(push=True) async def crcat_approve_pending( diff --git a/chia/rpc/wallet_rpc_client.py b/chia/rpc/wallet_rpc_client.py index 02fc08284162..186f25da55ec 100644 --- a/chia/rpc/wallet_rpc_client.py +++ b/chia/rpc/wallet_rpc_client.py @@ -85,8 +85,18 @@ SubmitTransactions, SubmitTransactionsResponse, TakeOfferResponse, + VCAddProofs, + VCGet, + VCGetList, + VCGetListResponse, + VCGetProofsForRoot, + VCGetProofsForRootResponse, + VCGetResponse, + VCMint, VCMintResponse, + VCRevoke, VCRevokeResponse, + VCSpend, VCSpendResponse, VerifySignature, VerifySignatureResponse, @@ -95,7 +105,6 @@ from chia.types.blockchain_format.program import Program from chia.types.blockchain_format.sized_bytes import bytes32 from chia.types.coin_record import CoinRecord -from chia.util.bech32m import encode_puzzle_hash from chia.util.ints import uint16, uint32, uint64 from chia.wallet.conditions import Condition, ConditionValidTimes, conditions_to_json_dicts from chia.wallet.puzzles.clawback.metadata import AutoClaimSettings @@ -107,7 +116,6 @@ from chia.wallet.util.query_filter import TransactionTypeFilter from chia.wallet.util.tx_config import CoinSelectionConfig, TXConfig from chia.wallet.util.wallet_types import WalletType -from chia.wallet.vc_wallet.vc_store import VCRecord from chia.wallet.wallet_coin_store import GetCoinRecords @@ -1662,94 +1670,56 @@ async def dao_adjust_filter_level(self, wallet_id: int, filter_level: int) -> di async def vc_mint( self, - did_id: bytes32, + request: VCMint, tx_config: TXConfig, - target_address: Optional[bytes32] = None, - fee: uint64 = uint64(0), extra_conditions: tuple[Condition, ...] = tuple(), timelock_info: ConditionValidTimes = ConditionValidTimes(), - push: bool = True, ) -> VCMintResponse: - response = await self.fetch( - "vc_mint", - { - "did_id": encode_puzzle_hash(did_id, "rpc"), - "target_address": encode_puzzle_hash(target_address, "rpc") if target_address is not None else None, - "fee": fee, - "extra_conditions": conditions_to_json_dicts(extra_conditions), - "push": push, - **tx_config.to_json_dict(), - **timelock_info.to_json_dict(), - }, + return VCMintResponse.from_json_dict( + await self.fetch( + "vc_mint", request.json_serialize_for_transport(tx_config, extra_conditions, timelock_info) + ) ) - return json_deserialize_with_clvm_streamable(response, VCMintResponse) - async def vc_get(self, vc_id: bytes32) -> Optional[VCRecord]: - response = await self.fetch("vc_get", {"vc_id": vc_id.hex()}) - return None if response["vc_record"] is None else VCRecord.from_json_dict(response["vc_record"]) + async def vc_get(self, request: VCGet) -> VCGetResponse: + return VCGetResponse.from_json_dict(await self.fetch("vc_get", request.to_json_dict())) - async def vc_get_list(self, start: int = 0, count: int = 50) -> tuple[list[VCRecord], dict[str, Any]]: - response = await self.fetch("vc_get_list", {"start": start, "count": count}) - return [VCRecord.from_json_dict(rec) for rec in response["vc_records"]], response["proofs"] + async def vc_get_list(self, request: VCGetList) -> VCGetListResponse: + return VCGetListResponse.from_json_dict(await self.fetch("vc_get_list", request.to_json_dict())) async def vc_spend( self, - vc_id: bytes32, + request: VCSpend, tx_config: TXConfig, - new_puzhash: Optional[bytes32] = None, - new_proof_hash: Optional[bytes32] = None, - provider_inner_puzhash: Optional[bytes32] = None, - fee: uint64 = uint64(0), extra_conditions: tuple[Condition, ...] = tuple(), timelock_info: ConditionValidTimes = ConditionValidTimes(), - push: bool = True, ) -> VCSpendResponse: - response = await self.fetch( - "vc_spend", - { - "vc_id": vc_id.hex(), - "new_puzhash": new_puzhash.hex() if new_puzhash is not None else new_puzhash, - "new_proof_hash": new_proof_hash.hex() if new_proof_hash is not None else new_proof_hash, - "provider_inner_puzhash": ( - provider_inner_puzhash.hex() if provider_inner_puzhash is not None else provider_inner_puzhash - ), - "fee": fee, - "extra_conditions": conditions_to_json_dicts(extra_conditions), - "push": push, - **tx_config.to_json_dict(), - **timelock_info.to_json_dict(), - }, + return VCSpendResponse.from_json_dict( + await self.fetch( + "vc_spend", request.json_serialize_for_transport(tx_config, extra_conditions, timelock_info) + ) ) - return json_deserialize_with_clvm_streamable(response, VCSpendResponse) - async def vc_add_proofs(self, proofs: dict[str, Any]) -> None: - await self.fetch("vc_add_proofs", {"proofs": proofs}) + async def vc_add_proofs(self, request: VCAddProofs) -> None: + await self.fetch("vc_add_proofs", request.to_json_dict()) - async def vc_get_proofs_for_root(self, root: bytes32) -> dict[str, Any]: - response = await self.fetch("vc_get_proofs_for_root", {"root": root.hex()}) - return cast(dict[str, Any], response["proofs"]) + async def vc_get_proofs_for_root(self, request: VCGetProofsForRoot) -> VCGetProofsForRootResponse: + return VCGetProofsForRootResponse.from_json_dict( + await self.fetch("vc_get_proofs_for_root", request.to_json_dict()) + ) async def vc_revoke( self, - vc_parent_id: bytes32, + request: VCRevoke, tx_config: TXConfig, - fee: uint64 = uint64(0), extra_conditions: tuple[Condition, ...] = tuple(), timelock_info: ConditionValidTimes = ConditionValidTimes(), - push: bool = True, ) -> VCRevokeResponse: - response = await self.fetch( - "vc_revoke", - { - "vc_parent_id": vc_parent_id.hex(), - "fee": fee, - "extra_conditions": conditions_to_json_dicts(extra_conditions), - "push": push, - **tx_config.to_json_dict(), - **timelock_info.to_json_dict(), - }, + return VCRevokeResponse.from_json_dict( + await self.fetch( + "vc_revoke", request.json_serialize_for_transport(tx_config, extra_conditions, timelock_info) + ) ) - return json_deserialize_with_clvm_streamable(response, VCRevokeResponse) async def crcat_approve_pending( self, From dd7dc85f5e79bf1d6e321504f1da998af1643662 Mon Sep 17 00:00:00 2001 From: Arvid Norberg Date: Thu, 12 Dec 2024 17:02:23 +0100 Subject: [PATCH 14/25] revert super set rule (#19011) revert https://github.com/Chia-Network/chia-blockchain/pull/18834 --- .../core/mempool/test_mempool_manager.py | 29 ------------------- chia/full_node/mempool_manager.py | 11 ++----- 2 files changed, 3 insertions(+), 37 deletions(-) diff --git a/chia/_tests/core/mempool/test_mempool_manager.py b/chia/_tests/core/mempool/test_mempool_manager.py index 4164d635dbc9..687dc1d13469 100644 --- a/chia/_tests/core/mempool/test_mempool_manager.py +++ b/chia/_tests/core/mempool/test_mempool_manager.py @@ -770,16 +770,7 @@ def make_test_coins() -> list[Coin]: return ret -def make_ephemeral(coins: list[Coin]) -> list[Coin]: - ret: list[Coin] = [] - for i, parent in enumerate(coins): - ret.append(Coin(parent.name(), height_hash(i + 150), uint64(i * 100))) - return ret - - coins = make_test_coins() -eph = make_ephemeral(coins) -eph2 = make_ephemeral(eph) @pytest.mark.parametrize( @@ -809,26 +800,6 @@ def make_ephemeral(coins: list[Coin]) -> list[Coin]: ([mk_item(coins[0:2])], mk_item(coins[0:2], fee=10000000), True), # or if we spend the same coins with additional coins ([mk_item(coins[0:2])], mk_item(coins[0:3], fee=10000000), True), - # SUPERSET RULE WITH EPHEMERAL COINS - # the super set rule only takes non-ephemeral coins into account. The - # ephmeral coins depend on how we spend, and might prevent legitimate - # replace-by-fee attempts. - # replace a spend that includes an ephemeral coin with one that doesn't - ([mk_item(coins[0:2] + eph[0:1])], mk_item(coins[0:2], fee=10000000), True), - # replace a spend with two-levels of ephemeral coins, with one that - # only has 1-level - ([mk_item(coins[0:2] + eph[0:1] + eph2[0:1])], mk_item(coins[0:2] + eph[0:1], fee=10000000), True), - # replace a spend with two-levels of ephemeral coins, with one that - # doesn't - ([mk_item(coins[0:2] + eph[0:1] + eph2[0:1])], mk_item(coins[0:2], fee=10000000), True), - # replace a spend with two-levels of ephemeral coins, with one that - # has *different* ephemeral coins - ([mk_item(coins[0:2] + eph[0:1] + eph2[0:1])], mk_item(coins[0:2] + eph[1:2] + eph2[1:2], fee=10000000), True), - # it's OK to add new ephemeral spends - ([mk_item(coins[0:2])], mk_item(coins[0:2] + eph[1:2] + eph2[1:2], fee=10000000), True), - # eph2[0:1] is not an ephemeral coin here, this violates the superset - # rule. eph[0:1] is missing for that - ([mk_item(coins[0:2] + eph2[0:1])], mk_item(coins[0:2] + eph[1:2] + eph2[1:2], fee=10000000), False), # FEE- AND FEE RATE RULES # if we're replacing two items, each paying a fee of 100, we need to # spend (at least) the same coins and pay at least 10000000 higher fee diff --git a/chia/full_node/mempool_manager.py b/chia/full_node/mempool_manager.py index d8f5cf19cab3..246dc2e37081 100644 --- a/chia/full_node/mempool_manager.py +++ b/chia/full_node/mempool_manager.py @@ -450,7 +450,7 @@ async def validate_spend_bundle( ) if removal_names != removal_names_from_coin_spends: - # If you reach here it's probably because your puzzle reveal doesn't match the coin's puzzle hash + # If you reach here it's probably because your program reveal doesn't match the coin's puzzle hash return Err.INVALID_SPEND_BUNDLE, None, [] removal_record_dict: dict[bytes32, CoinRecord] = {} @@ -801,13 +801,8 @@ def can_replace( # bundle with AB with a higher fee. An attacker then replaces the bundle with just B with a higher # fee than AB therefore kicking out A altogether. The better way to solve this would be to keep a cache # of booted transactions like A, and retry them after they get removed from mempool due to a conflict. - conflicting_removals = {c.name(): c for c in item.removals} - for coin in conflicting_removals.values(): - coin_name = coin.name() - # if the parent of this coin is one of the spends in this - # transaction, it means it's an ephemeral coin spend. Such spends - # are not considered by the superset rule - if coin_name not in removal_names and coin.parent_coin_info not in conflicting_removals: + for coin in item.removals: + if coin.name() not in removal_names: log.debug(f"Rejecting conflicting tx as it does not spend conflicting coin {coin.name()}") return False From 08478fdf2f750b2dfb9b975b3df42f4edd513e4d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 12 Dec 2024 09:04:19 -0700 Subject: [PATCH 15/25] build(deps): bump setproctitle from 1.3.3 to 1.3.4 (#19023) Bumps [setproctitle](https://github.com/dvarrazzo/py-setproctitle) from 1.3.3 to 1.3.4. - [Changelog](https://github.com/dvarrazzo/py-setproctitle/blob/master/HISTORY.rst) - [Commits](https://github.com/dvarrazzo/py-setproctitle/compare/version-1.3.3...version-1.3.4) --- updated-dependencies: - dependency-name: setproctitle dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 177 ++++++++++++++++++++++++++-------------------------- 1 file changed, 87 insertions(+), 90 deletions(-) diff --git a/poetry.lock b/poetry.lock index 85b8c692eb2d..e4a5f6d249b6 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2738,99 +2738,96 @@ files = [ [[package]] name = "setproctitle" -version = "1.3.3" +version = "1.3.4" description = "A Python module to customize the process title" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "setproctitle-1.3.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:897a73208da48db41e687225f355ce993167079eda1260ba5e13c4e53be7f754"}, - {file = "setproctitle-1.3.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8c331e91a14ba4076f88c29c777ad6b58639530ed5b24b5564b5ed2fd7a95452"}, - {file = "setproctitle-1.3.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bbbd6c7de0771c84b4aa30e70b409565eb1fc13627a723ca6be774ed6b9d9fa3"}, - {file = "setproctitle-1.3.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c05ac48ef16ee013b8a326c63e4610e2430dbec037ec5c5b58fcced550382b74"}, - {file = "setproctitle-1.3.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1342f4fdb37f89d3e3c1c0a59d6ddbedbde838fff5c51178a7982993d238fe4f"}, - {file = "setproctitle-1.3.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc74e84fdfa96821580fb5e9c0b0777c1c4779434ce16d3d62a9c4d8c710df39"}, - {file = "setproctitle-1.3.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9617b676b95adb412bb69645d5b077d664b6882bb0d37bfdafbbb1b999568d85"}, - {file = "setproctitle-1.3.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6a249415f5bb88b5e9e8c4db47f609e0bf0e20a75e8d744ea787f3092ba1f2d0"}, - {file = "setproctitle-1.3.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:38da436a0aaace9add67b999eb6abe4b84397edf4a78ec28f264e5b4c9d53cd5"}, - {file = "setproctitle-1.3.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:da0d57edd4c95bf221b2ebbaa061e65b1788f1544977288bdf95831b6e44e44d"}, - {file = "setproctitle-1.3.3-cp310-cp310-win32.whl", hash = "sha256:a1fcac43918b836ace25f69b1dca8c9395253ad8152b625064415b1d2f9be4fb"}, - {file = "setproctitle-1.3.3-cp310-cp310-win_amd64.whl", hash = "sha256:200620c3b15388d7f3f97e0ae26599c0c378fdf07ae9ac5a13616e933cbd2086"}, - {file = "setproctitle-1.3.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:334f7ed39895d692f753a443102dd5fed180c571eb6a48b2a5b7f5b3564908c8"}, - {file = "setproctitle-1.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:950f6476d56ff7817a8fed4ab207727fc5260af83481b2a4b125f32844df513a"}, - {file = "setproctitle-1.3.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:195c961f54a09eb2acabbfc90c413955cf16c6e2f8caa2adbf2237d1019c7dd8"}, - {file = "setproctitle-1.3.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f05e66746bf9fe6a3397ec246fe481096664a9c97eb3fea6004735a4daf867fd"}, - {file = "setproctitle-1.3.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b5901a31012a40ec913265b64e48c2a4059278d9f4e6be628441482dd13fb8b5"}, - {file = "setproctitle-1.3.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64286f8a995f2cd934082b398fc63fca7d5ffe31f0e27e75b3ca6b4efda4e353"}, - {file = "setproctitle-1.3.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:184239903bbc6b813b1a8fc86394dc6ca7d20e2ebe6f69f716bec301e4b0199d"}, - {file = "setproctitle-1.3.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:664698ae0013f986118064b6676d7dcd28fefd0d7d5a5ae9497cbc10cba48fa5"}, - {file = "setproctitle-1.3.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e5119a211c2e98ff18b9908ba62a3bd0e3fabb02a29277a7232a6fb4b2560aa0"}, - {file = "setproctitle-1.3.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:417de6b2e214e837827067048f61841f5d7fc27926f2e43954567094051aff18"}, - {file = "setproctitle-1.3.3-cp311-cp311-win32.whl", hash = "sha256:6a143b31d758296dc2f440175f6c8e0b5301ced3b0f477b84ca43cdcf7f2f476"}, - {file = "setproctitle-1.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:a680d62c399fa4b44899094027ec9a1bdaf6f31c650e44183b50d4c4d0ccc085"}, - {file = "setproctitle-1.3.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:d4460795a8a7a391e3567b902ec5bdf6c60a47d791c3b1d27080fc203d11c9dc"}, - {file = "setproctitle-1.3.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:bdfd7254745bb737ca1384dee57e6523651892f0ea2a7344490e9caefcc35e64"}, - {file = "setproctitle-1.3.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:477d3da48e216d7fc04bddab67b0dcde633e19f484a146fd2a34bb0e9dbb4a1e"}, - {file = "setproctitle-1.3.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ab2900d111e93aff5df9fddc64cf51ca4ef2c9f98702ce26524f1acc5a786ae7"}, - {file = "setproctitle-1.3.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:088b9efc62d5aa5d6edf6cba1cf0c81f4488b5ce1c0342a8b67ae39d64001120"}, - {file = "setproctitle-1.3.3-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6d50252377db62d6a0bb82cc898089916457f2db2041e1d03ce7fadd4a07381"}, - {file = "setproctitle-1.3.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:87e668f9561fd3a457ba189edfc9e37709261287b52293c115ae3487a24b92f6"}, - {file = "setproctitle-1.3.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:287490eb90e7a0ddd22e74c89a92cc922389daa95babc833c08cf80c84c4df0a"}, - {file = "setproctitle-1.3.3-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:4fe1c49486109f72d502f8be569972e27f385fe632bd8895f4730df3c87d5ac8"}, - {file = "setproctitle-1.3.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4a6ba2494a6449b1f477bd3e67935c2b7b0274f2f6dcd0f7c6aceae10c6c6ba3"}, - {file = "setproctitle-1.3.3-cp312-cp312-win32.whl", hash = "sha256:2df2b67e4b1d7498632e18c56722851ba4db5d6a0c91aaf0fd395111e51cdcf4"}, - {file = "setproctitle-1.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:f38d48abc121263f3b62943f84cbaede05749047e428409c2c199664feb6abc7"}, - {file = "setproctitle-1.3.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:816330675e3504ae4d9a2185c46b573105d2310c20b19ea2b4596a9460a4f674"}, - {file = "setproctitle-1.3.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68f960bc22d8d8e4ac886d1e2e21ccbd283adcf3c43136161c1ba0fa509088e0"}, - {file = "setproctitle-1.3.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:00e6e7adff74796ef12753ff399491b8827f84f6c77659d71bd0b35870a17d8f"}, - {file = "setproctitle-1.3.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:53bc0d2358507596c22b02db079618451f3bd720755d88e3cccd840bafb4c41c"}, - {file = "setproctitle-1.3.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad6d20f9541f5f6ac63df553b6d7a04f313947f550eab6a61aa758b45f0d5657"}, - {file = "setproctitle-1.3.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c1c84beab776b0becaa368254801e57692ed749d935469ac10e2b9b825dbdd8e"}, - {file = "setproctitle-1.3.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:507e8dc2891021350eaea40a44ddd887c9f006e6b599af8d64a505c0f718f170"}, - {file = "setproctitle-1.3.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:b1067647ac7aba0b44b591936118a22847bda3c507b0a42d74272256a7a798e9"}, - {file = "setproctitle-1.3.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2e71f6365744bf53714e8bd2522b3c9c1d83f52ffa6324bd7cbb4da707312cd8"}, - {file = "setproctitle-1.3.3-cp37-cp37m-win32.whl", hash = "sha256:7f1d36a1e15a46e8ede4e953abb104fdbc0845a266ec0e99cc0492a4364f8c44"}, - {file = "setproctitle-1.3.3-cp37-cp37m-win_amd64.whl", hash = "sha256:c9a402881ec269d0cc9c354b149fc29f9ec1a1939a777f1c858cdb09c7a261df"}, - {file = "setproctitle-1.3.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ff814dea1e5c492a4980e3e7d094286077054e7ea116cbeda138819db194b2cd"}, - {file = "setproctitle-1.3.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:accb66d7b3ccb00d5cd11d8c6e07055a4568a24c95cf86109894dcc0c134cc89"}, - {file = "setproctitle-1.3.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:554eae5a5b28f02705b83a230e9d163d645c9a08914c0ad921df363a07cf39b1"}, - {file = "setproctitle-1.3.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a911b26264dbe9e8066c7531c0591cfab27b464459c74385b276fe487ca91c12"}, - {file = "setproctitle-1.3.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2982efe7640c4835f7355fdb4da313ad37fb3b40f5c69069912f8048f77b28c8"}, - {file = "setproctitle-1.3.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df3f4274b80709d8bcab2f9a862973d453b308b97a0b423a501bcd93582852e3"}, - {file = "setproctitle-1.3.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:af2c67ae4c795d1674a8d3ac1988676fa306bcfa1e23fddb5e0bd5f5635309ca"}, - {file = "setproctitle-1.3.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:af4061f67fd7ec01624c5e3c21f6b7af2ef0e6bab7fbb43f209e6506c9ce0092"}, - {file = "setproctitle-1.3.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:37a62cbe16d4c6294e84670b59cf7adcc73faafe6af07f8cb9adaf1f0e775b19"}, - {file = "setproctitle-1.3.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a83ca086fbb017f0d87f240a8f9bbcf0809f3b754ee01cec928fff926542c450"}, - {file = "setproctitle-1.3.3-cp38-cp38-win32.whl", hash = "sha256:059f4ce86f8cc92e5860abfc43a1dceb21137b26a02373618d88f6b4b86ba9b2"}, - {file = "setproctitle-1.3.3-cp38-cp38-win_amd64.whl", hash = "sha256:ab92e51cd4a218208efee4c6d37db7368fdf182f6e7ff148fb295ecddf264287"}, - {file = "setproctitle-1.3.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c7951820b77abe03d88b114b998867c0f99da03859e5ab2623d94690848d3e45"}, - {file = "setproctitle-1.3.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5bc94cf128676e8fac6503b37763adb378e2b6be1249d207630f83fc325d9b11"}, - {file = "setproctitle-1.3.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f5d9027eeda64d353cf21a3ceb74bb1760bd534526c9214e19f052424b37e42"}, - {file = "setproctitle-1.3.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e4a8104db15d3462e29d9946f26bed817a5b1d7a47eabca2d9dc2b995991503"}, - {file = "setproctitle-1.3.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c32c41ace41f344d317399efff4cffb133e709cec2ef09c99e7a13e9f3b9483c"}, - {file = "setproctitle-1.3.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cbf16381c7bf7f963b58fb4daaa65684e10966ee14d26f5cc90f07049bfd8c1e"}, - {file = "setproctitle-1.3.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e18b7bd0898398cc97ce2dfc83bb192a13a087ef6b2d5a8a36460311cb09e775"}, - {file = "setproctitle-1.3.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:69d565d20efe527bd8a9b92e7f299ae5e73b6c0470f3719bd66f3cd821e0d5bd"}, - {file = "setproctitle-1.3.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:ddedd300cd690a3b06e7eac90ed4452348b1348635777ce23d460d913b5b63c3"}, - {file = "setproctitle-1.3.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:415bfcfd01d1fbf5cbd75004599ef167a533395955305f42220a585f64036081"}, - {file = "setproctitle-1.3.3-cp39-cp39-win32.whl", hash = "sha256:21112fcd2195d48f25760f0eafa7a76510871bbb3b750219310cf88b04456ae3"}, - {file = "setproctitle-1.3.3-cp39-cp39-win_amd64.whl", hash = "sha256:5a740f05d0968a5a17da3d676ce6afefebeeeb5ce137510901bf6306ba8ee002"}, - {file = "setproctitle-1.3.3-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6b9e62ddb3db4b5205c0321dd69a406d8af9ee1693529d144e86bd43bcb4b6c0"}, - {file = "setproctitle-1.3.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e3b99b338598de0bd6b2643bf8c343cf5ff70db3627af3ca427a5e1a1a90dd9"}, - {file = "setproctitle-1.3.3-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38ae9a02766dad331deb06855fb7a6ca15daea333b3967e214de12cfae8f0ef5"}, - {file = "setproctitle-1.3.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:200ede6fd11233085ba9b764eb055a2a191fb4ffb950c68675ac53c874c22e20"}, - {file = "setproctitle-1.3.3-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0d3a953c50776751e80fe755a380a64cb14d61e8762bd43041ab3f8cc436092f"}, - {file = "setproctitle-1.3.3-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5e08e232b78ba3ac6bc0d23ce9e2bee8fad2be391b7e2da834fc9a45129eb87"}, - {file = "setproctitle-1.3.3-pp37-pypy37_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1da82c3e11284da4fcbf54957dafbf0655d2389cd3d54e4eaba636faf6d117a"}, - {file = "setproctitle-1.3.3-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:aeaa71fb9568ebe9b911ddb490c644fbd2006e8c940f21cb9a1e9425bd709574"}, - {file = "setproctitle-1.3.3-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:59335d000c6250c35989394661eb6287187854e94ac79ea22315469ee4f4c244"}, - {file = "setproctitle-1.3.3-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c3ba57029c9c50ecaf0c92bb127224cc2ea9fda057b5d99d3f348c9ec2855ad3"}, - {file = "setproctitle-1.3.3-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d876d355c53d975c2ef9c4f2487c8f83dad6aeaaee1b6571453cb0ee992f55f6"}, - {file = "setproctitle-1.3.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:224602f0939e6fb9d5dd881be1229d485f3257b540f8a900d4271a2c2aa4e5f4"}, - {file = "setproctitle-1.3.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d7f27e0268af2d7503386e0e6be87fb9b6657afd96f5726b733837121146750d"}, - {file = "setproctitle-1.3.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f5e7266498cd31a4572378c61920af9f6b4676a73c299fce8ba93afd694f8ae7"}, - {file = "setproctitle-1.3.3-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33c5609ad51cd99d388e55651b19148ea99727516132fb44680e1f28dd0d1de9"}, - {file = "setproctitle-1.3.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:eae8988e78192fd1a3245a6f4f382390b61bce6cfcc93f3809726e4c885fa68d"}, - {file = "setproctitle-1.3.3.tar.gz", hash = "sha256:c913e151e7ea01567837ff037a23ca8740192880198b7fbb90b16d181607caae"}, + {file = "setproctitle-1.3.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0f6661a69c68349172ba7b4d5dd65fec2b0917abc99002425ad78c3e58cf7595"}, + {file = "setproctitle-1.3.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:754bac5e470adac7f7ec2239c485cd0b75f8197ca8a5b86ffb20eb3a3676cc42"}, + {file = "setproctitle-1.3.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7bc7088c15150745baf66db62a4ced4507d44419eb66207b609f91b64a682af"}, + {file = "setproctitle-1.3.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a46ef3ecf61e4840fbc1145fdd38acf158d0da7543eda7b773ed2b30f75c2830"}, + {file = "setproctitle-1.3.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffcb09d5c0ffa043254ec9a734a73f3791fec8bf6333592f906bb2e91ed2af1a"}, + {file = "setproctitle-1.3.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06c16b7a91cdc5d700271899e4383384a61aae83a3d53d0e2e5a266376083342"}, + {file = "setproctitle-1.3.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:9f9732e59863eaeedd3feef94b2b216cb86d40dda4fad2d0f0aaec3b31592716"}, + {file = "setproctitle-1.3.4-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e152f4ab9ea1632b5fecdd87cee354f2b2eb6e2dfc3aceb0eb36a01c1e12f94c"}, + {file = "setproctitle-1.3.4-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:020ea47a79b2bbd7bd7b94b85ca956ba7cb026e82f41b20d2e1dac4008cead25"}, + {file = "setproctitle-1.3.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8c52b12b10e4057fc302bd09cb3e3f28bb382c30c044eb3396e805179a8260e4"}, + {file = "setproctitle-1.3.4-cp310-cp310-win32.whl", hash = "sha256:a65a147f545f3fac86f11acb2d0b316d3e78139a9372317b7eb50561b2817ba0"}, + {file = "setproctitle-1.3.4-cp310-cp310-win_amd64.whl", hash = "sha256:66821fada6426998762a3650a37fba77e814a249a95b1183011070744aff47f6"}, + {file = "setproctitle-1.3.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f0f749f07002c2d6fecf37cedc43207a88e6c651926a470a5f229070cf791879"}, + {file = "setproctitle-1.3.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:90ea8d302a5d30b948451d146e94674a3c5b020cc0ced9a1c28f8ddb0f203a5d"}, + {file = "setproctitle-1.3.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f859c88193ed466bee4eb9d45fbc29d2253e6aa3ccd9119c9a1d8d95f409a60d"}, + {file = "setproctitle-1.3.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b3afa5a0ed08a477ded239c05db14c19af585975194a00adf594d48533b23701"}, + {file = "setproctitle-1.3.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10a78fce9018cc3e9a772b6537bbe3fe92380acf656c9f86db2f45e685af376e"}, + {file = "setproctitle-1.3.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d758e2eed2643afac5f2881542fbb5aa97640b54be20d0a5ed0691d02f0867d"}, + {file = "setproctitle-1.3.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ef133a1a2ee378d549048a12d56f4ef0e2b9113b0b25b6b77821e9af94d50634"}, + {file = "setproctitle-1.3.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1d2a154b79d5fb42d1eff06e05e22f0e8091261d877dd47b37d31352b74ecc37"}, + {file = "setproctitle-1.3.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:202eae632815571297833876a0f407d0d9c7ad9d843b38adbe687fe68c5192ee"}, + {file = "setproctitle-1.3.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:2b0080819859e80a7776ac47cf6accb4b7ad313baf55fabac89c000480dcd103"}, + {file = "setproctitle-1.3.4-cp311-cp311-win32.whl", hash = "sha256:9c9d7d1267dee8c6627963d9376efa068858cfc8f573c083b1b6a2d297a8710f"}, + {file = "setproctitle-1.3.4-cp311-cp311-win_amd64.whl", hash = "sha256:475986ddf6df65d619acd52188336a20f616589403f5a5ceb3fc70cdc137037a"}, + {file = "setproctitle-1.3.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:d06990dcfcd41bb3543c18dd25c8476fbfe1f236757f42fef560f6aa03ac8dfc"}, + {file = "setproctitle-1.3.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:317218c9d8b17a010ab2d2f0851e8ef584077a38b1ba2b7c55c9e44e79a61e73"}, + {file = "setproctitle-1.3.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb5fefb53b9d9f334a5d9ec518a36b92a10b936011ac8a6b6dffd60135f16459"}, + {file = "setproctitle-1.3.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0855006261635e8669646c7c304b494b6df0a194d2626683520103153ad63cc9"}, + {file = "setproctitle-1.3.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1a88e466fcaee659679c1d64dcb2eddbcb4bfadffeb68ba834d9c173a25b6184"}, + {file = "setproctitle-1.3.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f963b6ed8ba33eda374a98d979e8a0eaf21f891b6e334701693a2c9510613c4c"}, + {file = "setproctitle-1.3.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:122c2e05697fa91f5d23f00bbe98a9da1bd457b32529192e934095fadb0853f1"}, + {file = "setproctitle-1.3.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:1bba0a866f5895d5b769d8c36b161271c7fd407e5065862ab80ff91c29fbe554"}, + {file = "setproctitle-1.3.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:97f1f861998e326e640708488c442519ad69046374b2c3fe9bcc9869b387f23c"}, + {file = "setproctitle-1.3.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:726aee40357d4bdb70115442cb85ccc8e8bc554fc0bbbaa3a57cbe81df42287d"}, + {file = "setproctitle-1.3.4-cp312-cp312-win32.whl", hash = "sha256:04d6ba8b816dbb0bfd62000b0c3e583160893e6e8c4233e1dca1a9ae4d95d924"}, + {file = "setproctitle-1.3.4-cp312-cp312-win_amd64.whl", hash = "sha256:9c76e43cb351ba8887371240b599925cdf3ecececc5dfb7125c71678e7722c55"}, + {file = "setproctitle-1.3.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:d6e3b177e634aa6bbbfbf66d097b6d1cdb80fc60e912c7d8bace2e45699c07dd"}, + {file = "setproctitle-1.3.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6b17655a5f245b416e127e02087ea6347a48821cc4626bc0fd57101bfcd88afc"}, + {file = "setproctitle-1.3.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fa5057a86df920faab8ee83960b724bace01a3231eb8e3f2c93d78283504d598"}, + {file = "setproctitle-1.3.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149fdfb8a26a555780c4ce53c92e6d3c990ef7b30f90a675eca02e83c6d5f76d"}, + {file = "setproctitle-1.3.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ded03546938a987f463c68ab98d683af87a83db7ac8093bbc179e77680be5ba2"}, + {file = "setproctitle-1.3.4-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ab9f5b7f2bbc1754bc6292d9a7312071058e5a891b0391e6d13b226133f36aa"}, + {file = "setproctitle-1.3.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0b19813c852566fa031902124336fa1f080c51e262fc90266a8c3d65ca47b74c"}, + {file = "setproctitle-1.3.4-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:db78b645dc63c0ccffca367a498f3b13492fb106a2243a1e998303ba79c996e2"}, + {file = "setproctitle-1.3.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:b669aaac70bd9f03c070270b953f78d9ee56c4af6f0ff9f9cd3e6d1878c10b40"}, + {file = "setproctitle-1.3.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6dc3d656702791565994e64035a208be56b065675a5bc87b644c657d6d9e2232"}, + {file = "setproctitle-1.3.4-cp313-cp313-win32.whl", hash = "sha256:091f682809a4d12291cf0205517619d2e7014986b7b00ebecfde3d76f8ae5a8f"}, + {file = "setproctitle-1.3.4-cp313-cp313-win_amd64.whl", hash = "sha256:adcd6ba863a315702184d92d3d3bbff290514f24a14695d310f02ae5e28bd1f7"}, + {file = "setproctitle-1.3.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:acf41cf91bbc5a36d1fa4455a818bb02bf2a4ccfed2f892ba166ba2fcbb0ec8a"}, + {file = "setproctitle-1.3.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ceb3ce3262b0e8e088e4117175591b7a82b3bdc5e52e33b1e74778b5fb53fd38"}, + {file = "setproctitle-1.3.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2b2ef636a6a25fe7f3d5a064bea0116b74a4c8c7df9646b17dc7386c439a26cf"}, + {file = "setproctitle-1.3.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:28b8614de08679ae95bc4e8d6daaef6b61afdf027fa0d23bf13d619000286b3c"}, + {file = "setproctitle-1.3.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:24f3c8be826a7d44181eac2269b15b748b76d98cd9a539d4c69f09321dcb5c12"}, + {file = "setproctitle-1.3.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc9d79b1bf833af63b7c720a6604eb16453ac1ad4e718eb8b59d1f97d986b98c"}, + {file = "setproctitle-1.3.4-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:fb693000b65842c85356b667d057ae0d0bac6519feca7e1c437cc2cfeb0afc59"}, + {file = "setproctitle-1.3.4-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:a166251b8fbc6f2755e2ce9d3c11e9edb0c0c7d2ed723658ff0161fbce26ac1c"}, + {file = "setproctitle-1.3.4-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:0361428e6378911a378841509c56ba472d991cbed1a7e3078ec0cacc103da44a"}, + {file = "setproctitle-1.3.4-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:62d66e0423e3bd520b4c897063506b309843a8d07343fbfad04197e91a4edd28"}, + {file = "setproctitle-1.3.4-cp38-cp38-win32.whl", hash = "sha256:5edd01909348f3b0b2da329836d6b5419cd4869fec2e118e8ff3275b38af6267"}, + {file = "setproctitle-1.3.4-cp38-cp38-win_amd64.whl", hash = "sha256:59e0dda9ad245921af0328035a961767026e1fa94bb65957ab0db0a0491325d6"}, + {file = "setproctitle-1.3.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bdaaa81a6e95a0a19fba0285f10577377f3503ae4e9988b403feba79da3e2f80"}, + {file = "setproctitle-1.3.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ee5b19a2d794463bcc19153dfceede7beec784b4cf7967dec0bc0fc212ab3a3"}, + {file = "setproctitle-1.3.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3058a1bb0c767b3a6ccbb38b27ef870af819923eb732e21e44a3f300370fe159"}, + {file = "setproctitle-1.3.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5a97d37ee4fe0d1c6e87d2a97229c27a88787a8f4ebfbdeee95f91b818e52efe"}, + {file = "setproctitle-1.3.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e61dd7d05da11fc69bb86d51f1e0ee08f74dccf3ecf884c94de41135ffdc75d"}, + {file = "setproctitle-1.3.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1eb115d53dc2a1299ae72f1119c96a556db36073bacb6da40c47ece5db0d9587"}, + {file = "setproctitle-1.3.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:342570716e2647a51ea859b8a9126da9dc1a96a0153c9c0a3514effd60ab57ad"}, + {file = "setproctitle-1.3.4-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:0ad212ae2b03951367a69584af034579b34e1e4199a75d377ef9f8e08ee299b1"}, + {file = "setproctitle-1.3.4-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:4afcb38e22122465013f4621b7e9ff8d42a7a48ae0ffeb94133a806cb91b4aad"}, + {file = "setproctitle-1.3.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:30bb223e6c3f95ad9e9bb2a113292759e947d1cfd60dbd4adb55851c370006b2"}, + {file = "setproctitle-1.3.4-cp39-cp39-win32.whl", hash = "sha256:5f0521ed3bb9f02e9486573ea95e2062cd6bf036fa44e640bd54a06f22d85f35"}, + {file = "setproctitle-1.3.4-cp39-cp39-win_amd64.whl", hash = "sha256:0baadeb27f9e97e65922b4151f818b19c311d30b9efdb62af0e53b3db4006ce2"}, + {file = "setproctitle-1.3.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:939d364a187b2adfbf6ae488664277e717d56c7951a4ddeb4f23b281bc50bfe5"}, + {file = "setproctitle-1.3.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cb8a6a19be0cbf6da6fcbf3698b76c8af03fe83e4bd77c96c3922be3b88bf7da"}, + {file = "setproctitle-1.3.4-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:779006f9e1aade9522a40e8d9635115ab15dd82b7af8e655967162e9c01e2573"}, + {file = "setproctitle-1.3.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5519f2a7b8c535b0f1f77b30441476571373add72008230c81211ee17b423b57"}, + {file = "setproctitle-1.3.4-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:743836d484151334ebba1490d6907ca9e718fe815dcd5756f2a01bc3067d099c"}, + {file = "setproctitle-1.3.4-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abda20aff8d1751e48d7967fa8945fef38536b82366c49be39b83678d4be3893"}, + {file = "setproctitle-1.3.4-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a2041b5788ce52f218b5be94af458e04470f997ab46fdebd57cf0b8374cc20e"}, + {file = "setproctitle-1.3.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:2c3b1ce68746557aa6e6f4547e76883925cdc7f8d7c7a9f518acd203f1265ca5"}, + {file = "setproctitle-1.3.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:0b6a4cbabf024cb263a45bdef425760f14470247ff223f0ec51699ca9046c0fe"}, + {file = "setproctitle-1.3.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3e55d7ecc68bdc80de5a553691a3ed260395d5362c19a266cf83cbb4e046551f"}, + {file = "setproctitle-1.3.4-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02ca3802902d91a89957f79da3ec44b25b5804c88026362cb85eea7c1fbdefd1"}, + {file = "setproctitle-1.3.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:47669fc8ed8b27baa2d698104732234b5389f6a59c37c046f6bcbf9150f7a94e"}, + {file = "setproctitle-1.3.4.tar.gz", hash = "sha256:3b40d32a3e1f04e94231ed6dfee0da9e43b4f9c6b5450d53e6dd7754c34e0c50"}, ] [package.extras] From f51a8ceb328b6e9e56fb802b5138d68725544035 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 12 Dec 2024 09:04:29 -0700 Subject: [PATCH 16/25] build(deps): bump build from 1.2.1 to 1.2.2.post1 (#19024) Bumps [build](https://github.com/pypa/build) from 1.2.1 to 1.2.2.post1. - [Release notes](https://github.com/pypa/build/releases) - [Changelog](https://github.com/pypa/build/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pypa/build/compare/1.2.1...1.2.2.post1) --- updated-dependencies: - dependency-name: build dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index e4a5f6d249b6..f121ffdc9730 100644 --- a/poetry.lock +++ b/poetry.lock @@ -499,13 +499,13 @@ crt = ["awscrt (==0.22.0)"] [[package]] name = "build" -version = "1.2.1" +version = "1.2.2.post1" description = "A simple, correct Python build frontend" optional = true python-versions = ">=3.8" files = [ - {file = "build-1.2.1-py3-none-any.whl", hash = "sha256:75e10f767a433d9a86e50d83f418e83efc18ede923ee5ff7df93b6cb0306c5d4"}, - {file = "build-1.2.1.tar.gz", hash = "sha256:526263f4870c26f26c433545579475377b2b7588b6f1eac76a001e873ae3e19d"}, + {file = "build-1.2.2.post1-py3-none-any.whl", hash = "sha256:1d61c0887fa860c01971625baae8bdd338e517b836a2f70dd1f7aa3a6b2fc5b5"}, + {file = "build-1.2.2.post1.tar.gz", hash = "sha256:b36993e92ca9375a219c99e606a122ff365a760a2d4bba0caa09bd5278b608b7"}, ] [package.dependencies] From f77bc52f337aa74959ebf2562c08798451ed565d Mon Sep 17 00:00:00 2001 From: Kyle Altendorf Date: Thu, 12 Dec 2024 11:04:58 -0500 Subject: [PATCH 17/25] correct short license text to SPDX standard `Apache-2.0` (#19027) https://spdx.org/licenses/ --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index c419bc11266e..92c11fe0f250 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ name = "chia-blockchain" version = "0.0.0" description = "Chia blockchain full node, farmer, timelord, and wallet." authors = ["Mariano Sorgente "] -license = "Apache License" +license = "Apache-2.0" readme = "README.md" keywords= ["chia", "blockchain", "node"] homepage = "https://chia.net/" From 4db4cd9cc1a56ac1872ecdc03d9c8080419463b3 Mon Sep 17 00:00:00 2001 From: Earle Lowe <30607889+emlowe@users.noreply.github.com> Date: Thu, 12 Dec 2024 08:07:12 -0800 Subject: [PATCH 18/25] convert test_change_pools to WalletTestFramework (#18996) * convert test_change_pools to WalletTestFramework * add some checks that were in previous test version * wait for wallet sync * farm more blocks? --- chia/_tests/pools/test_pool_cmdline.py | 97 +----------- chia/_tests/pools/test_pool_rpc.py | 209 +++++++++++++++++++------ 2 files changed, 165 insertions(+), 141 deletions(-) diff --git a/chia/_tests/pools/test_pool_cmdline.py b/chia/_tests/pools/test_pool_cmdline.py index 77a0b9f80f96..25de78825818 100644 --- a/chia/_tests/pools/test_pool_cmdline.py +++ b/chia/_tests/pools/test_pool_cmdline.py @@ -3,7 +3,7 @@ import json from dataclasses import dataclass from io import StringIO -from typing import Optional, Union, cast +from typing import Optional, cast import pytest from chia_rs import G1Element @@ -14,7 +14,12 @@ from chia._tests.cmds.cmd_test_utils import TestWalletRpcClient from chia._tests.conftest import ConsensusMode from chia._tests.environments.wallet import WalletStateTransition, WalletTestFramework -from chia._tests.pools.test_pool_rpc import manage_temporary_pool_plot +from chia._tests.pools.test_pool_rpc import ( + LOCK_HEIGHT, + create_new_plotnft, + manage_temporary_pool_plot, + verify_pool_state, +) from chia._tests.util.misc import Marks, boolean_datacases, datacases from chia.cmds.cmd_classes import NeedsWalletRPC, WalletClientInfo from chia.cmds.param_types import CliAddress @@ -36,7 +41,7 @@ from chia.util.bech32m import encode_puzzle_hash from chia.util.config import lock_and_load_config, save_config from chia.util.errors import CliRpcConnectionError -from chia.util.ints import uint32, uint64 +from chia.util.ints import uint64 from chia.wallet.util.address_type import AddressType from chia.wallet.util.wallet_types import WalletType from chia.wallet.wallet_state_manager import WalletStateManager @@ -44,8 +49,6 @@ # limit to plain consensus mode for all tests pytestmark = [pytest.mark.limit_consensus_modes(reason="irrelevant")] -LOCK_HEIGHT = uint32(5) - @dataclass class StateUrlCase: @@ -56,90 +59,6 @@ class StateUrlCase: marks: Marks = () -async def verify_pool_state(wallet_rpc: WalletRpcClient, w_id: int, expected_state: PoolSingletonState) -> bool: - pw_status: PoolWalletInfo = (await wallet_rpc.pw_status(w_id))[0] - return pw_status.current.state == expected_state.value - - -async def process_plotnft_create( - wallet_test_framework: WalletTestFramework, expected_state: PoolSingletonState, second_nft: bool = False -) -> int: - wallet_rpc: WalletRpcClient = wallet_test_framework.environments[0].rpc_client - - pre_block_balance_updates: dict[Union[int, str], dict[str, int]] = { - 1: { - "confirmed_wallet_balance": 0, - "unconfirmed_wallet_balance": -1, - "<=#spendable_balance": 1, - "<=#max_send_amount": 1, - ">=#pending_change": 1, # any amount increase - "pending_coin_removal_count": 1, - } - } - - post_block_balance_updates: dict[Union[int, str], dict[str, int]] = { - 1: { - "confirmed_wallet_balance": -1, - "unconfirmed_wallet_balance": 0, - ">=#spendable_balance": 1, - ">=#max_send_amount": 1, - "<=#pending_change": 1, # any amount decrease - "<=#pending_coin_removal_count": 1, - }, - } - - if second_nft: - post_block = post_block_balance_updates | { - 2: { - "set_remainder": True, # TODO: sometimes this fails with pending_coin_removal_count - }, - 3: {"init": True, "unspent_coin_count": 1}, - } - else: - post_block = post_block_balance_updates | {2: {"init": True, "unspent_coin_count": 1}} - - await wallet_test_framework.process_pending_states( - [ - WalletStateTransition( - pre_block_balance_updates=pre_block_balance_updates, - post_block_balance_updates=post_block, - ) - ] - ) - - summaries_response = await wallet_rpc.get_wallets(WalletType.POOLING_WALLET) - assert len(summaries_response) == 2 if second_nft else 1 - wallet_id: int = summaries_response[-1]["id"] - - await verify_pool_state(wallet_rpc, wallet_id, expected_state=expected_state) - return wallet_id - - -async def create_new_plotnft( - wallet_test_framework: WalletTestFramework, self_pool: bool = False, second_nft: bool = False -) -> int: - wallet_state_manager: WalletStateManager = wallet_test_framework.environments[0].wallet_state_manager - wallet_rpc: WalletRpcClient = wallet_test_framework.environments[0].rpc_client - - our_ph = await wallet_state_manager.main_wallet.get_new_puzzlehash() - - await wallet_rpc.create_new_pool_wallet( - target_puzzlehash=our_ph, - backup_host="", - mode="new", - relative_lock_height=uint32(0) if self_pool else LOCK_HEIGHT, - state="SELF_POOLING" if self_pool else "FARMING_TO_POOL", - pool_url="" if self_pool else "http://pool.example.com", - fee=uint64(0), - ) - - return await process_plotnft_create( - wallet_test_framework=wallet_test_framework, - expected_state=PoolSingletonState.SELF_POOLING if self_pool else PoolSingletonState.FARMING_TO_POOL, - second_nft=second_nft, - ) - - @pytest.mark.parametrize( "wallet_environments", [ diff --git a/chia/_tests/pools/test_pool_rpc.py b/chia/_tests/pools/test_pool_rpc.py index dd30f7d3a0cf..b5359dee101a 100644 --- a/chia/_tests/pools/test_pool_rpc.py +++ b/chia/_tests/pools/test_pool_rpc.py @@ -8,7 +8,7 @@ from dataclasses import dataclass from pathlib import Path from shutil import rmtree -from typing import Any +from typing import Any, Union import pytest @@ -16,6 +16,7 @@ from _pytest.fixtures import SubRequest from chia_rs import G1Element +from chia._tests.environments.wallet import WalletStateTransition, WalletTestFramework from chia._tests.util.setup_nodes import setup_simulators_and_wallets_service from chia._tests.util.time_out_assert import time_out_assert from chia.consensus.constants import ConsensusConstants @@ -40,12 +41,17 @@ from chia.wallet.util.tx_config import DEFAULT_TX_CONFIG from chia.wallet.util.wallet_types import WalletType from chia.wallet.wallet_node import WalletNode +from chia.wallet.wallet_state_manager import WalletStateManager # TODO: Compare deducted fees in all tests against reported total_fee +# limit to plain consensus mode for all tests +pytestmark = [pytest.mark.limit_consensus_modes(reason="irrelevant")] + log = logging.getLogger(__name__) FEE_AMOUNT = uint64(29_000) MAX_WAIT_SECS = 30 # A high value for WAIT_SECS is useful when paused in the debugger +LOCK_HEIGHT = uint32(5) def get_pool_plot_dir() -> Path: @@ -183,6 +189,90 @@ async def setup( await client.await_closed() +async def verify_pool_state(wallet_rpc: WalletRpcClient, w_id: int, expected_state: PoolSingletonState) -> bool: + pw_status: PoolWalletInfo = (await wallet_rpc.pw_status(w_id))[0] + return pw_status.current.state == expected_state.value + + +async def process_plotnft_create( + wallet_test_framework: WalletTestFramework, expected_state: PoolSingletonState, second_nft: bool = False +) -> int: + wallet_rpc: WalletRpcClient = wallet_test_framework.environments[0].rpc_client + + pre_block_balance_updates: dict[Union[int, str], dict[str, int]] = { + 1: { + "confirmed_wallet_balance": 0, + "unconfirmed_wallet_balance": -1, + "<=#spendable_balance": 1, + "<=#max_send_amount": 1, + ">=#pending_change": 1, # any amount increase + "pending_coin_removal_count": 1, + } + } + + post_block_balance_updates: dict[Union[int, str], dict[str, int]] = { + 1: { + "confirmed_wallet_balance": -1, + "unconfirmed_wallet_balance": 0, + ">=#spendable_balance": 1, + ">=#max_send_amount": 1, + "<=#pending_change": 1, # any amount decrease + "<=#pending_coin_removal_count": 1, + }, + } + + if second_nft: + post_block = post_block_balance_updates | { + 2: { + "set_remainder": True, # TODO: sometimes this fails with pending_coin_removal_count + }, + 3: {"init": True, "unspent_coin_count": 1}, + } + else: + post_block = post_block_balance_updates | {2: {"init": True, "unspent_coin_count": 1}} + + await wallet_test_framework.process_pending_states( + [ + WalletStateTransition( + pre_block_balance_updates=pre_block_balance_updates, + post_block_balance_updates=post_block, + ) + ] + ) + + summaries_response = await wallet_rpc.get_wallets(WalletType.POOLING_WALLET) + assert len(summaries_response) == 2 if second_nft else 1 + wallet_id: int = summaries_response[-1]["id"] + + await verify_pool_state(wallet_rpc, wallet_id, expected_state=expected_state) + return wallet_id + + +async def create_new_plotnft( + wallet_test_framework: WalletTestFramework, self_pool: bool = False, second_nft: bool = False +) -> int: + wallet_state_manager: WalletStateManager = wallet_test_framework.environments[0].wallet_state_manager + wallet_rpc: WalletRpcClient = wallet_test_framework.environments[0].rpc_client + + our_ph = await wallet_state_manager.main_wallet.get_new_puzzlehash() + + await wallet_rpc.create_new_pool_wallet( + target_puzzlehash=our_ph, + backup_host="", + mode="new", + relative_lock_height=uint32(0) if self_pool else LOCK_HEIGHT, + state="SELF_POOLING" if self_pool else "FARMING_TO_POOL", + pool_url="" if self_pool else "http://pool.example.com", + fee=uint64(0), + ) + + return await process_plotnft_create( + wallet_test_framework=wallet_test_framework, + expected_state=PoolSingletonState.SELF_POOLING if self_pool else PoolSingletonState.FARMING_TO_POOL, + second_nft=second_nft, + ) + + class TestPoolWalletRpc: @pytest.mark.anyio async def test_create_new_pool_wallet_self_farm( @@ -865,73 +955,88 @@ async def status_is_self_pooling() -> bool: await time_out_assert(timeout=MAX_WAIT_SECS, function=status_is_self_pooling) assert len(await wallet_node.wallet_state_manager.tx_store.get_unconfirmed_for_wallet(2)) == 0 + @pytest.mark.parametrize( + "wallet_environments", + [ + { + "num_environments": 1, + "blocks_needed": [10], + } + ], + indirect=True, + ) @pytest.mark.anyio - async def test_change_pools(self, setup: Setup, fee: uint64, self_hostname: str) -> None: + async def test_change_pools( + self, + fee: uint64, + wallet_environments: WalletTestFramework, + ) -> None: """This tests Pool A -> escaping -> Pool B""" - full_node_api, wallet_node, our_ph, _total_block_rewards, client = setup - pool_a_ph = bytes32.zeros - pool_b_ph = bytes32.zeros - WAIT_SECS = 200 - assert len(await client.get_wallets(WalletType.POOLING_WALLET)) == 0 + wallet_state_manager: WalletStateManager = wallet_environments.environments[0].wallet_state_manager + wallet_rpc: WalletRpcClient = wallet_environments.environments[0].rpc_client - await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node, timeout=20) + wallet_state_manager.config["reuse_public_key_for_change"][ + str(wallet_state_manager.root_pubkey.get_fingerprint()) + ] = wallet_environments.tx_config.reuse_puzhash - creation_tx: TransactionRecord = await client.create_new_pool_wallet( - pool_a_ph, "https://pool-a.org", uint32(5), f"{self_hostname}:5000", "new", "FARMING_TO_POOL", fee - ) + # Create a farming plotnft to url http://pool.example.com + wallet_id = await create_new_plotnft(wallet_environments) - await full_node_api.wait_transaction_records_entered_mempool(records=[creation_tx]) - - await full_node_api.farm_blocks_to_puzzlehash(count=6, farm_to=our_ph, guarantee_transaction_blocks=True) - assert not full_node_api.txs_in_mempool(txs=[creation_tx]) - - await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node, timeout=20) - - summaries_response = await client.get_wallets(WalletType.POOLING_WALLET) - assert len(summaries_response) == 1 - wallet_id: int = summaries_response[0]["id"] - status: PoolWalletInfo = (await client.pw_status(wallet_id))[0] - - assert status.current.state == PoolSingletonState.FARMING_TO_POOL.value - assert status.target is None - - async def status_is_farming_to_pool() -> bool: - await full_node_api.farm_blocks_to_puzzlehash(count=1, farm_to=our_ph, guarantee_transaction_blocks=True) - pw_status: PoolWalletInfo = (await client.pw_status(wallet_id))[0] - return pw_status.current.state == PoolSingletonState.FARMING_TO_POOL.value - - await time_out_assert(timeout=WAIT_SECS, function=status_is_farming_to_pool) - - pw_info: PoolWalletInfo = (await client.pw_status(wallet_id))[0] - assert pw_info.current.pool_url == "https://pool-a.org" - assert pw_info.current.relative_lock_height == 5 - - await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node, timeout=20) + # Join a different pool join_pool_tx: TransactionRecord = ( - await client.pw_join_pool( + await wallet_rpc.pw_join_pool( wallet_id, - pool_b_ph, + bytes32.zeros, "https://pool-b.org", - uint32(10), + LOCK_HEIGHT, uint64(fee), ) )["transaction"] assert join_pool_tx is not None - async def status_is_leaving() -> bool: - await full_node_api.farm_blocks_to_puzzlehash(count=1, farm_to=our_ph, guarantee_transaction_blocks=True) - pw_status: PoolWalletInfo = (await client.pw_status(wallet_id))[0] - return pw_status.current.state == PoolSingletonState.LEAVING_POOL.value + await wallet_environments.full_node.farm_blocks_to_puzzlehash(count=1, guarantee_transaction_blocks=True) + await verify_pool_state(wallet_rpc, wallet_id, PoolSingletonState.LEAVING_POOL) + await wallet_environments.full_node.farm_blocks_to_puzzlehash( + count=LOCK_HEIGHT + 2, guarantee_transaction_blocks=True + ) + await wallet_environments.full_node.wait_for_wallet_synced( + wallet_node=wallet_environments.environments[0].node, timeout=20 + ) - await time_out_assert(timeout=WAIT_SECS, function=status_is_leaving) - pw_info = (await client.pw_status(wallet_id))[0] + async def farm_blocks_until_state( + state: PoolSingletonState, + wallet_rpc: WalletRpcClient, + wallet_id: int, + full_node: FullNodeSimulator, + wallet_node: WalletNode, + max_blocks: int = 10 * (LOCK_HEIGHT + 2), + ) -> bool: + block_chunk = LOCK_HEIGHT + 2 + total_blocks_farmed = 0 + while total_blocks_farmed < max_blocks: + await full_node.farm_blocks_to_puzzlehash(count=block_chunk, guarantee_transaction_blocks=True) + total_blocks_farmed += block_chunk + print(f"Checking state after {total_blocks_farmed} blocks") + + await full_node.wait_for_wallet_synced(wallet_node=wallet_node, timeout=20) + pw_status: PoolWalletInfo = (await wallet_rpc.pw_status(wallet_id))[0] + if pw_status.current.state == state.value: + return True + return False + + assert await farm_blocks_until_state( + PoolSingletonState.FARMING_TO_POOL, + wallet_rpc, + wallet_id, + wallet_environments.full_node, + wallet_environments.environments[0].node, + ) - await time_out_assert(timeout=WAIT_SECS, function=status_is_farming_to_pool) - pw_info = (await client.pw_status(wallet_id))[0] - assert pw_info.current.pool_url == "https://pool-b.org" - assert pw_info.current.relative_lock_height == 10 - assert len(await wallet_node.wallet_state_manager.tx_store.get_unconfirmed_for_wallet(2)) == 0 + pw_status: PoolWalletInfo = (await wallet_rpc.pw_status(wallet_id))[0] + assert pw_status.current.state == PoolSingletonState.FARMING_TO_POOL.value + assert pw_status.current.pool_url == "https://pool-b.org" + assert pw_status.current.relative_lock_height == LOCK_HEIGHT @pytest.mark.anyio async def test_change_pools_reorg(self, setup: Setup, fee: uint64, self_hostname: str) -> None: From 29826679a4e3dcf23759409153d6dd352f5c669d Mon Sep 17 00:00:00 2001 From: Almog De Paz Date: Thu, 12 Dec 2024 18:15:28 +0200 Subject: [PATCH 19/25] fix wrong param in prevalidate (#19035) --- chia/full_node/full_node.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/chia/full_node/full_node.py b/chia/full_node/full_node.py index b8b71dd91a61..de0c9f3a7671 100644 --- a/chia/full_node/full_node.py +++ b/chia/full_node/full_node.py @@ -1622,7 +1622,7 @@ async def add_prevalidated_blocks( cc_sub_slot = block.finished_sub_slots[0].challenge_chain if cc_sub_slot.new_sub_slot_iters is not None or cc_sub_slot.new_difficulty is not None: expected_sub_slot_iters, expected_difficulty = get_next_sub_slot_iters_and_difficulty( - self.constants, True, block_record, self.blockchain + self.constants, True, block_record, blockchain ) assert cc_sub_slot.new_sub_slot_iters is not None vs.ssi = cc_sub_slot.new_sub_slot_iters From beff6e2aedc57655170333c5ba235330b58354f1 Mon Sep 17 00:00:00 2001 From: Earle Lowe <30607889+emlowe@users.noreply.github.com> Date: Thu, 12 Dec 2024 08:51:50 -0800 Subject: [PATCH 20/25] CHIA-2022: Fix problems with startup timing and the Datalayer processing loop (#19014) Catch some more exceptions when getting owned stores in DL loop --- chia/data_layer/data_layer.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/chia/data_layer/data_layer.py b/chia/data_layer/data_layer.py index 2e37e6a7b621..9ba8bbff1cf4 100644 --- a/chia/data_layer/data_layer.py +++ b/chia/data_layer/data_layer.py @@ -920,10 +920,14 @@ async def periodically_manage_data(self) -> None: # Need this to make sure we process updates and generate DAT files try: owned_stores = await self.get_owned_stores() - except ValueError: + except (ValueError, aiohttp.client_exceptions.ClientConnectorError): # Sometimes the DL wallet isn't available, so we can't get the owned stores. # We'll try again next time. owned_stores = [] + except Exception as e: + self.log.error(f"Exception while fetching owned stores: {type(e)} {e} {traceback.format_exc()}") + owned_stores = [] + subscription_store_ids = {subscription.store_id for subscription in subscriptions} for record in owned_stores: store_id = record.launcher_id From 1a1209c06ac1e55fb9bcb9add89fa0020f0c0dd1 Mon Sep 17 00:00:00 2001 From: Arvid Norberg Date: Thu, 12 Dec 2024 17:52:05 +0100 Subject: [PATCH 21/25] remove the block fill rate limit of 70% when farming a block (#19005) * remove the block fill rate limit of 70% when farming a block * Addendum to 100% block fill rate. (#19006) * break down 'magic number' in test into its components * Attempt to clarify why we add many aggsig conditions in make_and_send_big_cost_sb (#19026) Attempt to clarify why we add many aggsig conditions in make_and_send_big_cost_sb. --------- Co-authored-by: Amine Khaldi --- .../core/mempool/test_mempool_manager.py | 64 +++++++++++++------ chia/full_node/mempool_manager.py | 3 +- 2 files changed, 46 insertions(+), 21 deletions(-) diff --git a/chia/_tests/core/mempool/test_mempool_manager.py b/chia/_tests/core/mempool/test_mempool_manager.py index 687dc1d13469..74415f350061 100644 --- a/chia/_tests/core/mempool/test_mempool_manager.py +++ b/chia/_tests/core/mempool/test_mempool_manager.py @@ -12,6 +12,7 @@ from chia._tests.conftest import ConsensusMode from chia._tests.util.misc import invariant_check_mempool from chia._tests.util.setup_nodes import OldSimulatorsAndWallets, setup_simulators_and_wallets +from chia.consensus.condition_costs import ConditionCost from chia.consensus.constants import ConsensusConstants from chia.consensus.default_constants import DEFAULT_CONSTANTS from chia.full_node.mempool import MAX_SKIPPED_ITEMS, PRIORITY_TX_THRESHOLD @@ -170,7 +171,7 @@ async def get_coin_records(coin_ids: Collection[bytes32]) -> list[CoinRecord]: constants = DEFAULT_CONSTANTS if max_block_clvm_cost is not None: - constants = constants.replace(MAX_BLOCK_COST_CLVM=uint64(max_block_clvm_cost)) + constants = constants.replace(MAX_BLOCK_COST_CLVM=uint64(max_block_clvm_cost + TEST_BLOCK_OVERHEAD)) if mempool_block_buffer is not None: constants = constants.replace(MEMPOOL_BLOCK_BUFFER=uint8(mempool_block_buffer)) mempool_manager = await instantiate_mempool_manager( @@ -1036,36 +1037,63 @@ async def send_spends_to_mempool(coin_spends: list[CoinSpend]) -> None: @pytest.mark.parametrize("num_skipped_items", [PRIORITY_TX_THRESHOLD, MAX_SKIPPED_ITEMS]) @pytest.mark.anyio async def test_create_bundle_from_mempool_on_max_cost(num_skipped_items: int, caplog: pytest.LogCaptureFixture) -> None: + """ + This test exercises the path where an item's inclusion would exceed the + maximum cumulative cost, so it gets skipped as a result. + + NOTE: + 1. After PRIORITY_TX_THRESHOLD, we skip items with eligible coins. + 2. After skipping MAX_SKIPPED_ITEMS, we stop processing further items. + """ + async def get_unspent_lineage_info_for_puzzle_hash(_: bytes32) -> Optional[UnspentLineageInfo]: assert False # pragma: no cover - # This test exercises the path where an item's inclusion would exceed the - # maximum cumulative cost, so it gets skipped as a result + MAX_BLOCK_CLVM_COST = 550_000_000 - # NOTE: - # 1. After PRIORITY_TX_THRESHOLD, we skip items with eligible coins. - # 2. After skipping MAX_SKIPPED_ITEMS, we stop processing further items. + mempool_manager, coins = await setup_mempool_with_coins( + coin_amounts=list(range(1_000_000_000, 1_000_000_030)), + max_block_clvm_cost=MAX_BLOCK_CLVM_COST, + max_tx_clvm_cost=uint64(MAX_BLOCK_CLVM_COST), + mempool_block_buffer=20, + ) async def make_and_send_big_cost_sb(coin: Coin) -> None: + """ + Creates a spend bundle with a big enough cost that gets it close to the + maximum block clvm cost limit. + """ conditions = [] sk = AugSchemeMPL.key_gen(b"7" * 32) g1 = sk.get_g1() sig = AugSchemeMPL.sign(sk, IDENTITY_PUZZLE_HASH, g1) aggsig = G2Element() - for _ in range(169): + # Let's get as close to `MAX_BLOCK_CLVM_COST` (550_000_000) as possible. + # We start by accounting for execution cost + spend_bundle_cost = 44 + # And then the created coin + conditions.append([ConditionOpcode.CREATE_COIN, IDENTITY_PUZZLE_HASH, coin.amount - 10_000_000]) + TEST_CREATE_COIN_SPEND_BYTESIZE = 93 + TEST_CREATE_COIN_CONDITION_COST = ( + ConditionCost.CREATE_COIN.value + TEST_CREATE_COIN_SPEND_BYTESIZE * DEFAULT_CONSTANTS.COST_PER_BYTE + ) + spend_bundle_cost += TEST_CREATE_COIN_CONDITION_COST + # We're using agg sig conditions to increase the spend bundle's cost + # and reach our target cost. + TEST_AGG_SIG_SPEND_BYTESIZE = 88 + TEST_AGGSIG_CONDITION_COST = ( + ConditionCost.AGG_SIG.value + TEST_AGG_SIG_SPEND_BYTESIZE * DEFAULT_CONSTANTS.COST_PER_BYTE + ) + while spend_bundle_cost + TEST_AGGSIG_CONDITION_COST < MAX_BLOCK_CLVM_COST: conditions.append([ConditionOpcode.AGG_SIG_UNSAFE, g1, IDENTITY_PUZZLE_HASH]) aggsig += sig - conditions.append([ConditionOpcode.CREATE_COIN, IDENTITY_PUZZLE_HASH, coin.amount - 10_000_000]) - # Create a spend bundle with a big enough cost that gets it close to the limit + spend_bundle_cost += TEST_AGGSIG_CONDITION_COST + # We now have a spend bundle with a big enough cost that gets it close to the limit _, _, res = await generate_and_add_spendbundle(mempool_manager, conditions, coin, aggsig) - assert res[1] == MempoolInclusionStatus.SUCCESS + cost, status, _ = res + assert status == MempoolInclusionStatus.SUCCESS + assert cost == spend_bundle_cost - mempool_manager, coins = await setup_mempool_with_coins( - coin_amounts=list(range(1_000_000_000, 1_000_000_030)), - max_block_clvm_cost=550_000_000, - max_tx_clvm_cost=uint64(550_000_000), - mempool_block_buffer=20, - ) # Create the spend bundles with a big enough cost that they get close to the limit for i in range(num_skipped_items): await make_and_send_big_cost_sb(coins[i]) @@ -2016,9 +2044,7 @@ async def fill_mempool_with_test_sbs( # and without them we won't be able to get the test bundle in. # This defaults to `MAX_BLOCK_COST_CLVM // 2` full_node_api.full_node._mempool_manager.max_tx_clvm_cost = max_block_clvm_cost - # This defaults to `MAX_BLOCK_COST_CLVM * BLOCK_SIZE_LIMIT_FACTOR` - # TODO: Revisit this when we eventually raise the fille rate to 100% - # and `BLOCK_SIZE_LIMIT_FACTOR` is no longer relevant. + # This defaults to `MAX_BLOCK_COST_CLVM - BLOCK_OVERHEAD` full_node_api.full_node._mempool_manager.mempool.mempool_info = dataclasses.replace( full_node_api.full_node._mempool_manager.mempool.mempool_info, max_block_clvm_cost=CLVMCost(max_block_clvm_cost), diff --git a/chia/full_node/mempool_manager.py b/chia/full_node/mempool_manager.py index 246dc2e37081..ad6a11b52dbf 100644 --- a/chia/full_node/mempool_manager.py +++ b/chia/full_node/mempool_manager.py @@ -162,12 +162,11 @@ def __init__( # spends. self.nonzero_fee_minimum_fpc = 5 - BLOCK_SIZE_LIMIT_FACTOR = 0.7 # We need to deduct the block overhead, which consists of the wrapping # quote opcode's bytes cost as well as its execution cost. BLOCK_OVERHEAD = QUOTE_BYTES * self.constants.COST_PER_BYTE + QUOTE_EXECUTION_COST - self.max_block_clvm_cost = uint64(self.constants.MAX_BLOCK_COST_CLVM * BLOCK_SIZE_LIMIT_FACTOR - BLOCK_OVERHEAD) + self.max_block_clvm_cost = uint64(self.constants.MAX_BLOCK_COST_CLVM - BLOCK_OVERHEAD) self.max_tx_clvm_cost = ( max_tx_clvm_cost if max_tx_clvm_cost is not None else uint64(self.constants.MAX_BLOCK_COST_CLVM // 2) ) From c98d3b5d68877ba631367c4afcafd047a8f16b15 Mon Sep 17 00:00:00 2001 From: Amine Khaldi Date: Thu, 12 Dec 2024 20:49:18 +0100 Subject: [PATCH 22/25] Update the anchor against the checkpoint. --- chia-blockchain-gui | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/chia-blockchain-gui b/chia-blockchain-gui index b2caac88dc2c..e1da059529ba 160000 --- a/chia-blockchain-gui +++ b/chia-blockchain-gui @@ -1 +1 @@ -Subproject commit b2caac88dc2ced4d4e2a904456506bc7318a5434 +Subproject commit e1da059529ba55a1bc645e98a56b45233b6e5887 From bc5ad74c998265ad3e4005753aebccc3151fb2b5 Mon Sep 17 00:00:00 2001 From: Kyle Altendorf Date: Fri, 13 Dec 2024 13:39:55 -0500 Subject: [PATCH 23/25] more less default root path (#19031) * more less default root path * hmm --- chia/_tests/cmds/cmd_test_utils.py | 2 +- chia/cmds/cmd_classes.py | 5 +- chia/cmds/cmds_util.py | 2 +- chia/cmds/coin_funcs.py | 10 +- chia/cmds/coins.py | 7 ++ chia/cmds/dao.py | 54 +++++++- chia/cmds/dao_funcs.py | 66 ++++++---- chia/cmds/wallet.py | 195 ++++++++++++++++++++++++----- chia/cmds/wallet_funcs.py | 166 +++++++++++++++--------- 9 files changed, 381 insertions(+), 126 deletions(-) diff --git a/chia/_tests/cmds/cmd_test_utils.py b/chia/_tests/cmds/cmd_test_utils.py index 57296e9daf50..68fbb5e88a2f 100644 --- a/chia/_tests/cmds/cmd_test_utils.py +++ b/chia/_tests/cmds/cmd_test_utils.py @@ -409,9 +409,9 @@ async def test_get_any_service_client( @asynccontextmanager async def test_get_wallet_client( + root_path: Path = default_root, wallet_rpc_port: Optional[int] = None, fingerprint: Optional[int] = None, - root_path: Path = default_root, ) -> AsyncIterator[tuple[WalletRpcClient, int, dict[str, Any]]]: async with test_get_any_service_client(WalletRpcClient, wallet_rpc_port, root_path) as (wallet_client, config): wallet_client.fingerprint = fingerprint # type: ignore diff --git a/chia/cmds/cmd_classes.py b/chia/cmds/cmd_classes.py index 0945fe3d781d..4e1d4076eea4 100644 --- a/chia/cmds/cmd_classes.py +++ b/chia/cmds/cmd_classes.py @@ -299,9 +299,8 @@ async def wallet_rpc(self, **kwargs: Any) -> AsyncIterator[WalletClientInfo]: if self.client_info is not None: yield self.client_info else: - if "root_path" not in kwargs: - kwargs["root_path"] = self.context["root_path"] - async with get_wallet_client(self.wallet_rpc_port, self.fingerprint, **kwargs) as ( + root_path = kwargs.get("root_path", self.context["root_path"]) + async with get_wallet_client(root_path, self.wallet_rpc_port, self.fingerprint, **kwargs) as ( wallet_client, fp, config, diff --git a/chia/cmds/cmds_util.py b/chia/cmds/cmds_util.py index e260d3460e85..e61d342eaf56 100644 --- a/chia/cmds/cmds_util.py +++ b/chia/cmds/cmds_util.py @@ -248,9 +248,9 @@ async def get_wallet(root_path: Path, wallet_client: WalletRpcClient, fingerprin @asynccontextmanager async def get_wallet_client( + root_path: Path, wallet_rpc_port: Optional[int] = None, fingerprint: Optional[int] = None, - root_path: Path = DEFAULT_ROOT_PATH, consume_errors: bool = True, ) -> AsyncIterator[tuple[WalletRpcClient, int, dict[str, Any]]]: async with get_any_service_client(WalletRpcClient, wallet_rpc_port, root_path, consume_errors) as ( diff --git a/chia/cmds/coin_funcs.py b/chia/cmds/coin_funcs.py index 7a03d6973d8d..1cc298719bcc 100644 --- a/chia/cmds/coin_funcs.py +++ b/chia/cmds/coin_funcs.py @@ -3,6 +3,7 @@ import dataclasses import sys from collections.abc import Sequence +from pathlib import Path from typing import Optional from chia.cmds.cmds_util import CMDCoinSelectionConfigLoader, CMDTXConfigLoader, cli_confirm, get_wallet_client @@ -21,6 +22,7 @@ async def async_list( *, + root_path: Path, wallet_rpc_port: Optional[int], fingerprint: Optional[int], wallet_id: int, @@ -31,7 +33,7 @@ async def async_list( show_unconfirmed: bool, paginate: Optional[bool], ) -> None: - async with get_wallet_client(wallet_rpc_port, fingerprint) as (wallet_client, _, config): + async with get_wallet_client(root_path, wallet_rpc_port, fingerprint) as (wallet_client, _, config): addr_prefix = selected_network_address_prefix(config) if paginate is None: paginate = sys.stdout.isatty() @@ -114,6 +116,7 @@ def print_coins( async def async_combine( *, + root_path: Path, wallet_rpc_port: Optional[int], fingerprint: Optional[int], wallet_id: int, @@ -131,7 +134,7 @@ async def async_combine( condition_valid_times: ConditionValidTimes, override: bool, ) -> list[TransactionRecord]: - async with get_wallet_client(wallet_rpc_port, fingerprint) as (wallet_client, fingerprint, config): + async with get_wallet_client(root_path, wallet_rpc_port, fingerprint) as (wallet_client, fingerprint, config): try: wallet_type = await get_wallet_type(wallet_id=wallet_id, wallet_client=wallet_client) mojo_per_unit = get_mojo_per_unit(wallet_type) @@ -194,6 +197,7 @@ async def async_combine( async def async_split( *, + root_path: Path, wallet_rpc_port: Optional[int], fingerprint: Optional[int], wallet_id: int, @@ -209,7 +213,7 @@ async def async_split( push: bool, condition_valid_times: ConditionValidTimes, ) -> list[TransactionRecord]: - async with get_wallet_client(wallet_rpc_port, fingerprint) as (wallet_client, fingerprint, config): + async with get_wallet_client(root_path, wallet_rpc_port, fingerprint) as (wallet_client, fingerprint, config): try: wallet_type = await get_wallet_type(wallet_id=wallet_id, wallet_client=wallet_client) mojo_per_unit = get_mojo_per_unit(wallet_type) diff --git a/chia/cmds/coins.py b/chia/cmds/coins.py index 0adf9835481e..b1fc69e23648 100644 --- a/chia/cmds/coins.py +++ b/chia/cmds/coins.py @@ -55,6 +55,7 @@ def list_cmd( asyncio.run( async_list( + root_path=ctx.obj["root_path"], wallet_rpc_port=wallet_rpc_port, fingerprint=fingerprint, wallet_id=id, @@ -111,7 +112,9 @@ def list_cmd( ) @click.option("--override", help="Submits transaction without checking for unusual values", is_flag=True, default=False) @tx_out_cmd() +@click.pass_context def combine_cmd( + ctx: click.Context, wallet_rpc_port: Optional[int], fingerprint: int, id: int, @@ -133,6 +136,7 @@ def combine_cmd( return asyncio.run( async_combine( + root_path=ctx.obj["root_path"], wallet_rpc_port=wallet_rpc_port, fingerprint=fingerprint, wallet_id=id, @@ -183,7 +187,9 @@ def combine_cmd( ) @tx_config_args @tx_out_cmd() +@click.pass_context def split_cmd( + ctx: click.Context, wallet_rpc_port: Optional[int], fingerprint: int, id: int, @@ -203,6 +209,7 @@ def split_cmd( return asyncio.run( async_split( + root_path=ctx.obj["root_path"], wallet_rpc_port=wallet_rpc_port, fingerprint=fingerprint, wallet_id=id, diff --git a/chia/cmds/dao.py b/chia/cmds/dao.py index b59cc65ea2a4..d592380a1965 100644 --- a/chia/cmds/dao.py +++ b/chia/cmds/dao.py @@ -51,7 +51,9 @@ def dao_cmd(ctx: click.Context) -> None: default=uint64(1), show_default=True, ) +@click.pass_context def dao_add_cmd( + ctx: click.Context, wallet_rpc_port: Optional[int], fingerprint: int, treasury_id: bytes32, @@ -60,7 +62,7 @@ def dao_add_cmd( ) -> None: from chia.cmds.dao_funcs import add_dao_wallet - asyncio.run(add_dao_wallet(wallet_rpc_port, fingerprint, name, treasury_id, filter_amount)) + asyncio.run(add_dao_wallet(ctx.obj["root_path"], wallet_rpc_port, fingerprint, name, treasury_id, filter_amount)) # ---------------------------------------------------------------------------------------- @@ -148,7 +150,9 @@ def dao_add_cmd( ) @tx_config_args @tx_out_cmd() +@click.pass_context def dao_create_cmd( + ctx: click.Context, wallet_rpc_port: Optional[int], fingerprint: int, proposal_timelock: uint64, @@ -180,6 +184,7 @@ def dao_create_cmd( return asyncio.run( create_dao_wallet( + ctx.obj["root_path"], wallet_rpc_port, fingerprint, fee, @@ -221,14 +226,16 @@ def dao_create_cmd( ) @click.option("-f", "--fingerprint", help="Set the fingerprint to specify which key to use", type=int) @click.option("-i", "--wallet-id", help="DAO Wallet ID", type=int, required=True) +@click.pass_context def dao_get_id_cmd( + ctx: click.Context, wallet_rpc_port: Optional[int], fingerprint: int, wallet_id: int, ) -> None: from chia.cmds.dao_funcs import get_treasury_id - asyncio.run(get_treasury_id(wallet_rpc_port, fingerprint, wallet_id)) + asyncio.run(get_treasury_id(ctx.obj["root_path"], wallet_rpc_port, fingerprint, wallet_id)) @dao_cmd.command("add_funds", short_help="Send funds to a DAO treasury", no_args_is_help=True) @@ -258,7 +265,9 @@ def dao_get_id_cmd( @options.create_fee() @tx_config_args @tx_out_cmd() +@click.pass_context def dao_add_funds_cmd( + ctx: click.Context, wallet_rpc_port: Optional[int], fingerprint: int, wallet_id: int, @@ -277,6 +286,7 @@ def dao_add_funds_cmd( return asyncio.run( add_funds_to_treasury( + ctx.obj["root_path"], wallet_rpc_port, fingerprint, wallet_id, @@ -306,14 +316,16 @@ def dao_add_funds_cmd( ) @click.option("-f", "--fingerprint", help="Set the fingerprint to specify which key to use", type=int) @click.option("-i", "--wallet-id", help="Id of the wallet to use", type=int, required=True) +@click.pass_context def dao_get_balance_cmd( + ctx: click.Context, wallet_rpc_port: Optional[int], fingerprint: int, wallet_id: int, ) -> None: from chia.cmds.dao_funcs import get_treasury_balance - asyncio.run(get_treasury_balance(wallet_rpc_port, fingerprint, wallet_id)) + asyncio.run(get_treasury_balance(ctx.obj["root_path"], wallet_rpc_port, fingerprint, wallet_id)) @dao_cmd.command("rules", short_help="Get the current rules governing the DAO", no_args_is_help=True) @@ -326,14 +338,16 @@ def dao_get_balance_cmd( ) @click.option("-f", "--fingerprint", help="Set the fingerprint to specify which key to use", type=int) @click.option("-i", "--wallet-id", help="Id of the wallet to use", type=int, required=True) +@click.pass_context def dao_rules_cmd( + ctx: click.Context, wallet_rpc_port: Optional[int], fingerprint: int, wallet_id: int, ) -> None: from chia.cmds.dao_funcs import get_rules - asyncio.run(get_rules(wallet_rpc_port, fingerprint, wallet_id)) + asyncio.run(get_rules(ctx.obj["root_path"], wallet_rpc_port, fingerprint, wallet_id)) # ---------------------------------------------------------------------------------------- @@ -356,7 +370,9 @@ def dao_rules_cmd( help="Include previously closed proposals", is_flag=True, ) +@click.pass_context def dao_list_proposals_cmd( + ctx: click.Context, wallet_rpc_port: Optional[int], fingerprint: int, wallet_id: int, @@ -367,7 +383,7 @@ def dao_list_proposals_cmd( if not include_closed: include_closed = False - asyncio.run(list_proposals(wallet_rpc_port, fingerprint, wallet_id, include_closed)) + asyncio.run(list_proposals(ctx.obj["root_path"], wallet_rpc_port, fingerprint, wallet_id, include_closed)) @dao_cmd.command("show_proposal", short_help="Show the details of a specific proposal", no_args_is_help=True) @@ -387,7 +403,9 @@ def dao_list_proposals_cmd( type=str, required=True, ) +@click.pass_context def dao_show_proposal_cmd( + ctx: click.Context, wallet_rpc_port: Optional[int], fingerprint: int, wallet_id: int, @@ -395,7 +413,7 @@ def dao_show_proposal_cmd( ) -> None: from chia.cmds.dao_funcs import show_proposal - asyncio.run(show_proposal(wallet_rpc_port, fingerprint, wallet_id, proposal_id)) + asyncio.run(show_proposal(ctx.obj["root_path"], wallet_rpc_port, fingerprint, wallet_id, proposal_id)) # ---------------------------------------------------------------------------------------- @@ -435,7 +453,9 @@ def dao_show_proposal_cmd( @options.create_fee() @tx_config_args @tx_out_cmd() +@click.pass_context def dao_vote_cmd( + ctx: click.Context, wallet_rpc_port: Optional[int], fingerprint: int, wallet_id: int, @@ -457,6 +477,7 @@ def dao_vote_cmd( return asyncio.run( vote_on_proposal( + ctx.obj["root_path"], wallet_rpc_port, fingerprint, wallet_id, @@ -508,7 +529,9 @@ def dao_vote_cmd( @options.create_fee() @tx_config_args @tx_out_cmd() +@click.pass_context def dao_close_proposal_cmd( + ctx: click.Context, wallet_rpc_port: Optional[int], fingerprint: int, wallet_id: int, @@ -527,6 +550,7 @@ def dao_close_proposal_cmd( return asyncio.run( close_proposal( + ctx.obj["root_path"], wallet_rpc_port, fingerprint, wallet_id, @@ -570,7 +594,9 @@ def dao_close_proposal_cmd( @options.create_fee() @tx_config_args @tx_out_cmd() +@click.pass_context def dao_lockup_coins_cmd( + ctx: click.Context, wallet_rpc_port: Optional[int], fingerprint: int, wallet_id: int, @@ -588,6 +614,7 @@ def dao_lockup_coins_cmd( return asyncio.run( lockup_coins( + ctx.obj["root_path"], wallet_rpc_port, fingerprint, wallet_id, @@ -619,7 +646,9 @@ def dao_lockup_coins_cmd( @options.create_fee() @tx_config_args @tx_out_cmd() +@click.pass_context def dao_release_coins_cmd( + ctx: click.Context, wallet_rpc_port: Optional[int], fingerprint: int, wallet_id: int, @@ -636,6 +665,7 @@ def dao_release_coins_cmd( return asyncio.run( release_coins( + ctx.obj["root_path"], wallet_rpc_port, fingerprint, wallet_id, @@ -666,7 +696,9 @@ def dao_release_coins_cmd( @options.create_fee() @tx_config_args @tx_out_cmd() +@click.pass_context def dao_exit_lockup_cmd( + ctx: click.Context, wallet_rpc_port: Optional[int], fingerprint: int, wallet_id: int, @@ -683,6 +715,7 @@ def dao_exit_lockup_cmd( return asyncio.run( exit_lockup( + ctx.obj["root_path"], wallet_rpc_port, fingerprint, wallet_id, @@ -762,7 +795,9 @@ def dao_proposal(ctx: click.Context) -> None: @options.create_fee() @tx_config_args @tx_out_cmd() +@click.pass_context def dao_create_spend_proposal_cmd( + ctx: click.Context, wallet_rpc_port: Optional[int], fingerprint: int, wallet_id: int, @@ -784,6 +819,7 @@ def dao_create_spend_proposal_cmd( return asyncio.run( create_spend_proposal( + ctx.obj["root_path"], wallet_rpc_port, fingerprint, wallet_id, @@ -869,7 +905,9 @@ def dao_create_spend_proposal_cmd( @options.create_fee() @tx_config_args @tx_out_cmd() +@click.pass_context def dao_create_update_proposal_cmd( + ctx: click.Context, wallet_rpc_port: Optional[int], fingerprint: int, wallet_id: int, @@ -893,6 +931,7 @@ def dao_create_update_proposal_cmd( return asyncio.run( create_update_proposal( + ctx.obj["root_path"], wallet_rpc_port, fingerprint, wallet_id, @@ -953,7 +992,9 @@ def dao_create_update_proposal_cmd( @options.create_fee() @tx_config_args @tx_out_cmd() +@click.pass_context def dao_create_mint_proposal_cmd( + ctx: click.Context, wallet_rpc_port: Optional[int], fingerprint: int, wallet_id: int, @@ -973,6 +1014,7 @@ def dao_create_mint_proposal_cmd( return asyncio.run( create_mint_proposal( + ctx.obj["root_path"], wallet_rpc_port, fingerprint, wallet_id, diff --git a/chia/cmds/dao_funcs.py b/chia/cmds/dao_funcs.py index 3712965c3aab..18015543979d 100644 --- a/chia/cmds/dao_funcs.py +++ b/chia/cmds/dao_funcs.py @@ -2,6 +2,7 @@ import asyncio import json +import pathlib import time from decimal import Decimal from typing import Optional @@ -21,12 +22,17 @@ async def add_dao_wallet( - wallet_rpc_port: Optional[int], fp: int, name: Optional[str], treasury_id: bytes32, filter_amount: uint64 + root_path: pathlib.Path, + wallet_rpc_port: Optional[int], + fp: int, + name: Optional[str], + treasury_id: bytes32, + filter_amount: uint64, ) -> None: print(f"Adding wallet for DAO: {treasury_id}") print("This may take awhile.") - async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): + async with get_wallet_client(root_path, wallet_rpc_port, fp) as (wallet_client, fingerprint, config): res = await wallet_client.create_new_dao_wallet( mode="existing", tx_config=CMDTXConfigLoader(reuse_puzhash=True).to_tx_config(units["chia"], config, fingerprint), @@ -45,6 +51,7 @@ async def add_dao_wallet( async def create_dao_wallet( + root_path: pathlib.Path, wallet_rpc_port: Optional[int], fp: int, fee: uint64, @@ -77,7 +84,7 @@ async def create_dao_wallet( "proposal_minimum_amount": proposal_minimum, } - async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): + async with get_wallet_client(root_path, wallet_rpc_port, fp) as (wallet_client, fingerprint, config): conf_coins, _, _ = await wallet_client.get_spendable_coins( wallet_id=1, coin_selection_config=DEFAULT_COIN_SELECTION_CONFIG ) @@ -106,15 +113,15 @@ async def create_dao_wallet( return res.transactions -async def get_treasury_id(wallet_rpc_port: Optional[int], fp: int, wallet_id: int) -> None: - async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, _, _): +async def get_treasury_id(root_path: pathlib.Path, wallet_rpc_port: Optional[int], fp: int, wallet_id: int) -> None: + async with get_wallet_client(root_path, wallet_rpc_port, fp) as (wallet_client, _, _): res = await wallet_client.dao_get_treasury_id(wallet_id=wallet_id) treasury_id = res["treasury_id"] print(f"Treasury ID: {treasury_id}") -async def get_rules(wallet_rpc_port: Optional[int], fp: int, wallet_id: int) -> None: - async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, _, _): +async def get_rules(root_path: pathlib.Path, wallet_rpc_port: Optional[int], fp: int, wallet_id: int) -> None: + async with get_wallet_client(root_path, wallet_rpc_port, fp) as (wallet_client, _, _): res = await wallet_client.dao_get_rules(wallet_id=wallet_id) rules = res["rules"] for rule, val in rules.items(): @@ -122,6 +129,7 @@ async def get_rules(wallet_rpc_port: Optional[int], fp: int, wallet_id: int) -> async def add_funds_to_treasury( + root_path: pathlib.Path, wallet_rpc_port: Optional[int], fp: int, wallet_id: int, @@ -132,7 +140,7 @@ async def add_funds_to_treasury( push: bool, condition_valid_times: ConditionValidTimes, ) -> list[TransactionRecord]: - async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): + async with get_wallet_client(root_path, wallet_rpc_port, fp) as (wallet_client, fingerprint, config): try: typ = await get_wallet_type(wallet_id=funding_wallet_id, wallet_client=wallet_client) mojo_per_unit = get_mojo_per_unit(typ) @@ -165,8 +173,10 @@ async def add_funds_to_treasury( return res.transactions -async def get_treasury_balance(wallet_rpc_port: Optional[int], fp: int, wallet_id: int) -> None: - async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, _, _): +async def get_treasury_balance( + root_path: pathlib.Path, wallet_rpc_port: Optional[int], fp: int, wallet_id: int +) -> None: + async with get_wallet_client(root_path, wallet_rpc_port, fp) as (wallet_client, _, _): res = await wallet_client.dao_get_treasury_balance(wallet_id=wallet_id) balances = res["balances"] @@ -183,8 +193,10 @@ async def get_treasury_balance(wallet_rpc_port: Optional[int], fp: int, wallet_i print(f"{asset_id}: {balance / cat_mojos}") -async def list_proposals(wallet_rpc_port: Optional[int], fp: int, wallet_id: int, include_closed: bool) -> None: - async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, _, _): +async def list_proposals( + root_path: pathlib.Path, wallet_rpc_port: Optional[int], fp: int, wallet_id: int, include_closed: bool +) -> None: + async with get_wallet_client(root_path, wallet_rpc_port, fp) as (wallet_client, _, _): res = await wallet_client.dao_get_proposals(wallet_id=wallet_id, include_closed=include_closed) proposals = res["proposals"] soft_close_length = res["soft_close_length"] @@ -201,8 +213,10 @@ async def list_proposals(wallet_rpc_port: Optional[int], fp: int, wallet_id: int print("############################") -async def show_proposal(wallet_rpc_port: Optional[int], fp: int, wallet_id: int, proposal_id: str) -> None: - async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, _, config): +async def show_proposal( + root_path: pathlib.Path, wallet_rpc_port: Optional[int], fp: int, wallet_id: int, proposal_id: str +) -> None: + async with get_wallet_client(root_path, wallet_rpc_port, fp) as (wallet_client, _, config): res = await wallet_client.dao_parse_proposal(wallet_id, proposal_id) pd = res["proposal_dictionary"] blocks_needed = pd["state"]["blocks_needed"] @@ -275,6 +289,7 @@ async def show_proposal(wallet_rpc_port: Optional[int], fp: int, wallet_id: int, async def vote_on_proposal( + root_path: pathlib.Path, wallet_rpc_port: Optional[int], fp: int, wallet_id: int, @@ -286,7 +301,7 @@ async def vote_on_proposal( push: bool, condition_valid_times: ConditionValidTimes, ) -> list[TransactionRecord]: - async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): + async with get_wallet_client(root_path, wallet_rpc_port, fp) as (wallet_client, fingerprint, config): res = await wallet_client.dao_vote_on_proposal( wallet_id=wallet_id, proposal_id=proposal_id, @@ -313,6 +328,7 @@ async def vote_on_proposal( async def close_proposal( + root_path: pathlib.Path, wallet_rpc_port: Optional[int], fp: int, wallet_id: int, @@ -323,7 +339,7 @@ async def close_proposal( push: bool, condition_valid_times: ConditionValidTimes, ) -> list[TransactionRecord]: - async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): + async with get_wallet_client(root_path, wallet_rpc_port, fp) as (wallet_client, fingerprint, config): res = await wallet_client.dao_close_proposal( wallet_id=wallet_id, proposal_id=proposal_id, @@ -350,6 +366,7 @@ async def close_proposal( async def lockup_coins( + root_path: pathlib.Path, wallet_rpc_port: Optional[int], fp: int, wallet_id: int, @@ -360,7 +377,7 @@ async def lockup_coins( condition_valid_times: ConditionValidTimes, ) -> list[TransactionRecord]: final_amount: uint64 = amount.convert_amount(units["cat"]) - async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): + async with get_wallet_client(root_path, wallet_rpc_port, fp) as (wallet_client, fingerprint, config): res = await wallet_client.dao_send_to_lockup( wallet_id=wallet_id, amount=final_amount, @@ -386,6 +403,7 @@ async def lockup_coins( async def release_coins( + root_path: pathlib.Path, wallet_rpc_port: Optional[int], fp: int, wallet_id: int, @@ -394,7 +412,7 @@ async def release_coins( push: bool, condition_valid_times: ConditionValidTimes, ) -> list[TransactionRecord]: - async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): + async with get_wallet_client(root_path, wallet_rpc_port, fp) as (wallet_client, fingerprint, config): res = await wallet_client.dao_free_coins_from_finished_proposals( wallet_id=wallet_id, fee=fee, @@ -418,6 +436,7 @@ async def release_coins( async def exit_lockup( + root_path: pathlib.Path, wallet_rpc_port: Optional[int], fp: int, wallet_id: int, @@ -426,7 +445,7 @@ async def exit_lockup( push: bool, condition_valid_times: ConditionValidTimes, ) -> list[TransactionRecord]: - async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): + async with get_wallet_client(root_path, wallet_rpc_port, fp) as (wallet_client, fingerprint, config): res = await wallet_client.dao_exit_lockup( wallet_id=wallet_id, coins=[], @@ -452,6 +471,7 @@ async def exit_lockup( async def create_spend_proposal( + root_path: pathlib.Path, wallet_rpc_port: Optional[int], fp: int, wallet_id: int, @@ -477,7 +497,7 @@ async def create_spend_proposal( additions.append(addition) else: additions = None - async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): + async with get_wallet_client(root_path, wallet_rpc_port, fp) as (wallet_client, fingerprint, config): wallet_type = await get_wallet_type(wallet_id=wallet_id, wallet_client=wallet_client) mojo_per_unit = get_mojo_per_unit(wallet_type=wallet_type) final_amount: Optional[uint64] = uint64(int(Decimal(amount) * mojo_per_unit)) if amount else None @@ -504,6 +524,7 @@ async def create_spend_proposal( async def create_update_proposal( + root_path: pathlib.Path, wallet_rpc_port: Optional[int], fp: int, wallet_id: int, @@ -527,7 +548,7 @@ async def create_update_proposal( "self_destruct_length": self_destruct_length, "oracle_spend_delay": oracle_spend_delay, } - async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): + async with get_wallet_client(root_path, wallet_rpc_port, fp) as (wallet_client, fingerprint, config): res = await wallet_client.dao_create_proposal( wallet_id=wallet_id, proposal_type="update", @@ -546,6 +567,7 @@ async def create_update_proposal( async def create_mint_proposal( + root_path: pathlib.Path, wallet_rpc_port: Optional[int], fp: int, wallet_id: int, @@ -557,7 +579,7 @@ async def create_mint_proposal( push: bool, condition_valid_times: ConditionValidTimes, ) -> list[TransactionRecord]: - async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): + async with get_wallet_client(root_path, wallet_rpc_port, fp) as (wallet_client, fingerprint, config): res = await wallet_client.dao_create_proposal( wallet_id=wallet_id, proposal_type="mint", diff --git a/chia/cmds/wallet.py b/chia/cmds/wallet.py index aef8f2a80029..3a26ed1abb2c 100644 --- a/chia/cmds/wallet.py +++ b/chia/cmds/wallet.py @@ -47,10 +47,21 @@ def wallet_cmd(ctx: click.Context) -> None: @click.option("-i", "--id", help="Id of the wallet to use", type=int, default=1, show_default=True, required=True) @click.option("-tx", "--tx_id", help="transaction id to search for", type=str, required=True) @click.option("--verbose", "-v", count=True, type=int) -def get_transaction_cmd(wallet_rpc_port: Optional[int], fingerprint: int, id: int, tx_id: str, verbose: int) -> None: +@click.pass_context +def get_transaction_cmd( + ctx: click.Context, wallet_rpc_port: Optional[int], fingerprint: int, id: int, tx_id: str, verbose: int +) -> None: from chia.cmds.wallet_funcs import get_transaction - asyncio.run(get_transaction(wallet_rpc_port=wallet_rpc_port, fingerprint=fingerprint, tx_id=tx_id, verbose=verbose)) + asyncio.run( + get_transaction( + root_path=ctx.obj["root_path"], + wallet_rpc_port=wallet_rpc_port, + fingerprint=fingerprint, + tx_id=tx_id, + verbose=verbose, + ) + ) @wallet_cmd.command("get_transactions", help="Get all transactions") @@ -114,7 +125,9 @@ def get_transaction_cmd(wallet_rpc_port: Optional[int], fingerprint: int, id: in default=False, help="Only show clawback transactions", ) +@click.pass_context def get_transactions_cmd( + ctx: click.Context, wallet_rpc_port: Optional[int], fingerprint: int, id: int, @@ -130,6 +143,7 @@ def get_transactions_cmd( asyncio.run( get_transactions( + root_path=ctx.obj["root_path"], wallet_rpc_port=wallet_rpc_port, fp=fingerprint, wallet_id=id, @@ -201,7 +215,9 @@ def get_transactions_cmd( default=0, ) @tx_out_cmd() +@click.pass_context def send_cmd( + ctx: click.Context, wallet_rpc_port: Optional[int], fingerprint: int, id: int, @@ -222,6 +238,7 @@ def send_cmd( return asyncio.run( send( + root_path=ctx.obj["root_path"], wallet_rpc_port=wallet_rpc_port, fp=fingerprint, wallet_id=id, @@ -257,10 +274,15 @@ def send_cmd( type=click.Choice([x.name.lower() for x in WalletType]), default=None, ) -def show_cmd(wallet_rpc_port: Optional[int], fingerprint: int, wallet_type: Optional[str]) -> None: +@click.pass_context +def show_cmd(ctx: click.Context, wallet_rpc_port: Optional[int], fingerprint: int, wallet_type: Optional[str]) -> None: from chia.cmds.wallet_funcs import print_balances - asyncio.run(print_balances(wallet_rpc_port, fingerprint, WalletType[wallet_type.upper()] if wallet_type else None)) + asyncio.run( + print_balances( + ctx.obj["root_path"], wallet_rpc_port, fingerprint, WalletType[wallet_type.upper()] if wallet_type else None + ) + ) @wallet_cmd.command("get_address", help="Get a wallet receive address") @@ -283,10 +305,13 @@ def show_cmd(wallet_rpc_port: Optional[int], fingerprint: int, wallet_type: Opti is_flag=True, default=False, ) -def get_address_cmd(wallet_rpc_port: Optional[int], id: int, fingerprint: int, new_address: bool) -> None: +@click.pass_context +def get_address_cmd( + ctx: click.Context, wallet_rpc_port: Optional[int], id: int, fingerprint: int, new_address: bool +) -> None: from chia.cmds.wallet_funcs import get_address - asyncio.run(get_address(wallet_rpc_port, fingerprint, id, new_address)) + asyncio.run(get_address(ctx.obj["root_path"], wallet_rpc_port, fingerprint, id, new_address)) @wallet_cmd.command( @@ -320,7 +345,9 @@ def get_address_cmd(wallet_rpc_port: Optional[int], id: int, fingerprint: int, n default=False, ) @tx_out_cmd() +@click.pass_context def clawback( + ctx: click.Context, wallet_rpc_port: Optional[int], id: int, fingerprint: int, @@ -334,6 +361,7 @@ def clawback( return asyncio.run( spend_clawback( + root_path=ctx.obj["root_path"], wallet_rpc_port=wallet_rpc_port, fp=fingerprint, fee=fee, @@ -355,10 +383,13 @@ def clawback( ) @click.option("-i", "--id", help="Id of the wallet to use", type=int, default=1, show_default=True, required=True) @options.create_fingerprint() -def delete_unconfirmed_transactions_cmd(wallet_rpc_port: Optional[int], id: int, fingerprint: int) -> None: +@click.pass_context +def delete_unconfirmed_transactions_cmd( + ctx: click.Context, wallet_rpc_port: Optional[int], id: int, fingerprint: int +) -> None: from chia.cmds.wallet_funcs import delete_unconfirmed_transactions - asyncio.run(delete_unconfirmed_transactions(wallet_rpc_port, fingerprint, id)) + asyncio.run(delete_unconfirmed_transactions(ctx.obj["root_path"], wallet_rpc_port, fingerprint, id)) @wallet_cmd.command("get_derivation_index", help="Get the last puzzle hash derivation path index") @@ -370,10 +401,11 @@ def delete_unconfirmed_transactions_cmd(wallet_rpc_port: Optional[int], id: int, default=None, ) @options.create_fingerprint() -def get_derivation_index_cmd(wallet_rpc_port: Optional[int], fingerprint: int) -> None: +@click.pass_context +def get_derivation_index_cmd(ctx: click.Context, wallet_rpc_port: Optional[int], fingerprint: int) -> None: from chia.cmds.wallet_funcs import get_derivation_index - asyncio.run(get_derivation_index(wallet_rpc_port, fingerprint)) + asyncio.run(get_derivation_index(ctx.obj["root_path"], wallet_rpc_port, fingerprint)) @wallet_cmd.command("sign_message", help="Sign a message by a derivation address") @@ -388,13 +420,15 @@ def get_derivation_index_cmd(wallet_rpc_port: Optional[int], fingerprint: int) - # TODO: Change RPC's to use the puzzle hash instead of address @click.option("-a", "--address", help="The address you want to use for signing", type=AddressParamType(), required=True) @click.option("-m", "--hex_message", help="The hex message you want sign", type=str, required=True) +@click.pass_context def address_sign_message( - wallet_rpc_port: Optional[int], fingerprint: int, address: CliAddress, hex_message: str + ctx: click.Context, wallet_rpc_port: Optional[int], fingerprint: int, address: CliAddress, hex_message: str ) -> None: from chia.cmds.wallet_funcs import sign_message asyncio.run( sign_message( + root_path=ctx.obj["root_path"], wallet_rpc_port=wallet_rpc_port, fp=fingerprint, addr_type=AddressType.XCH, @@ -418,10 +452,13 @@ def address_sign_message( @click.option( "-i", "--index", help="Index to set. Must be greater than the current derivation index", type=int, required=True ) -def update_derivation_index_cmd(wallet_rpc_port: Optional[int], fingerprint: int, index: int) -> None: +@click.pass_context +def update_derivation_index_cmd( + ctx: click.Context, wallet_rpc_port: Optional[int], fingerprint: int, index: int +) -> None: from chia.cmds.wallet_funcs import update_derivation_index - asyncio.run(update_derivation_index(wallet_rpc_port, fingerprint, index)) + asyncio.run(update_derivation_index(ctx.obj["root_path"], wallet_rpc_port, fingerprint, index)) @wallet_cmd.command("add_token", help="Add/Rename a CAT to the wallet by its asset ID") @@ -445,10 +482,13 @@ def update_derivation_index_cmd(wallet_rpc_port: Optional[int], fingerprint: int help="The name you wish to designate to the token", ) @options.create_fingerprint() -def add_token_cmd(wallet_rpc_port: Optional[int], asset_id: bytes32, token_name: str, fingerprint: int) -> None: +@click.pass_context +def add_token_cmd( + ctx: click.Context, wallet_rpc_port: Optional[int], asset_id: bytes32, token_name: str, fingerprint: int +) -> None: from chia.cmds.wallet_funcs import add_token - asyncio.run(add_token(wallet_rpc_port, fingerprint, asset_id, token_name)) + asyncio.run(add_token(ctx.obj["root_path"], wallet_rpc_port, fingerprint, asset_id, token_name)) @wallet_cmd.command("make_offer", help="Create an offer of XCH/CATs/NFTs for XCH/CATs/NFTs") @@ -489,9 +529,11 @@ def add_token_cmd(wallet_rpc_port: Optional[int], asset_id: bytes32, token_name: ) @click.option("--override", help="Creates offer without checking for unusual values", is_flag=True, default=False) @timelock_args(enable=True) +@click.pass_context # This command looks like a good candidate for @tx_out_cmd however, pushing an incomplete tx is nonsensical and # we already have a canonical offer file format which the idea of exporting a different transaction conflicts with def make_offer_cmd( + ctx: click.Context, wallet_rpc_port: Optional[int], fingerprint: int, offer: Sequence[str], @@ -510,6 +552,7 @@ def make_offer_cmd( asyncio.run( make_offer( + root_path=ctx.obj["root_path"], wallet_rpc_port=wallet_rpc_port, fp=fingerprint, fee=fee, @@ -543,7 +586,9 @@ def make_offer_cmd( @click.option("-s", "--summaries", help="Show the assets being offered and requested for each offer", is_flag=True) @click.option("--sort-by-relevance/--sort-by-confirmed-height", help="Sort the offers one of two ways", is_flag=True) @click.option("-r", "--reverse", help="Reverse the order of the output", is_flag=True) +@click.pass_context def get_offers_cmd( + ctx: click.Context, wallet_rpc_port: Optional[int], fingerprint: int, id: Optional[bytes32], @@ -559,6 +604,7 @@ def get_offers_cmd( asyncio.run( get_offers( + root_path=ctx.obj["root_path"], wallet_rpc_port=wallet_rpc_port, fp=fingerprint, offer_id=id, @@ -593,7 +639,9 @@ def get_offers_cmd( default=False, ) @tx_out_cmd() +@click.pass_context def take_offer_cmd( + ctx: click.Context, path_or_hex: str, wallet_rpc_port: Optional[int], fingerprint: int, @@ -607,6 +655,7 @@ def take_offer_cmd( return asyncio.run( take_offer( + ctx.obj["root_path"], wallet_rpc_port, fingerprint, fee, @@ -631,7 +680,9 @@ def take_offer_cmd( @click.option("--insecure", help="Don't make an on-chain transaction, simply mark the offer as cancelled", is_flag=True) @options.create_fee("The fee to use when cancelling the offer securely, in XCH") @tx_out_cmd() +@click.pass_context def cancel_offer_cmd( + ctx: click.Context, wallet_rpc_port: Optional[int], fingerprint: int, id: bytes32, @@ -644,6 +695,7 @@ def cancel_offer_cmd( return asyncio.run( cancel_offer( + ctx.obj["root_path"], wallet_rpc_port, fingerprint, fee, @@ -694,7 +746,9 @@ def did_cmd() -> None: ) @options.create_fee() @tx_out_cmd() +@click.pass_context def did_create_wallet_cmd( + ctx: click.Context, wallet_rpc_port: Optional[int], fingerprint: int, name: Optional[str], @@ -707,6 +761,7 @@ def did_create_wallet_cmd( return asyncio.run( create_did_wallet( + ctx.obj["root_path"], wallet_rpc_port, fingerprint, fee, @@ -729,11 +784,15 @@ def did_create_wallet_cmd( @options.create_fingerprint() @click.option("-i", "--did_id", help="DID ID you want to use for signing", type=AddressParamType(), required=True) @click.option("-m", "--hex_message", help="The hex message you want to sign", type=str, required=True) -def did_sign_message(wallet_rpc_port: Optional[int], fingerprint: int, did_id: CliAddress, hex_message: str) -> None: +@click.pass_context +def did_sign_message( + ctx: click.Context, wallet_rpc_port: Optional[int], fingerprint: int, did_id: CliAddress, hex_message: str +) -> None: from chia.cmds.wallet_funcs import sign_message asyncio.run( sign_message( + root_path=ctx.obj["root_path"], wallet_rpc_port=wallet_rpc_port, fp=fingerprint, addr_type=AddressType.DID, @@ -754,10 +813,13 @@ def did_sign_message(wallet_rpc_port: Optional[int], fingerprint: int, did_id: C @options.create_fingerprint() @click.option("-i", "--id", help="Id of the wallet to use", type=int, required=True) @click.option("-n", "--name", help="Set the DID wallet name", type=str, required=True) -def did_wallet_name_cmd(wallet_rpc_port: Optional[int], fingerprint: int, id: int, name: str) -> None: +@click.pass_context +def did_wallet_name_cmd( + ctx: click.Context, wallet_rpc_port: Optional[int], fingerprint: int, id: int, name: str +) -> None: from chia.cmds.wallet_funcs import did_set_wallet_name - asyncio.run(did_set_wallet_name(wallet_rpc_port, fingerprint, id, name)) + asyncio.run(did_set_wallet_name(ctx.obj["root_path"], wallet_rpc_port, fingerprint, id, name)) @did_cmd.command("get_did", help="Get DID from wallet") @@ -770,10 +832,11 @@ def did_wallet_name_cmd(wallet_rpc_port: Optional[int], fingerprint: int, id: in ) @options.create_fingerprint() @click.option("-i", "--id", help="Id of the wallet to use", type=int, required=True) -def did_get_did_cmd(wallet_rpc_port: Optional[int], fingerprint: int, id: int) -> None: +@click.pass_context +def did_get_did_cmd(ctx: click.Context, wallet_rpc_port: Optional[int], fingerprint: int, id: int) -> None: from chia.cmds.wallet_funcs import get_did - asyncio.run(get_did(wallet_rpc_port, fingerprint, id)) + asyncio.run(get_did(ctx.obj["root_path"], wallet_rpc_port, fingerprint, id)) @did_cmd.command("get_details", help="Get more details of any DID") @@ -787,10 +850,13 @@ def did_get_did_cmd(wallet_rpc_port: Optional[int], fingerprint: int, id: int) - @options.create_fingerprint() @click.option("-id", "--coin_id", help="Id of the DID or any coin ID of the DID", type=str, required=True) @click.option("-l", "--latest", help="Return latest DID information", is_flag=True, default=True) -def did_get_details_cmd(wallet_rpc_port: Optional[int], fingerprint: int, coin_id: str, latest: bool) -> None: +@click.pass_context +def did_get_details_cmd( + ctx: click.Context, wallet_rpc_port: Optional[int], fingerprint: int, coin_id: str, latest: bool +) -> None: from chia.cmds.wallet_funcs import get_did_info - asyncio.run(get_did_info(wallet_rpc_port, fingerprint, coin_id, latest)) + asyncio.run(get_did_info(ctx.obj["root_path"], wallet_rpc_port, fingerprint, coin_id, latest)) @did_cmd.command("update_metadata", help="Update the metadata of a DID") @@ -811,7 +877,9 @@ def did_get_details_cmd(wallet_rpc_port: Optional[int], fingerprint: int, coin_i default=False, ) @tx_out_cmd() +@click.pass_context def did_update_metadata_cmd( + ctx: click.Context, wallet_rpc_port: Optional[int], fingerprint: int, id: int, @@ -824,6 +892,7 @@ def did_update_metadata_cmd( return asyncio.run( update_did_metadata( + ctx.obj["root_path"], wallet_rpc_port, fingerprint, id, @@ -861,7 +930,9 @@ def did_update_metadata_cmd( type=int, required=False, ) +@click.pass_context def did_find_lost_cmd( + ctx: click.Context, wallet_rpc_port: Optional[int], fingerprint: int, coin_id: str, @@ -873,6 +944,7 @@ def did_find_lost_cmd( asyncio.run( find_lost_did( + root_path=ctx.obj["root_path"], wallet_rpc_port=wallet_rpc_port, fp=fingerprint, coin_id=coin_id, @@ -908,7 +980,9 @@ def did_find_lost_cmd( required=False, ) @tx_out_cmd() +@click.pass_context def did_message_spend_cmd( + ctx: click.Context, wallet_rpc_port: Optional[int], fingerprint: int, id: int, @@ -942,6 +1016,7 @@ def did_message_spend_cmd( return asyncio.run( did_message_spend( + ctx.obj["root_path"], wallet_rpc_port, fingerprint, id, @@ -976,7 +1051,9 @@ def did_message_spend_cmd( default=False, ) @tx_out_cmd() +@click.pass_context def did_transfer_did( + ctx: click.Context, wallet_rpc_port: Optional[int], fingerprint: int, id: int, @@ -991,6 +1068,7 @@ def did_transfer_did( return asyncio.run( transfer_did( + ctx.obj["root_path"], wallet_rpc_port, fingerprint, id, @@ -1021,12 +1099,17 @@ def nft_cmd() -> None: # TODO: Change RPC to use puzzlehash instead of address @click.option("-di", "--did-id", help="DID Id to use", type=AddressParamType()) @click.option("-n", "--name", help="Set the NFT wallet name", type=str) +@click.pass_context def nft_wallet_create_cmd( - wallet_rpc_port: Optional[int], fingerprint: int, did_id: Optional[CliAddress], name: Optional[str] + ctx: click.Context, + wallet_rpc_port: Optional[int], + fingerprint: int, + did_id: Optional[CliAddress], + name: Optional[str], ) -> None: from chia.cmds.wallet_funcs import create_nft_wallet - asyncio.run(create_nft_wallet(wallet_rpc_port, fingerprint, did_id, name)) + asyncio.run(create_nft_wallet(ctx.obj["root_path"], wallet_rpc_port, fingerprint, did_id, name)) @nft_cmd.command("sign_message", help="Sign a message by a NFT") @@ -1040,11 +1123,15 @@ def nft_wallet_create_cmd( @options.create_fingerprint() @click.option("-i", "--nft_id", help="NFT ID you want to use for signing", type=AddressParamType(), required=True) @click.option("-m", "--hex_message", help="The hex message you want to sign", type=str, required=True) -def nft_sign_message(wallet_rpc_port: Optional[int], fingerprint: int, nft_id: CliAddress, hex_message: str) -> None: +@click.pass_context +def nft_sign_message( + ctx: click.Context, wallet_rpc_port: Optional[int], fingerprint: int, nft_id: CliAddress, hex_message: str +) -> None: from chia.cmds.wallet_funcs import sign_message asyncio.run( sign_message( + root_path=ctx.obj["root_path"], wallet_rpc_port=wallet_rpc_port, fp=fingerprint, addr_type=AddressType.NFT, @@ -1091,7 +1178,9 @@ def nft_sign_message(wallet_rpc_port: Optional[int], fingerprint: int, nft_id: C default=False, ) @tx_out_cmd() +@click.pass_context def nft_mint_cmd( + ctx: click.Context, wallet_rpc_port: Optional[int], fingerprint: int, id: int, @@ -1126,6 +1215,7 @@ def nft_mint_cmd( return asyncio.run( mint_nft( + root_path=ctx.obj["root_path"], wallet_rpc_port=wallet_rpc_port, fp=fingerprint, wallet_id=id, @@ -1172,7 +1262,9 @@ def nft_mint_cmd( default=False, ) @tx_out_cmd() +@click.pass_context def nft_add_uri_cmd( + ctx: click.Context, wallet_rpc_port: Optional[int], fingerprint: int, id: int, @@ -1189,6 +1281,7 @@ def nft_add_uri_cmd( return asyncio.run( add_uri_to_nft( + root_path=ctx.obj["root_path"], wallet_rpc_port=wallet_rpc_port, fp=fingerprint, wallet_id=id, @@ -1225,7 +1318,9 @@ def nft_add_uri_cmd( default=False, ) @tx_out_cmd() +@click.pass_context def nft_transfer_cmd( + ctx: click.Context, wallet_rpc_port: Optional[int], fingerprint: int, id: int, @@ -1240,6 +1335,7 @@ def nft_transfer_cmd( return asyncio.run( transfer_nft( + root_path=ctx.obj["root_path"], wallet_rpc_port=wallet_rpc_port, fp=fingerprint, wallet_id=id, @@ -1265,10 +1361,13 @@ def nft_transfer_cmd( @click.option("-i", "--id", help="Id of the NFT wallet to use", type=int, required=True) @click.option("--num", help="Number of NFTs to return", type=int, default=50) @click.option("--start-index", help="Which starting index to start listing NFTs from", type=int, default=0) -def nft_list_cmd(wallet_rpc_port: Optional[int], fingerprint: int, id: int, num: int, start_index: int) -> None: +@click.pass_context +def nft_list_cmd( + ctx: click.Context, wallet_rpc_port: Optional[int], fingerprint: int, id: int, num: int, start_index: int +) -> None: from chia.cmds.wallet_funcs import list_nfts - asyncio.run(list_nfts(wallet_rpc_port, fingerprint, id, num, start_index)) + asyncio.run(list_nfts(ctx.obj["root_path"], wallet_rpc_port, fingerprint, id, num, start_index)) @nft_cmd.command("set_did", help="Set a DID on an NFT") @@ -1292,7 +1391,9 @@ def nft_list_cmd(wallet_rpc_port: Optional[int], fingerprint: int, id: int, num: default=False, ) @tx_out_cmd() +@click.pass_context def nft_set_did_cmd( + ctx: click.Context, wallet_rpc_port: Optional[int], fingerprint: int, id: int, @@ -1307,6 +1408,7 @@ def nft_set_did_cmd( return asyncio.run( set_nft_did( + root_path=ctx.obj["root_path"], wallet_rpc_port=wallet_rpc_port, fp=fingerprint, wallet_id=id, @@ -1331,14 +1433,16 @@ def nft_set_did_cmd( @options.create_fingerprint() # TODO: Change RPC to use bytes instead of hex string @click.option("-ni", "--nft-coin-id", help="Id of the NFT coin to get information on", type=str, required=True) +@click.pass_context def nft_get_info_cmd( + ctx: click.Context, wallet_rpc_port: Optional[int], fingerprint: int, nft_coin_id: str, ) -> None: from chia.cmds.wallet_funcs import get_nft_info - asyncio.run(get_nft_info(wallet_rpc_port, fingerprint, nft_coin_id)) + asyncio.run(get_nft_info(ctx.obj["root_path"], wallet_rpc_port, fingerprint, nft_coin_id)) # Keep at bottom. @@ -1374,7 +1478,9 @@ def notification_cmd() -> None: @click.option("-n", "--message", help="The message of the notification", type=str) @options.create_fee() @tx_out_cmd() +@click.pass_context def send_notification_cmd( + ctx: click.Context, wallet_rpc_port: Optional[int], fingerprint: int, to_address: CliAddress, @@ -1389,6 +1495,7 @@ def send_notification_cmd( message_bytes: bytes = bytes(message, "utf8") return asyncio.run( send_notification( + ctx.obj["root_path"], wallet_rpc_port, fingerprint, fee, @@ -1413,7 +1520,9 @@ def send_notification_cmd( @click.option("-i", "--id", help="The specific notification ID to show", type=Bytes32ParamType(), multiple=True) @click.option("-s", "--start", help="The number of notifications to skip", type=int, default=None) @click.option("-e", "--end", help="The number of notifications to stop at", type=int, default=None) +@click.pass_context def get_notifications_cmd( + ctx: click.Context, wallet_rpc_port: Optional[int], fingerprint: int, id: Sequence[bytes32], @@ -1422,7 +1531,7 @@ def get_notifications_cmd( ) -> None: from chia.cmds.wallet_funcs import get_notifications - asyncio.run(get_notifications(wallet_rpc_port, fingerprint, id, start, end)) + asyncio.run(get_notifications(ctx.obj["root_path"], wallet_rpc_port, fingerprint, id, start, end)) @notification_cmd.command("delete", help="Delete notification(s) that are in your wallet") @@ -1436,7 +1545,9 @@ def get_notifications_cmd( @options.create_fingerprint() @click.option("-i", "--id", help="A specific notification ID to delete", type=Bytes32ParamType(), multiple=True) @click.option("--all", help="All notifications can be deleted (they will be recovered during resync)", is_flag=True) +@click.pass_context def delete_notifications_cmd( + ctx: click.Context, wallet_rpc_port: Optional[int], fingerprint: int, id: Sequence[bytes32], @@ -1444,7 +1555,7 @@ def delete_notifications_cmd( ) -> None: from chia.cmds.wallet_funcs import delete_notifications - asyncio.run(delete_notifications(wallet_rpc_port, fingerprint, id, all)) + asyncio.run(delete_notifications(ctx.obj["root_path"], wallet_rpc_port, fingerprint, id, all)) @wallet_cmd.group("vcs", short_help="Verifiable Credential related actions") @@ -1471,7 +1582,9 @@ def vcs_cmd() -> None: # pragma: no cover ) @options.create_fee("Blockchain fee for mint transaction, in XCH") @tx_out_cmd() +@click.pass_context def mint_vc_cmd( + ctx: click.Context, wallet_rpc_port: Optional[int], fingerprint: int, did: CliAddress, @@ -1484,6 +1597,7 @@ def mint_vc_cmd( return asyncio.run( mint_vc( + ctx.obj["root_path"], wallet_rpc_port, fingerprint, did, @@ -1510,7 +1624,9 @@ def mint_vc_cmd( @click.option( "-c", "--count", help="How many results to return", type=int, required=False, default=50, show_default=True ) +@click.pass_context def get_vcs_cmd( + ctx: click.Context, wallet_rpc_port: Optional[int], fingerprint: int, start: int, @@ -1518,7 +1634,7 @@ def get_vcs_cmd( ) -> None: # pragma: no cover from chia.cmds.wallet_funcs import get_vcs - asyncio.run(get_vcs(wallet_rpc_port, fingerprint, start, count)) + asyncio.run(get_vcs(ctx.obj["root_path"], wallet_rpc_port, fingerprint, start, count)) @vcs_cmd.command("update_proofs", short_help="Update a VC's proofs if you have the provider DID") @@ -1553,7 +1669,9 @@ def get_vcs_cmd( show_default=True, ) @tx_out_cmd() +@click.pass_context def spend_vc_cmd( + ctx: click.Context, wallet_rpc_port: Optional[int], fingerprint: int, vc_id: bytes32, @@ -1568,6 +1686,7 @@ def spend_vc_cmd( return asyncio.run( spend_vc( + root_path=ctx.obj["root_path"], wallet_rpc_port=wallet_rpc_port, fp=fingerprint, vc_id=vc_id, @@ -1592,7 +1711,9 @@ def spend_vc_cmd( @options.create_fingerprint() @click.option("-p", "--proof", help="A flag to add as a proof", type=str, multiple=True) @click.option("-r", "--root-only", help="Do not add the proofs to the DB, just output the root", is_flag=True) +@click.pass_context def add_proof_reveal_cmd( + ctx: click.Context, wallet_rpc_port: Optional[int], fingerprint: int, proof: Sequence[str], @@ -1600,7 +1721,7 @@ def add_proof_reveal_cmd( ) -> None: # pragma: no cover from chia.cmds.wallet_funcs import add_proof_reveal - asyncio.run(add_proof_reveal(wallet_rpc_port, fingerprint, proof, root_only)) + asyncio.run(add_proof_reveal(ctx.obj["root_path"], wallet_rpc_port, fingerprint, proof, root_only)) @vcs_cmd.command("get_proofs_for_root", short_help="Get the stored proof flags for a given proof hash") @@ -1613,14 +1734,16 @@ def add_proof_reveal_cmd( ) @options.create_fingerprint() @click.option("-r", "--proof-hash", help="The root to search for", type=str, required=True) +@click.pass_context def get_proofs_for_root_cmd( + ctx: click.Context, wallet_rpc_port: Optional[int], fingerprint: int, proof_hash: str, ) -> None: # pragma: no cover from chia.cmds.wallet_funcs import get_proofs_for_root - asyncio.run(get_proofs_for_root(wallet_rpc_port, fingerprint, proof_hash)) + asyncio.run(get_proofs_for_root(ctx.obj["root_path"], wallet_rpc_port, fingerprint, proof_hash)) @vcs_cmd.command("revoke", short_help="Revoke any VC if you have the proper DID and the VCs parent coin") @@ -1654,7 +1777,9 @@ def get_proofs_for_root_cmd( show_default=True, ) @tx_out_cmd() +@click.pass_context def revoke_vc_cmd( + ctx: click.Context, wallet_rpc_port: Optional[int], fingerprint: int, parent_coin_id: Optional[bytes32], @@ -1668,6 +1793,7 @@ def revoke_vc_cmd( return asyncio.run( revoke_vc( + ctx.obj["root_path"], wallet_rpc_port, fingerprint, parent_coin_id, @@ -1719,7 +1845,9 @@ def revoke_vc_cmd( default=False, ) @tx_out_cmd() +@click.pass_context def approve_r_cats_cmd( + ctx: click.Context, wallet_rpc_port: Optional[int], fingerprint: int, id: int, @@ -1735,6 +1863,7 @@ def approve_r_cats_cmd( return asyncio.run( approve_r_cats( + ctx.obj["root_path"], wallet_rpc_port, fingerprint, uint32(id), diff --git a/chia/cmds/wallet_funcs.py b/chia/cmds/wallet_funcs.py index f676157ffedb..a90c52f80817 100644 --- a/chia/cmds/wallet_funcs.py +++ b/chia/cmds/wallet_funcs.py @@ -157,9 +157,9 @@ async def get_unit_name_for_wallet_id( async def get_transaction( - *, wallet_rpc_port: Optional[int], fingerprint: Optional[int], tx_id: str, verbose: int + *, root_path: pathlib.Path, wallet_rpc_port: Optional[int], fingerprint: Optional[int], tx_id: str, verbose: int ) -> None: - async with get_wallet_client(wallet_rpc_port, fingerprint) as (wallet_client, fingerprint, config): + async with get_wallet_client(root_path, wallet_rpc_port, fingerprint) as (wallet_client, fingerprint, config): transaction_id = bytes32.from_hexstr(tx_id) address_prefix = selected_network_address_prefix(config) tx: TransactionRecord = await wallet_client.get_transaction(transaction_id=transaction_id) @@ -188,6 +188,7 @@ async def get_transaction( async def get_transactions( *, + root_path: pathlib.Path, wallet_rpc_port: Optional[int], fp: Optional[int], wallet_id: int, @@ -199,7 +200,7 @@ async def get_transactions( reverse: bool, clawback: bool, ) -> None: - async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, _, config): + async with get_wallet_client(root_path, wallet_rpc_port, fp) as (wallet_client, _, config): if paginate is None: paginate = sys.stdout.isatty() type_filter = ( @@ -272,6 +273,7 @@ def check_unusual_transaction(amount: uint64, fee: uint64) -> bool: async def send( *, + root_path: pathlib.Path, wallet_rpc_port: Optional[int], fp: Optional[int], wallet_id: int, @@ -288,7 +290,7 @@ async def send( push: bool, condition_valid_times: ConditionValidTimes, ) -> list[TransactionRecord]: - async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): + async with get_wallet_client(root_path, wallet_rpc_port, fp) as (wallet_client, fingerprint, config): if memo is None: memos = None else: @@ -377,34 +379,42 @@ async def send( return res.transactions # pragma: no cover -async def get_address(wallet_rpc_port: Optional[int], fp: Optional[int], wallet_id: int, new_address: bool) -> None: - async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, _, _): +async def get_address( + root_path: pathlib.Path, wallet_rpc_port: Optional[int], fp: Optional[int], wallet_id: int, new_address: bool +) -> None: + async with get_wallet_client(root_path, wallet_rpc_port, fp) as (wallet_client, _, _): res = await wallet_client.get_next_address(wallet_id, new_address) print(res) -async def delete_unconfirmed_transactions(wallet_rpc_port: Optional[int], fp: Optional[int], wallet_id: int) -> None: - async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, _): +async def delete_unconfirmed_transactions( + root_path: pathlib.Path, wallet_rpc_port: Optional[int], fp: Optional[int], wallet_id: int +) -> None: + async with get_wallet_client(root_path, wallet_rpc_port, fp) as (wallet_client, fingerprint, _): await wallet_client.delete_unconfirmed_transactions(wallet_id) print(f"Successfully deleted all unconfirmed transactions for wallet id {wallet_id} on key {fingerprint}") -async def get_derivation_index(wallet_rpc_port: Optional[int], fp: Optional[int]) -> None: - async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, _, _): +async def get_derivation_index(root_path: pathlib.Path, wallet_rpc_port: Optional[int], fp: Optional[int]) -> None: + async with get_wallet_client(root_path, wallet_rpc_port, fp) as (wallet_client, _, _): res = await wallet_client.get_current_derivation_index() print(f"Last derivation index: {res}") -async def update_derivation_index(wallet_rpc_port: Optional[int], fp: Optional[int], index: int) -> None: - async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, _, _): +async def update_derivation_index( + root_path: pathlib.Path, wallet_rpc_port: Optional[int], fp: Optional[int], index: int +) -> None: + async with get_wallet_client(root_path, wallet_rpc_port, fp) as (wallet_client, _, _): print("Updating derivation index... This may take a while.") res = await wallet_client.extend_derivation_index(index) print(f"Updated derivation index: {res}") print("Your balances may take a while to update.") -async def add_token(wallet_rpc_port: Optional[int], fp: Optional[int], asset_id: bytes32, token_name: str) -> None: - async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, _): +async def add_token( + root_path: pathlib.Path, wallet_rpc_port: Optional[int], fp: Optional[int], asset_id: bytes32, token_name: str +) -> None: + async with get_wallet_client(root_path, wallet_rpc_port, fp) as (wallet_client, fingerprint, _): existing_info: Optional[tuple[Optional[uint32], str]] = await wallet_client.cat_asset_id_to_name(asset_id) if existing_info is None or existing_info[0] is None: response = await wallet_client.create_wallet_for_existing_cat(asset_id) @@ -419,6 +429,7 @@ async def add_token(wallet_rpc_port: Optional[int], fp: Optional[int], asset_id: async def make_offer( *, + root_path: pathlib.Path, wallet_rpc_port: Optional[int], fp: Optional[int], fee: uint64, @@ -428,7 +439,7 @@ async def make_offer( reuse_puzhash: Optional[bool], condition_valid_times: ConditionValidTimes, ) -> None: - async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): + async with get_wallet_client(root_path, wallet_rpc_port, fp) as (wallet_client, fingerprint, config): if offers == [] or requests == []: print("Not creating offer: Must be offering and requesting at least one asset") else: @@ -661,6 +672,7 @@ async def print_trade_record(record: TradeRecord, wallet_client: WalletRpcClient async def get_offers( *, + root_path: pathlib.Path, wallet_rpc_port: Optional[int], fp: Optional[int], offer_id: Optional[bytes32], @@ -672,7 +684,7 @@ async def get_offers( reverse: bool = False, sort_by_relevance: bool = True, ) -> None: - async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, _, _): + async with get_wallet_client(root_path, wallet_rpc_port, fp) as (wallet_client, _, _): file_contents: bool = (filepath is not None) or summaries records: list[TradeRecord] = [] if offer_id is None: @@ -712,6 +724,7 @@ async def get_offers( async def take_offer( + root_path: pathlib.Path, wallet_rpc_port: Optional[int], fp: Optional[int], fee: uint64, @@ -720,7 +733,7 @@ async def take_offer( push: bool, condition_valid_times: ConditionValidTimes, ) -> list[TransactionRecord]: - async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): + async with get_wallet_client(root_path, wallet_rpc_port, fp) as (wallet_client, fingerprint, config): if os.path.exists(file): filepath = pathlib.Path(file) with open(filepath) as ffile: @@ -817,6 +830,7 @@ async def take_offer( async def cancel_offer( + root_path: pathlib.Path, wallet_rpc_port: Optional[int], fp: Optional[int], fee: uint64, @@ -825,7 +839,7 @@ async def cancel_offer( push: bool, condition_valid_times: ConditionValidTimes, ) -> list[TransactionRecord]: - async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): + async with get_wallet_client(root_path, wallet_rpc_port, fp) as (wallet_client, fingerprint, config): trade_record = await wallet_client.get_offer(offer_id, file_contents=True) await print_trade_record(trade_record, wallet_client, summaries=True) @@ -866,9 +880,9 @@ def print_balance(amount: int, scale: int, address_prefix: str, *, decimal_only: async def print_balances( - wallet_rpc_port: Optional[int], fp: Optional[int], wallet_type: Optional[WalletType] = None + root_path: pathlib.Path, wallet_rpc_port: Optional[int], fp: Optional[int], wallet_type: Optional[WalletType] = None ) -> None: - async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): + async with get_wallet_client(root_path, wallet_rpc_port, fp) as (wallet_client, fingerprint, config): summaries_response = await wallet_client.get_wallets(wallet_type) address_prefix = selected_network_address_prefix(config) @@ -944,6 +958,7 @@ async def print_balances( async def create_did_wallet( + root_path: pathlib.Path, wallet_rpc_port: Optional[int], fp: Optional[int], fee: uint64, @@ -952,7 +967,7 @@ async def create_did_wallet( push: bool, condition_valid_times: ConditionValidTimes, ) -> list[TransactionRecord]: - async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): + async with get_wallet_client(root_path, wallet_rpc_port, fp) as (wallet_client, fingerprint, config): try: response = await wallet_client.create_new_did_wallet( amount, @@ -972,8 +987,10 @@ async def create_did_wallet( return [] -async def did_set_wallet_name(wallet_rpc_port: Optional[int], fp: Optional[int], wallet_id: int, name: str) -> None: - async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, _, _): +async def did_set_wallet_name( + root_path: pathlib.Path, wallet_rpc_port: Optional[int], fp: Optional[int], wallet_id: int, name: str +) -> None: + async with get_wallet_client(root_path, wallet_rpc_port, fp) as (wallet_client, _, _): try: await wallet_client.did_set_wallet_name(wallet_id, name) print(f"Successfully set a new name for DID wallet with id {wallet_id}: {name}") @@ -981,8 +998,10 @@ async def did_set_wallet_name(wallet_rpc_port: Optional[int], fp: Optional[int], print(f"Failed to set DID wallet name: {e}") -async def get_did(wallet_rpc_port: Optional[int], fp: Optional[int], did_wallet_id: int) -> None: - async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, _, _): +async def get_did( + root_path: pathlib.Path, wallet_rpc_port: Optional[int], fp: Optional[int], did_wallet_id: int +) -> None: + async with get_wallet_client(root_path, wallet_rpc_port, fp) as (wallet_client, _, _): try: response = await wallet_client.get_did_id(did_wallet_id) my_did = response["my_did"] @@ -993,8 +1012,10 @@ async def get_did(wallet_rpc_port: Optional[int], fp: Optional[int], did_wallet_ print(f"Failed to get DID: {e}") -async def get_did_info(wallet_rpc_port: Optional[int], fp: Optional[int], coin_id: str, latest: bool) -> None: - async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, _, _): +async def get_did_info( + root_path: pathlib.Path, wallet_rpc_port: Optional[int], fp: Optional[int], coin_id: str, latest: bool +) -> None: + async with get_wallet_client(root_path, wallet_rpc_port, fp) as (wallet_client, _, _): did_padding_length = 23 try: response = await wallet_client.get_did_info(coin_id, latest) @@ -1015,6 +1036,7 @@ async def get_did_info(wallet_rpc_port: Optional[int], fp: Optional[int], coin_i async def update_did_metadata( + root_path: pathlib.Path, wallet_rpc_port: Optional[int], fp: Optional[int], did_wallet_id: int, @@ -1023,7 +1045,7 @@ async def update_did_metadata( push: bool, condition_valid_times: ConditionValidTimes, ) -> list[TransactionRecord]: - async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): + async with get_wallet_client(root_path, wallet_rpc_port, fp) as (wallet_client, fingerprint, config): try: response = await wallet_client.update_did_metadata( did_wallet_id, @@ -1046,6 +1068,7 @@ async def update_did_metadata( async def did_message_spend( + root_path: pathlib.Path, wallet_rpc_port: Optional[int], fp: Optional[int], did_wallet_id: int, @@ -1054,7 +1077,7 @@ async def did_message_spend( push: bool, condition_valid_times: ConditionValidTimes, ) -> list[TransactionRecord]: - async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): + async with get_wallet_client(root_path, wallet_rpc_port, fp) as (wallet_client, fingerprint, config): try: response = await wallet_client.did_message_spend( did_wallet_id, @@ -1074,6 +1097,7 @@ async def did_message_spend( async def transfer_did( + root_path: pathlib.Path, wallet_rpc_port: Optional[int], fp: Optional[int], did_wallet_id: int, @@ -1084,7 +1108,7 @@ async def transfer_did( push: bool, condition_valid_times: ConditionValidTimes, ) -> list[TransactionRecord]: - async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): + async with get_wallet_client(root_path, wallet_rpc_port, fp) as (wallet_client, fingerprint, config): try: target_address = target_cli_address.original_address response = await wallet_client.did_transfer_did( @@ -1110,6 +1134,7 @@ async def transfer_did( async def find_lost_did( *, + root_path: pathlib.Path, wallet_rpc_port: Optional[int], fp: Optional[int], coin_id: str, @@ -1117,7 +1142,7 @@ async def find_lost_did( recovery_list_hash: Optional[str], num_verification: Optional[int], ) -> None: - async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, _, _): + async with get_wallet_client(root_path, wallet_rpc_port, fp) as (wallet_client, _, _): try: response = await wallet_client.find_lost_did( coin_id, @@ -1134,9 +1159,13 @@ async def find_lost_did( async def create_nft_wallet( - wallet_rpc_port: Optional[int], fp: Optional[int], did_id: Optional[CliAddress] = None, name: Optional[str] = None + root_path: pathlib.Path, + wallet_rpc_port: Optional[int], + fp: Optional[int], + did_id: Optional[CliAddress] = None, + name: Optional[str] = None, ) -> None: - async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, _): + async with get_wallet_client(root_path, wallet_rpc_port, fp) as (wallet_client, fingerprint, _): try: response = await wallet_client.create_new_nft_wallet(did_id.original_address if did_id else None, name) wallet_id = response["wallet_id"] @@ -1147,6 +1176,7 @@ async def create_nft_wallet( async def mint_nft( *, + root_path: pathlib.Path, wallet_rpc_port: Optional[int], fp: Optional[int], wallet_id: int, @@ -1167,7 +1197,7 @@ async def mint_nft( push: bool, condition_valid_times: ConditionValidTimes, ) -> list[TransactionRecord]: - async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): + async with get_wallet_client(root_path, wallet_rpc_port, fp) as (wallet_client, fingerprint, config): royalty_address = royalty_cli_address.validate_address_type(AddressType.XCH) if royalty_cli_address else None target_address = target_cli_address.validate_address_type(AddressType.XCH) if target_cli_address else None try: @@ -1217,6 +1247,7 @@ async def mint_nft( async def add_uri_to_nft( *, + root_path: pathlib.Path, wallet_rpc_port: Optional[int], fp: Optional[int], wallet_id: int, @@ -1229,7 +1260,7 @@ async def add_uri_to_nft( push: bool, condition_valid_times: ConditionValidTimes, ) -> list[TransactionRecord]: - async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): + async with get_wallet_client(root_path, wallet_rpc_port, fp) as (wallet_client, fingerprint, config): try: if len([x for x in (uri, metadata_uri, license_uri) if x is not None]) > 1: raise ValueError("You must provide only one of the URI flags") @@ -1267,6 +1298,7 @@ async def add_uri_to_nft( async def transfer_nft( *, + root_path: pathlib.Path, wallet_rpc_port: Optional[int], fp: Optional[int], wallet_id: int, @@ -1277,7 +1309,7 @@ async def transfer_nft( push: bool, condition_valid_times: ConditionValidTimes, ) -> list[TransactionRecord]: - async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): + async with get_wallet_client(root_path, wallet_rpc_port, fp) as (wallet_client, fingerprint, config): try: target_address = target_cli_address.validate_address_type(AddressType.XCH) response = await wallet_client.transfer_nft( @@ -1339,9 +1371,14 @@ def print_nft_info(nft: NFTInfo, *, config: dict[str, Any]) -> None: async def list_nfts( - wallet_rpc_port: Optional[int], fp: Optional[int], wallet_id: int, num: int, start_index: int + root_path: pathlib.Path, + wallet_rpc_port: Optional[int], + fp: Optional[int], + wallet_id: int, + num: int, + start_index: int, ) -> None: - async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): + async with get_wallet_client(root_path, wallet_rpc_port, fp) as (wallet_client, fingerprint, config): try: response = await wallet_client.list_nfts(wallet_id, num, start_index) nft_list = response["nft_list"] @@ -1357,6 +1394,7 @@ async def list_nfts( async def set_nft_did( *, + root_path: pathlib.Path, wallet_rpc_port: Optional[int], fp: Optional[int], wallet_id: int, @@ -1367,7 +1405,7 @@ async def set_nft_did( push: bool, condition_valid_times: ConditionValidTimes, ) -> list[TransactionRecord]: - async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): + async with get_wallet_client(root_path, wallet_rpc_port, fp) as (wallet_client, fingerprint, config): try: response = await wallet_client.set_nft_did( wallet_id, @@ -1388,8 +1426,10 @@ async def set_nft_did( return [] -async def get_nft_info(wallet_rpc_port: Optional[int], fp: Optional[int], nft_coin_id: str) -> None: - async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, _, config): +async def get_nft_info( + root_path: pathlib.Path, wallet_rpc_port: Optional[int], fp: Optional[int], nft_coin_id: str +) -> None: + async with get_wallet_client(root_path, wallet_rpc_port, fp) as (wallet_client, _, config): try: response = await wallet_client.get_nft_info(nft_coin_id) nft_info = NFTInfo.from_json_dict(response["nft_info"]) @@ -1456,6 +1496,7 @@ def fungible_assets_from_offer(offer: Offer) -> list[Optional[bytes32]]: async def send_notification( + root_path: pathlib.Path, wallet_rpc_port: Optional[int], fp: Optional[int], fee: uint64, @@ -1465,7 +1506,7 @@ async def send_notification( push: bool, condition_valid_times: ConditionValidTimes, ) -> list[TransactionRecord]: - async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, _): + async with get_wallet_client(root_path, wallet_rpc_port, fp) as (wallet_client, fingerprint, _): amount: uint64 = cli_amount.convert_amount(units["chia"]) tx = await wallet_client.send_notification( @@ -1484,13 +1525,14 @@ async def send_notification( async def get_notifications( + root_path: pathlib.Path, wallet_rpc_port: Optional[int], fp: Optional[int], ids: Optional[Sequence[bytes32]], start: Optional[int], end: Optional[int], ) -> None: - async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, _, _): + async with get_wallet_client(root_path, wallet_rpc_port, fp) as (wallet_client, _, _): if ids is not None: ids = None if len(ids) == 0 else list(ids) response = await wallet_client.get_notifications( @@ -1504,9 +1546,9 @@ async def get_notifications( async def delete_notifications( - wallet_rpc_port: Optional[int], fp: Optional[int], ids: Sequence[bytes32], delete_all: bool + root_path: pathlib.Path, wallet_rpc_port: Optional[int], fp: Optional[int], ids: Sequence[bytes32], delete_all: bool ) -> None: - async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, _, _): + async with get_wallet_client(root_path, wallet_rpc_port, fp) as (wallet_client, _, _): if delete_all: print(f"Success: {await wallet_client.delete_notifications()}") else: @@ -1515,6 +1557,7 @@ async def delete_notifications( async def sign_message( *, + root_path: pathlib.Path, wallet_rpc_port: Optional[int], fp: Optional[int], addr_type: AddressType, @@ -1523,7 +1566,7 @@ async def sign_message( did_id: Optional[CliAddress] = None, nft_id: Optional[CliAddress] = None, ) -> None: - async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, _, _): + async with get_wallet_client(root_path, wallet_rpc_port, fp) as (wallet_client, _, _): if addr_type == AddressType.XCH: if address is None: print("Address is required for XCH address type.") @@ -1553,6 +1596,7 @@ async def sign_message( async def spend_clawback( *, + root_path: pathlib.Path, wallet_rpc_port: Optional[int], fp: Optional[int], fee: uint64, @@ -1561,7 +1605,7 @@ async def spend_clawback( push: bool, condition_valid_times: ConditionValidTimes, ) -> list[TransactionRecord]: - async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, _, _): + async with get_wallet_client(root_path, wallet_rpc_port, fp) as (wallet_client, _, _): tx_ids = [] for tid in tx_ids_str.split(","): tx_ids.append(bytes32.from_hexstr(tid)) @@ -1583,6 +1627,7 @@ async def spend_clawback( async def mint_vc( + root_path: pathlib.Path, wallet_rpc_port: Optional[int], fp: Optional[int], did: CliAddress, @@ -1591,7 +1636,7 @@ async def mint_vc( push: bool, condition_valid_times: ConditionValidTimes, ) -> list[TransactionRecord]: - async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): + async with get_wallet_client(root_path, wallet_rpc_port, fp) as (wallet_client, fingerprint, config): res = await wallet_client.vc_mint( VCMint( did_id=did.validate_address_type(AddressType.DID), @@ -1618,8 +1663,10 @@ async def mint_vc( return res.transactions -async def get_vcs(wallet_rpc_port: Optional[int], fp: Optional[int], start: int, count: int) -> None: - async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, _, config): +async def get_vcs( + root_path: pathlib.Path, wallet_rpc_port: Optional[int], fp: Optional[int], start: int, count: int +) -> None: + async with get_wallet_client(root_path, wallet_rpc_port, fp) as (wallet_client, _, config): get_list_response = await wallet_client.vc_get_list(VCGetList(uint32(start), uint32(count))) print("Proofs:") for hash, proof_dict in get_list_response.proof_dict.items(): @@ -1643,6 +1690,7 @@ async def get_vcs(wallet_rpc_port: Optional[int], fp: Optional[int], start: int, async def spend_vc( *, + root_path: pathlib.Path, wallet_rpc_port: Optional[int], fp: Optional[int], vc_id: bytes32, @@ -1653,7 +1701,7 @@ async def spend_vc( push: bool, condition_valid_times: ConditionValidTimes, ) -> list[TransactionRecord]: - async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): + async with get_wallet_client(root_path, wallet_rpc_port, fp) as (wallet_client, fingerprint, config): txs = ( await wallet_client.vc_spend( VCSpend( @@ -1686,9 +1734,9 @@ async def spend_vc( async def add_proof_reveal( - wallet_rpc_port: Optional[int], fp: Optional[int], proofs: Sequence[str], root_only: bool + root_path: pathlib.Path, wallet_rpc_port: Optional[int], fp: Optional[int], proofs: Sequence[str], root_only: bool ) -> None: - async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, _, _): + async with get_wallet_client(root_path, wallet_rpc_port, fp) as (wallet_client, _, _): if len(proofs) == 0: print("Must specify at least one proof") return @@ -1703,8 +1751,10 @@ async def add_proof_reveal( return -async def get_proofs_for_root(wallet_rpc_port: Optional[int], fp: Optional[int], proof_hash: str) -> None: - async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, _, _): +async def get_proofs_for_root( + root_path: pathlib.Path, wallet_rpc_port: Optional[int], fp: Optional[int], proof_hash: str +) -> None: + async with get_wallet_client(root_path, wallet_rpc_port, fp) as (wallet_client, _, _): proof_dict: dict[str, str] = ( (await wallet_client.vc_get_proofs_for_root(VCGetProofsForRoot(bytes32.from_hexstr(proof_hash)))) .to_vc_proofs() @@ -1716,6 +1766,7 @@ async def get_proofs_for_root(wallet_rpc_port: Optional[int], fp: Optional[int], async def revoke_vc( + root_path: pathlib.Path, wallet_rpc_port: Optional[int], fp: Optional[int], parent_coin_id: Optional[bytes32], @@ -1725,7 +1776,7 @@ async def revoke_vc( push: bool, condition_valid_times: ConditionValidTimes, ) -> list[TransactionRecord]: - async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): + async with get_wallet_client(root_path, wallet_rpc_port, fp) as (wallet_client, fingerprint, config): if parent_coin_id is None: if vc_id is None: print("Must specify either --parent-coin-id or --vc-id") @@ -1767,6 +1818,7 @@ async def revoke_vc( async def approve_r_cats( + root_path: pathlib.Path, wallet_rpc_port: Optional[int], fingerprint: int, wallet_id: uint32, @@ -1778,7 +1830,7 @@ async def approve_r_cats( push: bool, condition_valid_times: ConditionValidTimes, ) -> list[TransactionRecord]: - async with get_wallet_client(wallet_rpc_port, fingerprint) as (wallet_client, fingerprint, config): + async with get_wallet_client(root_path, wallet_rpc_port, fingerprint) as (wallet_client, fingerprint, config): if wallet_client is None: return txs = await wallet_client.crcat_approve_pending( From 85c23d4812322b33bd02fb38ae4709a5eaeea954 Mon Sep 17 00:00:00 2001 From: Kyle Altendorf Date: Fri, 13 Dec 2024 13:42:11 -0500 Subject: [PATCH 24/25] avoid set for test parametrization (#19039) --- chia/_tests/rpc/test_rpc_client.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/chia/_tests/rpc/test_rpc_client.py b/chia/_tests/rpc/test_rpc_client.py index 4a25b9afdfa2..062218008e9e 100644 --- a/chia/_tests/rpc/test_rpc_client.py +++ b/chia/_tests/rpc/test_rpc_client.py @@ -21,11 +21,11 @@ RpcClient.await_closed, } -client_fetch_methods = { +client_fetch_methods = [ attribute for name, attribute in vars(RpcClient).items() if callable(attribute) and attribute not in non_fetch_client_methods and not name.startswith("__") -} +] @dataclass From 78afe07f287b3d5e15c350ce8c0811692e2b2511 Mon Sep 17 00:00:00 2001 From: Kyle Altendorf Date: Fri, 13 Dec 2024 13:49:16 -0500 Subject: [PATCH 25/25] first pass at using `@chia_command` without a group (#19040) * first pass at using `@chia_command` without a group * less slots --- chia/_tests/cmds/test_cmd_framework.py | 44 +++++++++++----------- chia/_tests/wallet/test_signer_protocol.py | 10 ++--- chia/cmds/chia.py | 2 - chia/cmds/cmd_classes.py | 32 +++++++++++++++- chia/cmds/plotnft.py | 34 ++++++++--------- chia/cmds/signer.py | 33 ++++++++-------- chia/cmds/wallet.py | 6 +++ 7 files changed, 98 insertions(+), 63 deletions(-) diff --git a/chia/_tests/cmds/test_cmd_framework.py b/chia/_tests/cmds/test_cmd_framework.py index 04a7e6fcf445..e2089dc6fb44 100644 --- a/chia/_tests/cmds/test_cmd_framework.py +++ b/chia/_tests/cmds/test_cmd_framework.py @@ -37,7 +37,7 @@ def new_run(self: Any) -> None: dict_compare_with_ignore_context(asdict(cmd), asdict(self)) # type: ignore[call-overload] setattr(mock_type, "run", new_run) - chia_command(_cmd, "_", "", "")(mock_type) + chia_command(group=_cmd, name="_", short_help="", help="")(mock_type) runner = CliRunner() result = runner.invoke(_cmd, ["_", *args], catch_exceptions=False, obj=obj) @@ -49,12 +49,12 @@ def test_cmd_bases() -> None: def cmd() -> None: pass - @chia_command(cmd, "temp_cmd", "blah", help="n/a") + @chia_command(group=cmd, name="temp_cmd", short_help="blah", help="n/a") class TempCMD: def run(self) -> None: print("syncronous") - @chia_command(cmd, "temp_cmd_async", "blah", help="n/a") + @chia_command(group=cmd, name="temp_cmd_async", short_help="blah", help="n/a") class TempCMDAsync: async def run(self) -> None: print("asyncronous") @@ -96,7 +96,7 @@ def test_option_loading() -> None: def cmd() -> None: pass - @chia_command(cmd, "temp_cmd", "blah", help="n/a") + @chia_command(group=cmd, name="temp_cmd", short_help="blah", help="n/a") class TempCMD: some_option: int = option("-o", "--some-option", required=True, type=int) choices: list[str] = option("--choice", multiple=True, type=str) @@ -104,7 +104,7 @@ class TempCMD: def run(self) -> None: print(self.some_option) - @chia_command(cmd, "temp_cmd_2", "blah", help="n/a") + @chia_command(group=cmd, name="temp_cmd_2", short_help="blah", help="n/a") class TempCMD2: some_option: int = option("-o", "--some-option", required=True, type=int, default=13) @@ -146,7 +146,7 @@ def test_context_requirement() -> None: def cmd(ctx: click.Context) -> None: ctx.obj = {"foo": "bar"} - @chia_command(cmd, "temp_cmd", "blah", help="n/a") + @chia_command(group=cmd, name="temp_cmd", short_help="blah", help="n/a") class TempCMD: context: Context @@ -164,7 +164,7 @@ def run(self) -> None: # Test that other variables named context are disallowed with pytest.raises(ValueError, match="context"): - @chia_command(cmd, "shouldnt_work", "blah", help="n/a") + @chia_command(group=cmd, name="shouldnt_work", short_help="blah", help="n/a") class BadCMD: context: int @@ -176,7 +176,7 @@ def test_typing() -> None: def cmd() -> None: pass - @chia_command(cmd, "temp_cmd", "blah", help="n/a") + @chia_command(group=cmd, name="temp_cmd", short_help="blah", help="n/a") class TempCMD: integer: int = option("--integer", default=1, required=False) text: str = option("--text", default="1", required=False) @@ -208,7 +208,7 @@ def run(self) -> None: ... ) # Test optional - @chia_command(cmd, "temp_cmd_optional", "blah", help="n/a") + @chia_command(group=cmd, name="temp_cmd_optional", short_help="blah", help="n/a") class TempCMDOptional: optional: Optional[int] = option("--optional", required=False) @@ -220,7 +220,7 @@ def run(self) -> None: ... # Test optional failure with pytest.raises(TypeError): - @chia_command(cmd, "temp_cmd_optional_bad", "blah", help="n/a") + @chia_command(group=cmd, name="temp_cmd_optional_bad", short_help="blah", help="n/a") class TempCMDOptionalBad2: optional: Optional[int] = option("--optional", required=True) @@ -228,20 +228,20 @@ def run(self) -> None: ... with pytest.raises(TypeError): - @chia_command(cmd, "temp_cmd_optional_bad", "blah", help="n/a") + @chia_command(group=cmd, name="temp_cmd_optional_bad", short_help="blah", help="n/a") class TempCMDOptionalBad3: optional: Optional[int] = option("--optional", default="string", required=False) def run(self) -> None: ... - @chia_command(cmd, "temp_cmd_optional_fine", "blah", help="n/a") + @chia_command(group=cmd, name="temp_cmd_optional_fine", short_help="blah", help="n/a") class TempCMDOptionalBad4: optional: Optional[int] = option("--optional", default=None, required=False) def run(self) -> None: ... # Test multiple - @chia_command(cmd, "temp_cmd_sequence", "blah", help="n/a") + @chia_command(group=cmd, name="temp_cmd_sequence", short_help="blah", help="n/a") class TempCMDSequence: sequence: Sequence[int] = option("--sequence", multiple=True) @@ -253,7 +253,7 @@ def run(self) -> None: ... # Test sequence failure with pytest.raises(TypeError): - @chia_command(cmd, "temp_cmd_sequence_bad", "blah", help="n/a") + @chia_command(group=cmd, name="temp_cmd_sequence_bad", short_help="blah", help="n/a") class TempCMDSequenceBad: sequence: Sequence[int] = option("--sequence") @@ -261,7 +261,7 @@ def run(self) -> None: ... with pytest.raises(TypeError): - @chia_command(cmd, "temp_cmd_sequence_bad", "blah", help="n/a") + @chia_command(group=cmd, name="temp_cmd_sequence_bad", short_help="blah", help="n/a") class TempCMDSequenceBad2: sequence: int = option("--sequence", multiple=True) @@ -269,7 +269,7 @@ def run(self) -> None: ... with pytest.raises(ValueError): - @chia_command(cmd, "temp_cmd_sequence_bad", "blah", help="n/a") + @chia_command(group=cmd, name="temp_cmd_sequence_bad", short_help="blah", help="n/a") class TempCMDSequenceBad3: sequence: Sequence[int] = option("--sequence", default=[1, 2, 3], multiple=True) @@ -277,7 +277,7 @@ def run(self) -> None: ... with pytest.raises(TypeError): - @chia_command(cmd, "temp_cmd_sequence_bad", "blah", help="n/a") + @chia_command(group=cmd, name="temp_cmd_sequence_bad", short_help="blah", help="n/a") class TempCMDSequenceBad4: sequence: Sequence[int] = option("--sequence", default=(1, 2, "3"), multiple=True) @@ -286,7 +286,7 @@ def run(self) -> None: ... # Test invalid type with pytest.raises(TypeError): - @chia_command(cmd, "temp_cmd_bad_type", "blah", help="n/a") + @chia_command(group=cmd, name="temp_cmd_bad_type", short_help="blah", help="n/a") class TempCMDBadType: sequence: list[int] = option("--sequence") @@ -295,20 +295,20 @@ def run(self) -> None: ... # Test invalid default with pytest.raises(TypeError): - @chia_command(cmd, "temp_cmd_bad_default", "blah", help="n/a") + @chia_command(group=cmd, name="temp_cmd_bad_default", short_help="blah", help="n/a") class TempCMDBadDefault: integer: int = option("--int", default="string") def run(self) -> None: ... # Test bytes parsing - @chia_command(cmd, "temp_cmd_bad_bytes", "blah", help="n/a") + @chia_command(group=cmd, name="temp_cmd_bad_bytes", short_help="blah", help="n/a") class TempCMDBadBytes: blob: bytes = option("--blob", required=True) def run(self) -> None: ... - @chia_command(cmd, "temp_cmd_bad_bytes32", "blah", help="n/a") + @chia_command(group=cmd, name="temp_cmd_bad_bytes32", short_help="blah", help="n/a") class TempCMDBadBytes32: blob32: bytes32 = option("--blob32", required=True) @@ -354,7 +354,7 @@ async def test_wallet_rpc_helper(wallet_environments: WalletTestFramework) -> No def cmd() -> None: pass - @chia_command(cmd, "temp_cmd", "blah", help="n/a") + @chia_command(group=cmd, name="temp_cmd", short_help="blah", help="n/a") class TempCMD: rpc_info: NeedsWalletRPC diff --git a/chia/_tests/wallet/test_signer_protocol.py b/chia/_tests/wallet/test_signer_protocol.py index 83552b210c56..f7f22a8b7e6a 100644 --- a/chia/_tests/wallet/test_signer_protocol.py +++ b/chia/_tests/wallet/test_signer_protocol.py @@ -752,7 +752,7 @@ def test_transactions_in() -> None: def cmd() -> None: pass - @chia_command(cmd, "temp_cmd", "blah", help="n/a") + @chia_command(group=cmd, name="temp_cmd", short_help="blah", help="n/a") class TempCMD(TransactionsIn): def run(self) -> None: assert self.transaction_bundle == TransactionBundle([STD_TX]) @@ -771,7 +771,7 @@ def test_transactions_out() -> None: def cmd() -> None: pass - @chia_command(cmd, "temp_cmd", "blah", help="n/a") + @chia_command(group=cmd, name="temp_cmd", short_help="blah", help="n/a") class TempCMD(TransactionsOut): def run(self) -> None: self.handle_transaction_output([STD_TX]) @@ -824,7 +824,7 @@ def cmd() -> None: coin = Coin(bytes32.zeros, bytes32.zeros, uint64(13)) - @chia_command(cmd, "temp_cmd", "blah", help="n/a") + @chia_command(group=cmd, name="temp_cmd", short_help="blah", help="n/a") class TempCMD(SPIn): def run(self) -> None: assert self.read_sp_input(Coin) == [coin, coin] @@ -881,7 +881,7 @@ def cmd() -> None: coin = Coin(bytes32.zeros, bytes32.zeros, uint64(0)) coin_bytes = byte_serialize_clvm_streamable(coin) - @chia_command(cmd, "temp_cmd", "blah", help="n/a") + @chia_command(group=cmd, name="temp_cmd", short_help="blah", help="n/a") class TempCMD(SPOut): def run(self) -> None: self.handle_clvm_output([coin, coin]) @@ -930,7 +930,7 @@ def cmd() -> None: bytes_to_encode = b"foo bar qat qux bam bat" - @chia_command(cmd, "temp_cmd", "blah", help="n/a") + @chia_command(group=cmd, name="temp_cmd", short_help="blah", help="n/a") class TempCMD(QrCodeDisplay): def run(self) -> None: self.display_qr_codes([bytes_to_encode, bytes_to_encode]) diff --git a/chia/cmds/chia.py b/chia/cmds/chia.py index 3d323505f07f..543002d1d743 100644 --- a/chia/cmds/chia.py +++ b/chia/cmds/chia.py @@ -143,8 +143,6 @@ def run_daemon_cmd(ctx: click.Context, wait_for_unlock: bool) -> None: def main() -> None: - import chia.cmds.signer # noqa - cli() diff --git a/chia/cmds/cmd_classes.py b/chia/cmds/cmd_classes.py index 4e1d4076eea4..c57a7decbaca 100644 --- a/chia/cmds/cmd_classes.py +++ b/chia/cmds/cmd_classes.py @@ -227,7 +227,8 @@ def _convert_class_to_function(cls: type[ChiaCommand]) -> SyncCmd: @dataclass_transform() def chia_command( - cmd: click.Group, + *, + group: Optional[click.Group] = None, name: str, short_help: str, help: str, @@ -246,12 +247,39 @@ def _chia_command(cls: type[ChiaCommand]) -> type[ChiaCommand]: kw_only=True, )(cls) - cmd.command(name, short_help=short_help, help=help)(_convert_class_to_function(wrapped_cls)) + metadata = Metadata( + command=click.command( + name=name, + short_help=short_help, + help=help, + )(_convert_class_to_function(wrapped_cls)) + ) + + setattr(wrapped_cls, _chia_command_metadata_attribute, metadata) + if group is not None: + group.add_command(metadata.command) + return wrapped_cls return _chia_command +_chia_command_metadata_attribute = f"_{__name__.replace('.', '_')}_{chia_command.__qualname__}_metadata" + + +@dataclass(frozen=True) +class Metadata: + command: click.Command + + +def get_chia_command_metadata(cls: type[ChiaCommand]) -> Metadata: + metadata: Optional[Metadata] = getattr(cls, _chia_command_metadata_attribute, None) + if metadata is None: + raise Exception(f"Class is not a chia command: {cls}") + + return metadata + + @dataclass_transform() def command_helper(cls: type[Any]) -> type[Any]: if sys.version_info < (3, 10): # stuff below 3.10 doesn't support kw_only diff --git a/chia/cmds/plotnft.py b/chia/cmds/plotnft.py index 20e47c9e052a..a4e1fef36b3b 100644 --- a/chia/cmds/plotnft.py +++ b/chia/cmds/plotnft.py @@ -24,9 +24,9 @@ def plotnft_cmd(ctx: click.Context) -> None: @chia_command( - plotnft_cmd, - "show", - "Show plotnft information", + group=plotnft_cmd, + name="show", + short_help="Show plotnft information", help="Show plotnft information", ) class ShowPlotNFTCMD: @@ -48,8 +48,8 @@ async def run(self) -> None: @chia_command( - plotnft_cmd, - "get_login_link", + group=plotnft_cmd, + name="get_login_link", short_help="Create a login link for a pool", help="Create a login link for a pool. The farmer must be running. Use 'plotnft show' to get the launcher id.", ) @@ -69,8 +69,8 @@ async def run(self) -> None: # They will therefore not work with observer-only functionality # NOTE: tx_endpoint (This creates wallet transactions and should be parametrized by relevant options) @chia_command( - plotnft_cmd, - "create", + group=plotnft_cmd, + name="create", short_help="Create a plot NFT", help="Create a plot NFT.", ) @@ -116,8 +116,8 @@ async def run(self) -> None: # NOTE: tx_endpoint @chia_command( - plotnft_cmd, - "join", + group=plotnft_cmd, + name="join", short_help="Join a plot NFT to a Pool", help="Join a plot NFT to a Pool.", ) @@ -153,8 +153,8 @@ async def run(self) -> None: # NOTE: tx_endpoint @chia_command( - plotnft_cmd, - "leave", + group=plotnft_cmd, + name="leave", short_help="Leave a pool and return to self-farming", help="Leave a pool and return to self-farming.", ) @@ -187,8 +187,8 @@ async def run(self) -> None: @chia_command( - plotnft_cmd, - "inspect", + group=plotnft_cmd, + name="inspect", short_help="Get Detailed plotnft information as JSON", help="Get Detailed plotnft information as JSON", ) @@ -207,8 +207,8 @@ async def run(self) -> None: # NOTE: tx_endpoint @chia_command( - plotnft_cmd, - "claim", + group=plotnft_cmd, + name="claim", short_help="Claim rewards from a plot NFT", help="Claim rewards from a plot NFT", ) @@ -239,8 +239,8 @@ async def run(self) -> None: @chia_command( - plotnft_cmd, - "change_payout_instructions", + group=plotnft_cmd, + name="change_payout_instructions", short_help="Change the payout instructions for a pool.", help="Change the payout instructions for a pool. Use 'plotnft show' to get the launcher id.", ) diff --git a/chia/cmds/signer.py b/chia/cmds/signer.py index b5afcd803a10..42e25e2227a2 100644 --- a/chia/cmds/signer.py +++ b/chia/cmds/signer.py @@ -17,7 +17,6 @@ from chia.cmds.cmd_classes import NeedsWalletRPC, chia_command, command_helper, option from chia.cmds.cmds_util import TransactionBundle -from chia.cmds.wallet import wallet_cmd from chia.rpc.util import ALL_TRANSLATION_LAYERS from chia.rpc.wallet_request_types import ( ApplySignatures, @@ -38,7 +37,7 @@ def _clear_screen() -> None: os.system("cls" if os.name == "nt" else "clear") -@wallet_cmd.group("signer", help="Get information for an external signer") +@click.group("signer", help="Get information for an external signer") def signer_cmd() -> None: pass # pragma: no cover @@ -209,10 +208,10 @@ def handle_clvm_output(self, outputs: list[Streamable]) -> None: @chia_command( - signer_cmd, - "gather_signing_info", - "gather signer information", - "Gather the information from a transaction that a signer needs in order to create a signature", + group=signer_cmd, + name="gather_signing_info", + short_help="gather signer information", + help="Gather the information from a transaction that a signer needs in order to create a signature", ) class GatherSigningInfoCMD: sp_out: SPOut @@ -233,7 +232,12 @@ async def run(self) -> None: self.sp_out.handle_clvm_output([signing_instructions]) -@chia_command(signer_cmd, "apply_signatures", "apply signatures", "Apply a signer's signatures to a transaction bundle") +@chia_command( + group=signer_cmd, + name="apply_signatures", + short_help="apply signatures", + help="Apply a signer's signatures to a transaction bundle", +) class ApplySignaturesCMD: txs_out: TransactionsOut sp_in: SPIn @@ -272,10 +276,10 @@ async def run(self) -> None: @chia_command( - signer_cmd, - "execute_signing_instructions", - "execute signing instructions", - "Given some signing instructions, return signing responses", + group=signer_cmd, + name="execute_signing_instructions", + short_help="execute signing instructions", + help="Given some signing instructions, return signing responses", ) class ExecuteSigningInstructionsCMD: sp_out: SPOut @@ -299,10 +303,9 @@ async def run(self) -> None: @chia_command( - wallet_cmd, - "push_transactions", - "push transaction bundle", - "Push a transaction bundle to the wallet to send to the network", + name="push_transactions", + short_help="push transaction bundle", + help="Push a transaction bundle to the wallet to send to the network", ) class PushTransactionsCMD: txs_in: TransactionsIn diff --git a/chia/cmds/wallet.py b/chia/cmds/wallet.py index 3a26ed1abb2c..9058f6c9107d 100644 --- a/chia/cmds/wallet.py +++ b/chia/cmds/wallet.py @@ -9,6 +9,7 @@ from chia.cmds import options from chia.cmds.check_wallet_db import help_text as check_help_text +from chia.cmds.cmd_classes import get_chia_command_metadata from chia.cmds.cmds_util import timelock_args, tx_out_cmd from chia.cmds.coins import coins_cmd from chia.cmds.param_types import ( @@ -19,6 +20,7 @@ CliAmount, cli_amount_none, ) +from chia.cmds.signer import PushTransactionsCMD, signer_cmd from chia.types.blockchain_format.sized_bytes import bytes32 from chia.util.ints import uint32, uint64 from chia.wallet.conditions import ConditionValidTimes @@ -34,6 +36,10 @@ def wallet_cmd(ctx: click.Context) -> None: pass +wallet_cmd.add_command(signer_cmd) +wallet_cmd.add_command(get_chia_command_metadata(PushTransactionsCMD).command) + + @wallet_cmd.command("get_transaction", help="Get a transaction") @click.option( "-wp",