From 1ea5343cbae43bc07c30133792b2789c9277610f Mon Sep 17 00:00:00 2001 From: James Piechota Date: Tue, 7 Jan 2025 22:53:01 +0000 Subject: [PATCH] test: add replica_2_9 to the ar_repack_mine_tests --- apps/arweave/e2e/ar_e2e.erl | 459 ++++++++++++---------- apps/arweave/e2e/ar_repack_mine_tests.erl | 50 ++- apps/arweave/src/ar_data_sync.erl | 9 +- apps/arweave/test/ar_test_node.erl | 3 + 4 files changed, 297 insertions(+), 224 deletions(-) diff --git a/apps/arweave/e2e/ar_e2e.erl b/apps/arweave/e2e/ar_e2e.erl index 374d026c9..97e9ab97a 100644 --- a/apps/arweave/e2e/ar_e2e.erl +++ b/apps/arweave/e2e/ar_e2e.erl @@ -4,9 +4,9 @@ write_chunk_fixture/3, load_chunk_fixture/2]). -export([delayed_print/2, packing_type_to_packing/2, - start_source_node/3, source_node_storage_modules/3, max_chunk_offset/1, - assert_block/2, assert_syncs_range/3, assert_does_not_sync_range/3, - assert_chunks/3, assert_no_chunks/2, assert_partition_size/4]). + start_source_node/3, source_node_storage_modules/3, max_chunk_offset/1, + assert_block/2, assert_syncs_range/3, assert_does_not_sync_range/3, + assert_chunks/3, assert_no_chunks/2, assert_partition_size/4, assert_empty_partition/3]). -include_lib("arweave/include/ar.hrl"). -include_lib("arweave/include/ar_config.hrl"). @@ -23,60 +23,60 @@ -spec fixture_dir(atom()) -> binary(). fixture_dir(FixtureType) -> - Dir = filename:dirname(?FILE), + Dir = filename:dirname(?FILE), filename:join([Dir, "fixtures", atom_to_list(FixtureType)]). -spec fixture_dir(atom(), [binary()]) -> binary(). fixture_dir(FixtureType, SubDirs) -> - FixtureDir = fixture_dir(FixtureType), + FixtureDir = fixture_dir(FixtureType), filename:join([FixtureDir] ++ SubDirs). -spec install_fixture(binary(), atom(), string()) -> binary(). install_fixture(FilePath, FixtureType, FixtureName) -> - FixtureDir = fixture_dir(FixtureType), - ok = filelib:ensure_dir(FixtureDir ++ "/"), - FixturePath = filename:join([FixtureDir, FixtureName]), - file:copy(FilePath, FixturePath), - FixturePath. + FixtureDir = fixture_dir(FixtureType), + ok = filelib:ensure_dir(FixtureDir ++ "/"), + FixturePath = filename:join([FixtureDir, FixtureName]), + file:copy(FilePath, FixturePath), + FixturePath. -spec load_wallet_fixture(atom()) -> tuple(). load_wallet_fixture(WalletFixture) -> - WalletName = atom_to_list(WalletFixture), - FixtureDir = fixture_dir(wallets), - FixturePath = filename:join([FixtureDir, WalletName ++ ".json"]), - Wallet = ar_wallet:load_keyfile(FixturePath), - Address = ar_wallet:to_address(Wallet), + WalletName = atom_to_list(WalletFixture), + FixtureDir = fixture_dir(wallets), + FixturePath = filename:join([FixtureDir, WalletName ++ ".json"]), + Wallet = ar_wallet:load_keyfile(FixturePath), + Address = ar_wallet:to_address(Wallet), WalletPath = ar_wallet:wallet_filepath(ar_util:encode(Address)), - file:copy(FixturePath, WalletPath), - ar_wallet:load_keyfile(WalletPath). + file:copy(FixturePath, WalletPath), + ar_wallet:load_keyfile(WalletPath). -spec write_chunk_fixture(binary(), non_neg_integer(), binary()) -> ok. write_chunk_fixture(Packing, EndOffset, Chunk) -> - FixtureDir = fixture_dir(chunks, [ar_serialize:encode_packing(Packing, true)]), - ok = filelib:ensure_dir(FixtureDir ++ "/"), - FixturePath = filename:join([FixtureDir, integer_to_list(EndOffset) ++ ".bin"]), - file:write_file(FixturePath, Chunk). + FixtureDir = fixture_dir(chunks, [ar_serialize:encode_packing(Packing, true)]), + ok = filelib:ensure_dir(FixtureDir ++ "/"), + FixturePath = filename:join([FixtureDir, integer_to_list(EndOffset) ++ ".bin"]), + file:write_file(FixturePath, Chunk). -spec load_chunk_fixture(binary(), non_neg_integer()) -> binary(). load_chunk_fixture(Packing, EndOffset) -> - FixtureDir = fixture_dir(chunks, [ar_serialize:encode_packing(Packing, true)]), - FixturePath = filename:join([FixtureDir, integer_to_list(EndOffset) ++ ".bin"]), - file:read_file(FixturePath). + FixtureDir = fixture_dir(chunks, [ar_serialize:encode_packing(Packing, true)]), + FixturePath = filename:join([FixtureDir, integer_to_list(EndOffset) ++ ".bin"]), + file:read_file(FixturePath). packing_type_to_packing(PackingType, Address) -> - case PackingType of - replica_2_9 -> {replica_2_9, Address}; - spora_2_6 -> {spora_2_6, Address}; - composite_1 -> {composite, Address, 1}; - composite_2 -> {composite, Address, 2}; - unpacked -> unpacked - end. + case PackingType of + replica_2_9 -> {replica_2_9, Address}; + spora_2_6 -> {spora_2_6, Address}; + composite_1 -> {composite, Address, 1}; + composite_2 -> {composite, Address, 2}; + unpacked -> unpacked + end. start_source_node(Node, unpacked, _WalletFixture) -> TempNode = case Node of - peer1 -> peer2; - peer2 -> peer1 - end, + peer1 -> peer2; + peer2 -> peer1 + end, {Blocks, _SourceAddr, Chunks} = ar_e2e:start_source_node(TempNode, spora_2_6, wallet_a), {_, StorageModules} = ar_e2e:source_node_storage_modules(Node, unpacked, wallet_a), [B0 | _] = Blocks, @@ -92,230 +92,255 @@ start_source_node(Node, unpacked, _WalletFixture) -> ar_test_node:stop(TempNode), {Blocks, undefined, Chunks}; start_source_node(Node, PackingType, WalletFixture) -> - {Wallet, StorageModules} = source_node_storage_modules(Node, PackingType, WalletFixture), - RewardAddr = ar_wallet:to_address(Wallet), - [B0] = ar_weave:init([{RewardAddr, ?AR(200), <<>>}], 0, ?PARTITION_SIZE), - - {ok, Config} = ar_test_node:remote_call(Node, application, get_env, [arweave, config]), - - ?assertEqual(ar_test_node:peer_name(Node), - ar_test_node:start_other_node(Node, B0, Config#config{ - start_from_latest_state = true, - storage_modules = StorageModules, - auto_join = true, - mining_addr = RewardAddr - }, true) - ), - - %% Note: small chunks will be padded to 256 KiB. So B1 actually contains 3 chunks of data - %% and B2 starts at a chunk boundary and contains 1 chunk of data. - B1 = mine_block(Node, Wallet, floor(2.5 * ?DATA_CHUNK_SIZE)), - B2 = mine_block(Node, Wallet, floor(0.75 * ?DATA_CHUNK_SIZE)), - B3 = mine_block(Node, Wallet, ?PARTITION_SIZE), - B4 = mine_block(Node, Wallet, ?PARTITION_SIZE), - B5 = mine_block(Node, Wallet, ?PARTITION_SIZE), - - %% List of {Block, EndOffset, ChunkSize} - Chunks = [ - {B1, ?PARTITION_SIZE + ?DATA_CHUNK_SIZE, ?DATA_CHUNK_SIZE}, - {B1, ?PARTITION_SIZE + (2*?DATA_CHUNK_SIZE), ?DATA_CHUNK_SIZE}, - {B1, ?PARTITION_SIZE + floor(2.5 * ?DATA_CHUNK_SIZE), floor(0.5 * ?DATA_CHUNK_SIZE)}, - {B2, ?PARTITION_SIZE + floor(3.75 * ?DATA_CHUNK_SIZE), floor(0.75 * ?DATA_CHUNK_SIZE)}, - {B3, ?PARTITION_SIZE + (5*?DATA_CHUNK_SIZE), ?DATA_CHUNK_SIZE}, - {B3, ?PARTITION_SIZE + (6*?DATA_CHUNK_SIZE), ?DATA_CHUNK_SIZE}, - {B3, ?PARTITION_SIZE + (7*?DATA_CHUNK_SIZE), ?DATA_CHUNK_SIZE}, - {B3, ?PARTITION_SIZE + (8*?DATA_CHUNK_SIZE), ?DATA_CHUNK_SIZE} - ], - - SourcePacking = ar_e2e:packing_type_to_packing(PackingType, RewardAddr), + {Wallet, StorageModules} = source_node_storage_modules(Node, PackingType, WalletFixture), + RewardAddr = ar_wallet:to_address(Wallet), + [B0] = ar_weave:init([{RewardAddr, ?AR(200), <<>>}], 0, ?PARTITION_SIZE), + + {ok, Config} = ar_test_node:remote_call(Node, application, get_env, [arweave, config]), + + ?assertEqual(ar_test_node:peer_name(Node), + ar_test_node:start_other_node(Node, B0, Config#config{ + start_from_latest_state = true, + storage_modules = StorageModules, + auto_join = true, + mining_addr = RewardAddr + }, true) + ), + + %% Note: small chunks will be padded to 256 KiB. So B1 actually contains 3 chunks of data + %% and B2 starts at a chunk boundary and contains 1 chunk of data. + B1 = mine_block(Node, Wallet, floor(2.5 * ?DATA_CHUNK_SIZE)), + B2 = mine_block(Node, Wallet, floor(0.75 * ?DATA_CHUNK_SIZE)), + B3 = mine_block(Node, Wallet, ?PARTITION_SIZE), + B4 = mine_block(Node, Wallet, ?PARTITION_SIZE), + B5 = mine_block(Node, Wallet, ?PARTITION_SIZE), + + %% List of {Block, EndOffset, ChunkSize} + Chunks = [ + {B1, ?PARTITION_SIZE + ?DATA_CHUNK_SIZE, ?DATA_CHUNK_SIZE}, + {B1, ?PARTITION_SIZE + (2*?DATA_CHUNK_SIZE), ?DATA_CHUNK_SIZE}, + {B1, ?PARTITION_SIZE + floor(2.5 * ?DATA_CHUNK_SIZE), floor(0.5 * ?DATA_CHUNK_SIZE)}, + {B2, ?PARTITION_SIZE + floor(3.75 * ?DATA_CHUNK_SIZE), floor(0.75 * ?DATA_CHUNK_SIZE)}, + {B3, ?PARTITION_SIZE + (5*?DATA_CHUNK_SIZE), ?DATA_CHUNK_SIZE}, + {B3, ?PARTITION_SIZE + (6*?DATA_CHUNK_SIZE), ?DATA_CHUNK_SIZE}, + {B3, ?PARTITION_SIZE + (7*?DATA_CHUNK_SIZE), ?DATA_CHUNK_SIZE}, + {B3, ?PARTITION_SIZE + (8*?DATA_CHUNK_SIZE), ?DATA_CHUNK_SIZE} + ], + + SourcePacking = ar_e2e:packing_type_to_packing(PackingType, RewardAddr), ar_e2e:assert_syncs_range(Node, ?PARTITION_SIZE, 2*?PARTITION_SIZE), + ar_e2e:assert_chunks(Node, SourcePacking, Chunks), - {[B0, B1, B2, B3, B4, B5], RewardAddr, Chunks}. + {[B0, B1, B2, B3, B4, B5], RewardAddr, Chunks}. max_chunk_offset(Chunks) -> - lists:foldl(fun({_, EndOffset, _}, Acc) -> max(Acc, EndOffset) end, 0, Chunks). + lists:foldl(fun({_, EndOffset, _}, Acc) -> max(Acc, EndOffset) end, 0, Chunks). source_node_storage_modules(_Node, unpacked, _WalletFixture) -> - {undefined, source_node_storage_modules(unpacked)}; + {undefined, source_node_storage_modules(unpacked)}; source_node_storage_modules(Node, PackingType, WalletFixture) -> - Wallet = ar_test_node:remote_call(Node, ar_e2e, load_wallet_fixture, [WalletFixture]), - RewardAddr = ar_wallet:to_address(Wallet), - SourcePacking = packing_type_to_packing(PackingType, RewardAddr), - {Wallet, source_node_storage_modules(SourcePacking)}. + Wallet = ar_test_node:remote_call(Node, ar_e2e, load_wallet_fixture, [WalletFixture]), + RewardAddr = ar_wallet:to_address(Wallet), + SourcePacking = packing_type_to_packing(PackingType, RewardAddr), + {Wallet, source_node_storage_modules(SourcePacking)}. source_node_storage_modules(SourcePacking) -> - [ - {?PARTITION_SIZE, 0, SourcePacking}, - {?PARTITION_SIZE, 1, SourcePacking}, - {?PARTITION_SIZE, 2, SourcePacking}, - {?PARTITION_SIZE, 3, SourcePacking}, - {?PARTITION_SIZE, 4, SourcePacking}, - {?PARTITION_SIZE, 5, SourcePacking}, - {?PARTITION_SIZE, 6, SourcePacking} - ]. - + [ + {?PARTITION_SIZE, 0, SourcePacking}, + {?PARTITION_SIZE, 1, SourcePacking}, + {?PARTITION_SIZE, 2, SourcePacking}, + {?PARTITION_SIZE, 3, SourcePacking}, + {?PARTITION_SIZE, 4, SourcePacking}, + {?PARTITION_SIZE, 5, SourcePacking}, + {?PARTITION_SIZE, 6, SourcePacking} + ]. + mine_block(Node, Wallet, DataSize) -> - WeaveSize = ar_test_node:remote_call(Node, ar_node, get_current_weave_size, []), - Addr = ar_wallet:to_address(Wallet), - {TX, Chunks} = generate_tx(Node, Wallet, WeaveSize, DataSize), - B = ar_test_node:post_and_mine(#{ miner => Node, await_on => Node }, [TX]), + WeaveSize = ar_test_node:remote_call(Node, ar_node, get_current_weave_size, []), + Addr = ar_wallet:to_address(Wallet), + {TX, Chunks} = generate_tx(Node, Wallet, WeaveSize, DataSize), + B = ar_test_node:post_and_mine(#{ miner => Node, await_on => Node }, [TX]), - ?assertEqual(Addr, B#block.reward_addr), + ?assertEqual(Addr, B#block.reward_addr), - Proofs = ar_test_data_sync:post_proofs(Node, B, TX, Chunks), - - ar_test_data_sync:wait_until_syncs_chunks(Node, Proofs, infinity), - B. + Proofs = ar_test_data_sync:post_proofs(Node, B, TX, Chunks), + + ar_test_data_sync:wait_until_syncs_chunks(Node, Proofs, infinity), + B. generate_tx(Node, Wallet, WeaveSize, DataSize) -> - Chunks = generate_chunks(Node, WeaveSize, DataSize, []), - {DataRoot, _DataTree} = ar_merkle:generate_tree( - [{ar_tx:generate_chunk_id(Chunk), Offset} || {Chunk, Offset} <- Chunks] - ), - TX = ar_test_node:sign_tx(Node, Wallet, #{ - data_size => DataSize, - data_root => DataRoot - }), - {TX, [Chunk || {Chunk, _} <- Chunks]}. + Chunks = generate_chunks(Node, WeaveSize, DataSize, []), + {DataRoot, _DataTree} = ar_merkle:generate_tree( + [{ar_tx:generate_chunk_id(Chunk), Offset} || {Chunk, Offset} <- Chunks] + ), + TX = ar_test_node:sign_tx(Node, Wallet, #{ + data_size => DataSize, + data_root => DataRoot + }), + {TX, [Chunk || {Chunk, _} <- Chunks]}. generate_chunks(Node, WeaveSize, DataSize, Acc) when DataSize > 0 -> - ChunkSize = min(DataSize, ?DATA_CHUNK_SIZE), - EndOffset = (length(Acc) * ?DATA_CHUNK_SIZE) + ChunkSize, - Chunk = ar_test_node:get_genesis_chunk(WeaveSize + EndOffset), - generate_chunks(Node, WeaveSize, DataSize - ChunkSize, Acc ++ [{Chunk, EndOffset}]); + ChunkSize = min(DataSize, ?DATA_CHUNK_SIZE), + EndOffset = (length(Acc) * ?DATA_CHUNK_SIZE) + ChunkSize, + Chunk = ar_test_node:get_genesis_chunk(WeaveSize + EndOffset), + generate_chunks(Node, WeaveSize, DataSize - ChunkSize, Acc ++ [{Chunk, EndOffset}]); generate_chunks(_, _, _, Acc) -> - Acc. + Acc. assert_block({spora_2_6, Address}, MinedBlock) -> - ?assertEqual(Address, MinedBlock#block.reward_addr), - ?assertEqual(0, MinedBlock#block.packing_difficulty); + ?assertEqual(Address, MinedBlock#block.reward_addr), + ?assertEqual(0, MinedBlock#block.packing_difficulty); assert_block({composite, Address, PackingDifficulty}, MinedBlock) -> - ?assertEqual(Address, MinedBlock#block.reward_addr), - ?assertEqual(PackingDifficulty, MinedBlock#block.packing_difficulty); + ?assertEqual(Address, MinedBlock#block.reward_addr), + ?assertEqual(PackingDifficulty, MinedBlock#block.packing_difficulty); assert_block({replica_2_9, Address}, MinedBlock) -> - ?assertEqual(Address, MinedBlock#block.reward_addr), - ?assertEqual(?REPLICA_2_9_PACKING_DIFFICULTY, MinedBlock#block.packing_difficulty). - + ?assertEqual(Address, MinedBlock#block.reward_addr), + ?assertEqual(?REPLICA_2_9_PACKING_DIFFICULTY, MinedBlock#block.packing_difficulty). + assert_syncs_range(Node, StartOffset, EndOffset) -> - ?assert( - ar_util:do_until( - fun() -> has_range(Node, StartOffset, EndOffset) end, - 100, - ?E2E_WAIT_TIME - ), - iolist_to_binary(io_lib:format( - "~s Failed to sync range ~p - ~p", [Node, StartOffset, EndOffset]))). + ?assert( + ar_util:do_until( + fun() -> has_range(Node, StartOffset, EndOffset) end, + 100, + ?E2E_WAIT_TIME + ), + iolist_to_binary(io_lib:format( + "~s Failed to sync range ~p - ~p", [Node, StartOffset, EndOffset]))). assert_does_not_sync_range(Node, StartOffset, EndOffset) -> - ar_util:do_until( - fun() -> has_range(Node, StartOffset, EndOffset) end, - 1000, - ?E2E_WAIT_TIME - ), - ?assertEqual(false, has_range(Node, StartOffset, EndOffset), - iolist_to_binary(io_lib:format( - "~s synced range when it should not have: ~p - ~p", - [Node, StartOffset, EndOffset]))). + ar_util:do_until( + fun() -> has_range(Node, StartOffset, EndOffset) end, + 1000, + ?E2E_WAIT_TIME + ), + ?assertEqual(false, has_range(Node, StartOffset, EndOffset), + iolist_to_binary(io_lib:format( + "~s synced range when it should not have: ~p - ~p", + [Node, StartOffset, EndOffset]))). assert_partition_size(Node, PartitionNumber, Packing, Size) -> - ?assert( - ar_util:do_until( - fun() -> - ar_test_node:remote_call(Node, ar_mining_stats, get_partition_data_size, - [PartitionNumber, Packing]) >= Size - end, - 100, - ?E2E_WAIT_TIME - ), - iolist_to_binary(io_lib:format( - "~s partition ~p,~p failed to reach size ~p", [Node, PartitionNumber, - ar_serialize:encode_packing(Packing, true), Size]))). + ?assert( + ar_util:do_until( + fun() -> + ar_test_node:remote_call(Node, ar_mining_stats, get_partition_data_size, + [PartitionNumber, Packing]) >= Size + end, + 100, + ?E2E_WAIT_TIME + ), + iolist_to_binary(io_lib:format( + "~s partition ~p,~p failed to reach size ~p", [Node, PartitionNumber, + ar_serialize:encode_packing(Packing, true), Size]))). + +assert_empty_partition(Node, PartitionNumber, Packing) -> + ar_util:do_until( + fun() -> + ar_test_node:remote_call(Node, ar_mining_stats, get_partition_data_size, + [PartitionNumber, Packing]) > 0 + end, + 100, + ?E2E_WAIT_TIME + ), + ?assertEqual( + 0, + ar_test_node:remote_call(Node, ar_mining_stats, get_partition_data_size, + [PartitionNumber, Packing]), + iolist_to_binary(io_lib:format( + "~s partition ~p,~p os not empty", [Node, PartitionNumber, + ar_serialize:encode_packing(Packing, true)]))). has_range(Node, StartOffset, EndOffset) -> - NodeIP = ar_test_node:peer_ip(Node), - case ar_http_iface_client:get_sync_record(NodeIP) of - {ok, SyncRecord} -> - interval_contains(SyncRecord, StartOffset, EndOffset); - Error -> - ?assert(false, - iolist_to_binary(io_lib:format( - "Failed to get sync record from ~p: ~p", [Node, Error]))), - false - end. + NodeIP = ar_test_node:peer_ip(Node), + case ar_http_iface_client:get_sync_record(NodeIP) of + {ok, SyncRecord} -> + interval_contains(SyncRecord, StartOffset, EndOffset); + Error -> + ?assert(false, + iolist_to_binary(io_lib:format( + "Failed to get sync record from ~p: ~p", [Node, Error]))), + false + end. interval_contains(Intervals, Start, End) when End > Start -> - case gb_sets:iterator_from({Start, Start}, Intervals) of - Iter -> - interval_contains2(Iter, Start, End) - end. + case gb_sets:iterator_from({Start, Start}, Intervals) of + Iter -> + interval_contains2(Iter, Start, End) + end. interval_contains2(Iter, Start, End) -> - case gb_sets:next(Iter) of - none -> - false; - {{IntervalEnd, IntervalStart}, _} when IntervalStart =< Start andalso IntervalEnd >= End -> - true; - _ -> - false - end. + case gb_sets:next(Iter) of + none -> + false; + {{IntervalEnd, IntervalStart}, _} when IntervalStart =< Start andalso IntervalEnd >= End -> + true; + _ -> + false + end. assert_chunks(Node, Packing, Chunks) -> - lists:foreach(fun({Block, EndOffset, ChunkSize}) -> - assert_chunk(Node, Packing, Block, EndOffset, ChunkSize) - end, Chunks). + lists:foreach(fun({Block, EndOffset, ChunkSize}) -> + assert_chunk(Node, Packing, Block, EndOffset, ChunkSize) + end, Chunks). assert_chunk(Node, Packing, Block, EndOffset, ChunkSize) -> - ?LOG_INFO("Asserting chunk at offset ~p, size ~p", [EndOffset, ChunkSize]), - Result = ar_test_node:get_chunk(Node, EndOffset, any), - {ok, {{StatusCode, _}, _, EncodedProof, _, _}} = Result, - ?assertEqual(<<"200">>, StatusCode, iolist_to_binary(io_lib:format( - "Chunk not found. Node: ~p, Offset: ~p", - [Node, EndOffset]))), - Proof = ar_serialize:json_map_to_poa_map( - jiffy:decode(EncodedProof, [return_maps]) - ), - Proof = ar_serialize:json_map_to_poa_map( - jiffy:decode(EncodedProof, [return_maps]) - ), - {true, _} = ar_test_node:remote_call(Node, ar_poa, validate_paths, [ - Block#block.tx_root, - maps:get(tx_path, Proof), - maps:get(data_path, Proof), - EndOffset - 1 - ]), - Chunk = maps:get(chunk, Proof), - - maybe_write_chunk_fixture(Packing, EndOffset, Chunk), - - {ok, ExpectedPackedChunk} = ar_e2e:load_chunk_fixture(Packing, EndOffset), - ?assertEqual(ExpectedPackedChunk, Chunk, - iolist_to_binary(io_lib:format( - "Chunk at offset ~p, size ~p does not match previously packed chunk", - [EndOffset, ChunkSize]))), - - {ok, UnpackedChunk} = ar_packing_server:unpack( - Packing, EndOffset, Block#block.tx_root, Chunk, ?DATA_CHUNK_SIZE), - UnpaddedChunk = ar_packing_server:unpad_chunk(Packing, UnpackedChunk, ChunkSize, byte_size(Chunk)), - ExpectedUnpackedChunk = ar_test_node:get_genesis_chunk(EndOffset), - ?assertEqual(ExpectedUnpackedChunk, UnpaddedChunk, - iolist_to_binary(io_lib:format( - "Chunk at offset ~p, size ~p does not match unpacked chunk", - [EndOffset, ChunkSize]))). + ?LOG_INFO("Asserting chunk at offset ~p, size ~p", [EndOffset, ChunkSize]), + + %% Normally we can't sync replica_2_9 data since it's too expensive to unpack. The + %% one exception is if you request the exact format stored by the node. + RequestPacking = case Packing of + {replica_2_9, _} -> Packing; + _ -> any + end, + Result = ar_test_node:get_chunk(Node, EndOffset, RequestPacking), + {ok, {{StatusCode, _}, _, EncodedProof, _, _}} = Result, + ?assertEqual(<<"200">>, StatusCode, iolist_to_binary(io_lib:format( + "Chunk not found. Node: ~p, Offset: ~p", + [Node, EndOffset]))), + Proof = ar_serialize:json_map_to_poa_map( + jiffy:decode(EncodedProof, [return_maps]) + ), + Proof = ar_serialize:json_map_to_poa_map( + jiffy:decode(EncodedProof, [return_maps]) + ), + {true, _} = ar_test_node:remote_call(Node, ar_poa, validate_paths, [ + Block#block.tx_root, + maps:get(tx_path, Proof), + maps:get(data_path, Proof), + EndOffset - 1 + ]), + Chunk = maps:get(chunk, Proof), + + maybe_write_chunk_fixture(Packing, EndOffset, Chunk), + + {ok, ExpectedPackedChunk} = ar_e2e:load_chunk_fixture(Packing, EndOffset), + ?assertEqual(ExpectedPackedChunk, Chunk, + iolist_to_binary(io_lib:format( + "Chunk at offset ~p, size ~p does not match previously packed chunk", + [EndOffset, ChunkSize]))), + + {ok, UnpackedChunk} = ar_packing_server:unpack( + Packing, EndOffset, Block#block.tx_root, Chunk, ?DATA_CHUNK_SIZE), + UnpaddedChunk = ar_packing_server:unpad_chunk(Packing, UnpackedChunk, ChunkSize, byte_size(Chunk)), + ExpectedUnpackedChunk = ar_test_node:get_genesis_chunk(EndOffset), + ?assertEqual(ExpectedUnpackedChunk, UnpaddedChunk, + iolist_to_binary(io_lib:format( + "Chunk at offset ~p, size ~p does not match unpacked chunk", + [EndOffset, ChunkSize]))). assert_no_chunks(Node, Chunks) -> - lists:foreach(fun({_Block, EndOffset, _ChunkSize}) -> - assert_no_chunk(Node, EndOffset) - end, Chunks). + lists:foreach(fun({_Block, EndOffset, _ChunkSize}) -> + assert_no_chunk(Node, EndOffset) + end, Chunks). assert_no_chunk(Node, EndOffset) -> - Result = ar_test_node:get_chunk(Node, EndOffset, any), - {ok, {{StatusCode, _}, _, _, _, _}} = Result, - ?assertEqual(<<"404">>, StatusCode, iolist_to_binary(io_lib:format( - "Chunk found when it should not have been. Node: ~p, Offset: ~p", - [Node, EndOffset]))). + Result = ar_test_node:get_chunk(Node, EndOffset, any), + {ok, {{StatusCode, _}, _, _, _, _}} = Result, + ?assertEqual(<<"404">>, StatusCode, iolist_to_binary(io_lib:format( + "Chunk found when it should not have been. Node: ~p, Offset: ~p", + [Node, EndOffset]))). delayed_print(Format, Args) -> %% Print the specific flavor of this test since it isn't captured in the test name. @@ -339,8 +364,8 @@ write_wallet_fixtures() -> ok. maybe_write_chunk_fixture(Packing, EndOffset, Chunk) when ?UPDATE_CHUNK_FIXTURES =:= true -> - ?LOG_ERROR("WARNING: Updating chunk fixture! EndOffset: ~p, Packing: ~p", - [EndOffset, ar_serialize:encode_packing(Packing, true)]), + ?LOG_ERROR("WARNING: Updating chunk fixture! EndOffset: ~p, Packing: ~p", + [EndOffset, ar_serialize:encode_packing(Packing, true)]), ar_e2e:write_chunk_fixture(Packing, EndOffset, Chunk); maybe_write_chunk_fixture(_, _, _) -> ok. diff --git a/apps/arweave/e2e/ar_repack_mine_tests.erl b/apps/arweave/e2e/ar_repack_mine_tests.erl index af2f94403..3adf4ab6b 100644 --- a/apps/arweave/e2e/ar_repack_mine_tests.erl +++ b/apps/arweave/e2e/ar_repack_mine_tests.erl @@ -9,17 +9,27 @@ %% -------------------------------------------------------------------------------------------- repack_mine_test_() -> [ + {timeout, 300, {with, {replica_2_9, replica_2_9}, [fun test_repacking_blocked/1]}}, + {timeout, 300, {with, {replica_2_9, spora_2_6}, [fun test_repacking_blocked/1]}}, + {timeout, 300, {with, {replica_2_9, composite_1}, [fun test_repacking_blocked/1]}}, + {timeout, 300, {with, {replica_2_9, composite_2}, [fun test_repacking_blocked/1]}}, + {timeout, 300, {with, {replica_2_9, unpacked}, [fun test_repacking_blocked/1]}}, + {timeout, 300, {with, {unpacked, replica_2_9}, [fun test_repack_mine/1]}}, {timeout, 300, {with, {unpacked, spora_2_6}, [fun test_repack_mine/1]}}, {timeout, 300, {with, {unpacked, composite_1}, [fun test_repack_mine/1]}}, {timeout, 300, {with, {unpacked, composite_2}, [fun test_repack_mine/1]}}, + {timeout, 300, {with, {unpacked, unpacked}, [fun test_repack_mine/1]}}, + {timeout, 300, {with, {spora_2_6, replica_2_9}, [fun test_repack_mine/1]}}, {timeout, 300, {with, {spora_2_6, spora_2_6}, [fun test_repack_mine/1]}}, {timeout, 300, {with, {spora_2_6, composite_1}, [fun test_repack_mine/1]}}, {timeout, 300, {with, {spora_2_6, composite_2}, [fun test_repack_mine/1]}}, {timeout, 300, {with, {spora_2_6, unpacked}, [fun test_repack_mine/1]}}, + {timeout, 300, {with, {composite_1, replica_2_9}, [fun test_repack_mine/1]}}, {timeout, 300, {with, {composite_1, spora_2_6}, [fun test_repack_mine/1]}}, {timeout, 300, {with, {composite_1, composite_1}, [fun test_repack_mine/1]}}, {timeout, 300, {with, {composite_1, composite_2}, [fun test_repack_mine/1]}}, {timeout, 300, {with, {composite_1, unpacked}, [fun test_repack_mine/1]}}, + {timeout, 300, {with, {composite_2, replica_2_9}, [fun test_repack_mine/1]}}, {timeout, 300, {with, {composite_2, spora_2_6}, [fun test_repack_mine/1]}}, {timeout, 300, {with, {composite_2, composite_1}, [fun test_repack_mine/1]}}, {timeout, 300, {with, {composite_2, composite_2}, [fun test_repack_mine/1]}}, @@ -60,6 +70,8 @@ test_repack_mine({FromPackingType, ToPackingType}) -> mining_addr = AddrB }), ar_test_node:restart(RepackerNode), + ar_e2e:assert_syncs_range(RepackerNode, ?PARTITION_SIZE, 2*?PARTITION_SIZE), + ar_e2e:assert_chunks(RepackerNode, ToPacking, Chunks), case ToPackingType of @@ -78,6 +90,41 @@ test_repack_mine({FromPackingType, ToPackingType}) -> ?assertEqual(RepackerBlock, ValidatorBlock) end. +test_repacking_blocked({FromPackingType, ToPackingType}) -> + ar_e2e:delayed_print(<<" ~p -> ~p ">>, [FromPackingType, ToPackingType]), + ValidatorNode = peer1, + RepackerNode = peer2, + {Blocks, _AddrA, Chunks} = ar_e2e:start_source_node( + RepackerNode, FromPackingType, wallet_a), + + [B0 | _] = Blocks, + start_validator_node(ValidatorNode, RepackerNode, B0), + + {WalletB, StorageModules} = ar_e2e:source_node_storage_modules( + RepackerNode, ToPackingType, wallet_b), + AddrB = case WalletB of + undefined -> undefined; + _ -> ar_wallet:to_address(WalletB) + end, + ToPacking = ar_e2e:packing_type_to_packing(ToPackingType, AddrB), + {ok, Config} = ar_test_node:get_config(RepackerNode), + ar_test_node:update_config(RepackerNode, Config#config{ + storage_modules = Config#config.storage_modules ++ StorageModules, + mining_addr = AddrB + }), + ar_test_node:restart(RepackerNode), + + ar_e2e:assert_empty_partition(RepackerNode, 1, ToPacking), + ar_e2e:assert_no_chunks(RepackerNode, Chunks), + + ar_test_node:update_config(RepackerNode, Config#config{ + storage_modules = StorageModules, + mining_addr = AddrB + }), + ar_test_node:restart(RepackerNode), + + ar_e2e:assert_empty_partition(RepackerNode, 1, ToPacking), + ar_e2e:assert_no_chunks(RepackerNode, Chunks). start_validator_node(ValidatorNode, RepackerNode, B0) -> {ok, Config} = ar_test_node:get_config(ValidatorNode), @@ -85,7 +132,8 @@ start_validator_node(ValidatorNode, RepackerNode, B0) -> ar_test_node:start_other_node(ValidatorNode, B0, Config#config{ peers = [ar_test_node:peer_ip(RepackerNode)], start_from_latest_state = true, - auto_join = true + auto_join = true, + storage_modules = [] }, true) ), ok. diff --git a/apps/arweave/src/ar_data_sync.erl b/apps/arweave/src/ar_data_sync.erl index 4b8aa3269..fb9fd4abb 100644 --- a/apps/arweave/src/ar_data_sync.erl +++ b/apps/arweave/src/ar_data_sync.erl @@ -833,7 +833,7 @@ handle_cast(sync_data2, #sync_data_state{ } = State) -> Intervals = case ar_storage_module:get_packing(StoreID) of - {replica_2_9, _} -> + {replica_2_9, _} when ?BLOCK_2_9_SYNCING -> %% Do not unpack the 2.9 data by default, finding unpacked data %% may be cheaper. []; @@ -1734,7 +1734,8 @@ invalidate_bad_data_record({Start, End, StoreID, Case}) -> PaddedStart end, ?LOG_WARNING([{event, invalidating_bad_data_record}, {type, Case}, - {range_start, PaddedStart2}, {range_end, PaddedEnd}]), + {range_start, PaddedStart2}, {range_end, PaddedEnd}, + {store_id, StoreID}]), case ar_sync_record:delete(PaddedEnd, PaddedStart2, ar_data_sync, StoreID) of ok -> ar_sync_record:add(PaddedEnd, PaddedStart2, invalid_chunks, StoreID), @@ -2748,10 +2749,6 @@ write_not_blacklisted_chunk(Offset, ChunkDataKey, Chunk, ChunkSize, DataPath, Pa Error end; _ -> - ?LOG_ERROR([{event, failed_to_write_not_blacklisted_chunk_to_chunk_data_db}, - {offset, Offset}, {chunk_size, ChunkSize}, - {packing, ar_serialize:encode_packing(Packing, true)}, - {store_id, StoreID}]), Result end; false -> diff --git a/apps/arweave/test/ar_test_node.erl b/apps/arweave/test/ar_test_node.erl index 3990a919e..138ed2d5d 100644 --- a/apps/arweave/test/ar_test_node.erl +++ b/apps/arweave/test/ar_test_node.erl @@ -205,6 +205,8 @@ update_config(Config) -> repack_in_place_storage_modules = Config#config.repack_in_place_storage_modules }, ok = application:set_env(arweave, config, Config2), + ?LOG_INFO("Updated Config:"), + ar_config:log_config(Config2), Config2. start_other_node(Node, B0, Config, WaitUntilSync) -> @@ -570,6 +572,7 @@ start(B0, RewardAddr, Config, StorageModules) -> wait_until_syncs_genesis_data(). restart() -> + ?LOG_INFO("Restarting node"), stop(), ar:start_dependencies(), wait_until_joined().