Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[CHIA-298] Optimize DL autoinserts. #17883

Merged
merged 26 commits into from May 9, 2024
Merged
Show file tree
Hide file tree
Changes from 21 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
84 changes: 46 additions & 38 deletions chia/_tests/core/data_layer/test_data_rpc.py
Expand Up @@ -132,6 +132,12 @@ async def bare_data_layer_api_fixture(tmp_path: Path, bt: BlockTools) -> AsyncIt
yield data_rpc_api


def bytes32_list_code(data: List[bytes32]) -> str:
elements = [f"bytes32.from_hexstr({root.hex()!r})" for root in data]
joined = ", ".join(elements)
return f"[{joined},]"


async def init_wallet_and_node(
self_hostname: str, one_wallet_and_one_simulator: SimulatorsAndWalletsServices
) -> nodes_with_port_bt_ph:
Expand Down Expand Up @@ -1045,12 +1051,12 @@ class MakeAndTakeReference:
taker_inclusions=[{"key": b"\x10".hex(), "value": b"\x02\x10".hex()}],
trade_id="a86b08e21b7677783812969fd8f8a1442d4d265cbb0bd2727bf6c16858789f5b",
maker_root_history=[
bytes32.from_hexstr("6661ea6604b491118b0f49c932c0f0de2ad815a57b54b6ec8fdbd1b408ae7e27"),
bytes32.from_hexstr("8e54f5066aa7999fc1561a56df59d11ff01f7df93cadf49a61adebf65dec65ea"),
bytes32.from_hexstr("65c03098d1d14fe7a09a1a18225a210cce381f174cc853702ea1c08bd8476609"),
bytes32.from_hexstr("4e66e60e6652f03fe15eb4d65b4ca18aa5b475822e7c698cba40c4e10bc15e66"),
],
taker_root_history=[
bytes32.from_hexstr("42f08ebc0578f2cec7a9ad1c3038e74e0f30eba5c2f4cb1ee1c8fdb682c19dbb"),
bytes32.from_hexstr("eeb63ac765065d2ee161e1c059c8188ef809e1c3ed8739bad5bfee2c2ee1c742"),
bytes32.from_hexstr("9ea7ab07052249cac3dc9be7fd669620eb409ced20c2d281f9edc60ac379895d"),
bytes32.from_hexstr("785c24f5d6e51dd042c14bc4537a0b6d62c7026fe24baea685d6ba183303db7b"),
],
)

Expand Down Expand Up @@ -1105,12 +1111,12 @@ class MakeAndTakeReference:
taker_inclusions=[{"key": b"\x10".hex(), "value": b"\x05\x10".hex()}],
trade_id="09c4af6aa770f6797516dbae697a5efead5a1eaa29295fcc314b3f2f48fd9fe9",
maker_root_history=[
bytes32.from_hexstr("6661ea6604b491118b0f49c932c0f0de2ad815a57b54b6ec8fdbd1b408ae7e27"),
bytes32.from_hexstr("1d1eb374688e3033cbce2514e4fded10ceffe068e663718b8a20716a65019f91"),
bytes32.from_hexstr("65c03098d1d14fe7a09a1a18225a210cce381f174cc853702ea1c08bd8476609"),
bytes32.from_hexstr("fa3cfc7366a6c4e94036b91c0ccc6744f607e697435a8332f733994bca4b76d0"),
],
taker_root_history=[
bytes32.from_hexstr("42f08ebc0578f2cec7a9ad1c3038e74e0f30eba5c2f4cb1ee1c8fdb682c19dbb"),
bytes32.from_hexstr("87ebc7585e5b291203c318a7be96ca9cdfd5ddfc9cc2a97f55a3eddb49f0c74e"),
bytes32.from_hexstr("9ea7ab07052249cac3dc9be7fd669620eb409ced20c2d281f9edc60ac379895d"),
bytes32.from_hexstr("d21665368ad7fc0fdbc65f4d20858c62da690bf5939568f32c427be6b26d116f"),
],
)

Expand Down Expand Up @@ -1195,12 +1201,12 @@ class MakeAndTakeReference:
taker_inclusions=[{"key": b"\x10".hex(), "value": b"\x02\x10".hex()}],
trade_id="9c407637b889be3b61b3f5599b7391ee6edbf69a0c8c954656231c0bfb710b08",
maker_root_history=[
bytes32.from_hexstr("6661ea6604b491118b0f49c932c0f0de2ad815a57b54b6ec8fdbd1b408ae7e27"),
bytes32.from_hexstr("043fed6d67961e36db2900b6aab24aa68be529c4e632aace486fbea1b26dc70e"),
bytes32.from_hexstr("65c03098d1d14fe7a09a1a18225a210cce381f174cc853702ea1c08bd8476609"),
bytes32.from_hexstr("fe8d2157948e2549cc92db6bb8719723e3a5ce32e7924c3cbcf2db875dc8f25d"),
],
taker_root_history=[
bytes32.from_hexstr("42f08ebc0578f2cec7a9ad1c3038e74e0f30eba5c2f4cb1ee1c8fdb682c19dbb"),
bytes32.from_hexstr("eeb63ac765065d2ee161e1c059c8188ef809e1c3ed8739bad5bfee2c2ee1c742"),
bytes32.from_hexstr("9ea7ab07052249cac3dc9be7fd669620eb409ced20c2d281f9edc60ac379895d"),
bytes32.from_hexstr("785c24f5d6e51dd042c14bc4537a0b6d62c7026fe24baea685d6ba183303db7b"),
],
)

Expand Down Expand Up @@ -1258,12 +1264,12 @@ class MakeAndTakeReference:
],
trade_id="d53d08a6951849cd33de3a703bc133a2ae973a34ce4527e19e233fb5cb57bbe3",
maker_root_history=[
bytes32.from_hexstr("6661ea6604b491118b0f49c932c0f0de2ad815a57b54b6ec8fdbd1b408ae7e27"),
bytes32.from_hexstr("8e54f5066aa7999fc1561a56df59d11ff01f7df93cadf49a61adebf65dec65ea"),
bytes32.from_hexstr("65c03098d1d14fe7a09a1a18225a210cce381f174cc853702ea1c08bd8476609"),
bytes32.from_hexstr("4e66e60e6652f03fe15eb4d65b4ca18aa5b475822e7c698cba40c4e10bc15e66"),
],
taker_root_history=[
bytes32.from_hexstr("42f08ebc0578f2cec7a9ad1c3038e74e0f30eba5c2f4cb1ee1c8fdb682c19dbb"),
bytes32.from_hexstr("2215da3c9a309e0d8972fd6acb8ac62898a0f7e4a07351d558c2cc5094dfc5ec"),
bytes32.from_hexstr("9ea7ab07052249cac3dc9be7fd669620eb409ced20c2d281f9edc60ac379895d"),
bytes32.from_hexstr("af4467e1727270a67bf23298aa29da959198ce9ef48cf4a34da2f309d72bf1c8"),
],
)

Expand Down Expand Up @@ -1323,11 +1329,11 @@ class MakeAndTakeReference:
taker_inclusions=[{"key": b"\x10".hex(), "value": b"\x02\x10".hex()}],
trade_id="74ce97a6154467ca1a868e546a5d9e15e1e61c386aa27cb3686b198613972606",
maker_root_history=[
bytes32.from_hexstr("6661ea6604b491118b0f49c932c0f0de2ad815a57b54b6ec8fdbd1b408ae7e27"),
bytes32.from_hexstr("65c03098d1d14fe7a09a1a18225a210cce381f174cc853702ea1c08bd8476609"),
],
taker_root_history=[
bytes32.from_hexstr("42f08ebc0578f2cec7a9ad1c3038e74e0f30eba5c2f4cb1ee1c8fdb682c19dbb"),
bytes32.from_hexstr("eeb63ac765065d2ee161e1c059c8188ef809e1c3ed8739bad5bfee2c2ee1c742"),
bytes32.from_hexstr("9ea7ab07052249cac3dc9be7fd669620eb409ced20c2d281f9edc60ac379895d"),
bytes32.from_hexstr("785c24f5d6e51dd042c14bc4537a0b6d62c7026fe24baea685d6ba183303db7b"),
],
)

Expand Down Expand Up @@ -1382,11 +1388,11 @@ class MakeAndTakeReference:
taker_inclusions=[{"key": b"\x09".hex(), "value": b"\x02\x09".hex()}],
trade_id="be67400ac9856e7aa1ec96071457bda73f7304115902ac387ad2b1e085115956",
maker_root_history=[
bytes32.from_hexstr("6661ea6604b491118b0f49c932c0f0de2ad815a57b54b6ec8fdbd1b408ae7e27"),
bytes32.from_hexstr("8e54f5066aa7999fc1561a56df59d11ff01f7df93cadf49a61adebf65dec65ea"),
bytes32.from_hexstr("65c03098d1d14fe7a09a1a18225a210cce381f174cc853702ea1c08bd8476609"),
bytes32.from_hexstr("4e66e60e6652f03fe15eb4d65b4ca18aa5b475822e7c698cba40c4e10bc15e66"),
],
taker_root_history=[
bytes32.from_hexstr("42f08ebc0578f2cec7a9ad1c3038e74e0f30eba5c2f4cb1ee1c8fdb682c19dbb"),
bytes32.from_hexstr("9ea7ab07052249cac3dc9be7fd669620eb409ced20c2d281f9edc60ac379895d"),
],
)

Expand Down Expand Up @@ -1441,12 +1447,12 @@ class MakeAndTakeReference:
taker_inclusions=[{"key": b"\x10".hex(), "value": b"\x02\x10".hex()}],
trade_id="72232956344e9f12eec28635e9299d367e9fd9c4a8759db0f8f110c872919ff0",
maker_root_history=[
bytes32.from_hexstr("6661ea6604b491118b0f49c932c0f0de2ad815a57b54b6ec8fdbd1b408ae7e27"),
bytes32.from_hexstr("3761921b9b0520458995bb0ec353ea28d36efa2a7cfc3aba6772f005f7dd34c6"),
bytes32.from_hexstr("65c03098d1d14fe7a09a1a18225a210cce381f174cc853702ea1c08bd8476609"),
bytes32.from_hexstr("558e340c925a04f78ac0cd1da8d6296619016407f76014790adc17bc1ccd3a9b"),
],
taker_root_history=[
bytes32.from_hexstr("42f08ebc0578f2cec7a9ad1c3038e74e0f30eba5c2f4cb1ee1c8fdb682c19dbb"),
bytes32.from_hexstr("eeb63ac765065d2ee161e1c059c8188ef809e1c3ed8739bad5bfee2c2ee1c742"),
bytes32.from_hexstr("9ea7ab07052249cac3dc9be7fd669620eb409ced20c2d281f9edc60ac379895d"),
bytes32.from_hexstr("785c24f5d6e51dd042c14bc4537a0b6d62c7026fe24baea685d6ba183303db7b"),
],
)

Expand Down Expand Up @@ -1501,12 +1507,12 @@ class MakeAndTakeReference:
taker_inclusions=[{"key": b"\x09".hex(), "value": b"\x02\x10".hex()}],
trade_id="399511c325cc7ac6df2e195271f9001f965d25327e46f89049aec1e286252746",
maker_root_history=[
bytes32.from_hexstr("6661ea6604b491118b0f49c932c0f0de2ad815a57b54b6ec8fdbd1b408ae7e27"),
bytes32.from_hexstr("8e54f5066aa7999fc1561a56df59d11ff01f7df93cadf49a61adebf65dec65ea"),
bytes32.from_hexstr("65c03098d1d14fe7a09a1a18225a210cce381f174cc853702ea1c08bd8476609"),
bytes32.from_hexstr("4e66e60e6652f03fe15eb4d65b4ca18aa5b475822e7c698cba40c4e10bc15e66"),
],
taker_root_history=[
bytes32.from_hexstr("42f08ebc0578f2cec7a9ad1c3038e74e0f30eba5c2f4cb1ee1c8fdb682c19dbb"),
bytes32.from_hexstr("d77afd64e9f307f3250a352c155480311512f9da2033228f1a2f0a3687cc90e0"),
bytes32.from_hexstr("9ea7ab07052249cac3dc9be7fd669620eb409ced20c2d281f9edc60ac379895d"),
bytes32.from_hexstr("3ea564c7737e28ec275fd46144a5c6d0ff9457400bcc027220fd6c7af97c4685"),
],
)

Expand Down Expand Up @@ -1617,16 +1623,18 @@ async def test_make_and_take_offer(offer_setup: OfferSetup, reference: MakeAndTa
taker_history = taker_history_result["root_history"]

assert [generation["confirmed"] for generation in maker_history] == [True] * len(maker_history)
assert [generation["root_hash"] for generation in maker_history] == [
maker_root_hash_history = [generation["root_hash"] for generation in maker_history]
assert maker_root_hash_history == [
bytes32([0] * 32),
*reference.maker_root_history,
]
], f"maker_root_history={bytes32_list_code(maker_root_hash_history[1:])},"

assert [generation["confirmed"] for generation in taker_history] == [True] * len(taker_history)
assert [generation["root_hash"] for generation in taker_history] == [
taker_root_hash_history = [generation["root_hash"] for generation in taker_history]
assert taker_root_hash_history == [
bytes32([0] * 32),
*reference.taker_root_history,
]
], f"taker_root_history={bytes32_list_code(taker_root_hash_history[1:])},"

# TODO: test maker and taker fees

Expand Down Expand Up @@ -3099,7 +3107,7 @@ async def test_pagination_cmds(
if max_page_size is None or max_page_size == 100:
assert keys == {
"keys": ["0x61616161", "0x6161"],
"root_hash": "0x3f4ae7b8e10ef48b3114843537d5def989ee0a3b6568af7e720a71730f260fa1",
"root_hash": "0x889a4a61b17be799ae9d36831246672ef857a24091f54481431a83309d4e890e",
"success": True,
"total_bytes": 6,
"total_pages": 1,
Expand All @@ -3119,7 +3127,7 @@ async def test_pagination_cmds(
"value": "0x6161",
},
],
"root_hash": "0x3f4ae7b8e10ef48b3114843537d5def989ee0a3b6568af7e720a71730f260fa1",
"root_hash": "0x889a4a61b17be799ae9d36831246672ef857a24091f54481431a83309d4e890e",
"success": True,
"total_bytes": 9,
"total_pages": 1,
Expand All @@ -3136,7 +3144,7 @@ async def test_pagination_cmds(
elif max_page_size == 5:
assert keys == {
"keys": ["0x61616161"],
"root_hash": "0x3f4ae7b8e10ef48b3114843537d5def989ee0a3b6568af7e720a71730f260fa1",
"root_hash": "0x889a4a61b17be799ae9d36831246672ef857a24091f54481431a83309d4e890e",
"success": True,
"total_bytes": 6,
"total_pages": 2,
Expand All @@ -3150,7 +3158,7 @@ async def test_pagination_cmds(
"value": "0x61",
}
],
"root_hash": "0x3f4ae7b8e10ef48b3114843537d5def989ee0a3b6568af7e720a71730f260fa1",
"root_hash": "0x889a4a61b17be799ae9d36831246672ef857a24091f54481431a83309d4e890e",
"success": True,
"total_bytes": 9,
"total_pages": 2,
Expand Down
81 changes: 73 additions & 8 deletions chia/_tests/core/data_layer/test_data_store.py
Expand Up @@ -370,12 +370,21 @@ async def test_get_ancestors_optimized(data_store: DataStore, tree_id: bytes32)
"use_optimized",
[True, False],
)
async def test_batch_update(data_store: DataStore, tree_id: bytes32, use_optimized: bool, tmp_path: Path) -> None:
num_batches = 10
num_ops_per_batch = 100 if use_optimized else 10
saved_roots: List[Root] = []
@pytest.mark.parametrize(
"num_batches",
[1, 5, 10, 25],
)
async def test_batch_update(
data_store: DataStore,
tree_id: bytes32,
use_optimized: bool,
tmp_path: Path,
num_batches: int,
) -> None:
total_operations = 1000 if use_optimized else 100
num_ops_per_batch = total_operations // num_batches
saved_batches: List[List[Dict[str, Any]]] = []

saved_kv: List[List[TerminalNode]] = []
db_uri = generate_in_memory_db_uri()
async with DataStore.managed(database=db_uri, uri=True) as single_op_data_store:
await single_op_data_store.create_tree(tree_id, status=Status.COMMITTED)
Expand Down Expand Up @@ -442,16 +451,21 @@ async def test_batch_update(data_store: DataStore, tree_id: bytes32, use_optimiz
if (operation + 1) % num_ops_per_batch == 0:
saved_batches.append(batch)
batch = []
root = await single_op_data_store.get_tree_root(tree_id=tree_id)
saved_roots.append(root)
current_kv = await single_op_data_store.get_keys_values(tree_id=tree_id)
assert {kv.key: kv.value for kv in current_kv} == keys_values
saved_kv.append(current_kv)

for batch_number, batch in enumerate(saved_batches):
assert len(batch) == num_ops_per_batch
await data_store.insert_batch(tree_id, batch, status=Status.COMMITTED)
root = await data_store.get_tree_root(tree_id)
assert root.generation == batch_number + 1
assert root.node_hash == saved_roots[batch_number].node_hash
assert root.node_hash is not None
current_kv = await data_store.get_keys_values(tree_id=tree_id)
# Get the same keys/values, but possibly stored in other order.
assert {node.key: node.value for node in current_kv} == {
node.key: node.value for node in saved_kv[batch_number]
}
queue: List[bytes32] = [root.node_hash]
ancestors: Dict[bytes32, bytes32] = {}
while len(queue) > 0:
Expand Down Expand Up @@ -1509,6 +1523,18 @@ def id(self) -> str:
return f"pre={self.pre},count={self.count}"


@dataclass
class BatchesInsertBenchmarkCase:
count: int
batch_count: int
limit: float
marks: Marks = ()

@property
def id(self) -> str:
return f"count={self.count},batch_count={self.batch_count}"


@datacases(
BatchInsertBenchmarkCase(
pre=0,
Expand All @@ -1530,6 +1556,11 @@ def id(self) -> str:
count=1_000,
limit=36,
),
BatchInsertBenchmarkCase(
pre=10_000,
count=25_000,
limit=52,
),
)
@pytest.mark.anyio
async def test_benchmark_batch_insert_speed(
Expand Down Expand Up @@ -1567,6 +1598,40 @@ async def test_benchmark_batch_insert_speed(
)


@datacases(
BatchesInsertBenchmarkCase(
count=50,
batch_count=200,
limit=195,
),
)
altendky marked this conversation as resolved.
Show resolved Hide resolved
@pytest.mark.anyio
async def test_benchmark_batch_insert_speed_multiple_batches(
data_store: DataStore,
tree_id: bytes32,
benchmark_runner: BenchmarkRunner,
case: BatchesInsertBenchmarkCase,
) -> None:
r = random.Random()
r.seed("shadowlands", version=2)

with benchmark_runner.assert_runtime(seconds=case.limit):
for batch in range(case.batch_count):
changelist = [
{
"action": "insert",
"key": x.to_bytes(32, byteorder="big", signed=False),
"value": bytes(r.getrandbits(8) for _ in range(10000)),
}
for x in range(batch * case.count, (batch + 1) * case.count)
]
await data_store.insert_batch(
tree_id=tree_id,
changelist=changelist,
status=Status.COMMITTED,
)


@pytest.mark.anyio
async def test_delete_store_data(raw_data_store: DataStore) -> None:
tree_id = bytes32(b"\0" * 32)
Expand Down
3 changes: 2 additions & 1 deletion chia/data_layer/data_layer.py
Expand Up @@ -299,7 +299,8 @@ async def batch_insert(
raise ValueError(f"Singleton with launcher ID {tree_id} is not owned by DL Wallet")

t1 = time.monotonic()
batch_hash = await self.data_store.insert_batch(tree_id, changelist, status)
enable_batch_autoinsert = self.config.get("enable_batch_autoinsert", True)
batch_hash = await self.data_store.insert_batch(tree_id, changelist, status, enable_batch_autoinsert)
t2 = time.monotonic()
self.log.info(f"Data store batch update process time: {t2 - t1}.")
# todo return empty node hash from get_tree_root
Expand Down