Skip to content

Commit

Permalink
Merge pull request #808 from input-output-hk/test/lw-7282-idle-test-h…
Browse files Browse the repository at this point in the history
…d-wallet

LW-7282 idle test hd wallet
  • Loading branch information
mirceahasegan committed Jul 18, 2023
2 parents 98cccbd + 180704f commit 222b4b4
Show file tree
Hide file tree
Showing 7 changed files with 254 additions and 0 deletions.
1 change: 1 addition & 0 deletions packages/e2e/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -134,6 +134,7 @@
"@types/delay": "^3.1.0",
"@types/dockerode": "^3.3.8",
"@types/jest": "^28.1.2",
"@types/k6": "^0.45.0",
"@types/lodash": "^4.14.182",
"@types/ora": "^3.2.0",
"@types/uuid": "^8.3.4",
Expand Down
21 changes: 21 additions & 0 deletions packages/e2e/test/k6/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
# Running tests locally

## Prerequisites

1. [K6 installed locally](https://k6.io/docs/get-started/installation/). Needed for `k6 run the-test.js`.
1. Metrics dashboards & reports: install [K6 Dashboards extension](https://github.com/szkiba/xk6-dashboard#download)
- **Make sure you are using `k6` binary downloaded/built from `xk6-dashboard` project** when running or replaying**.
Otherwise the command will fail with `invalid output type 'dashboard', available types are`.
- K6 dashboards are available by default in: http://127.0.0.1:5665

## Running

- Without K6 dashboards:
```k6 run test-file.js --out json=test-file-out.json --out csv=test-file-out.csv```

- With K6 dashboards while test is running.
`k6 run test-file.js --out json=test-file-out.json --out csv=test-file-out.csv --out dashboard`

- Open K6 dashboards for a previous run. The `json` out file is needed.
`k6 dashboard replay test-file-out.json`

221 changes: 221 additions & 0 deletions packages/e2e/test/k6/scenarios/idle-clients.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,221 @@
/* eslint-disable no-console */
// K6 doesn't support numeric separators style
/* eslint-disable unicorn/numeric-separators-style */

/* eslint-disable func-style */
import { check, sleep } from 'k6';
import http from 'k6/http';

/**
* # Script overall description:
*
* ## Purpose
* Measure the load of the back-end services given a number of wallets in idle state.
* Idle state is defined as the queries that the wallets perform as a result of staying in sync,
* without submitting/receiving any transactions.
*
* ## Methodology
* - Each wallet is just a set of queries performed on a number of addresses, it is not an actual SDK wallet.
* - Each wallet consists of a number of addresses (`hdWalletParams.activeAddrCount`). A value of 1 indicates
* a single address wallet, while a value >1 is an HD wallet.
* - The maximum transaction history per address can be configured using `hdWalletParams.maxTxHistory`.
* - Addresses are real addresses from mainnet.
* - Stage 1: a number of wallets (`MAX_VU`) are started progressively during a given time (`RAMP_UP_DURATION`).
* The purpose of this stage is to increase the load of idle wallets progressively.
* When each wallet starts, it will directly perform the queries normally done by an idle wallet.
* They will NOT do any initialization, address discovery, etc.
* - Stage 2: wallets remain in idle state, performing the idle specific queries.
* The purpose of this stage is to sustain the maximum load for a period of time.
*
* ## Idle state definition
* - Query the tip every 5 seconds (`POLL_INTERVAL`) for 4 times (`NUM_TIP_QUERIES`).
* - Query the transactions for all wallet addresses, starting with a block from 1 epoch behind.
* - Query the utxos for all wallet addresses.
* - Wait another 5 seconds (`POLL_INTERVAL`).
* - Repeat.
*
* Other queries normally done by the wallet, but which we are not doing in this test because they are done rarely (every epoch),
* or when wallet is not idle (is active: sending/receiving transactions):
* - era summaries, protocol params, genesis params
* - delegation & rewards, assets metadata, handles
*
* ## Performance indicators
* - http_req_duration
* - http_req_failed
*/

// eslint-disable-next-line no-undef
const PROVIDER_SERVER_URL = __ENV.PROVIDER_SERVER_URL;
/** URL of the JSON file containing the wallets */
const WALLET_ADDRESSES_URL =
'https://raw.githubusercontent.com/input-output-hk/cardano-js-sdk/master/packages/e2e/test/dump/addresses/mainnet.json';

const MAX_VU = 50;

/** Time span during which the number of wallets doing idle queries increase in a linear fashion */
const RAMP_UP_DURATION = '100s';
/** Time span during which the total number of wallets doing idle queries is maintained */
const STEADY_STATE_DURATION = '150s';

const NUM_TIP_QUERIES = 4;
const POLL_INTERVAL = 5;

/** HD wallet params */
const hdWalletParams = {
/** HD wallet size. The number of addresses with transaction history per wallet. They are queried at discover time. */
activeAddrCount: 10,
/** Use only addresses with a transaction history up to this value */
maxTxHistory: 100
};

/** Repetitive endpoints */
const TIP_URL = 'network-info/ledger-tip';

export const options = {
ext: {
loadimpact: {
apm: [],
distribution: { 'amazon:de:frankfurt': { loadZone: 'amazon:de:frankfurt', percent: 100 } }
}
},
scenarios: {
Scenario_1: {
exec: 'scenario_1',
executor: 'ramping-vus',
gracefulRampDown: '0s',
gracefulStop: '0s',
stages: [
{ duration: RAMP_UP_DURATION, target: MAX_VU },
{ duration: STEADY_STATE_DURATION, target: MAX_VU }
]
}
},
thresholds: { http_req_duration: ['p(95)<200'], http_req_failed: ['rate<0.02'] }
};

/** equivalent to lodash.chunk */
const chunkArray = (array, chunkSize) => {
const arrayCopy = [...array];
const chunked = [];
while (arrayCopy.length > 0) {
chunked.push(arrayCopy.splice(0, chunkSize));
}
return chunked;
};

/** Util functions for sending the http post requests to cardano-sdk services */
const cardanoHttpPost = (url, body = {}) => {
const opts = { headers: { 'content-type': 'application/json' } };
return http.post(`${PROVIDER_SERVER_URL}/${url}`, JSON.stringify(body), opts);
};

const utxosByAddresses = (addresses) => {
const addressChunks = chunkArray(addresses, 25);
for (const chunk of addressChunks) {
cardanoHttpPost('utxo/utxo-by-addresses', { addresses: chunk });
}
};

/**
*
* @param addresses Bech32 cardano addresses: `Cardano.Address[]`
* @param blockHeightOfLastTx query transactions done starting with this block height.
*/
const txsByAddress = (addresses, blockHeightOfLastTx) => {
const pageSize = 25;
const addressChunks = chunkArray(addresses, pageSize);
for (const chunk of addressChunks) {
let startAt = 0;
let txCount = 0;

do {
const resp = cardanoHttpPost('chain-history/txs/by-addresses', {
addresses: chunk,
blockRange: { lowerBound: blockHeightOfLastTx },
pagination: { limit: pageSize, startAt }
});

if (resp.status !== 200) {
// No point in trying to get the other pages.
// Should we log this? it will show up as if the restoration was quicker since this wallet did not fetch all the pages
break;
}

const { pageResults } = JSON.parse(resp.body);
startAt += pageSize;
txCount = pageResults.length;
} while (txCount === pageSize);
}
};

/**
* Grab the wallets json file to be used by the scenario.
* Group the addresses per wallet (single address or HD wallets).
*/
export function setup() {
console.log(
`Ramp-up: ${RAMP_UP_DURATION}; Sustain: ${STEADY_STATE_DURATION}; Poll: ${POLL_INTERVAL}s; Block height change every: ${NUM_TIP_QUERIES}`
);
// This call will be part of the statistics. There is no way around it so far: https://github.com/grafana/k6/issues/1321
const res = http.get(WALLET_ADDRESSES_URL);
check(res, { 'get wallets': (r) => r.status >= 200 && r.status < 300 });

const { body: resBodyWallets } = res;
const walletsOrig = JSON.parse(resBodyWallets);
const walletsOrigCount = walletsOrig ? walletsOrig.length : 0;
check(walletsOrigCount, {
'At least one wallet is required to run the test': (count) => count > 0
});
console.log(`Wallet addresses configuration file contains ${walletsOrigCount} addresses`);

// Remove "big transaction history wallets"
const filteredWallets = walletsOrig.filter(({ tx_count }) => tx_count < hdWalletParams.maxTxHistory);
// Create chunks of `activeAddrCount` addresses per HD wallet
const wallets = chunkArray(filteredWallets, hdWalletParams.activeAddrCount);

const requestedAddrCount = MAX_VU * hdWalletParams.activeAddrCount;
const availableAddrCount = filteredWallets.length;
if (availableAddrCount < requestedAddrCount) {
console.warn(
`Requested wallet count * addresses per wallet: (${requestedAddrCount}), is greater than the available addresses: ${availableAddrCount}. Some addresses will be reused`
);
}

const tipRes = cardanoHttpPost(TIP_URL);
check(tipRes, { 'Initial tip query': (r) => r.status >= 200 && r.status < 300 });
const { body } = tipRes;
const { blockNo } = JSON.parse(body);

// When querying transactions, assume the last transaction was done in the previous epoch
const blocksPerEpoch = 20000;
const blockHeightOfLastTx = blockNo - blocksPerEpoch;
check(blockHeightOfLastTx, { 'Block height of last tx (tip - 1 epoch) is valid': (height) => height > 0 });

return { blockHeightOfLastTx, wallets: wallets.slice(0, MAX_VU) };
}

/**
* Each wallet consisting of hdWalletParams.activeAddrCount addresses is polling the tip, then queries:
* - current utxo set for all addresses
* - transaction history since last known transaction block height for all addresses
*/
// eslint-disable-next-line func-style
export function scenario_1({ wallets, blockHeightOfLastTx }) {
// Get the wallet for the current virtual user
// eslint-disable-next-line no-undef
const vu = __VU;
const wallet = wallets[vu % wallets.length]; // each wallet is a collection of addresses
const addresses = wallet.map(({ address }) => address);
for (let i = 0; i < NUM_TIP_QUERIES; i++) {
cardanoHttpPost(TIP_URL);
// No sleep after last query - fetch utxo and tx history immediately
if (i + 1 < NUM_TIP_QUERIES) {
sleep(POLL_INTERVAL);
}
}

txsByAddress(addresses, blockHeightOfLastTx);
utxosByAddresses(addresses);

sleep(POLL_INTERVAL);
}
1 change: 1 addition & 0 deletions packages/e2e/test/k6/scenarios/wallet-creation.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,7 @@ const TIP_URL = 'network-info/ledger-tip';
/** Grab the wallets json file to be used by the scenario */
export function setup() {
console.log(`Running in ${RUN_MODE} mode`);
// This call will be part of the statistics. There is no way around it so far: https://github.com/grafana/k6/issues/1321
const res = http.batch([WALLET_ADDRESSES_URL, POOL_ADDRESSES_URL]);
check(res, { 'get wallets and pools files': (r) => r.every(({ status }) => status >= 200 && status < 300) });

Expand Down
1 change: 1 addition & 0 deletions packages/e2e/test/k6/scenarios/wallet-restoration.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -107,6 +107,7 @@ export function setup() {
console.log('HD wallet params are:', hdWalletParams);
}

// This call will be part of the statistics. There is no way around it so far: https://github.com/grafana/k6/issues/1321
const res = http.batch([WALLET_ADDRESSES_URL, POOL_ADDRESSES_URL]);
check(res, { 'get wallets and pools files': (r) => r.every(({ status }) => status >= 200 && status < 300) });

Expand Down
1 change: 1 addition & 0 deletions yarn-project.nix
Original file line number Diff line number Diff line change
Expand Up @@ -589,6 +589,7 @@ cacheEntries = {
"@types/json-bigint@npm:1.0.1" = { filename = "@types-json-bigint-npm-1.0.1-1fbfe75fdf-b39e55a811.zip"; sha512 = "b39e55a811f554bd25f1d991bc4fc70655216dff466f21f97160097573a4bc7b478c1907aa5194c79022c115f509f8e4712083c51f57665e7a2de7412ff7801f"; };
"@types/json-schema@npm:7.0.11" = { filename = "@types-json-schema-npm-7.0.11-79462ae5ca-527bddfe62.zip"; sha512 = "527bddfe62db9012fccd7627794bd4c71beb77601861055d87e3ee464f2217c85fca7a4b56ae677478367bbd248dbde13553312b7d4dbc702a2f2bbf60c4018d"; };
"@types/json5@npm:0.0.29" = { filename = "@types-json5-npm-0.0.29-f63a7916bd-e60b153664.zip"; sha512 = "e60b153664572116dfea673c5bda7778dbff150498f44f998e34b5886d8afc47f16799280e4b6e241c0472aef1bc36add771c569c68fc5125fc2ae519a3eb9ac"; };
"@types/k6@npm:0.45.0" = { filename = "@types-k6-npm-0.45.0-9e854909ee-cb42e439a7.zip"; sha512 = "cb42e439a7af950ada2887609362d6da5b528c2dac126fc3fd524aef6c55b078131f6a80a4b618385e7a1948caf473030de1838d010d66a1c65fcaab1e603c6c"; };
"@types/keyv@npm:3.1.4" = { filename = "@types-keyv-npm-3.1.4-a8082ea56b-e009a2bfb5.zip"; sha512 = "e009a2bfb50e90ca9b7c6e8f648f8464067271fd99116f881073fa6fa76dc8d0133181dd65e6614d5fb1220d671d67b0124aef7d97dc02d7e342ab143a47779d"; };
"@types/libsodium-wrappers-sumo@npm:0.7.5" = { filename = "@types-libsodium-wrappers-sumo-npm-0.7.5-b503484acd-27846e49cd.zip"; sha512 = "27846e49cd54556c05011ff475cc6564ce8dde8f9a02a542740e3ebaab7de21ed2dfb4afdc182510d7058d3475f748bab0aa4a41178cd105b9f8618a00f8ef3f"; };
"@types/libsodium-wrappers@npm:0.7.10" = { filename = "@types-libsodium-wrappers-npm-0.7.10-1977488a6a-717054ebcb.zip"; sha512 = "717054ebcb5fa553e378144b8d564bed8b691905c0d4e90b95c64d77ba24ec9fe798cb2c58cd61dad545ceacb1f05ab69b5597217f9829f2da7a23f0688d11d0"; };
Expand Down
8 changes: 8 additions & 0 deletions yarn.lock
Original file line number Diff line number Diff line change
Expand Up @@ -2565,6 +2565,7 @@ __metadata:
"@types/delay": ^3.1.0
"@types/dockerode": ^3.3.8
"@types/jest": ^28.1.2
"@types/k6": ^0.45.0
"@types/lodash": ^4.14.182
"@types/ora": ^3.2.0
"@types/uuid": ^8.3.4
Expand Down Expand Up @@ -5794,6 +5795,13 @@ __metadata:
languageName: node
linkType: hard

"@types/k6@npm:^0.45.0":
version: 0.45.0
resolution: "@types/k6@npm:0.45.0"
checksum: cb42e439a7af950ada2887609362d6da5b528c2dac126fc3fd524aef6c55b078131f6a80a4b618385e7a1948caf473030de1838d010d66a1c65fcaab1e603c6c
languageName: node
linkType: hard

"@types/keyv@npm:*, @types/keyv@npm:^3.1.1":
version: 3.1.4
resolution: "@types/keyv@npm:3.1.4"
Expand Down

0 comments on commit 222b4b4

Please sign in to comment.