Skip to content

Commit

Permalink
ci: add gifting-from-genesis feat for workflows
Browse files Browse the repository at this point in the history
  • Loading branch information
joshuef committed Apr 24, 2024
1 parent c28d43c commit c4dd2ba
Show file tree
Hide file tree
Showing 3 changed files with 46 additions and 36 deletions.
28 changes: 15 additions & 13 deletions .github/workflows/benchmark-prs.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ name: PR Benchmarks
on: pull_request

env:
CARGO_INCREMENTAL: '0'
CARGO_INCREMENTAL: "0"
RUST_BACKTRACE: 1
CLIENT_DATA_PATH: /home/runner/.local/share/safe/client
NODE_DATA_PATH: /home/runner/.local/share/safe/node
Expand Down Expand Up @@ -43,7 +43,11 @@ jobs:
# it will be better to execute bench test with `local-discovery`,
# to make the measurement results reflect speed improvement or regression more accurately.
- name: Build sn bins
run: cargo build --release --bins --features local-discovery
run: cargo build --release --bin safe --bin safenode --features local-discovery
timeout-minutes: 30

- name: Build faucet bin
run: cargo build --release --bin faucet --features local-discovery --features="gifting-from-genesis"
timeout-minutes: 30

- name: Start a local network
Expand Down Expand Up @@ -143,8 +147,8 @@ jobs:
- name: Alert for client memory usage
uses: benchmark-action/github-action-benchmark@v1
with:
name: 'Memory Usage of Client during uploading large file'
tool: 'customSmallerIsBetter'
name: "Memory Usage of Client during uploading large file"
tool: "customSmallerIsBetter"
output-file-path: client_memory_usage.json
# Where the previous data file is stored
external-data-json-path: ./cache/client-mem-usage.json
Expand All @@ -155,7 +159,7 @@ jobs:
# Enable alert commit comment
comment-on-alert: true
# 200% regression will result in alert
alert-threshold: '200%'
alert-threshold: "200%"
# Enable Job Summary for PRs
summary-always: true

Expand Down Expand Up @@ -186,7 +190,7 @@ jobs:
uses: benchmark-action/github-action-benchmark@v1
with:
# What benchmark tool the output.txt came from
tool: 'customBiggerIsBetter'
tool: "customBiggerIsBetter"
output-file-path: files-benchmark.json
# Where the previous data file is stored
external-data-json-path: ./cache/benchmark-data.json
Expand All @@ -197,7 +201,7 @@ jobs:
# Enable alert commit comment
comment-on-alert: true
# 200% regression will result in alert
alert-threshold: '200%'
alert-threshold: "200%"
# Enable Job Summary for PRs
summary-always: true

Expand Down Expand Up @@ -269,7 +273,7 @@ jobs:
- name: Alert for node memory usage
uses: benchmark-action/github-action-benchmark@v1
with:
tool: 'customSmallerIsBetter'
tool: "customSmallerIsBetter"
output-file-path: node_memory_usage.json
# Where the previous data file is stored
external-data-json-path: ./cache/node-mem-usage.json
Expand All @@ -282,7 +286,7 @@ jobs:
# Comment on the PR
comment-always: true
# 200% regression will result in alert
alert-threshold: '200%'
alert-threshold: "200%"
# Enable Job Summary for PRs
summary-always: true

Expand Down Expand Up @@ -369,7 +373,7 @@ jobs:
- name: Alert for swarm_driver long handlings
uses: benchmark-action/github-action-benchmark@v1
with:
tool: 'customSmallerIsBetter'
tool: "customSmallerIsBetter"
output-file-path: swarm_driver_long_handlings.json
# Where the previous data file is stored
external-data-json-path: ./cache/swarm_driver_long_handlings.json
Expand All @@ -382,7 +386,7 @@ jobs:
# Comment on the PR
comment-always: true
# 200% regression will result in alert
alert-threshold: '200%'
alert-threshold: "200%"
# Enable Job Summary for PRs
summary-always: true

Expand Down Expand Up @@ -410,8 +414,6 @@ jobs:
- name: install ripgrep
run: sudo apt-get -y install ripgrep



########################
### Benchmark ###
########################
Expand Down
48 changes: 26 additions & 22 deletions .github/workflows/generate-benchmark-charts.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ permissions:
contents: write

env:
CARGO_INCREMENTAL: '0'
CARGO_INCREMENTAL: "0"
RUST_BACKTRACE: 1
CLIENT_DATA_PATH: /home/runner/.local/share/safe/client
NODE_DATA_PATH: /home/runner/.local/share/safe/node
Expand All @@ -28,7 +28,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4

- uses: dtolnay/rust-toolchain@stable
with:
components: rustfmt, clippy
Expand All @@ -44,9 +44,13 @@ jobs:
- name: Download 95mb file to be uploaded with the safe client
shell: bash
run: wget https://sn-node.s3.eu-west-2.amazonaws.com/the-test-data.zip

- name: Build node and client
run: cargo build --release --features local-discovery --bin safenode --bin safe --bin faucet
run: cargo build --release --features local-discovery --bin safenode --bin safe
timeout-minutes: 30

- name: Build faucet bin
run: cargo build --release --bin faucet --features local-discovery --features="gifting-from-genesis"
timeout-minutes: 30

- name: Start a local network
Expand Down Expand Up @@ -91,13 +95,13 @@ jobs:
- name: check files-benchmark.json
shell: bash
run: cat files-benchmark.json

# gh-pages branch is updated and pushed automatically with extracted benchmark data
- name: Store cli files benchmark result
uses: benchmark-action/github-action-benchmark@v1
with:
name: "`safe files` benchmarks"
tool: 'customBiggerIsBetter'
tool: "customBiggerIsBetter"
output-file-path: files-benchmark.json
github-token: ${{ secrets.GITHUB_TOKEN }}
auto-push: true
Expand All @@ -108,7 +112,7 @@ jobs:
run: cargo run --bin safe --release -- --log-output-dest=data-dir files upload the-test-data.zip --retry-strategy quick
env:
SN_LOG: "all"

#########################
### Stop Network ###
#########################
Expand All @@ -130,12 +134,12 @@ jobs:
shell: bash
run: |
peak_mem_usage=$(
rg '"memory_used_mb":[^,]*' $NODE_DATA_PATH/*/logs/* -o --no-line-number --no-filename |
awk -F':' '/"memory_used_mb":/{print $2}' |
sort -n |
rg '"memory_used_mb":[^,]*' $NODE_DATA_PATH/*/logs/* -o --no-line-number --no-filename |
awk -F':' '/"memory_used_mb":/{print $2}' |
sort -n |
tail -n 1
)
# Write the node memory usage to a file
echo '[
{
Expand All @@ -152,8 +156,8 @@ jobs:
- name: Upload Node Memory Usage
uses: benchmark-action/github-action-benchmark@v1
with:
name: 'Node memory'
tool: 'customSmallerIsBetter'
name: "Node memory"
tool: "customSmallerIsBetter"
output-file-path: node_memory_usage.json
github-token: ${{ secrets.GITHUB_TOKEN }}
auto-push: true
Expand All @@ -163,13 +167,13 @@ jobs:
shell: bash
run: |
peak_mem_usage=$(
rg '"memory_used_mb":[^,]*' $CLIENT_DATA_PATH/logs --glob safe.* -o --no-line-number --no-filename |
awk -F':' '/"memory_used_mb":/{print $2}' |
sort -n |
rg '"memory_used_mb":[^,]*' $CLIENT_DATA_PATH/logs --glob safe.* -o --no-line-number --no-filename |
awk -F':' '/"memory_used_mb":/{print $2}' |
sort -n |
tail -n 1
)
total_mem=$(
rg '"memory_used_mb":[^,]*' $CLIENT_DATA_PATH/logs --glob safe.* -o --no-line-number --no-filename |
rg '"memory_used_mb":[^,]*' $CLIENT_DATA_PATH/logs --glob safe.* -o --no-line-number --no-filename |
awk -F':' '/"memory_used_mb":/ {sum += $2} END {printf "%.0f\n", sum}'
)
num_of_times=$(
Expand Down Expand Up @@ -204,8 +208,8 @@ jobs:
- name: Upload Client Memory Usage
uses: benchmark-action/github-action-benchmark@v1
with:
name: 'Client memory'
tool: 'customSmallerIsBetter'
name: "Client memory"
tool: "customSmallerIsBetter"
output-file-path: client_memory_usage.json
github-token: ${{ secrets.GITHUB_TOKEN }}
auto-push: true
Expand Down Expand Up @@ -252,7 +256,7 @@ jobs:
echo "Total swarm_driver long handling times is: $total_num_of_times"
echo "Total swarm_driver long handling duration is: $total_long_handling ms"
echo "Total average swarm_driver long handling duration is: $average_handling_ms ms"
# Write the node memory usage to a file
echo '[
{
Expand All @@ -279,8 +283,8 @@ jobs:
- name: Upload swarm_driver Long Handlings
uses: benchmark-action/github-action-benchmark@v1
with:
name: 'swarm_driver long handlings'
tool: 'customSmallerIsBetter'
name: "swarm_driver long handlings"
tool: "customSmallerIsBetter"
output-file-path: swarm_driver_long_handlings.json
github-token: ${{ secrets.GITHUB_TOKEN }}
auto-push: true
Expand Down
6 changes: 5 additions & 1 deletion .github/workflows/memcheck.yml
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,11 @@ jobs:
run: sudo apt-get install -y ripgrep

- name: Build binaries
run: cargo build --release --bins
run: cargo build --release --bin safe --bin safenode
timeout-minutes: 30

- name: Build faucet binary with gifting
run: cargo build --release --bin faucet --features="gifting-from-genesis"
timeout-minutes: 30

- name: Build tests
Expand Down

0 comments on commit c4dd2ba

Please sign in to comment.