Skip to content

Commit

Permalink
Merge pull request #7 from TrAyZeN/helpers
Browse files Browse the repository at this point in the history
Add cpa and snr helper functions
  • Loading branch information
kingofpayne committed Apr 8, 2024
2 parents 8e02cc2 + 6923007 commit e5f3e4c
Show file tree
Hide file tree
Showing 14 changed files with 406 additions and 304 deletions.
14 changes: 8 additions & 6 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -10,18 +10,20 @@ readme = "README.md"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html

[dependencies]
serde_json = "1.0"
serde_json = "1.0.115"
hex = "0.4.3"
npyz = "0.7.4"
ndarray = "0.15.6"
rayon = "1.7.0"
indicatif = "0.17.3"
rayon = "1.10.0"
indicatif = "0.17.8"
ndarray-npy ="0.8.1"
itertools = "*"
itertools = "0.12.1"
thiserror = "1.0.58"

[dev-dependencies]
criterion = "0.5"
ndarray-rand = "0.14"
criterion = "0.5.1"
ndarray-rand = "0.14.0"
anyhow = "1.0.81"

[[bench]]
name = "cpa"
Expand Down
87 changes: 25 additions & 62 deletions benches/cpa.rs
Original file line number Diff line number Diff line change
@@ -1,12 +1,11 @@
use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion};
use muscat::cpa::Cpa;
use muscat::cpa::{self, Cpa};
use muscat::cpa_normal;
use muscat::leakage::{hw, sbox};
use ndarray::{Array2, ArrayView1, Axis};
use ndarray_rand::rand::{rngs::StdRng, SeedableRng};
use ndarray_rand::rand_distr::Uniform;
use ndarray_rand::RandomExt;
use rayon::iter::{ParallelBridge, ParallelIterator};
use std::iter::zip;

pub fn leakage_model(value: usize, guess: usize) -> usize {
Expand All @@ -28,36 +27,6 @@ fn cpa_sequential(leakages: &Array2<f64>, plaintexts: &Array2<u8>) -> Cpa {
cpa
}

fn cpa_parallel(leakages: &Array2<f64>, plaintexts: &Array2<u8>) -> Cpa {
let chunk_size = 500;

let mut cpa = zip(
leakages.axis_chunks_iter(Axis(0), chunk_size),
plaintexts.axis_chunks_iter(Axis(0), chunk_size),
)
.par_bridge()
.map(|(leakages_chunk, plaintexts_chunk)| {
let mut cpa = Cpa::new(leakages.shape()[1], 256, 0, leakage_model);

for i in 0..leakages_chunk.shape()[0] {
cpa.update(
leakages_chunk.row(i).map(|&x| x as usize),
plaintexts_chunk.row(i).map(|&y| y as usize),
);
}

cpa
})
.reduce(
|| Cpa::new(leakages.shape()[1], 256, 0, leakage_model),
|a: Cpa, b| a + b,
);

cpa.finalize();

cpa
}

pub fn leakage_model_normal(value: ArrayView1<usize>, guess: usize) -> usize {
hw(sbox((value[1] ^ guess) as u8) as usize)
}
Expand All @@ -82,33 +51,6 @@ fn cpa_normal_sequential(leakages: &Array2<f64>, plaintexts: &Array2<u8>) -> cpa
cpa
}

fn cpa_normal_parallel(leakages: &Array2<f64>, plaintexts: &Array2<u8>) -> cpa_normal::Cpa {
let chunk_size = 500;

let mut cpa = zip(
leakages.axis_chunks_iter(Axis(0), chunk_size),
plaintexts.axis_chunks_iter(Axis(0), chunk_size),
)
.par_bridge()
.map(|(leakages_chunk, plaintexts_chunk)| {
let mut cpa =
cpa_normal::Cpa::new(leakages.shape()[1], chunk_size, 256, leakage_model_normal);
cpa.update(
leakages_chunk.map(|&x| x as f32),
plaintexts_chunk.to_owned(),
);
cpa
})
.reduce(
|| cpa_normal::Cpa::new(leakages.shape()[1], chunk_size, 256, leakage_model_normal),
|x, y| x + y,
);

cpa.finalize();

cpa
}

fn bench_cpa(c: &mut Criterion) {
// Seed rng to get the same output each run
let mut rng = StdRng::seed_from_u64(0);
Expand All @@ -118,7 +60,7 @@ fn bench_cpa(c: &mut Criterion) {
group.measurement_time(std::time::Duration::from_secs(60));

for nb_traces in [5000, 10000, 25000].into_iter() {
let leakages = Array2::random_using((nb_traces, 5000), Uniform::new(-2f64, 2f64), &mut rng);
let leakages = Array2::random_using((nb_traces, 5000), Uniform::new(-2., 2.), &mut rng);
let plaintexts = Array2::random_using(
(nb_traces, 16),
Uniform::new_inclusive(0u8, 255u8),
Expand All @@ -133,7 +75,18 @@ fn bench_cpa(c: &mut Criterion) {
group.bench_with_input(
BenchmarkId::new("cpa_parallel", nb_traces),
&(&leakages, &plaintexts),
|b, (leakages, plaintexts)| b.iter(|| cpa_parallel(leakages, plaintexts)),
|b, (leakages, plaintexts)| {
b.iter(|| {
cpa::cpa(
&leakages.map(|&x| x as usize),
&plaintexts.map(|&x| x as usize),
256,
0,
leakage_model,
500,
)
})
},
);
// For 25000 traces, 60s of measurement_time is too low
if nb_traces <= 10000 {
Expand All @@ -146,7 +99,17 @@ fn bench_cpa(c: &mut Criterion) {
group.bench_with_input(
BenchmarkId::new("cpa_normal_parallel", nb_traces),
&(&leakages, &plaintexts),
|b, (leakages, plaintexts)| b.iter(|| cpa_normal_parallel(leakages, plaintexts)),
|b, (leakages, plaintexts)| {
b.iter(|| {
cpa_normal::cpa(
&leakages.map(|&x| x as f32),
plaintexts,
256,
leakage_model_normal,
500,
)
})
},
);
}

Expand Down
30 changes: 6 additions & 24 deletions benches/snr.rs
Original file line number Diff line number Diff line change
@@ -1,40 +1,22 @@
use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion};
use muscat::processors::Snr;
use ndarray::{Array2, Axis};
use muscat::processors::{compute_snr, Snr};
use ndarray::{Array1, Array2};
use ndarray_rand::rand::{rngs::StdRng, SeedableRng};
use ndarray_rand::rand_distr::Uniform;
use ndarray_rand::RandomExt;
use rayon::iter::{ParallelBridge, ParallelIterator};
use std::iter::zip;

fn snr_sequential(leakages: &Array2<i64>, plaintexts: &Array2<u8>) -> Snr {
fn snr_sequential(leakages: &Array2<i64>, plaintexts: &Array2<u8>) -> Array1<f64> {
let mut snr = Snr::new(leakages.shape()[1], 256);

for i in 0..leakages.shape()[0] {
snr.process(&leakages.row(i), plaintexts.row(i)[0] as usize);
}

snr
snr.snr()
}

fn snr_parallel(leakages: &Array2<i64>, plaintexts: &Array2<u8>) -> Snr {
let chunk_size = 500;

zip(
leakages.axis_chunks_iter(Axis(0), chunk_size),
plaintexts.axis_chunks_iter(Axis(0), chunk_size),
)
.par_bridge()
.map(|(leakages_chunk, plaintexts_chunk)| {
let mut snr = Snr::new(leakages.shape()[1], 256);

for i in 0..leakages_chunk.shape()[0] {
snr.process(&leakages_chunk.row(i), plaintexts_chunk.row(i)[0] as usize);
}

snr
})
.reduce(|| Snr::new(leakages.shape()[1], 256), |a, b| a + b)
fn snr_parallel(leakages: &Array2<i64>, plaintexts: &Array2<u8>) -> Array1<f64> {
compute_snr(leakages, 256, |i| plaintexts.row(i)[0].into(), 500)
}

fn bench_snr(c: &mut Criterion) {
Expand Down
72 changes: 42 additions & 30 deletions examples/cpa.rs
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
use anyhow::Result;
use indicatif::ProgressIterator;
use muscat::cpa_normal::*;
use muscat::leakage::{hw, sbox};
use muscat::util::{progress_bar, read_array_2_from_npy_file, save_array2};
use muscat::util::{progress_bar, read_array2_from_npy_file, save_array2};
use ndarray::*;
use rayon::iter::{ParallelBridge, ParallelIterator};
use std::time::{self};
use std::time;

// leakage model
pub fn leakage_model(value: ArrayView1<usize>, guess: usize) -> usize {
Expand All @@ -16,25 +17,25 @@ type FormatTraces = f64;
type FormatMetadata = u8;

#[allow(dead_code)]
fn cpa() {
let start_sample: usize = 0;
let end_sample: usize = 5000;
let size: usize = end_sample - start_sample; // Number of samples
let batch: usize = 500;
fn cpa() -> Result<()> {
let start_sample = 0;
let end_sample = 5000;
let size = end_sample - start_sample; // Number of samples
let patch = 500;
let guess_range = 256; // 2**(key length)
let folder = String::from("../../data/cw");
let dir_l = format!("{folder}/leakages.npy");
let dir_p = format!("{folder}/plaintexts.npy");
let leakages: Array2<FormatTraces> = read_array_2_from_npy_file::<FormatTraces>(&dir_l);
let plaintext: Array2<FormatMetadata> = read_array_2_from_npy_file::<FormatMetadata>(&dir_p);
let leakages = read_array2_from_npy_file::<FormatTraces>(&dir_l)?;
let plaintext = read_array2_from_npy_file::<FormatMetadata>(&dir_p)?;
let len_traces = leakages.shape()[0];
let mut cpa_parallel = ((0..len_traces).step_by(batch))

let mut cpa_parallel = ((0..len_traces).step_by(patch))
.progress_with(progress_bar(len_traces))
.map(|row| row)
.par_bridge()
.map(|row_number| {
let mut cpa = Cpa::new(size, batch, guess_range, leakage_model);
let range_rows = row_number..row_number + batch;
let mut cpa = Cpa::new(size, patch, guess_range, leakage_model);
let range_rows = row_number..row_number + patch;
let range_samples = start_sample..end_sample;
let sample_traces = leakages
.slice(s![range_rows.clone(), range_samples])
Expand All @@ -45,36 +46,41 @@ fn cpa() {
cpa
})
.reduce(
|| Cpa::new(size, batch, guess_range, leakage_model),
|| Cpa::new(size, patch, guess_range, leakage_model),
|x, y| x + y,
);

cpa_parallel.finalize();
println!("Guessed key = {}", cpa_parallel.pass_guess());
save_array2("results/corr.npy", cpa_parallel.pass_corr_array().view());

save_array2("results/corr.npy", cpa_parallel.pass_corr_array().view())?;

Ok(())
}

#[allow(dead_code)]
fn success() {
let start_sample: usize = 0;
let end_sample: usize = 5000;
let size: usize = end_sample - start_sample; // Number of samples
let batch: usize = 500;
fn success() -> Result<()> {
let start_sample = 0;
let end_sample = 5000;
let size = end_sample - start_sample; // Number of samples
let patch = 500;
let guess_range = 256; // 2**(key length)
let folder = String::from("../data/log_584012"); // "../../../intenship/scripts/log_584012"
let nfiles = 13; // Number of files in the directory. TBD: Automating this value
let rank_traces: usize = 1000;
let mut cpa = Cpa::new(size, batch, guess_range, leakage_model);
let rank_traces = 1000;

let mut cpa = Cpa::new(size, patch, guess_range, leakage_model);

cpa.success_traces(rank_traces);
for i in (0..nfiles).progress() {
let dir_l = format!("{folder}/l/{i}.npy");
let dir_p = format!("{folder}/p/{i}.npy");
let leakages: Array2<FormatTraces> = read_array_2_from_npy_file::<FormatTraces>(&dir_l);
let plaintext: Array2<FormatMetadata> =
read_array_2_from_npy_file::<FormatMetadata>(&dir_p);
let leakages = read_array2_from_npy_file::<FormatTraces>(&dir_l)?;
let plaintext = read_array2_from_npy_file::<FormatMetadata>(&dir_p)?;
let len_leakages = leakages.shape()[0];
for row in (0..len_leakages).step_by(batch) {
for row in (0..len_leakages).step_by(patch) {
let range_samples = start_sample..end_sample;
let range_rows: std::ops::Range<usize> = row..row + batch;
let range_rows: std::ops::Range<usize> = row..row + patch;
let range_metadat = 0..plaintext.shape()[1];
let sample_traces = leakages
.slice(s![range_rows.clone(), range_samples])
Expand All @@ -84,14 +90,20 @@ fn success() {
cpa.update_success(sample_traces, sample_metadata);
}
}

cpa.finalize();
println!("Guessed key = {}", cpa.pass_guess());

// save corr key curves in npy
save_array2("results/success.npy", cpa.pass_rank().view());
save_array2("results/success.npy", cpa.pass_rank().view())?;

Ok(())
}

fn main() {
fn main() -> Result<()> {
let t = time::Instant::now();
cpa();
cpa()?;
println!("{:?}", t.elapsed());

Ok(())
}

0 comments on commit e5f3e4c

Please sign in to comment.