From 6052396f8f3946b70945640d23c01d3d4ebd853d Mon Sep 17 00:00:00 2001 From: Ben Greenier Date: Fri, 29 Oct 2021 15:17:55 -0700 Subject: [PATCH] feat(enum_pipeline): Add initial implementation --- .actrc | 1 + .github/dependabot.yml | 11 + .github/workflows/ci.yml | 33 +++ .github/workflows/release.yml | 53 +++++ .gitignore | 12 ++ Cargo.toml | 14 ++ Justfile | 28 +++ LICENSE | 21 ++ README.md | 85 ++++++++ derive/Cargo.toml | 15 ++ derive/src/impls.rs | 169 +++++++++++++++ derive/src/lib.rs | 27 +++ rust-toolchain.toml | 3 + src/lib.rs | 396 ++++++++++++++++++++++++++++++++++ 14 files changed, 868 insertions(+) create mode 100644 .actrc create mode 100644 .github/dependabot.yml create mode 100644 .github/workflows/ci.yml create mode 100644 .github/workflows/release.yml create mode 100644 .gitignore create mode 100644 Cargo.toml create mode 100644 Justfile create mode 100644 LICENSE create mode 100644 README.md create mode 100644 derive/Cargo.toml create mode 100644 derive/src/impls.rs create mode 100644 derive/src/lib.rs create mode 100644 rust-toolchain.toml create mode 100644 src/lib.rs diff --git a/.actrc b/.actrc new file mode 100644 index 0000000..0132374 --- /dev/null +++ b/.actrc @@ -0,0 +1 @@ +-P ubuntu-latest=ghcr.io/catthehacker/ubuntu:rust-18.04 diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000..b207453 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,11 @@ +# Basic dependabot.yml file with rust (cargo) configuration. + +version: 2 +updates: + # Enable version updates for cargo (crates.io) + - package-ecosystem: "cargo" + # Look for `Cargo.toml` and `lock` files in the `root` directory + directory: "/" + # Check the registry for updates every day (weekdays) + schedule: + interval: "daily" diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..cd3f324 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,33 @@ +# This is a basic workflow to ensure code builds and passes tests. + +name: CI + +# Controls when the workflow will run +on: + # Triggers the workflow on pull_request and push but only when the target is the main branch + pull_request: + branches: [main] + push: + branches: [main] + # Allows you to run this workflow manually from the Actions tab + workflow_dispatch: + +# A workflow run is made up of one or more jobs that can run sequentially or in parallel +jobs: + # This workflow contains a single job called "build" + build: + # The type of runner that the job will run on + runs-on: ubuntu-latest + + # Steps represent a sequence of tasks that will be executed as part of the job + steps: + # Checkout our code + - uses: actions/checkout@v2 + # Install Just, so we can use our Justfile in the action + - run: cargo install just + # Install necessary tools + - run: just install-tools + # Check our code + - run: just check + # Run tests + - run: just test diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 0000000..5a0552d --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,53 @@ +# This is a basic workflow to generate release artifacts for rust projects +# It requires a [Repository secret](https://docs.github.com/en/actions/security-guides/encrypted-secrets#creating-encrypted-secrets-for-a-repository) +# For `cargo publish` that can be obtained [here](https://crates.io/me) +# CARGO_REGISTRY_TOKEN: + +name: Release + +# Controls when the workflow will run +on: + # Triggers the workflow on push but only for the main branch + push: + branches: [main] + # Allows you to run this workflow manually from the Actions tab + workflow_dispatch: + +# A workflow run is made up of one or more jobs that can run sequentially or in parallel +jobs: + # This workflow contains a single job called "release" + release: + # The type of runner that the job will run on + runs-on: ubuntu-latest + + # Steps represent a sequence of tasks that will be executed as part of the job + steps: + # Use `release-please` to track changes and generate release PRs + - uses: GoogleCloudPlatform/release-please-action@v2 + id: release + with: + release-type: rust + # The name of your crate + package-name: "{{ crate_name }}" + # The logic below handles the crates.io publication: + - uses: actions/checkout@v2 + # these if statements ensure that a publication only occurs when + # a new release is created: + if: ${{ steps.release.outputs.release_created }} + # Install Just, so we can use our Justfile in the action + - run: cargo install just + if: ${{ steps.release.outputs.release_created }} + # Install necessary tools + - run: just install-tools + if: ${{ steps.release.outputs.release_created }} + # Check our code + - run: just check + if: ${{ steps.release.outputs.release_created }} + # Run tests + - run: just test + if: ${{ steps.release.outputs.release_created }} + # Publish the crate (note: release-please will have updated the Cargo.toml for us, so it should already have the correct version) + - run: just publish + if: ${{ steps.release.outputs.release_created }} + env: + CARGO_REGISTRY_TOKEN: ${{ secrets.CARGO_REGISTRY_TOKEN }} diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..f91aba3 --- /dev/null +++ b/.gitignore @@ -0,0 +1,12 @@ +# Generated by Cargo +# will have compiled files and executables +/target/ + + +# Remove Cargo.lock from gitignore if creating an executable, leave it for libraries +# More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html +Cargo.lock + + +# These are backup files generated by rustfmt +**/*.rs.bk diff --git a/Cargo.toml b/Cargo.toml new file mode 100644 index 0000000..e53f018 --- /dev/null +++ b/Cargo.toml @@ -0,0 +1,14 @@ +[package] +name = "enum_pipeline" +description = "Provides a way to use enums to describe and execute ordered data pipelines." +license = "mit" +version = "0.1.0" +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +enum_pipeline_derive = {path = "derive", version = "0.1.0"} + +[workspace] +members = ["derive"] diff --git a/Justfile b/Justfile new file mode 100644 index 0000000..7d63bf3 --- /dev/null +++ b/Justfile @@ -0,0 +1,28 @@ +# You'll need just to get started: `cargo install just` +# just manual: https://github.com/casey/just/#readme + +_default: + @just --list + + +# Installs tools needed for other `just` recipes +install-tools: + cargo install cargo-hack cargo-bump cargo-workspaces + + +# Checks (using `clippy`) all crates across all features +check: + cargo hack --feature-powerset --exclude-no-default-features clippy --locked -- -D warnings + +# Tests all crates across all features +test: + cargo hack --feature-powerset --exclude-no-default-features test --locked + +# Sets the version of the crate to `version` +set-version version: + cargo bump 0.1.0 + + +# Attempts to publish the crate using `cargo workspaces` +publish: + cargo workspaces publish --from-git --no-git-commit --no-git-push --no-git-tag -y diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..92ca7cc --- /dev/null +++ b/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2021 bengreenier + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/README.md b/README.md new file mode 100644 index 0000000..53e594a --- /dev/null +++ b/README.md @@ -0,0 +1,85 @@ +# enum-pipeline + +Provides a way to use enums to describe and execute ordered data pipelines. 🦀🐾 + +[![CI](https://github.com/bengreenier/enum-pipeline/actions/workflows/ci.yml/badge.svg)](https://github.com/bengreenier/enum-pipeline/actions/workflows/ci.yml) +[![Crates.io](https://img.shields.io/crates/d/enum-pipeline)](https://crates.io/crates/enum-pipeline) +[![docs.rs](https://img.shields.io/docsrs/enum-pipeline)](https://docs.rs/enum-pipeline) +[![dependency status](https://deps.rs/repo/github/bengreenier/enum-pipeline/status.svg)](https://deps.rs/repo/github/bengreenier/enum-pipeline) + +I needed a succinct way to describe 2d pixel map operations for a game I'm working on. I wanted callers to be able to easily determine all possible operations (hence `enum`), with per-operation data (hence variants), and their operation-specific logic. This is what I came up with! + +## Quickstart + +Some quick examples to get you started. For more information see [docs.rs/enum_pipeline](https://docs.rs/enum_pipeline) and [docs.rs/enum_pipeline_derive](https://docs.rs/enum_pipeline_derive). + +### Derive + +``` +#[derive(Default)] +struct MacroMutRefData { + a_count: i32, + b_count: i32, +} + +#[derive(ExecuteWithMut)] +#[execute_with(MacroMutRefData)] +enum MacroMutRefPipeline { + #[handler(handle_a)] + A(i32), + #[handler(handle_b)] + B, +} + +impl MacroMutRefPipeline { + fn handle_a(i: i32, arg: &mut MacroMutRefData) { + arg.a_count += 1; + } + + fn handle_b(arg: &mut MacroMutRefData) { + arg.b_count += 1; + } +} +``` + +Then create and execute some pipelines: + +``` +let mut arg = MacroMutRefData::default(); +vec![MacroMutRefPipeline::A(23), MacroMutRefPipeline::B].execute_with_mut(&mut arg); +``` + +### Manual + +``` +#[derive(Default)] +struct MutRefData { + a_count: i32, + b_count: i32, +} + +enum MutRefPipeline { + A(i32), + B, +} + +impl ExecuteWithMut for MutRefPipeline { + fn execute_with_mut(self, arg: &mut MutRefData) { + match self { + MutRefPipeline::A(i) => arg.a_count += 1, + MutRefPipeline::B => arg.b_count += 1, + } + } +} +``` + +Then create and execute some pipelines: + +``` +let mut arg = MutRefData::default(); +vec![MutRefPipeline::A(23), MutRefPipeline::B].execute_with_mut(&mut arg); +``` + +## License + +MIT diff --git a/derive/Cargo.toml b/derive/Cargo.toml new file mode 100644 index 0000000..cf501e8 --- /dev/null +++ b/derive/Cargo.toml @@ -0,0 +1,15 @@ +[package] +name = "enum_pipeline_derive" +description = "Provides derive macros for enum_pipeline." +license = "mit" +version = "0.1.0" +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html +[lib] +proc-macro = true + +[dependencies] +proc-macro2 = "1.0.30" +quote = "1.0.10" +syn = {version = "1.0.80", features = ["full"]} diff --git a/derive/src/impls.rs b/derive/src/impls.rs new file mode 100644 index 0000000..c23aea5 --- /dev/null +++ b/derive/src/impls.rs @@ -0,0 +1,169 @@ +use std::fmt::Debug; + +use proc_macro2::{Ident, TokenStream}; +use quote::{quote, ToTokens}; +use syn::{Arm, Attribute, Data, DeriveInput}; + +const HANDLER_ATTR_NAME: &str = "handler"; +const ARG_ATTR_NAME: &str = "execute_with"; + +#[derive(Debug)] +enum Opts<'a> { + None, + RefParam(&'a str), + RefMutParam(&'a str), +} + +#[derive(Debug)] +struct IncrementalId { + from: i32, +} + +fn base_derive_macro(input: DeriveInput, opts: Opts) -> TokenStream { + let enum_ident = input.ident; + + let variants = match input.data { + Data::Enum(e) => e.variants, + _ => panic!("Only `enum` types are supported"), + }; + + let arms = variants + .into_iter() + .map(|variant| { + let handler_attrs: Vec = variant + .attrs + .into_iter() + .filter(|attr| matches!(attr.path.get_ident(), Some(ident) if ident == HANDLER_ATTR_NAME)) + .collect(); + + if handler_attrs.len() != 1 { + panic!( + "Variant `{}` is missing attribute #[handler(your_handler_function)]", + variant.ident + ); + } + + let handler_attr = &handler_attrs[0]; + let handler_token = handler_attr.tokens.to_string(); + let handler_name = match handler_token[1..handler_token.len() - 1].to_string() { + s if s.contains("::") => s, + u => format!("{}::{}", enum_ident.to_string(), u), + }; + + let field_placeholders: Vec = variant + .fields + .into_iter() + .enumerate() + .map(|(index, field)| match field.ident { + Some(ident) => ident.to_string(), + None => format!("__{}", index + 1), + }) + .collect(); + + // TODO(bengreenier): This could be cleaned up now that deeper inspection of ident is no longer needed + let handler_pipeline_arg = match &opts { + Opts::None => "".to_string(), + Opts::RefParam(ident) => ident.to_string(), + Opts::RefMutParam(ident) => ident.to_string(), + }; + + let arm_text = match field_placeholders.len() { + 0 => format!( + "{}::{} => {}({})", + enum_ident, variant.ident, handler_name, handler_pipeline_arg + ), + _ => { + let pl = field_placeholders.join(","); + let mut pl_with_arg = field_placeholders; + pl_with_arg.extend_from_slice(&[handler_pipeline_arg]); + + format!( + "{}::{}({}) => {}({})", + enum_ident, + variant.ident, + pl, + handler_name, + pl_with_arg.join(",") + ) + } + }; + + syn::parse_str::(&arm_text).expect("Failed to generate a variant arm") + }) + .collect::>(); + + quote! { + match self { + #(#arms),* + } + } +} + +fn parse_argtype(attrs: &[Attribute], ident: &Ident) -> Ident { + let arg_type_attrs: Vec<&Attribute> = attrs + .iter() + .filter(|attr| matches!(attr.path.get_ident(), Some(ident) if ident == ARG_ATTR_NAME)) + .collect(); + + if arg_type_attrs.len() != 1 { + panic!( + "Enum `{}` is missing attribute #[argtype(your_arg_type)]", + ident + ); + } + + let arg_type_attr = &arg_type_attrs[0]; + let arg_type_token = arg_type_attr.tokens.to_string(); + let arg_type_name = arg_type_token[1..arg_type_token.len() - 1].to_string(); + + syn::parse_str::(&arg_type_name) + .unwrap_or_else(|_| panic!("Failed to parse argtype attribute on Enum `{}`", ident)) +} + +pub fn execute_derive_macro(input: DeriveInput) -> TokenStream { + let enum_ident = input.ident.clone(); + let matcher = base_derive_macro(input, Opts::None); + + quote! { + #[automatically_derived] + impl Execute for #enum_ident { + fn execute(self) { + #matcher + } + } + } +} + +pub fn execute_with_derive_macro(input: DeriveInput) -> TokenStream { + let enum_ident = input.ident.clone(); + let arg_type = parse_argtype(&input.attrs, &input.ident); + let matcher = base_derive_macro(input, Opts::RefParam("args")); + + let arg_type_ts = arg_type.into_token_stream(); + + quote! { + #[automatically_derived] + impl ExecuteWith<#arg_type_ts> for #enum_ident { + fn execute_with(self, args: &#arg_type_ts) { + #matcher + } + } + } +} + +pub fn execute_with_mut_derive_macro(input: DeriveInput) -> TokenStream { + let enum_ident = input.ident.clone(); + let arg_type = parse_argtype(&input.attrs, &input.ident); + let matcher = base_derive_macro(input, Opts::RefMutParam("args")); + + let arg_type_ts = arg_type.into_token_stream(); + + quote! { + #[automatically_derived] + impl ExecuteWithMut<#arg_type_ts> for #enum_ident { + fn execute_with_mut(self, args: &mut #arg_type_ts) { + #matcher + } + } + } +} diff --git a/derive/src/lib.rs b/derive/src/lib.rs new file mode 100644 index 0000000..7f3e00b --- /dev/null +++ b/derive/src/lib.rs @@ -0,0 +1,27 @@ +use proc_macro::TokenStream; +use syn::{parse_macro_input, DeriveInput}; + +use impls::*; + +mod impls; + +#[proc_macro_derive(Execute, attributes(handler))] +pub fn derive_execute(input: TokenStream) -> TokenStream { + let input = parse_macro_input!(input as DeriveInput); + + execute_derive_macro(input).into() +} + +#[proc_macro_derive(ExecuteWith, attributes(handler, execute_with))] +pub fn derive_execute_with(input: TokenStream) -> TokenStream { + let input = parse_macro_input!(input as DeriveInput); + + execute_with_derive_macro(input).into() +} + +#[proc_macro_derive(ExecuteWithMut, attributes(handler, execute_with))] +pub fn derive_execute_with_mut(input: TokenStream) -> TokenStream { + let input = parse_macro_input!(input as DeriveInput); + + execute_with_mut_derive_macro(input).into() +} diff --git a/rust-toolchain.toml b/rust-toolchain.toml new file mode 100644 index 0000000..0dfa164 --- /dev/null +++ b/rust-toolchain.toml @@ -0,0 +1,3 @@ +[toolchain] +channel = "1.56.0" +components = [ "rustfmt", "clippy" ] diff --git a/src/lib.rs b/src/lib.rs new file mode 100644 index 0000000..25de7fc --- /dev/null +++ b/src/lib.rs @@ -0,0 +1,396 @@ +/// Provides an execute handler for pipelines. +pub trait Execute { + /// Execute a pipeline call to this instance. + /// Responsible for invoking the relevant handler(s). + fn execute(self); +} + +/// Provides an execute handler for pipelines, with a argument of type `TArg`. +pub trait ExecuteWith { + /// Execute a pipeline call to this instance with an argument. + /// Responsible for invoking the relevant handler(s). + fn execute_with(self, arg: &TArg); +} + +/// Provides an Execute handler for pipelines, with a mutable argument of type `TArg`. +pub trait ExecuteWithMut { + /// Execute a pipeline call to this instance with a mutable argument. + /// Responsible for invoking the relevant handler(s). + fn execute_with_mut(self, arg: &mut TArg); +} + +/// Blanket implementation of the [`Execute`] trait for any type +/// that can be converted to an [`Iterator`] over some type that +/// also implements [`Execute`] +/// +/// ## Example +/// +/// ``` +/// use enum_pipeline::Execute; +/// use std::cell::RefCell; +/// +/// enum Operations<'a> { +/// AddOne(&'a RefCell), +/// AddTwo(&'a RefCell), +/// } +/// +/// impl Execute for Operations<'_> { +/// fn execute(self) { +/// match self { +/// Operations::AddOne(cell) => *cell.borrow_mut() += 1, +/// Operations::AddTwo(cell) => *cell.borrow_mut() += 2, +/// } +/// } +/// } +/// +/// let acc = RefCell::new(0u32); +/// let my_op_pipeline = vec![ +/// Operations::AddOne(&acc), +/// Operations::AddTwo(&acc), +/// Operations::AddTwo(&acc), +/// ]; +/// +/// my_op_pipeline.execute(); +/// assert_eq!(5, *acc.borrow()); +/// ``` +impl Execute for T +where + T: IntoIterator, + T::Item: Execute, +{ + fn execute(self) { + // This is morally equivalent to a for loop or + // a while let binding, but [`for_each`] has the opportunity + // to be quicker in some cases if `T` is an adapter + // like [`Chain`] + self.into_iter().for_each(move |item| item.execute()); + } +} + +/// Blanket implementation of the [`ExecuteWith`] trait for any type +/// that can be converted to an [`Iterator`] over some type that +/// also implements [`ExecuteWith`] +/// +/// ## Example +/// +/// ``` +/// use enum_pipeline::ExecuteWith; +/// use std::cell::RefCell; +/// +/// enum Operations { +/// Allocate(f32, f32), +/// Init, +/// Run(f32), +/// } +/// +/// impl ExecuteWith> for Operations { +/// fn execute_with(self, arg: &RefCell) { +/// match self { +/// Operations::Allocate(_, _) => arg.borrow_mut().push_str("[alloc]"), +/// Operations::Init => arg.borrow_mut().push_str("[init]"), +/// Operations::Run(_) => arg.borrow_mut().push_str("[run]"), +/// } +/// } +/// } +/// +/// let my_op_pipeline = vec![ +/// Operations::Init, +/// Operations::Allocate(1.0, 1.0), +/// Operations::Run(1.0), +/// ]; +/// +/// let arg = RefCell::new(String::from("")); +/// my_op_pipeline.execute_with(&arg); +/// assert_eq!(*arg.borrow(), String::from("[init][alloc][run]")); +/// ``` +impl ExecuteWith for T +where + T: IntoIterator, + T::Item: ExecuteWith, +{ + fn execute_with(self, arg: &TArg) { + // This is morally equivalent to a for loop or + // a while let binding, but [`for_each`] has the opportunity + // to be quicker in some cases if `T` is an adapter + // like [`Chain`] + self.into_iter() + .for_each(move |item| item.execute_with(arg)); + } +} + +/// Blanket implementation of the [`ExecuteWithMut`] trait for any type +/// that can be converted to an [`Iterator`] over some type that +/// also implements [`ExecuteWithMut`] +/// +/// ## Example +/// +/// ``` +/// use enum_pipeline::ExecuteWithMut; +/// +/// enum Operations { +/// Allocate(f32, f32), +/// Init, +/// Run(f32), +/// } +/// +/// impl ExecuteWithMut for Operations where T: std::ops::AddAssign { +/// fn execute_with_mut(self, arg: &mut T) { +/// match self { +/// Operations::Allocate(_, _) => *arg += 2, +/// Operations::Init => *arg += 3, +/// Operations::Run(_) => *arg += 5, +/// } +/// } +/// } +/// +/// fn do_work_with_mut() { +/// let my_op_pipeline = vec![ +/// Operations::Init, +/// Operations::Allocate(1.0, 1.0), +/// Operations::Run(1.0), +/// ]; +/// +/// let mut acc = 0; +/// my_op_pipeline.execute_with_mut(&mut acc); +/// assert_eq!(acc, 10); +/// } +/// ``` +impl ExecuteWithMut for T +where + T: IntoIterator, + T::Item: ExecuteWithMut, +{ + fn execute_with_mut(self, arg: &mut TArg) { + // This is morally equivalent to a for loop or + // a while let binding, but [`for_each`] has the opportunity + // to be quicker in some cases if `T` is an adapter + // like [`Chain`] + self.into_iter() + .for_each(move |item| item.execute_with_mut(arg)); + } +} + +#[cfg(test)] +mod tests { + use crate::{Execute, ExecuteWith, ExecuteWithMut}; + use enum_pipeline_derive::{Execute, ExecuteWith, ExecuteWithMut}; + + #[derive(Execute)] + enum VoidDispatchPipeline { + #[handler(VoidDispatchPipeline::handle_one)] + One, + #[handler(handle_two)] + Two, + } + + static mut VOID_ONE_COUNT: i32 = 0; + static mut VOID_TWO_COUNT: i32 = 0; + + impl VoidDispatchPipeline { + fn handle_one() { + unsafe { + VOID_ONE_COUNT += 1; + } + } + + fn handle_two() { + unsafe { + VOID_TWO_COUNT += 1; + } + } + } + + #[test] + fn void_dispatch_works() { + let pipeline = vec![VoidDispatchPipeline::One, VoidDispatchPipeline::Two]; + + pipeline.execute(); + + unsafe { + assert_eq!(1, VOID_ONE_COUNT); + assert_eq!(1, VOID_TWO_COUNT); + } + } + + enum RefDataPipeline { + One(i32), + Two, + } + + static mut REF_ONE_VALUE: i32 = 0; + static mut REF_TWO_COUNT: i32 = 0; + + struct RefDataPipelineData { + mult: i32, + } + + impl RefDataPipeline { + fn handle_one(v: i32, arg: &RefDataPipelineData) { + unsafe { + REF_ONE_VALUE += v * arg.mult; + } + } + + fn handle_two(_arg: &RefDataPipelineData) { + unsafe { + REF_TWO_COUNT += 1; + } + } + } + + impl ExecuteWith for RefDataPipeline { + fn execute_with(self, arg: &RefDataPipelineData) { + match self { + RefDataPipeline::One(f) => RefDataPipeline::handle_one(f, arg), + RefDataPipeline::Two => RefDataPipeline::handle_two(arg), + } + } + } + + #[test] + fn ref_data_pipeline_works() { + let pipeline = vec![RefDataPipeline::One(24), RefDataPipeline::Two]; + + let data = RefDataPipelineData { mult: 2 }; + + pipeline.execute_with(&data); + + unsafe { + assert_eq!(48, REF_ONE_VALUE); + assert_eq!(1, REF_TWO_COUNT); + } + } + + enum MutDataPipeline { + One(i32), + Two, + } + + #[derive(Default)] + struct MutDataPipelineData { + one_value: i32, + two_count: i32, + } + + // no macro yet, srry + impl ExecuteWithMut for MutDataPipeline { + fn execute_with_mut(self, arg: &mut MutDataPipelineData) { + match self { + MutDataPipeline::One(i) => arg.one_value += i, + MutDataPipeline::Two => arg.two_count += 1, + } + } + } + + #[test] + fn mut_data_pipeline_works() { + let pipeline = vec![MutDataPipeline::One(12), MutDataPipeline::Two]; + + let mut data = MutDataPipelineData::default(); + pipeline.execute_with_mut(&mut data); + + assert_eq!(12, data.one_value); + assert_eq!(1, data.two_count); + } + + struct MacroRefPipelineData {} + + #[derive(ExecuteWith)] + #[execute_with(MacroRefPipelineData)] + enum MacroRefPipeline { + #[handler(handle_a)] + A, + #[handler(handle_b)] + B, + } + + static mut MACRO_REF_ONE_COUNT: i32 = 0; + static mut MACRO_REF_TWO_COUNT: i32 = 0; + + impl MacroRefPipeline { + fn handle_a(_data: &MacroRefPipelineData) { + unsafe { + MACRO_REF_ONE_COUNT += 1; + } + } + + fn handle_b(_data: &MacroRefPipelineData) { + unsafe { + MACRO_REF_TWO_COUNT += 1; + } + } + } + + #[test] + fn macro_ref_pipeline_works() { + vec![MacroRefPipeline::A, MacroRefPipeline::B].execute_with(&MacroRefPipelineData {}); + + unsafe { + assert_eq!(1, MACRO_REF_ONE_COUNT); + assert_eq!(1, MACRO_REF_TWO_COUNT); + } + } + + #[derive(Default)] + struct MacroMutRefData { + a_count: i32, + b_count: i32, + } + + #[derive(ExecuteWithMut)] + #[execute_with(MacroMutRefData)] + enum MacroMutRefPipeline { + #[handler(handle_a)] + A(i32), + #[handler(handle_b)] + B, + } + + impl MacroMutRefPipeline { + fn handle_a(_i: i32, arg: &mut MacroMutRefData) { + arg.a_count += 1; + } + + fn handle_b(arg: &mut MacroMutRefData) { + arg.b_count += 1; + } + } + + #[test] + fn macro_mut_pipeline_works() { + let mut arg = MacroMutRefData::default(); + vec![MacroMutRefPipeline::A(23), MacroMutRefPipeline::B].execute_with_mut(&mut arg); + + assert_eq!(1, arg.a_count); + assert_eq!(1, arg.b_count); + } + + #[derive(Default)] + struct MutRefData { + a_count: i32, + b_count: i32, + } + + enum MutRefPipeline { + A(i32), + B, + } + + impl ExecuteWithMut for MutRefPipeline { + fn execute_with_mut(self, arg: &mut MutRefData) { + match self { + MutRefPipeline::A(_i) => arg.a_count += 1, + MutRefPipeline::B => arg.b_count += 1, + } + } + } + + #[test] + fn _mut_pipeline_works() { + let mut arg = MutRefData::default(); + vec![MutRefPipeline::A(23), MutRefPipeline::B].execute_with_mut(&mut arg); + + assert_eq!(1, arg.a_count); + assert_eq!(1, arg.b_count); + } +}