Skip to content
This repository has been archived by the owner on Feb 3, 2023. It is now read-only.

Commit

Permalink
Merge pull request #1923 from holochain/immutable-datastructures-state
Browse files Browse the repository at this point in the history
Use immutable data-structures to make State cloning much cheaper
  • Loading branch information
lucksus committed Nov 27, 2019
2 parents ea419a5 + cfac4d8 commit 19f4aaa
Show file tree
Hide file tree
Showing 8 changed files with 83 additions and 48 deletions.
1 change: 1 addition & 0 deletions CHANGELOG-UNRELEASED.md
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.htm

- Exchanged [vanilla thread-pool](https://docs.rs/threadpool/1.7.1/threadpool/) with the futures executor thread-pool [from the futures crate](https://docs.rs/futures/0.3.1/futures/executor/index.html). This enables M:N Future:Thread execution which is much less wasteful than having a thread per future. Number of threads in the pool is kept at the default (of that crate) of number of CPUs. [#1915](https://github.com/holochain/holochain-rust/pull/1915)
- Replace naive timeout implementation (for network queries / direct messages) that uses a thread per timeout with a scheduled job that polls the State and sends timeout actions when needed (reduces number of used threads and thus memory footprint) [#1916](https://github.com/holochain/holochain-rust/pull/1916).
- Use the [im crate](https://docs.rs/im/14.0.0/im/) for `HashMap`s and `HashSet`s used in the redux State. This makes cloning the state much cheaper and improves over-all performance. [#1923](https://github.com/holochain/holochain-rust/pull/1923)

### Deprecated

Expand Down
44 changes: 44 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions crates/core/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,7 @@ env_logger = "=0.6.1"
url = { version = "=2.1.0", features = ["serde"] }
rand = "0.7.2"
threadpool = "=1.7.1"
im = { version = "=14.0.0", features = ["serde"] }

[dev-dependencies]
wabt = "=0.7.4"
Expand Down
5 changes: 3 additions & 2 deletions crates/core/src/agent/state.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,9 @@ use holochain_json_api::{
json::JsonString,
};
use holochain_wasm_utils::api_serialization::crypto::CryptoMethod;
use im::HashMap;
use serde_json;
use std::{collections::HashMap, convert::TryFrom, sync::Arc, time::SystemTime};
use std::{convert::TryFrom, sync::Arc, time::SystemTime};

/// The state-slice for the Agent.
/// Holds the agent's source chain and keys.
Expand Down Expand Up @@ -314,8 +315,8 @@ pub mod tests {
};
use holochain_json_api::json::JsonString;
use holochain_persistence_api::cas::content::AddressableContent;
use im::HashMap;
use serde_json;
use std::collections::HashMap;
use test_utils::mock_signing::mock_signer;

/// dummy agent state
Expand Down
57 changes: 26 additions & 31 deletions crates/core/src/dht/aspect_map.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
use crate::holochain_wasm_utils::holochain_persistence_api::cas::content::AddressableContent;
use holochain_core_types::network::entry_aspect::EntryAspect;
use im::{HashMap, HashSet};
use lib3h_protocol::types::{AspectHash, EntryHash};
use std::collections::{HashMap, HashSet};

use std::collections::HashMap as StdHashMap;
pub type AspectSet = HashSet<AspectHash>;

pub type AspectMapBare = HashMap<EntryHash, AspectSet>;
Expand All @@ -19,12 +19,7 @@ impl AspectMap {
pub fn diff(&self, other: &AspectMap) -> AspectMap {
let self_set = HashSet::<(EntryHash, AspectHash)>::from(self);
let other_set = HashSet::<(EntryHash, AspectHash)>::from(other);
AspectMap::from(
&self_set
.difference(&other_set)
.cloned()
.collect::<HashSet<(EntryHash, AspectHash)>>(),
)
AspectMap::from(&self_set.difference(other_set))
}

pub fn bare(&self) -> &AspectMapBare {
Expand Down Expand Up @@ -81,7 +76,7 @@ impl AspectMap {
.join("\n")
}

pub fn merge(map1: &AspectMap, map2: &AspectMap) -> AspectMap {
pub fn merge(map1: AspectMap, map2: AspectMap) -> AspectMap {
map1.0
.keys()
.chain(map2.0.keys())
Expand All @@ -90,9 +85,8 @@ impl AspectMap {
.0
.get(entry)
.unwrap_or(&HashSet::new())
.union(map2.0.get(entry).unwrap_or(&HashSet::new()))
.cloned()
.collect();
.clone()
.union(map2.0.get(entry).unwrap_or(&HashSet::new()).clone());
(entry.clone(), merged)
})
.collect::<AspectMapBare>()
Expand All @@ -118,10 +112,10 @@ impl From<&AspectMap> for HashSet<(EntryHash, AspectHash)> {
}
}

pub type AspectVecMap = HashMap<EntryHash, Vec<AspectHash>>;
pub type AspectVecMap = StdHashMap<EntryHash, Vec<AspectHash>>;
impl From<AspectMap> for AspectVecMap {
fn from(map: AspectMap) -> AspectVecMap {
let mut new_map = HashMap::new();
let mut new_map = StdHashMap::new();
map.0.into_iter().for_each(|(entry, set)| {
let vec = set.into_iter().collect();
new_map.insert(entry, vec);
Expand Down Expand Up @@ -151,6 +145,7 @@ impl From<&HashSet<(EntryHash, AspectHash)>> for AspectMap {
mod tests {

use super::*;
use im::hashset;
use sim1h::aspect::fixture::content_aspect_fresh;

#[test]
Expand All @@ -169,11 +164,11 @@ mod tests {
fn test_merge_address_maps_merges_entries() {
let mut map1: AspectMapBare = HashMap::new();
let mut map2: AspectMapBare = HashMap::new();
map1.insert("a".into(), vec!["x".into()].into_iter().collect());
map2.insert("b".into(), vec!["y".into()].into_iter().collect());
let (map1, map2) = (map1.into(), map2.into());
let merged = AspectMap::merge(&map1, &map2);
let merged2 = AspectMap::merge(&map2, &map1);
map1.insert("a".into(), hashset![AspectHash::from("x")]);
map2.insert("b".into(), hashset![AspectHash::from("y")]);
let (map1, map2): (AspectMap, AspectMap) = (map1.into(), map2.into());
let merged = AspectMap::merge(map1.clone(), map2.clone());
let merged2 = AspectMap::merge(map2.clone(), map1.clone());
assert_eq!(merged.0, merged2.0);
assert_eq!(merged.0.len(), 2);
assert_eq!(merged.0.get(&EntryHash::from("a")).unwrap().len(), 1);
Expand All @@ -184,14 +179,14 @@ mod tests {
fn test_merge_address_maps_merges_aspects_1() {
let mut map1: AspectMapBare = HashMap::new();
let mut map2: AspectMapBare = HashMap::new();
map1.insert("a".into(), vec!["x".into()].into_iter().collect());
map1.insert("a".into(), hashset!["x".into()]);
map2.insert(
"a".into(),
vec!["x".into(), "y".into()].into_iter().collect(),
hashset![AspectHash::from("x"), AspectHash::from("y")],
);
let (map1, map2) = (map1.into(), map2.into());
let merged = AspectMap::merge(&map1, &map2);
let merged2 = AspectMap::merge(&map1, &map2);
let (map1, map2): (AspectMap, AspectMap) = (map1.into(), map2.into());
let merged = AspectMap::merge(map1.clone(), map2.clone());
let merged2 = AspectMap::merge(map1, map2);
assert_eq!(merged.0, merged2.0);
assert_eq!(merged.0.len(), 1);
assert_eq!(merged.0.get(&EntryHash::from("a")).unwrap().len(), 2);
Expand All @@ -206,24 +201,24 @@ mod tests {
let mut map2: AspectMapBare = HashMap::new();
map1.insert(
"a".into(),
vec!["x".into(), "y".into()].into_iter().collect(),
hashset![AspectHash::from("x"), AspectHash::from("y")],
);
map1.insert(
"b".into(),
vec!["u".into(), "v".into()].into_iter().collect(),
hashset![AspectHash::from("u"), AspectHash::from("v")],
);

map2.insert(
"a".into(),
vec!["y".into(), "z".into()].into_iter().collect(),
hashset![AspectHash::from("y"), AspectHash::from("z")],
);
map2.insert(
"b".into(),
vec!["v".into(), "w".into()].into_iter().collect(),
hashset![AspectHash::from("v"), AspectHash::from("w")],
);
let (map1, map2) = (map1.into(), map2.into());
let merged = AspectMap::merge(&map1, &map2);
let merged2 = AspectMap::merge(&map2, &map1);
let (map1, map2): (AspectMap, AspectMap) = (map1.into(), map2.into());
let merged = AspectMap::merge(map1.clone(), map2.clone());
let merged2 = AspectMap::merge(map2, map1);
assert_eq!(merged.0, merged2.0);
assert_eq!(merged.0.len(), 2);
assert_eq!(merged.0.get(&EntryHash::from("a")).unwrap().len(), 3);
Expand Down
10 changes: 4 additions & 6 deletions crates/core/src/network/handler/lists.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,14 +9,12 @@ use crate::{
};
use holochain_core_types::entry::Entry;
use holochain_persistence_api::cas::content::{Address, AddressableContent};
use im::HashSet;
use lib3h_protocol::{
data_types::{EntryListData, GetListData},
types::{AspectHash, EntryHash},
};
use std::{
collections::{HashMap, HashSet},
sync::Arc,
};
use std::sync::Arc;

pub fn handle_get_authoring_list(get_list_data: GetListData, context: Arc<Context>) {
let c = context.clone();
Expand All @@ -37,7 +35,7 @@ pub fn handle_get_authoring_list(get_list_data: GetListData, context: Arc<Contex
}

fn create_authoring_map(context: Arc<Context>) -> AspectMap {
let mut address_map: AspectMapBare = HashMap::new();
let mut address_map: AspectMapBare = AspectMapBare::new();
for entry_address in get_all_public_chain_entries(context.clone()) {
// 1. For every public chain entry we definitely add the content aspect:
let content_aspect = get_content_aspect(&entry_address, context.clone())
Expand Down Expand Up @@ -145,7 +143,7 @@ pub fn handle_get_gossip_list(get_list_data: GetListData, context: Arc<Context>)
.expect("No state present when trying to respond with gossip list");
let authoring_map = create_authoring_map(context.clone());
let holding_map = state.dht().get_holding_map().clone();
let address_map = AspectMap::merge(&authoring_map, &holding_map);
let address_map = AspectMap::merge(authoring_map, holding_map);

let action = Action::RespondGossipList(EntryListData {
space_address: get_list_data.space_address,
Expand Down
6 changes: 2 additions & 4 deletions crates/core/src/network/state.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,9 @@ use boolinator::*;
use holochain_core_types::{error::HolochainError, validation::ValidationPackage};
use holochain_net::p2p_network::P2pNetwork;
use holochain_persistence_api::cas::content::Address;
use im::HashMap;
use snowflake;
use std::{
collections::HashMap,
time::{Duration, SystemTime},
};
use std::time::{Duration, SystemTime};

type Actions = HashMap<ActionWrapper, ActionResponse>;

Expand Down
7 changes: 2 additions & 5 deletions crates/core/src/nucleus/state.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,15 +10,12 @@ use holochain_json_api::{
json::JsonString,
};
use holochain_persistence_api::cas::content::{Address, AddressableContent, Content};
use im::{HashMap, HashSet};
use serde::{
de::{Error, Visitor},
Deserialize, Deserializer, Serialize, Serializer,
};
use std::{
collections::{HashMap, HashSet, VecDeque},
convert::TryFrom,
fmt,
};
use std::{collections::VecDeque, convert::TryFrom, fmt};

#[derive(Clone, Debug, PartialEq, Deserialize, Serialize, DefaultJson)]
pub enum NucleusStatus {
Expand Down

0 comments on commit 19f4aaa

Please sign in to comment.