Skip to content

Commit

Permalink
Update geo, and copy in some popgetter code here temporarily, while the
Browse files Browse the repository at this point in the history
upstream repo is in flux.
  • Loading branch information
dabreegster committed Nov 27, 2023
1 parent 26d68c7 commit 817e220
Show file tree
Hide file tree
Showing 8 changed files with 249 additions and 67 deletions.
182 changes: 120 additions & 62 deletions Cargo.lock

Large diffs are not rendered by default.

5 changes: 3 additions & 2 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ members = [
"map_model",
"piggyback",
"popdat",
"popgetter",
"raw_map",
"sim",
"synthpop",
Expand All @@ -37,7 +38,7 @@ opt-level = 3
# Specify the versions for common dependencies just once here, instead of
# repeating in a bunch of crates
[workspace.dependencies]
anyhow = "1.0.69"
anyhow = "1.0.75"
bincode = "1.3.1"
colorous = "1.0.9"
contour = "0.7.0"
Expand All @@ -46,7 +47,7 @@ flate2 = "1.0.26"
fs-err = "2.9.0"
futures = { version = "0.3.27"}
futures-channel = { version = "0.3.29"}
geo = "0.26.0"
geo = "0.27.0"
geojson = { version = "0.24.1", features = ["geo-types"] }
geom = { git = "https://github.com/a-b-street/geom" }
getrandom = "0.2.11"
Expand Down
2 changes: 1 addition & 1 deletion convert_osm/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ geom = { workspace = true }
kml = { path = "../kml" }
log = { workspace = true }
osm2streets = { git = "https://github.com/a-b-street/osm2streets" }
popgetter = { git = "https://github.com/dabreegster/popgetter/" }
popgetter = { path = "../popgetter" }
raw_map = { path = "../raw_map" }
serde = { workspace = true }
streets_reader = { git = "https://github.com/a-b-street/osm2streets" }
2 changes: 1 addition & 1 deletion map_model/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ log = { workspace = true }
lyon = "1.0.1"
md5 = "0.7.0"
petgraph = { version = "0.6.4", features=["serde-1"] }
popgetter = { git = "https://github.com/dabreegster/popgetter/" }
popgetter = { path = "../popgetter" }
rand = { workspace = true }
rand_xorshift = { workspace = true }
raw_map = { path = "../raw_map" }
Expand Down
13 changes: 13 additions & 0 deletions popgetter/Cargo.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
[package]
name = "popgetter"
version = "0.1.0"
edition = "2021"

[dependencies]
anyhow = { workspace = true }
fs-err = { workspace = true }
geo = { workspace = true }
geo-types = "0.7.12"
geojson = { workspace = true }
serde = { workspace = true }
topojson = { git = "https://github.com/georust/topojson" }
1 change: 1 addition & 0 deletions popgetter/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Temporarily, old code from <https://github.com/Urban-Analytics-Technology-Platform/popgetter> is copied here to upgrade dependencies. When the upstream repo is ready again, depend on it.
109 changes: 109 additions & 0 deletions popgetter/src/lib.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,109 @@
use std::time::Instant;

use anyhow::{bail, Result};
use geo::Intersects;
use geojson::Feature;
use serde::{Deserialize, Serialize};
use topojson::{to_geojson, TopoJson};

#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct CensusZone {
// England: OA11CD
pub id: String,

// (England-only for now)
// 0 cars or vars per household. See https://www.ons.gov.uk/datasets/TS045/editions/2021/versions/3 for details.
pub cars_0: u16,
pub cars_1: u16,
pub cars_2: u16,
// 3 or more cars or vans per household
pub cars_3: u16,
}

impl CensusZone {
// Assumes "3 or more" just means 3
pub fn total_cars(&self) -> u16 {
self.cars_1 + 2 * self.cars_2 + 3 * self.cars_3
}
}

/// Clips existing TopoJSON files to the given boundary. All polygons are in WGS84.
pub fn clip_zones(
topojson_path: &str,
boundary: geo::Polygon<f64>,
) -> Result<Vec<(geo::Polygon<f64>, CensusZone)>> {
let gj = load_all_zones_as_geojson(topojson_path)?;

let start = Instant::now();
let mut output = Vec::new();
for gj_feature in gj {
let geom: geo::Geometry<f64> = gj_feature.clone().try_into()?;
if boundary.intersects(&geom) {
let polygon = match geom {
geo::Geometry::Polygon(p) => p,
// TODO What're these, and what should we do with them?
geo::Geometry::MultiPolygon(mut mp) => mp.0.remove(0),
_ => bail!("Unexpected geometry type for {:?}", gj_feature.properties),
};
let census_zone = CensusZone {
id: gj_feature
.property("ID")
.unwrap()
.as_str()
.unwrap()
.to_string(),
cars_0: gj_feature
.property("cars_0")
.unwrap()
.as_u64()
.unwrap()
.try_into()?,
cars_1: gj_feature
.property("cars_1")
.unwrap()
.as_u64()
.unwrap()
.try_into()?,
cars_2: gj_feature
.property("cars_2")
.unwrap()
.as_u64()
.unwrap()
.try_into()?,
cars_3: gj_feature
.property("cars_3")
.unwrap()
.as_u64()
.unwrap()
.try_into()?,
};
output.push((polygon, census_zone));
}
}
println!(
"Filtering took {:?}. {} results",
start.elapsed(),
output.len()
);

Ok(output)
}

fn load_all_zones_as_geojson(path: &str) -> Result<Vec<Feature>> {
let mut start = Instant::now();
let topojson_str = fs_err::read_to_string(path)?;
println!("Reading file took {:?}", start.elapsed());

start = Instant::now();
let topo = topojson_str.parse::<TopoJson>()?;
println!("Parsing topojson took {:?}", start.elapsed());

start = Instant::now();
let fc = match topo {
TopoJson::Topology(t) => to_geojson(&t, "zones")?,
_ => bail!("Unexpected topojson contents"),
};
println!("Converting to geojson took {:?}", start.elapsed());

Ok(fc.features)
}
2 changes: 1 addition & 1 deletion raw_map/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,6 @@ abstutil = { path = "../abstutil" }
geom = { workspace = true }
serde = { workspace = true }
osm2streets = { git = "https://github.com/a-b-street/osm2streets" }
popgetter = { git = "https://github.com/dabreegster/popgetter/" }
popgetter = { path = "../popgetter" }
strum = "0.24.1"
strum_macros = "0.24.3"

0 comments on commit 817e220

Please sign in to comment.