Skip to content

Commit

Permalink
Unrolled build for rust-lang#124475
Browse files Browse the repository at this point in the history
Rollup merge of rust-lang#124475 - GKFX:more-dependency-pruning, r=oli-obk

Remove direct dependencies on lazy_static, once_cell and byteorder

The relevant functionality of all three crates is now available and stable in the standard library, i.e. `std::sync::OnceLock` and `{integer}::to_le_bytes`. I think waiting for `LazyLock` (rust-lang#109736) would give marginally more concise code, but not by much.
  • Loading branch information
rust-timer committed Apr 29, 2024
2 parents e27af29 + 8aa3c59 commit 1eb7c0a
Show file tree
Hide file tree
Showing 20 changed files with 71 additions and 96 deletions.
7 changes: 0 additions & 7 deletions Cargo.lock
Original file line number Diff line number Diff line change
Expand Up @@ -2113,7 +2113,6 @@ dependencies = [
"fs-err",
"getopts",
"jsonpath_lib",
"once_cell",
"regex",
"serde_json",
"shlex",
Expand Down Expand Up @@ -2232,7 +2231,6 @@ name = "linkchecker"
version = "0.1.0"
dependencies = [
"html5ever",
"once_cell",
"regex",
]

Expand Down Expand Up @@ -2491,7 +2489,6 @@ dependencies = [
"directories",
"getrandom",
"jemalloc-sys",
"lazy_static",
"libc",
"libffi",
"libloading",
Expand Down Expand Up @@ -4791,12 +4788,10 @@ dependencies = [
"arrayvec",
"askama",
"base64",
"byteorder",
"expect-test",
"indexmap",
"itertools 0.12.1",
"minifier",
"once_cell",
"regex",
"rustdoc-json-types",
"serde",
Expand Down Expand Up @@ -5351,7 +5346,6 @@ version = "0.1.0"
dependencies = [
"build_helper",
"glob",
"once_cell",
]

[[package]]
Expand Down Expand Up @@ -5596,7 +5590,6 @@ version = "0.1.0"
dependencies = [
"cargo_metadata 0.15.4",
"ignore",
"lazy_static",
"miropt-test-tools",
"regex",
"rustc-hash",
Expand Down
2 changes: 0 additions & 2 deletions src/librustdoc/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -10,11 +10,9 @@ path = "lib.rs"
arrayvec = { version = "0.7", default-features = false }
askama = { version = "0.12", default-features = false, features = ["config"] }
base64 = "0.21.7"
byteorder = "1.5"
itertools = "0.12"
indexmap = "2"
minifier = "0.3.0"
once_cell = "1.10.0"
regex = "1"
rustdoc-json-types = { path = "../rustdoc-json-types" }
serde_json = "1.0"
Expand Down
6 changes: 3 additions & 3 deletions src/librustdoc/html/markdown.rs
Original file line number Diff line number Diff line change
Expand Up @@ -35,13 +35,13 @@ use rustc_resolve::rustdoc::may_be_doc_link;
use rustc_span::edition::Edition;
use rustc_span::{Span, Symbol};

use once_cell::sync::Lazy;
use std::borrow::Cow;
use std::collections::VecDeque;
use std::fmt::Write;
use std::iter::Peekable;
use std::ops::{ControlFlow, Range};
use std::str::{self, CharIndices};
use std::sync::OnceLock;

use crate::clean::RenderedLink;
use crate::doctest;
Expand Down Expand Up @@ -1994,7 +1994,7 @@ pub struct IdMap {
}

// The map is pre-initialized and cloned each time to avoid reinitializing it repeatedly.
static DEFAULT_ID_MAP: Lazy<FxHashMap<Cow<'static, str>, usize>> = Lazy::new(|| init_id_map());
static DEFAULT_ID_MAP: OnceLock<FxHashMap<Cow<'static, str>, usize>> = OnceLock::new();

fn init_id_map() -> FxHashMap<Cow<'static, str>, usize> {
let mut map = FxHashMap::default();
Expand Down Expand Up @@ -2051,7 +2051,7 @@ fn init_id_map() -> FxHashMap<Cow<'static, str>, usize> {

impl IdMap {
pub fn new() -> Self {
IdMap { map: DEFAULT_ID_MAP.clone() }
IdMap { map: DEFAULT_ID_MAP.get_or_init(init_id_map).clone() }
}

pub(crate) fn derive<S: AsRef<str> + ToString>(&mut self, candidate: S) -> String {
Expand Down
23 changes: 11 additions & 12 deletions src/librustdoc/html/render/search_index/encode.rs
Original file line number Diff line number Diff line change
Expand Up @@ -166,43 +166,42 @@ pub(crate) fn write_bitmap_to_bytes(
containers.push(container);
}
// https://github.com/RoaringBitmap/RoaringFormatSpec
use byteorder::{WriteBytesExt, LE};
const SERIAL_COOKIE_NO_RUNCONTAINER: u32 = 12346;
const SERIAL_COOKIE: u32 = 12347;
const NO_OFFSET_THRESHOLD: u32 = 4;
let size: u32 = containers.len().try_into().unwrap();
let start_offset = if has_run {
out.write_u32::<LE>(SERIAL_COOKIE | ((size - 1) << 16))?;
out.write_all(&u32::to_le_bytes(SERIAL_COOKIE | ((size - 1) << 16)))?;
for set in containers.chunks(8) {
let mut b = 0;
for (i, container) in set.iter().enumerate() {
if matches!(container, &Container::Run(..)) {
b |= 1 << i;
}
}
out.write_u8(b)?;
out.write_all(&[b])?;
}
if size < NO_OFFSET_THRESHOLD {
4 + 4 * size + ((size + 7) / 8)
} else {
4 + 8 * size + ((size + 7) / 8)
}
} else {
out.write_u32::<LE>(SERIAL_COOKIE_NO_RUNCONTAINER)?;
out.write_u32::<LE>(containers.len().try_into().unwrap())?;
out.write_all(&u32::to_le_bytes(SERIAL_COOKIE_NO_RUNCONTAINER))?;
out.write_all(&u32::to_le_bytes(containers.len().try_into().unwrap()))?;
4 + 4 + 4 * size + 4 * size
};
for (&key, container) in keys.iter().zip(&containers) {
// descriptive header
let key: u32 = key.into();
let count: u32 = container.popcount() - 1;
out.write_u32::<LE>((count << 16) | key)?;
out.write_all(&u32::to_le_bytes((count << 16) | key))?;
}
if !has_run || size >= NO_OFFSET_THRESHOLD {
// offset header
let mut starting_offset = start_offset;
for container in &containers {
out.write_u32::<LE>(starting_offset)?;
out.write_all(&u32::to_le_bytes(starting_offset))?;
starting_offset += match container {
Container::Bits(_) => 8192u32,
Container::Array(array) => u32::try_from(array.len()).unwrap() * 2,
Expand All @@ -214,19 +213,19 @@ pub(crate) fn write_bitmap_to_bytes(
match container {
Container::Bits(bits) => {
for chunk in bits.iter() {
out.write_u64::<LE>(*chunk)?;
out.write_all(&u64::to_le_bytes(*chunk))?;
}
}
Container::Array(array) => {
for value in array.iter() {
out.write_u16::<LE>(*value)?;
out.write_all(&u16::to_le_bytes(*value))?;
}
}
Container::Run(runs) => {
out.write_u16::<LE>((runs.len()).try_into().unwrap())?;
out.write_all(&u16::to_le_bytes(runs.len().try_into().unwrap()))?;
for (start, lenm1) in runs.iter().copied() {
out.write_u16::<LE>(start)?;
out.write_u16::<LE>(lenm1)?;
out.write_all(&u16::to_le_bytes(start))?;
out.write_all(&u16::to_le_bytes(lenm1))?;
}
}
}
Expand Down
1 change: 0 additions & 1 deletion src/tools/jsondocck/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -10,4 +10,3 @@ regex = "1.4"
shlex = "1.0"
serde_json = "1.0"
fs-err = "2.5.0"
once_cell = "1.0"
9 changes: 5 additions & 4 deletions src/tools/jsondocck/src/main.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
use jsonpath_lib::select;
use once_cell::sync::Lazy;
use regex::{Regex, RegexBuilder};
use serde_json::Value;
use std::borrow::Cow;
use std::sync::OnceLock;
use std::{env, fmt, fs};

mod cache;
Expand Down Expand Up @@ -95,7 +95,8 @@ impl fmt::Display for CommandKind {
}
}

static LINE_PATTERN: Lazy<Regex> = Lazy::new(|| {
static LINE_PATTERN: OnceLock<Regex> = OnceLock::new();
fn line_pattern() -> Regex {
RegexBuilder::new(
r#"
\s(?P<invalid>!?)@(?P<negated>!?)
Expand All @@ -107,7 +108,7 @@ static LINE_PATTERN: Lazy<Regex> = Lazy::new(|| {
.unicode(true)
.build()
.unwrap()
});
}

fn print_err(msg: &str, lineno: usize) {
eprintln!("Invalid command: {} on line {}", msg, lineno)
Expand All @@ -123,7 +124,7 @@ fn get_commands(template: &str) -> Result<Vec<Command>, ()> {
for (lineno, line) in file.split('\n').enumerate() {
let lineno = lineno + 1;

let cap = match LINE_PATTERN.captures(line) {
let cap = match LINE_PATTERN.get_or_init(line_pattern).captures(line) {
Some(c) => c,
None => continue,
};
Expand Down
1 change: 0 additions & 1 deletion src/tools/linkchecker/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -9,5 +9,4 @@ path = "main.rs"

[dependencies]
regex = "1"
once_cell = "1"
html5ever = "0.26.0"
12 changes: 7 additions & 5 deletions src/tools/linkchecker/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,6 @@ use html5ever::tendril::ByteTendril;
use html5ever::tokenizer::{
BufferQueue, TagToken, Token, TokenSink, TokenSinkResult, Tokenizer, TokenizerOpts,
};
use once_cell::sync::Lazy;
use regex::Regex;
use std::cell::RefCell;
use std::collections::{HashMap, HashSet};
use std::env;
Expand Down Expand Up @@ -69,8 +67,12 @@ const INTRA_DOC_LINK_EXCEPTIONS: &[(&str, &[&str])] = &[

];

static BROKEN_INTRA_DOC_LINK: Lazy<Regex> =
Lazy::new(|| Regex::new(r#"\[<code>(.*)</code>\]"#).unwrap());
macro_rules! static_regex {
($re:literal) => {{
static RE: ::std::sync::OnceLock<::regex::Regex> = ::std::sync::OnceLock::new();
RE.get_or_init(|| ::regex::Regex::new($re).unwrap())
}};
}

macro_rules! t {
($e:expr) => {
Expand Down Expand Up @@ -373,7 +375,7 @@ impl Checker {
// Search for intra-doc links that rustdoc didn't warn about
// NOTE: only looks at one line at a time; in practice this should find most links
for (i, line) in source.lines().enumerate() {
for broken_link in BROKEN_INTRA_DOC_LINK.captures_iter(line) {
for broken_link in static_regex!(r#"\[<code>(.*)</code>\]"#).captures_iter(line) {
if is_intra_doc_exception(file, &broken_link[1]) {
report.intra_doc_exceptions += 1;
} else {
Expand Down
1 change: 0 additions & 1 deletion src/tools/miri/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,6 @@ colored = "2"
ui_test = "0.21.1"
rustc_version = "0.4"
regex = "1.5.5"
lazy_static = "1.4.0"
tempfile = "3"

[package.metadata.rust-analyzer]
Expand Down
22 changes: 13 additions & 9 deletions src/tools/miri/tests/ui.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
use std::ffi::OsString;
use std::num::NonZeroUsize;
use std::path::{Path, PathBuf};
use std::sync::OnceLock;
use std::{env, process::Command};

use colored::*;
Expand Down Expand Up @@ -67,8 +68,8 @@ fn miri_config(target: &str, path: &str, mode: Mode, with_dependencies: bool) ->

let mut config = Config {
target: Some(target.to_owned()),
stderr_filters: STDERR.clone(),
stdout_filters: STDOUT.clone(),
stderr_filters: stderr_filters().into(),
stdout_filters: stdout_filters().into(),
mode,
program,
out_dir: PathBuf::from(std::env::var_os("CARGO_TARGET_DIR").unwrap()).join("ui"),
Expand Down Expand Up @@ -174,15 +175,18 @@ fn run_tests(
}

macro_rules! regexes {
($name:ident: $($regex:expr => $replacement:expr,)*) => {lazy_static::lazy_static! {
static ref $name: Vec<(Match, &'static [u8])> = vec![
$((Regex::new($regex).unwrap().into(), $replacement.as_bytes()),)*
];
}};
($name:ident: $($regex:expr => $replacement:expr,)*) => {
fn $name() -> &'static [(Match, &'static [u8])] {
static S: OnceLock<Vec<(Match, &'static [u8])>> = OnceLock::new();
S.get_or_init(|| vec![
$((Regex::new($regex).unwrap().into(), $replacement.as_bytes()),)*
])
}
};
}

regexes! {
STDOUT:
stdout_filters:
// Windows file paths
r"\\" => "/",
// erase borrow tags
Expand All @@ -191,7 +195,7 @@ regexes! {
}

regexes! {
STDERR:
stderr_filters:
// erase line and column info
r"\.rs:[0-9]+:[0-9]+(: [0-9]+:[0-9]+)?" => ".rs:LL:CC",
// erase alloc ids
Expand Down
1 change: 0 additions & 1 deletion src/tools/suggest-tests/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -6,4 +6,3 @@ edition = "2021"
[dependencies]
glob = "0.3.0"
build_helper = { version = "0.1.0", path = "../build_helper" }
once_cell = "1.17.1"
4 changes: 2 additions & 2 deletions src/tools/suggest-tests/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ use std::{

use dynamic_suggestions::DYNAMIC_SUGGESTIONS;
use glob::Pattern;
use static_suggestions::STATIC_SUGGESTIONS;
use static_suggestions::static_suggestions;

mod dynamic_suggestions;
mod static_suggestions;
Expand Down Expand Up @@ -33,7 +33,7 @@ pub fn get_suggestions<T: AsRef<str>>(modified_files: &[T]) -> Vec<Suggestion> {
let mut suggestions = Vec::new();

// static suggestions
for (globs, sugs) in STATIC_SUGGESTIONS.iter() {
for (globs, sugs) in static_suggestions().iter() {
let globs = globs
.iter()
.map(|glob| Pattern::new(glob).expect("Found invalid glob pattern!"))
Expand Down
8 changes: 6 additions & 2 deletions src/tools/suggest-tests/src/static_suggestions.rs
Original file line number Diff line number Diff line change
@@ -1,10 +1,14 @@
use crate::{sug, Suggestion};
use std::sync::OnceLock;

// FIXME: perhaps this could use `std::lazy` when it is stablizied
macro_rules! static_suggestions {
($( [ $( $glob:expr ),* $(,)? ] => [ $( $suggestion:expr ),* $(,)? ] ),* $(,)? ) => {
pub(crate) const STATIC_SUGGESTIONS: ::once_cell::unsync::Lazy<Vec<(Vec<&'static str>, Vec<Suggestion>)>>
= ::once_cell::unsync::Lazy::new(|| vec![ $( (vec![ $($glob),* ], vec![ $($suggestion),* ]) ),*]);
pub(crate) fn static_suggestions() -> &'static [(Vec<&'static str>, Vec<Suggestion>)]
{
static S: OnceLock<Vec<(Vec<&'static str>, Vec<Suggestion>)>> = OnceLock::new();
S.get_or_init(|| vec![ $( (vec![ $($glob),* ], vec![ $($suggestion),* ]) ),*])
}
}
}

Expand Down
1 change: 0 additions & 1 deletion src/tools/tidy/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@ autobins = false
cargo_metadata = "0.15"
regex = "1"
miropt-test-tools = { path = "../miropt-test-tools" }
lazy_static = "1"
walkdir = "2"
ignore = "0.4.18"
semver = "1.0"
Expand Down

0 comments on commit 1eb7c0a

Please sign in to comment.