Use `rustc-hash` for hash maps and sets (#6678)

This commit is contained in:
Laurenz 2025-07-31 10:45:03 +02:00 committed by GitHub
parent 59243dadbb
commit 88dfe4d276
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
46 changed files with 183 additions and 144 deletions

13
Cargo.lock generated
View File

@ -2906,6 +2906,7 @@ version = "0.13.1"
dependencies = [ dependencies = [
"comemo", "comemo",
"ecow", "ecow",
"rustc-hash",
"typst-eval", "typst-eval",
"typst-html", "typst-html",
"typst-layout", "typst-layout",
@ -2941,6 +2942,7 @@ dependencies = [
"parking_lot", "parking_lot",
"pathdiff", "pathdiff",
"rayon", "rayon",
"rustc-hash",
"same-file", "same-file",
"self-replace", "self-replace",
"semver", "semver",
@ -2981,6 +2983,7 @@ dependencies = [
"ecow", "ecow",
"heck", "heck",
"pulldown-cmark", "pulldown-cmark",
"rustc-hash",
"serde", "serde",
"serde_json", "serde_json",
"serde_yaml 0.9.34+deprecated", "serde_yaml 0.9.34+deprecated",
@ -3003,6 +3006,7 @@ dependencies = [
"comemo", "comemo",
"ecow", "ecow",
"indexmap 2.7.1", "indexmap 2.7.1",
"rustc-hash",
"stacker", "stacker",
"toml", "toml",
"typst-library", "typst-library",
@ -3033,6 +3037,7 @@ dependencies = [
"comemo", "comemo",
"ecow", "ecow",
"palette", "palette",
"rustc-hash",
"time", "time",
"typst-assets", "typst-assets",
"typst-library", "typst-library",
@ -3051,6 +3056,7 @@ dependencies = [
"ecow", "ecow",
"once_cell", "once_cell",
"pathdiff", "pathdiff",
"rustc-hash",
"serde", "serde",
"typst", "typst",
"typst-assets", "typst-assets",
@ -3100,6 +3106,7 @@ dependencies = [
"icu_segmenter", "icu_segmenter",
"kurbo", "kurbo",
"memchr", "memchr",
"rustc-hash",
"rustybuzz", "rustybuzz",
"smallvec", "smallvec",
"ttf-parser", "ttf-parser",
@ -3151,6 +3158,7 @@ dependencies = [
"regex-syntax", "regex-syntax",
"roxmltree", "roxmltree",
"rust_decimal", "rust_decimal",
"rustc-hash",
"rustybuzz", "rustybuzz",
"serde", "serde",
"serde_json", "serde_json",
@ -3200,6 +3208,7 @@ dependencies = [
"infer", "infer",
"krilla", "krilla",
"krilla-svg", "krilla-svg",
"rustc-hash",
"serde", "serde",
"typst-assets", "typst-assets",
"typst-library", "typst-library",
@ -3253,6 +3262,7 @@ dependencies = [
"flate2", "flate2",
"hayro", "hayro",
"image", "image",
"rustc-hash",
"ttf-parser", "ttf-parser",
"typst-assets", "typst-assets",
"typst-library", "typst-library",
@ -3268,6 +3278,7 @@ name = "typst-syntax"
version = "0.13.1" version = "0.13.1"
dependencies = [ dependencies = [
"ecow", "ecow",
"rustc-hash",
"serde", "serde",
"toml", "toml",
"typst-timing", "typst-timing",
@ -3290,6 +3301,7 @@ dependencies = [
"parking_lot", "parking_lot",
"rayon", "rayon",
"regex", "regex",
"rustc-hash",
"tiny-skia", "tiny-skia",
"typst", "typst",
"typst-assets", "typst-assets",
@ -3321,6 +3333,7 @@ dependencies = [
"once_cell", "once_cell",
"portable-atomic", "portable-atomic",
"rayon", "rayon",
"rustc-hash",
"siphasher", "siphasher",
"thin-vec", "thin-vec",
"unicode-math-class", "unicode-math-class",

View File

@ -59,6 +59,7 @@ fastrand = "2.3"
flate2 = "1" flate2 = "1"
fontdb = { version = "0.23", default-features = false } fontdb = { version = "0.23", default-features = false }
fs_extra = "1.3" fs_extra = "1.3"
rustc-hash = "2.1"
glidesort = "0.1.2" glidesort = "0.1.2"
hayagriva = "0.8.1" hayagriva = "0.8.1"
hayro-syntax = { git = "https://github.com/LaurenzV/hayro", rev = "e701f95" } hayro-syntax = { git = "https://github.com/LaurenzV/hayro", rev = "e701f95" }

View File

@ -41,6 +41,7 @@ open = { workspace = true }
parking_lot = { workspace = true } parking_lot = { workspace = true }
pathdiff = { workspace = true } pathdiff = { workspace = true }
rayon = { workspace = true } rayon = { workspace = true }
rustc-hash = { workspace = true }
same-file = { workspace = true } same-file = { workspace = true }
self-replace = { workspace = true, optional = true } self-replace = { workspace = true, optional = true }
semver = { workspace = true } semver = { workspace = true }

View File

@ -1,4 +1,3 @@
use std::collections::{HashMap, HashSet};
use std::io::{self, Write}; use std::io::{self, Write};
use std::iter; use std::iter;
use std::path::PathBuf; use std::path::PathBuf;
@ -9,6 +8,7 @@ use codespan_reporting::term::termcolor::WriteColor;
use codespan_reporting::term::{self, termcolor}; use codespan_reporting::term::{self, termcolor};
use ecow::eco_format; use ecow::eco_format;
use notify::{Event, RecommendedWatcher, RecursiveMode, Watcher as _}; use notify::{Event, RecommendedWatcher, RecursiveMode, Watcher as _};
use rustc_hash::{FxHashMap, FxHashSet};
use same_file::is_same_file; use same_file::is_same_file;
use typst::diag::{StrResult, bail, warning}; use typst::diag::{StrResult, bail, warning};
use typst::syntax::Span; use typst::syntax::Span;
@ -91,10 +91,10 @@ struct Watcher {
/// Keeps track of which paths are watched via `watcher`. The boolean is /// Keeps track of which paths are watched via `watcher`. The boolean is
/// used during updating for mark-and-sweep garbage collection of paths we /// used during updating for mark-and-sweep garbage collection of paths we
/// should unwatch. /// should unwatch.
watched: HashMap<PathBuf, bool>, watched: FxHashMap<PathBuf, bool>,
/// A set of files that should be watched, but don't exist. We manually poll /// A set of files that should be watched, but don't exist. We manually poll
/// for those. /// for those.
missing: HashSet<PathBuf>, missing: FxHashSet<PathBuf>,
} }
impl Watcher { impl Watcher {
@ -127,8 +127,8 @@ impl Watcher {
output, output,
rx, rx,
watcher, watcher,
watched: HashMap::new(), watched: FxHashMap::default(),
missing: HashSet::new(), missing: FxHashSet::default(),
}) })
} }

View File

@ -1,4 +1,3 @@
use std::collections::HashMap;
use std::io::Read; use std::io::Read;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use std::sync::{LazyLock, OnceLock}; use std::sync::{LazyLock, OnceLock};
@ -7,6 +6,7 @@ use std::{fmt, fs, io, mem};
use chrono::{DateTime, Datelike, FixedOffset, Local, Utc}; use chrono::{DateTime, Datelike, FixedOffset, Local, Utc};
use ecow::{EcoString, eco_format}; use ecow::{EcoString, eco_format};
use parking_lot::Mutex; use parking_lot::Mutex;
use rustc_hash::FxHashMap;
use typst::diag::{FileError, FileResult}; use typst::diag::{FileError, FileResult};
use typst::foundations::{Bytes, Datetime, Dict, IntoValue}; use typst::foundations::{Bytes, Datetime, Dict, IntoValue};
use typst::syntax::{FileId, Lines, Source, VirtualPath}; use typst::syntax::{FileId, Lines, Source, VirtualPath};
@ -41,7 +41,7 @@ pub struct SystemWorld {
/// Locations of and storage for lazily loaded fonts. /// Locations of and storage for lazily loaded fonts.
fonts: Vec<FontSlot>, fonts: Vec<FontSlot>,
/// Maps file ids to source files and buffers. /// Maps file ids to source files and buffers.
slots: Mutex<HashMap<FileId, FileSlot>>, slots: Mutex<FxHashMap<FileId, FileSlot>>,
/// Holds information about where packages are stored. /// Holds information about where packages are stored.
package_storage: PackageStorage, package_storage: PackageStorage,
/// The current datetime if requested. This is stored here to ensure it is /// The current datetime if requested. This is stored here to ensure it is
@ -139,7 +139,7 @@ impl SystemWorld {
library: LazyHash::new(library), library: LazyHash::new(library),
book: LazyHash::new(fonts.book), book: LazyHash::new(fonts.book),
fonts: fonts.fonts, fonts: fonts.fonts,
slots: Mutex::new(HashMap::new()), slots: Mutex::new(FxHashMap::default()),
package_storage: package::storage(&world_args.package), package_storage: package::storage(&world_args.package),
now, now,
}) })

View File

@ -21,6 +21,7 @@ typst-utils = { workspace = true }
comemo = { workspace = true } comemo = { workspace = true }
ecow = { workspace = true } ecow = { workspace = true }
indexmap = { workspace = true } indexmap = { workspace = true }
rustc-hash = { workspace = true }
toml = { workspace = true } toml = { workspace = true }
unicode-segmentation = { workspace = true } unicode-segmentation = { workspace = true }

View File

@ -1,6 +1,5 @@
use std::collections::HashSet;
use ecow::eco_format; use ecow::eco_format;
use rustc_hash::FxHashSet;
use typst_library::diag::{At, SourceDiagnostic, SourceResult, bail, error}; use typst_library::diag::{At, SourceDiagnostic, SourceResult, bail, error};
use typst_library::foundations::{Array, Dict, Value}; use typst_library::foundations::{Array, Dict, Value};
use typst_syntax::ast::{self, AstNode}; use typst_syntax::ast::{self, AstNode};
@ -137,7 +136,7 @@ where
F: Fn(&mut Vm, ast::Expr, Value) -> SourceResult<()>, F: Fn(&mut Vm, ast::Expr, Value) -> SourceResult<()>,
{ {
let mut sink = None; let mut sink = None;
let mut used = HashSet::new(); let mut used = FxHashSet::default();
for p in destruct.items() { for p in destruct.items() {
match p { match p {

View File

@ -246,7 +246,7 @@ impl Eval for ast::Dict<'_> {
type Output = Dict; type Output = Dict;
fn eval(self, vm: &mut Vm) -> SourceResult<Self::Output> { fn eval(self, vm: &mut Vm) -> SourceResult<Self::Output> {
let mut map = indexmap::IndexMap::new(); let mut map = indexmap::IndexMap::default();
let mut invalid_keys = eco_vec![]; let mut invalid_keys = eco_vec![];
for item in self.items() { for item in self.items() {

View File

@ -24,6 +24,7 @@ bumpalo = { workspace = true }
comemo = { workspace = true } comemo = { workspace = true }
ecow = { workspace = true } ecow = { workspace = true }
palette = { workspace = true } palette = { workspace = true }
rustc-hash = { workspace = true }
time = { workspace = true } time = { workspace = true }
[lints] [lints]

View File

@ -1,7 +1,7 @@
use std::collections::HashSet;
use std::num::NonZeroUsize; use std::num::NonZeroUsize;
use comemo::{Tracked, TrackedMut}; use comemo::{Tracked, TrackedMut};
use rustc_hash::FxHashSet;
use typst_library::World; use typst_library::World;
use typst_library::diag::{SourceResult, bail}; use typst_library::diag::{SourceResult, bail};
use typst_library::engine::{Engine, Route, Sink, Traced}; use typst_library::engine::{Engine, Route, Sink, Traced};
@ -87,7 +87,7 @@ fn html_document_impl(
children.iter().copied(), children.iter().copied(),
)?; )?;
let mut link_targets = HashSet::new(); let mut link_targets = FxHashSet::default();
let mut introspector = introspect_html(&output, &mut link_targets); let mut introspector = introspect_html(&output, &mut link_targets);
let mut root = root_element(output, &info)?; let mut root = root_element(output, &info)?;
crate::link::identify_link_targets(&mut root, &mut introspector, link_targets); crate::link::identify_link_targets(&mut root, &mut introspector, link_targets);
@ -99,12 +99,12 @@ fn html_document_impl(
#[typst_macros::time(name = "introspect html")] #[typst_macros::time(name = "introspect html")]
fn introspect_html( fn introspect_html(
output: &[HtmlNode], output: &[HtmlNode],
link_targets: &mut HashSet<Location>, link_targets: &mut FxHashSet<Location>,
) -> Introspector { ) -> Introspector {
fn discover( fn discover(
builder: &mut IntrospectorBuilder, builder: &mut IntrospectorBuilder,
sink: &mut Vec<(Content, Position)>, sink: &mut Vec<(Content, Position)>,
link_targets: &mut HashSet<Location>, link_targets: &mut FxHashSet<Location>,
nodes: &[HtmlNode], nodes: &[HtmlNode],
) { ) {
for node in nodes { for node in nodes {

View File

@ -1,7 +1,8 @@
use std::collections::{HashMap, HashSet, VecDeque}; use std::collections::VecDeque;
use comemo::Track; use comemo::Track;
use ecow::{EcoString, eco_format}; use ecow::{EcoString, eco_format};
use rustc_hash::{FxHashMap, FxHashSet};
use typst_library::foundations::{Label, NativeElement}; use typst_library::foundations::{Label, NativeElement};
use typst_library::introspection::{Introspector, Location, Tag}; use typst_library::introspection::{Introspector, Location, Tag};
use typst_library::layout::{Frame, FrameItem, Point}; use typst_library::layout::{Frame, FrameItem, Point};
@ -16,7 +17,7 @@ use crate::{HtmlElement, HtmlNode, attr, tag};
/// in favor of the query in `identify_link_targets`. For the time being, some /// in favor of the query in `identify_link_targets`. For the time being, some
/// links are created without existence of a `LinkElem`, so this is /// links are created without existence of a `LinkElem`, so this is
/// unfortunately necessary. /// unfortunately necessary.
pub fn introspect_frame_links(frame: &Frame, targets: &mut HashSet<Location>) { pub fn introspect_frame_links(frame: &Frame, targets: &mut FxHashSet<Location>) {
for (_, item) in frame.items() { for (_, item) in frame.items() {
match item { match item {
FrameItem::Link(Destination::Location(loc), _) => { FrameItem::Link(Destination::Location(loc), _) => {
@ -35,7 +36,7 @@ pub fn introspect_frame_links(frame: &Frame, targets: &mut HashSet<Location>) {
pub fn identify_link_targets( pub fn identify_link_targets(
root: &mut HtmlElement, root: &mut HtmlElement,
introspector: &mut Introspector, introspector: &mut Introspector,
mut targets: HashSet<Location>, mut targets: FxHashSet<Location>,
) { ) {
// Query for all links with an intra-doc (i.e. `Location`) destination to // Query for all links with an intra-doc (i.e. `Location`) destination to
// know what needs IDs. // know what needs IDs.
@ -72,7 +73,7 @@ pub fn identify_link_targets(
/// Traverses a list of nodes. /// Traverses a list of nodes.
fn traverse( fn traverse(
work: &mut Work, work: &mut Work,
targets: &HashSet<Location>, targets: &FxHashSet<Location>,
identificator: &mut Identificator<'_>, identificator: &mut Identificator<'_>,
nodes: &mut Vec<HtmlNode>, nodes: &mut Vec<HtmlNode>,
) { ) {
@ -144,7 +145,7 @@ fn traverse(
/// Traverses a frame embedded in HTML. /// Traverses a frame embedded in HTML.
fn traverse_frame( fn traverse_frame(
work: &mut Work, work: &mut Work,
targets: &HashSet<Location>, targets: &FxHashSet<Location>,
identificator: &mut Identificator<'_>, identificator: &mut Identificator<'_>,
frame: &Frame, frame: &Frame,
link_points: &mut Vec<(Point, EcoString)>, link_points: &mut Vec<(Point, EcoString)>,
@ -174,13 +175,13 @@ struct Work {
/// now. /// now.
queue: VecDeque<(Location, Option<Label>)>, queue: VecDeque<(Location, Option<Label>)>,
/// The resulting mapping from element location's to HTML IDs. /// The resulting mapping from element location's to HTML IDs.
ids: HashMap<Location, EcoString>, ids: FxHashMap<Location, EcoString>,
} }
impl Work { impl Work {
/// Sets up. /// Sets up.
fn new() -> Self { fn new() -> Self {
Self { queue: VecDeque::new(), ids: HashMap::new() } Self { queue: VecDeque::new(), ids: FxHashMap::default() }
} }
/// Marks the element with the given location and label as in need of an /// Marks the element with the given location and label as in need of an
@ -215,7 +216,7 @@ impl Work {
struct Identificator<'a> { struct Identificator<'a> {
introspector: &'a Introspector, introspector: &'a Introspector,
loc_counter: usize, loc_counter: usize,
label_counter: HashMap<Label, usize>, label_counter: FxHashMap<Label, usize>,
} }
impl<'a> Identificator<'a> { impl<'a> Identificator<'a> {
@ -224,7 +225,7 @@ impl<'a> Identificator<'a> {
Self { Self {
introspector, introspector,
loc_counter: 0, loc_counter: 0,
label_counter: HashMap::new(), label_counter: FxHashMap::default(),
} }
} }

View File

@ -18,6 +18,7 @@ typst-eval = { workspace = true }
comemo = { workspace = true } comemo = { workspace = true }
ecow = { workspace = true } ecow = { workspace = true }
pathdiff = { workspace = true } pathdiff = { workspace = true }
rustc-hash = { workspace = true }
serde = { workspace = true } serde = { workspace = true }
unscanny = { workspace = true } unscanny = { workspace = true }

View File

@ -1,7 +1,6 @@
use std::collections::HashSet;
use comemo::Track; use comemo::Track;
use ecow::{EcoString, EcoVec, eco_vec}; use ecow::{EcoString, EcoVec, eco_vec};
use rustc_hash::FxHashSet;
use typst::foundations::{Label, Styles, Value}; use typst::foundations::{Label, Styles, Value};
use typst::layout::PagedDocument; use typst::layout::PagedDocument;
use typst::model::{BibliographyElem, FigureElem}; use typst::model::{BibliographyElem, FigureElem};
@ -76,7 +75,7 @@ pub fn analyze_labels(
document: &PagedDocument, document: &PagedDocument,
) -> (Vec<(Label, Option<EcoString>)>, usize) { ) -> (Vec<(Label, Option<EcoString>)>, usize) {
let mut output = vec![]; let mut output = vec![];
let mut seen_labels = HashSet::new(); let mut seen_labels = FxHashSet::default();
// Labels in the document. // Labels in the document.
for elem in document.introspector.all() { for elem in document.introspector.all() {

View File

@ -1,8 +1,9 @@
use std::cmp::Reverse; use std::cmp::Reverse;
use std::collections::{BTreeMap, HashSet}; use std::collections::BTreeMap;
use std::ffi::OsStr; use std::ffi::OsStr;
use ecow::{EcoString, eco_format}; use ecow::{EcoString, eco_format};
use rustc_hash::FxHashSet;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use typst::foundations::{ use typst::foundations::{
AutoValue, CastInfo, Func, Label, NoneValue, ParamInfo, Repr, StyleChain, Styles, AutoValue, CastInfo, Func, Label, NoneValue, ParamInfo, Repr, StyleChain, Styles,
@ -739,7 +740,7 @@ fn param_completions<'a>(
// Determine which arguments are already present. // Determine which arguments are already present.
let mut existing_positional = 0; let mut existing_positional = 0;
let mut existing_named = HashSet::new(); let mut existing_named = FxHashSet::default();
for arg in args.items() { for arg in args.items() {
match arg { match arg {
ast::Arg::Pos(_) => { ast::Arg::Pos(_) => {
@ -1116,7 +1117,7 @@ struct CompletionContext<'a> {
explicit: bool, explicit: bool,
from: usize, from: usize,
completions: Vec<Completion>, completions: Vec<Completion>,
seen_casts: HashSet<u128>, seen_casts: FxHashSet<u128>,
} }
impl<'a> CompletionContext<'a> { impl<'a> CompletionContext<'a> {
@ -1141,7 +1142,7 @@ impl<'a> CompletionContext<'a> {
explicit, explicit,
from: cursor, from: cursor,
completions: vec![], completions: vec![],
seen_casts: HashSet::new(), seen_casts: FxHashSet::default(),
}) })
} }

View File

@ -1,8 +1,8 @@
use std::borrow::Borrow; use std::borrow::Borrow;
use std::collections::HashMap;
use std::sync::Arc; use std::sync::Arc;
use ecow::EcoString; use ecow::EcoString;
use rustc_hash::FxHashMap;
use typst::diag::{FileError, FileResult}; use typst::diag::{FileError, FileResult};
use typst::foundations::{Bytes, Datetime, Smart}; use typst::foundations::{Bytes, Datetime, Smart};
use typst::layout::{Abs, Margin, PageElem}; use typst::layout::{Abs, Margin, PageElem};
@ -137,8 +137,8 @@ impl IdeWorld for TestWorld {
/// Test-specific files. /// Test-specific files.
#[derive(Default, Clone)] #[derive(Default, Clone)]
struct TestFiles { struct TestFiles {
assets: HashMap<FileId, Bytes>, assets: FxHashMap<FileId, Bytes>,
sources: HashMap<FileId, Source>, sources: FxHashMap<FileId, Source>,
} }
/// Shared foundation of all test worlds. /// Shared foundation of all test worlds.

View File

@ -32,6 +32,7 @@ icu_provider_blob = { workspace = true }
icu_segmenter = { workspace = true } icu_segmenter = { workspace = true }
kurbo = { workspace = true } kurbo = { workspace = true }
memchr = { workspace = true } memchr = { workspace = true }
rustc-hash = { workspace = true }
rustybuzz = { workspace = true } rustybuzz = { workspace = true }
smallvec = { workspace = true } smallvec = { workspace = true }
ttf-parser = { workspace = true } ttf-parser = { workspace = true }

View File

@ -7,13 +7,13 @@ mod distribute;
pub(crate) use self::block::unbreakable_pod; pub(crate) use self::block::unbreakable_pod;
use std::collections::HashSet;
use std::num::NonZeroUsize; use std::num::NonZeroUsize;
use std::rc::Rc; use std::rc::Rc;
use bumpalo::Bump; use bumpalo::Bump;
use comemo::{Track, Tracked, TrackedMut}; use comemo::{Track, Tracked, TrackedMut};
use ecow::EcoVec; use ecow::EcoVec;
use rustc_hash::FxHashSet;
use typst_library::World; use typst_library::World;
use typst_library::diag::{At, SourceDiagnostic, SourceResult, bail}; use typst_library::diag::{At, SourceDiagnostic, SourceResult, bail};
use typst_library::engine::{Engine, Route, Sink, Traced}; use typst_library::engine::{Engine, Route, Sink, Traced};
@ -303,7 +303,7 @@ struct Work<'a, 'b> {
/// Identifies floats and footnotes that can be skipped if visited because /// Identifies floats and footnotes that can be skipped if visited because
/// they were already handled and incorporated as column or page level /// they were already handled and incorporated as column or page level
/// insertions. /// insertions.
skips: Rc<HashSet<Location>>, skips: Rc<FxHashSet<Location>>,
} }
impl<'a, 'b> Work<'a, 'b> { impl<'a, 'b> Work<'a, 'b> {
@ -316,7 +316,7 @@ impl<'a, 'b> Work<'a, 'b> {
footnotes: EcoVec::new(), footnotes: EcoVec::new(),
footnote_spill: None, footnote_spill: None,
tags: EcoVec::new(), tags: EcoVec::new(),
skips: Rc::new(HashSet::new()), skips: Rc::new(FxHashSet::default()),
} }
} }

View File

@ -1,5 +1,4 @@
use std::collections::HashSet; use rustc_hash::FxHashSet;
use typst_library::foundations::StyleChain; use typst_library::foundations::StyleChain;
use typst_library::introspection::{Locator, SplitLocator, Tag, TagElem}; use typst_library::introspection::{Locator, SplitLocator, Tag, TagElem};
use typst_library::layout::{PagebreakElem, Parity}; use typst_library::layout::{PagebreakElem, Parity};
@ -134,7 +133,7 @@ fn migrate_unterminated_tags(children: &mut [Pair], mid: usize) -> usize {
// Determine the set of tag locations which we won't migrate (because they // Determine the set of tag locations which we won't migrate (because they
// are terminated). // are terminated).
let excluded: HashSet<_> = children[start..mid] let excluded: FxHashSet<_> = children[start..mid]
.iter() .iter()
.filter_map(|(c, _)| match c.to_packed::<TagElem>()?.tag { .filter_map(|(c, _)| match c.to_packed::<TagElem>()?.tag {
Tag::Start(_) => None, Tag::Start(_) => None,

View File

@ -50,6 +50,7 @@ regex = { workspace = true }
regex-syntax = { workspace = true } regex-syntax = { workspace = true }
roxmltree = { workspace = true } roxmltree = { workspace = true }
rust_decimal = { workspace = true } rust_decimal = { workspace = true }
rustc-hash = { workspace = true }
rustybuzz = { workspace = true } rustybuzz = { workspace = true }
serde = { workspace = true } serde = { workspace = true }
serde_json = { workspace = true } serde_json = { workspace = true }

View File

@ -1,11 +1,11 @@
//! Definition of the central compilation context. //! Definition of the central compilation context.
use std::collections::HashSet;
use std::sync::atomic::{AtomicUsize, Ordering}; use std::sync::atomic::{AtomicUsize, Ordering};
use comemo::{Track, Tracked, TrackedMut, Validate}; use comemo::{Track, Tracked, TrackedMut, Validate};
use ecow::EcoVec; use ecow::EcoVec;
use rayon::iter::{IndexedParallelIterator, IntoParallelIterator, ParallelIterator}; use rayon::iter::{IndexedParallelIterator, IntoParallelIterator, ParallelIterator};
use rustc_hash::FxHashSet;
use typst_syntax::{FileId, Span}; use typst_syntax::{FileId, Span};
use crate::World; use crate::World;
@ -135,7 +135,7 @@ pub struct Sink {
/// Warnings emitted during iteration. /// Warnings emitted during iteration.
warnings: EcoVec<SourceDiagnostic>, warnings: EcoVec<SourceDiagnostic>,
/// Hashes of all warning's spans and messages for warning deduplication. /// Hashes of all warning's spans and messages for warning deduplication.
warnings_set: HashSet<u128>, warnings_set: FxHashSet<u128>,
/// A sequence of traced values for a span. /// A sequence of traced values for a span.
values: EcoVec<(Value, Option<Styles>)>, values: EcoVec<(Value, Option<Styles>)>,
} }

View File

@ -5,6 +5,7 @@ use std::sync::Arc;
use ecow::{EcoString, eco_format}; use ecow::{EcoString, eco_format};
use indexmap::IndexMap; use indexmap::IndexMap;
use rustc_hash::FxBuildHasher;
use serde::{Deserialize, Deserializer, Serialize, Serializer}; use serde::{Deserialize, Deserializer, Serialize, Serializer};
use typst_syntax::is_ident; use typst_syntax::is_ident;
use typst_utils::ArcExt; use typst_utils::ArcExt;
@ -20,7 +21,7 @@ use crate::foundations::{
macro_rules! __dict { macro_rules! __dict {
($($key:expr => $value:expr),* $(,)?) => {{ ($($key:expr => $value:expr),* $(,)?) => {{
#[allow(unused_mut)] #[allow(unused_mut)]
let mut map = $crate::foundations::IndexMap::new(); let mut map = $crate::foundations::IndexMap::default();
$(map.insert($key.into(), $crate::foundations::IntoValue::into_value($value));)* $(map.insert($key.into(), $crate::foundations::IntoValue::into_value($value));)*
$crate::foundations::Dict::from(map) $crate::foundations::Dict::from(map)
}}; }};
@ -66,7 +67,7 @@ pub use crate::__dict as dict;
/// ``` /// ```
#[ty(scope, cast, name = "dictionary")] #[ty(scope, cast, name = "dictionary")]
#[derive(Default, Clone, PartialEq)] #[derive(Default, Clone, PartialEq)]
pub struct Dict(Arc<IndexMap<Str, Value>>); pub struct Dict(Arc<IndexMap<Str, Value, FxBuildHasher>>);
impl Dict { impl Dict {
/// Create a new, empty dictionary. /// Create a new, empty dictionary.
@ -343,7 +344,7 @@ impl<'de> Deserialize<'de> for Dict {
where where
D: Deserializer<'de>, D: Deserializer<'de>,
{ {
Ok(IndexMap::<Str, Value>::deserialize(deserializer)?.into()) Ok(IndexMap::<Str, Value, FxBuildHasher>::deserialize(deserializer)?.into())
} }
} }
@ -377,8 +378,8 @@ impl<'a> IntoIterator for &'a Dict {
} }
} }
impl From<IndexMap<Str, Value>> for Dict { impl From<IndexMap<Str, Value, FxBuildHasher>> for Dict {
fn from(map: IndexMap<Str, Value>) -> Self { fn from(map: IndexMap<Str, Value, FxBuildHasher>) -> Self {
Self(Arc::new(map)) Self(Arc::new(map))
} }
} }

View File

@ -4,6 +4,7 @@ use std::hash::{Hash, Hasher};
use ecow::{EcoString, eco_format}; use ecow::{EcoString, eco_format};
use indexmap::IndexMap; use indexmap::IndexMap;
use indexmap::map::Entry; use indexmap::map::Entry;
use rustc_hash::FxBuildHasher;
use typst_syntax::Span; use typst_syntax::Span;
use crate::diag::{DeprecationSink, HintedStrResult, HintedString, StrResult, bail}; use crate::diag::{DeprecationSink, HintedStrResult, HintedString, StrResult, bail};
@ -102,7 +103,7 @@ impl<'a> Scopes<'a> {
/// A map from binding names to values. /// A map from binding names to values.
#[derive(Default, Clone)] #[derive(Default, Clone)]
pub struct Scope { pub struct Scope {
map: IndexMap<EcoString, Binding>, map: IndexMap<EcoString, Binding, FxBuildHasher>,
deduplicate: bool, deduplicate: bool,
category: Option<Category>, category: Option<Category>,
} }

View File

@ -1,11 +1,11 @@
use std::any::{Any, TypeId}; use std::any::{Any, TypeId};
use std::collections::HashMap;
use std::fmt::{self, Debug, Formatter}; use std::fmt::{self, Debug, Formatter};
use std::hash::{Hash, Hasher}; use std::hash::{Hash, Hasher};
use std::{mem, ptr}; use std::{mem, ptr};
use comemo::Tracked; use comemo::Tracked;
use ecow::{EcoString, EcoVec, eco_vec}; use ecow::{EcoString, EcoVec, eco_vec};
use rustc_hash::FxHashMap;
use smallvec::SmallVec; use smallvec::SmallVec;
use typst_syntax::Span; use typst_syntax::Span;
use typst_utils::LazyHash; use typst_utils::LazyHash;
@ -938,7 +938,7 @@ fn block_wrong_type(func: Element, id: u8, value: &Block) -> ! {
/// Holds native show rules. /// Holds native show rules.
pub struct NativeRuleMap { pub struct NativeRuleMap {
rules: HashMap<(Element, Target), NativeShowRule>, rules: FxHashMap<(Element, Target), NativeShowRule>,
} }
/// The signature of a native show rule. /// The signature of a native show rule.
@ -956,7 +956,7 @@ impl NativeRuleMap {
/// ///
/// Contains built-in rules for a few special elements. /// Contains built-in rules for a few special elements.
pub fn new() -> Self { pub fn new() -> Self {
let mut rules = Self { rules: HashMap::new() }; let mut rules = Self { rules: FxHashMap::default() };
// ContextElem is as special as SequenceElem and StyledElem and could, // ContextElem is as special as SequenceElem and StyledElem and could,
// in theory, also be special cased in realization. // in theory, also be special cased in realization.

View File

@ -1,9 +1,10 @@
use std::collections::{BTreeSet, HashMap}; use std::collections::BTreeSet;
use std::fmt::{self, Debug, Display, Formatter, Write}; use std::fmt::{self, Debug, Display, Formatter, Write};
use std::sync::Arc; use std::sync::Arc;
use codex::ModifierSet; use codex::ModifierSet;
use ecow::{EcoString, eco_format}; use ecow::{EcoString, eco_format};
use rustc_hash::FxHashMap;
use serde::{Serialize, Serializer}; use serde::{Serialize, Serializer};
use typst_syntax::{Span, Spanned, is_ident}; use typst_syntax::{Span, Spanned, is_ident};
use typst_utils::hash128; use typst_utils::hash128;
@ -221,7 +222,7 @@ impl Symbol {
// Maps from canonicalized 128-bit hashes to indices of variants we've // Maps from canonicalized 128-bit hashes to indices of variants we've
// seen before. // seen before.
let mut seen = HashMap::<u128, usize>::new(); let mut seen = FxHashMap::<u128, usize>::default();
// A list of modifiers, cleared & reused in each iteration. // A list of modifiers, cleared & reused in each iteration.
let mut modifiers = Vec::new(); let mut modifiers = Vec::new();

View File

@ -1,10 +1,11 @@
use std::collections::{BTreeSet, HashMap, HashSet}; use std::collections::BTreeSet;
use std::fmt::{self, Debug, Formatter}; use std::fmt::{self, Debug, Formatter};
use std::hash::Hash; use std::hash::Hash;
use std::num::NonZeroUsize; use std::num::NonZeroUsize;
use std::sync::RwLock; use std::sync::RwLock;
use ecow::{EcoString, EcoVec}; use ecow::{EcoString, EcoVec};
use rustc_hash::{FxHashMap, FxHashSet};
use smallvec::SmallVec; use smallvec::SmallVec;
use typst_utils::NonZeroExt; use typst_utils::NonZeroExt;
@ -31,14 +32,14 @@ pub struct Introspector {
keys: MultiMap<u128, Location>, keys: MultiMap<u128, Location>,
/// Accelerates lookup of elements by location. /// Accelerates lookup of elements by location.
locations: HashMap<Location, usize>, locations: FxHashMap<Location, usize>,
/// Accelerates lookup of elements by label. /// Accelerates lookup of elements by label.
labels: MultiMap<Label, usize>, labels: MultiMap<Label, usize>,
/// Maps from element locations to assigned HTML IDs. This used to support /// Maps from element locations to assigned HTML IDs. This used to support
/// intra-doc links in HTML export. In paged export, is is simply left /// intra-doc links in HTML export. In paged export, is is simply left
/// empty and [`Self::html_id`] is not used. /// empty and [`Self::html_id`] is not used.
html_ids: HashMap<Location, EcoString>, html_ids: FxHashMap<Location, EcoString>,
/// Caches queries done on the introspector. This is important because /// Caches queries done on the introspector. This is important because
/// even if all top-level queries are distinct, they often have shared /// even if all top-level queries are distinct, they often have shared
@ -63,7 +64,7 @@ impl Introspector {
/// Enriches an existing introspector with HTML IDs, which were assigned /// Enriches an existing introspector with HTML IDs, which were assigned
/// to the DOM in a post-processing step. /// to the DOM in a post-processing step.
pub fn set_html_ids(&mut self, html_ids: HashMap<Location, EcoString>) { pub fn set_html_ids(&mut self, html_ids: FxHashMap<Location, EcoString>) {
self.html_ids = html_ids; self.html_ids = html_ids;
} }
@ -313,7 +314,7 @@ impl Debug for Introspector {
/// A map from one keys to multiple elements. /// A map from one keys to multiple elements.
#[derive(Clone)] #[derive(Clone)]
struct MultiMap<K, V>(HashMap<K, SmallVec<[V; 1]>>); struct MultiMap<K, V>(FxHashMap<K, SmallVec<[V; 1]>>);
impl<K, V> MultiMap<K, V> impl<K, V> MultiMap<K, V>
where where
@ -334,13 +335,13 @@ where
impl<K, V> Default for MultiMap<K, V> { impl<K, V> Default for MultiMap<K, V> {
fn default() -> Self { fn default() -> Self {
Self(HashMap::new()) Self(FxHashMap::default())
} }
} }
/// Caches queries. /// Caches queries.
#[derive(Default)] #[derive(Default)]
struct QueryCache(RwLock<HashMap<u128, EcoVec<Content>>>); struct QueryCache(RwLock<FxHashMap<u128, EcoVec<Content>>>);
impl QueryCache { impl QueryCache {
fn get(&self, hash: u128) -> Option<EcoVec<Content>> { fn get(&self, hash: u128) -> Option<EcoVec<Content>> {
@ -364,11 +365,11 @@ pub struct IntrospectorBuilder {
pub pages: usize, pub pages: usize,
pub page_numberings: Vec<Option<Numbering>>, pub page_numberings: Vec<Option<Numbering>>,
pub page_supplements: Vec<Content>, pub page_supplements: Vec<Content>,
pub html_ids: HashMap<Location, EcoString>, pub html_ids: FxHashMap<Location, EcoString>,
seen: HashSet<Location>, seen: FxHashSet<Location>,
insertions: MultiMap<Location, Vec<Pair>>, insertions: MultiMap<Location, Vec<Pair>>,
keys: MultiMap<u128, Location>, keys: MultiMap<u128, Location>,
locations: HashMap<Location, usize>, locations: FxHashMap<Location, usize>,
labels: MultiMap<Label, usize>, labels: MultiMap<Label, usize>,
} }

View File

@ -1,9 +1,9 @@
use std::collections::HashMap;
use std::fmt::{self, Debug, Formatter}; use std::fmt::{self, Debug, Formatter};
use std::hash::Hash; use std::hash::Hash;
use std::sync::OnceLock; use std::sync::OnceLock;
use comemo::{Tracked, Validate}; use comemo::{Tracked, Validate};
use rustc_hash::FxHashMap;
use crate::introspection::{Introspector, Location}; use crate::introspection::{Introspector, Location};
@ -188,7 +188,7 @@ impl<'a> Locator<'a> {
SplitLocator { SplitLocator {
local: self.local, local: self.local,
outer: self.outer, outer: self.outer,
disambiguators: HashMap::new(), disambiguators: FxHashMap::default(),
} }
} }
@ -244,7 +244,7 @@ pub struct SplitLocator<'a> {
/// for all the layers beyond the memoization boundary on-demand. /// for all the layers beyond the memoization boundary on-demand.
outer: Option<&'a LocatorLink<'a>>, outer: Option<&'a LocatorLink<'a>>,
/// Simply counts up the number of times we've seen each local hash. /// Simply counts up the number of times we've seen each local hash.
disambiguators: HashMap<u128, usize>, disambiguators: FxHashMap<u128, usize>,
} }
impl<'a> SplitLocator<'a> { impl<'a> SplitLocator<'a> {

View File

@ -1,5 +1,4 @@
use std::any::TypeId; use std::any::TypeId;
use std::collections::HashMap;
use std::ffi::OsStr; use std::ffi::OsStr;
use std::fmt::{self, Debug, Formatter}; use std::fmt::{self, Debug, Formatter};
use std::path::Path; use std::path::Path;
@ -14,6 +13,7 @@ use hayagriva::{
SpecificLocator, citationberg, SpecificLocator, citationberg,
}; };
use indexmap::IndexMap; use indexmap::IndexMap;
use rustc_hash::{FxBuildHasher, FxHashMap};
use smallvec::{SmallVec, smallvec}; use smallvec::{SmallVec, smallvec};
use typst_syntax::{Span, Spanned, SyntaxMode}; use typst_syntax::{Span, Spanned, SyntaxMode};
use typst_utils::{ManuallyHash, PicoStr}; use typst_utils::{ManuallyHash, PicoStr};
@ -217,7 +217,9 @@ impl LocalName for Packed<BibliographyElem> {
/// A loaded bibliography. /// A loaded bibliography.
#[derive(Clone, PartialEq, Hash)] #[derive(Clone, PartialEq, Hash)]
pub struct Bibliography(Arc<ManuallyHash<IndexMap<Label, hayagriva::Entry>>>); pub struct Bibliography(
Arc<ManuallyHash<IndexMap<Label, hayagriva::Entry, FxBuildHasher>>>,
);
impl Bibliography { impl Bibliography {
/// Load a bibliography from data sources. /// Load a bibliography from data sources.
@ -234,7 +236,7 @@ impl Bibliography {
#[comemo::memoize] #[comemo::memoize]
#[typst_macros::time(name = "load bibliography")] #[typst_macros::time(name = "load bibliography")]
fn decode(data: &[Loaded]) -> SourceResult<Bibliography> { fn decode(data: &[Loaded]) -> SourceResult<Bibliography> {
let mut map = IndexMap::new(); let mut map = IndexMap::default();
let mut duplicates = Vec::<EcoString>::new(); let mut duplicates = Vec::<EcoString>::new();
// We might have multiple bib/yaml files // We might have multiple bib/yaml files
@ -486,7 +488,7 @@ impl IntoValue for CslSource {
/// citations to do it. /// citations to do it.
pub struct Works { pub struct Works {
/// Maps from the location of a citation group to its rendered content. /// Maps from the location of a citation group to its rendered content.
pub citations: HashMap<Location, SourceResult<Content>>, pub citations: FxHashMap<Location, SourceResult<Content>>,
/// Lists all references in the bibliography, with optional prefix, or /// Lists all references in the bibliography, with optional prefix, or
/// `None` if the citation style can't be used for bibliographies. /// `None` if the citation style can't be used for bibliographies.
pub references: Option<Vec<(Option<Content>, Content)>>, pub references: Option<Vec<(Option<Content>, Content)>>,
@ -528,7 +530,7 @@ struct Generator<'a> {
/// bibliography driver and needed when processing hayagriva's output. /// bibliography driver and needed when processing hayagriva's output.
infos: Vec<GroupInfo>, infos: Vec<GroupInfo>,
/// Citations with unresolved keys. /// Citations with unresolved keys.
failures: HashMap<Location, SourceResult<Content>>, failures: FxHashMap<Location, SourceResult<Content>>,
} }
/// Details about a group of merged citations. All citations are put into groups /// Details about a group of merged citations. All citations are put into groups
@ -571,7 +573,7 @@ impl<'a> Generator<'a> {
bibliography, bibliography,
groups, groups,
infos, infos,
failures: HashMap::new(), failures: FxHashMap::default(),
}) })
} }
@ -702,10 +704,10 @@ impl<'a> Generator<'a> {
fn display_citations( fn display_citations(
&mut self, &mut self,
rendered: &hayagriva::Rendered, rendered: &hayagriva::Rendered,
) -> StrResult<HashMap<Location, SourceResult<Content>>> { ) -> StrResult<FxHashMap<Location, SourceResult<Content>>> {
// Determine for each citation key where in the bibliography it is, // Determine for each citation key where in the bibliography it is,
// so that we can link there. // so that we can link there.
let mut links = HashMap::new(); let mut links = FxHashMap::default();
if let Some(bibliography) = &rendered.bibliography { if let Some(bibliography) = &rendered.bibliography {
let location = self.bibliography.location().unwrap(); let location = self.bibliography.location().unwrap();
for (k, item) in bibliography.items.iter().enumerate() { for (k, item) in bibliography.items.iter().enumerate() {
@ -760,7 +762,7 @@ impl<'a> Generator<'a> {
// Determine for each citation key where it first occurred, so that we // Determine for each citation key where it first occurred, so that we
// can link there. // can link there.
let mut first_occurrences = HashMap::new(); let mut first_occurrences = FxHashMap::default();
for info in &self.infos { for info in &self.infos {
for subinfo in &info.subinfos { for subinfo in &info.subinfos {
let key = subinfo.key.resolve(); let key = subinfo.key.resolve();

View File

@ -1,7 +1,7 @@
use std::collections::HashMap;
use std::str::FromStr; use std::str::FromStr;
use ecow::{EcoString, eco_format}; use ecow::{EcoString, eco_format};
use rustc_hash::FxHashMap;
use crate::diag::Hint; use crate::diag::Hint;
use crate::foundations::{StyleChain, cast}; use crate::foundations::{StyleChain, cast};
@ -278,13 +278,13 @@ pub fn localized_str(lang: Lang, region: Option<Region>, key: &str) -> &'static
fn parse_language_bundle( fn parse_language_bundle(
lang: Lang, lang: Lang,
region: Option<Region>, region: Option<Region>,
) -> Result<HashMap<&'static str, &'static str>, &'static str> { ) -> Result<FxHashMap<&'static str, &'static str>, &'static str> {
let language_tuple = TRANSLATIONS.iter().find(|it| it.0 == lang_str(lang, region)); let language_tuple = TRANSLATIONS.iter().find(|it| it.0 == lang_str(lang, region));
let Some((_lang_name, language_file)) = language_tuple else { let Some((_lang_name, language_file)) = language_tuple else {
return Ok(HashMap::new()); return Ok(FxHashMap::default());
}; };
let mut bundle = HashMap::new(); let mut bundle = FxHashMap::default();
let lines = language_file.trim().lines(); let lines = language_file.trim().lines();
for line in lines { for line in lines {
if line.trim().starts_with('#') { if line.trim().starts_with('#') {
@ -313,9 +313,9 @@ fn lang_str(lang: Lang, region: Option<Region>) -> EcoString {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use std::collections::HashSet;
use std::path::PathBuf; use std::path::PathBuf;
use rustc_hash::FxHashSet;
use typst_utils::option_eq; use typst_utils::option_eq;
use super::*; use super::*;
@ -337,7 +337,7 @@ mod tests {
#[test] #[test]
fn test_all_translations_included() { fn test_all_translations_included() {
let defined_keys = let defined_keys =
HashSet::<&str>::from_iter(TRANSLATIONS.iter().map(|(lang, _)| *lang)); FxHashSet::<&str>::from_iter(TRANSLATIONS.iter().map(|(lang, _)| *lang));
let mut checked = 0; let mut checked = 0;
for file in translation_files_iter() { for file in translation_files_iter() {
assert!( assert!(

View File

@ -1,8 +1,8 @@
use std::collections::HashMap;
use std::hash::{Hash, Hasher}; use std::hash::{Hash, Hasher};
use std::sync::{Arc, Mutex}; use std::sync::{Arc, Mutex};
use comemo::Tracked; use comemo::Tracked;
use rustc_hash::FxHashMap;
use siphasher::sip128::{Hasher128, SipHasher13}; use siphasher::sip128::{Hasher128, SipHasher13};
use crate::World; use crate::World;
@ -144,9 +144,9 @@ struct FontResolver<'a> {
/// The active list of font families at the location of the SVG. /// The active list of font families at the location of the SVG.
families: &'a [&'a str], families: &'a [&'a str],
/// A mapping from Typst font indices to fontdb IDs. /// A mapping from Typst font indices to fontdb IDs.
to_id: HashMap<usize, Option<fontdb::ID>>, to_id: FxHashMap<usize, Option<fontdb::ID>>,
/// The reverse mapping. /// The reverse mapping.
from_id: HashMap<fontdb::ID, Font>, from_id: FxHashMap<fontdb::ID, Font>,
/// Accumulates a hash of all used fonts. /// Accumulates a hash of all used fonts.
hasher: SipHasher13, hasher: SipHasher13,
} }
@ -162,8 +162,8 @@ impl<'a> FontResolver<'a> {
book, book,
world, world,
families, families,
to_id: HashMap::new(), to_id: FxHashMap::default(),
from_id: HashMap::new(), from_id: FxHashMap::default(),
hasher: SipHasher13::new(), hasher: SipHasher13::new(),
} }
} }

View File

@ -26,6 +26,7 @@ image = { workspace = true }
infer = { workspace = true } infer = { workspace = true }
krilla = { workspace = true } krilla = { workspace = true }
krilla-svg = { workspace = true } krilla-svg = { workspace = true }
rustc-hash = { workspace = true }
serde = { workspace = true } serde = { workspace = true }
[lints] [lints]

View File

@ -1,4 +1,4 @@
use std::collections::{BTreeMap, HashMap, HashSet}; use std::collections::BTreeMap;
use std::num::NonZeroU64; use std::num::NonZeroU64;
use ecow::{EcoVec, eco_format}; use ecow::{EcoVec, eco_format};
@ -13,6 +13,7 @@ use krilla::pdf::PdfError;
use krilla::surface::Surface; use krilla::surface::Surface;
use krilla::{Document, SerializeSettings}; use krilla::{Document, SerializeSettings};
use krilla_svg::render_svg_glyph; use krilla_svg::render_svg_glyph;
use rustc_hash::{FxHashMap, FxHashSet};
use typst_library::diag::{SourceDiagnostic, SourceResult, bail, error}; use typst_library::diag::{SourceDiagnostic, SourceResult, bail, error};
use typst_library::foundations::{NativeElement, Repr}; use typst_library::foundations::{NativeElement, Repr};
use typst_library::introspection::Location; use typst_library::introspection::Location;
@ -207,22 +208,22 @@ impl FrameContext {
/// Globally needed context for converting a typst document. /// Globally needed context for converting a typst document.
pub(crate) struct GlobalContext<'a> { pub(crate) struct GlobalContext<'a> {
/// Cache the conversion between krilla and Typst fonts (forward and backward). /// Cache the conversion between krilla and Typst fonts (forward and backward).
pub(crate) fonts_forward: HashMap<Font, krilla::text::Font>, pub(crate) fonts_forward: FxHashMap<Font, krilla::text::Font>,
pub(crate) fonts_backward: HashMap<krilla::text::Font, Font>, pub(crate) fonts_backward: FxHashMap<krilla::text::Font, Font>,
/// Mapping between images and their span. /// Mapping between images and their span.
// Note: In theory, the same image can have multiple spans // Note: In theory, the same image can have multiple spans
// if it appears in the document multiple times. We just store the // if it appears in the document multiple times. We just store the
// first appearance, though. // first appearance, though.
pub(crate) image_to_spans: HashMap<krilla::image::Image, Span>, pub(crate) image_to_spans: FxHashMap<krilla::image::Image, Span>,
/// The spans of all images that appear in the document. We use this so /// The spans of all images that appear in the document. We use this so
/// we can give more accurate error messages. /// we can give more accurate error messages.
pub(crate) image_spans: HashSet<Span>, pub(crate) image_spans: FxHashSet<Span>,
/// The document to convert. /// The document to convert.
pub(crate) document: &'a PagedDocument, pub(crate) document: &'a PagedDocument,
/// Options for PDF export. /// Options for PDF export.
pub(crate) options: &'a PdfOptions<'a>, pub(crate) options: &'a PdfOptions<'a>,
/// Mapping between locations in the document and named destinations. /// Mapping between locations in the document and named destinations.
pub(crate) loc_to_names: HashMap<Location, NamedDestination>, pub(crate) loc_to_names: FxHashMap<Location, NamedDestination>,
/// The languages used throughout the document. /// The languages used throughout the document.
pub(crate) languages: BTreeMap<Lang, usize>, pub(crate) languages: BTreeMap<Lang, usize>,
pub(crate) page_index_converter: PageIndexConverter, pub(crate) page_index_converter: PageIndexConverter,
@ -232,17 +233,17 @@ impl<'a> GlobalContext<'a> {
pub(crate) fn new( pub(crate) fn new(
document: &'a PagedDocument, document: &'a PagedDocument,
options: &'a PdfOptions, options: &'a PdfOptions,
loc_to_names: HashMap<Location, NamedDestination>, loc_to_names: FxHashMap<Location, NamedDestination>,
page_index_converter: PageIndexConverter, page_index_converter: PageIndexConverter,
) -> GlobalContext<'a> { ) -> GlobalContext<'a> {
Self { Self {
fonts_forward: HashMap::new(), fonts_forward: FxHashMap::default(),
fonts_backward: HashMap::new(), fonts_backward: FxHashMap::default(),
document, document,
options, options,
loc_to_names, loc_to_names,
image_to_spans: HashMap::new(), image_to_spans: FxHashMap::default(),
image_spans: HashSet::new(), image_spans: FxHashSet::default(),
languages: BTreeMap::new(), languages: BTreeMap::new(),
page_index_converter, page_index_converter,
} }
@ -632,13 +633,13 @@ fn to_span(loc: Option<krilla::surface::Location>) -> Span {
fn collect_named_destinations( fn collect_named_destinations(
document: &PagedDocument, document: &PagedDocument,
pic: &PageIndexConverter, pic: &PageIndexConverter,
) -> HashMap<Location, NamedDestination> { ) -> FxHashMap<Location, NamedDestination> {
let mut locs_to_names = HashMap::new(); let mut locs_to_names = FxHashMap::default();
// Find all headings that have a label and are the first among other // Find all headings that have a label and are the first among other
// headings with the same label. // headings with the same label.
let matches: Vec<_> = { let matches: Vec<_> = {
let mut seen = HashSet::new(); let mut seen = FxHashSet::default();
document document
.introspector .introspector
.query(&HeadingElem::ELEM.select()) .query(&HeadingElem::ELEM.select())
@ -673,13 +674,13 @@ fn collect_named_destinations(
} }
pub(crate) struct PageIndexConverter { pub(crate) struct PageIndexConverter {
page_indices: HashMap<usize, usize>, page_indices: FxHashMap<usize, usize>,
skipped_pages: usize, skipped_pages: usize,
} }
impl PageIndexConverter { impl PageIndexConverter {
pub fn new(document: &PagedDocument, options: &PdfOptions) -> Self { pub fn new(document: &PagedDocument, options: &PdfOptions) -> Self {
let mut page_indices = HashMap::new(); let mut page_indices = FxHashMap::default();
let mut skipped_pages = 0; let mut skipped_pages = 0;
for i in 0..document.pages.len() { for i in 0..document.pages.len() {

View File

@ -24,6 +24,7 @@ ecow = { workspace = true }
flate2 = { workspace = true } flate2 = { workspace = true }
hayro = { workspace = true } hayro = { workspace = true }
image = { workspace = true } image = { workspace = true }
rustc-hash = { workspace = true }
ttf-parser = { workspace = true } ttf-parser = { workspace = true }
xmlparser = { workspace = true } xmlparser = { workspace = true }
xmlwriter = { workspace = true } xmlwriter = { workspace = true }

View File

@ -6,10 +6,10 @@ mod shape;
mod text; mod text;
pub use image::{convert_image_scaling, convert_image_to_base64_url}; pub use image::{convert_image_scaling, convert_image_to_base64_url};
use rustc_hash::FxHashMap;
use typst_library::introspection::Introspector; use typst_library::introspection::Introspector;
use typst_library::model::Destination; use typst_library::model::Destination;
use std::collections::HashMap;
use std::fmt::{self, Display, Formatter, Write}; use std::fmt::{self, Display, Formatter, Write};
use ecow::EcoString; use ecow::EcoString;
@ -421,12 +421,16 @@ impl<'a> SVGRenderer<'a> {
struct Deduplicator<T> { struct Deduplicator<T> {
kind: char, kind: char,
vec: Vec<(u128, T)>, vec: Vec<(u128, T)>,
present: HashMap<u128, Id>, present: FxHashMap<u128, Id>,
} }
impl<T> Deduplicator<T> { impl<T> Deduplicator<T> {
fn new(kind: char) -> Self { fn new(kind: char) -> Self {
Self { kind, vec: Vec::new(), present: HashMap::new() } Self {
kind,
vec: Vec::new(),
present: FxHashMap::default(),
}
} }
/// Inserts a value into the vector. If the hash is already present, returns /// Inserts a value into the vector. If the hash is already present, returns

View File

@ -16,6 +16,7 @@ readme = { workspace = true }
typst-timing = { workspace = true } typst-timing = { workspace = true }
typst-utils = { workspace = true } typst-utils = { workspace = true }
ecow = { workspace = true } ecow = { workspace = true }
rustc-hash = { workspace = true }
serde = { workspace = true } serde = { workspace = true }
toml = { workspace = true } toml = { workspace = true }
unicode-ident = { workspace = true } unicode-ident = { workspace = true }

View File

@ -1,21 +1,22 @@
//! File and package management. //! File and package management.
use std::collections::HashMap;
use std::fmt::{self, Debug, Formatter}; use std::fmt::{self, Debug, Formatter};
use std::num::NonZeroU16; use std::num::NonZeroU16;
use std::sync::{LazyLock, RwLock}; use std::sync::{LazyLock, RwLock};
use rustc_hash::FxHashMap;
use crate::VirtualPath; use crate::VirtualPath;
use crate::package::PackageSpec; use crate::package::PackageSpec;
/// The global package-path interner. /// The global package-path interner.
static INTERNER: LazyLock<RwLock<Interner>> = LazyLock::new(|| { static INTERNER: LazyLock<RwLock<Interner>> = LazyLock::new(|| {
RwLock::new(Interner { to_id: HashMap::new(), from_id: Vec::new() }) RwLock::new(Interner { to_id: FxHashMap::default(), from_id: Vec::new() })
}); });
/// A package-path interner. /// A package-path interner.
struct Interner { struct Interner {
to_id: HashMap<Pair, FileId>, to_id: FxHashMap<Pair, FileId>,
from_id: Vec<Pair>, from_id: Vec<Pair>,
} }

View File

@ -1,8 +1,8 @@
use std::collections::{HashMap, HashSet};
use std::mem; use std::mem;
use std::ops::{Index, IndexMut, Range}; use std::ops::{Index, IndexMut, Range};
use ecow::{EcoString, eco_format}; use ecow::{EcoString, eco_format};
use rustc_hash::{FxHashMap, FxHashSet};
use typst_utils::default_math_class; use typst_utils::default_math_class;
use unicode_math_class::MathClass; use unicode_math_class::MathClass;
@ -481,7 +481,7 @@ fn math_args(p: &mut Parser) {
let mut has_arrays = false; let mut has_arrays = false;
let mut maybe_array_start = p.marker(); let mut maybe_array_start = p.marker();
let mut seen = HashSet::new(); let mut seen = FxHashSet::default();
while !p.at_set(syntax_set!(End, Dollar, RightParen)) { while !p.at_set(syntax_set!(End, Dollar, RightParen)) {
positional = math_arg(p, &mut seen); positional = math_arg(p, &mut seen);
@ -522,7 +522,7 @@ fn math_args(p: &mut Parser) {
/// Parses a single argument in a math argument list. /// Parses a single argument in a math argument list.
/// ///
/// Returns whether the parsed argument was positional or not. /// Returns whether the parsed argument was positional or not.
fn math_arg<'s>(p: &mut Parser<'s>, seen: &mut HashSet<&'s str>) -> bool { fn math_arg<'s>(p: &mut Parser<'s>, seen: &mut FxHashSet<&'s str>) -> bool {
let m = p.marker(); let m = p.marker();
let start = p.current_start(); let start = p.current_start();
@ -831,7 +831,7 @@ fn let_binding(p: &mut Parser) {
closure = true; closure = true;
} }
} else { } else {
pattern(p, false, &mut HashSet::new(), None); pattern(p, false, &mut FxHashSet::default(), None);
other = true; other = true;
} }
@ -923,7 +923,7 @@ fn for_loop(p: &mut Parser) {
let m = p.marker(); let m = p.marker();
p.assert(SyntaxKind::For); p.assert(SyntaxKind::For);
let mut seen = HashSet::new(); let mut seen = FxHashSet::default();
pattern(p, false, &mut seen, None); pattern(p, false, &mut seen, None);
if p.at(SyntaxKind::Comma) { if p.at(SyntaxKind::Comma) {
@ -1084,7 +1084,7 @@ fn expr_with_paren(p: &mut Parser, atomic: bool) {
} else if p.at(SyntaxKind::Eq) && kind != SyntaxKind::Parenthesized { } else if p.at(SyntaxKind::Eq) && kind != SyntaxKind::Parenthesized {
p.restore(checkpoint); p.restore(checkpoint);
let m = p.marker(); let m = p.marker();
destructuring_or_parenthesized(p, true, &mut HashSet::new()); destructuring_or_parenthesized(p, true, &mut FxHashSet::default());
if !p.expect(SyntaxKind::Eq) { if !p.expect(SyntaxKind::Eq) {
return; return;
} }
@ -1107,7 +1107,7 @@ fn parenthesized_or_array_or_dict(p: &mut Parser) -> SyntaxKind {
count: 0, count: 0,
maybe_just_parens: true, maybe_just_parens: true,
kind: None, kind: None,
seen: HashSet::new(), seen: FxHashSet::default(),
}; };
// An edge case with parens is whether we can interpret a leading spread // An edge case with parens is whether we can interpret a leading spread
@ -1169,7 +1169,7 @@ struct GroupState {
/// The `SyntaxKind` to wrap as (if we've figured it out yet). /// The `SyntaxKind` to wrap as (if we've figured it out yet).
kind: Option<SyntaxKind>, kind: Option<SyntaxKind>,
/// Store named arguments so we can give an error if they're repeated. /// Store named arguments so we can give an error if they're repeated.
seen: HashSet<EcoString>, seen: FxHashSet<EcoString>,
} }
/// Parses a single item in an array or dictionary. /// Parses a single item in an array or dictionary.
@ -1238,7 +1238,7 @@ fn args(p: &mut Parser) {
p.with_nl_mode(AtNewline::Continue, |p| { p.with_nl_mode(AtNewline::Continue, |p| {
p.assert(SyntaxKind::LeftParen); p.assert(SyntaxKind::LeftParen);
let mut seen = HashSet::new(); let mut seen = FxHashSet::default();
while !p.current().is_terminator() { while !p.current().is_terminator() {
if !p.at_set(set::ARG) { if !p.at_set(set::ARG) {
p.unexpected(); p.unexpected();
@ -1264,7 +1264,7 @@ fn args(p: &mut Parser) {
} }
/// Parses a single argument in an argument list. /// Parses a single argument in an argument list.
fn arg<'s>(p: &mut Parser<'s>, seen: &mut HashSet<&'s str>) { fn arg<'s>(p: &mut Parser<'s>, seen: &mut FxHashSet<&'s str>) {
let m = p.marker(); let m = p.marker();
// Parses a spread argument: `..args`. // Parses a spread argument: `..args`.
@ -1301,7 +1301,7 @@ fn params(p: &mut Parser) {
p.with_nl_mode(AtNewline::Continue, |p| { p.with_nl_mode(AtNewline::Continue, |p| {
p.assert(SyntaxKind::LeftParen); p.assert(SyntaxKind::LeftParen);
let mut seen = HashSet::new(); let mut seen = FxHashSet::default();
let mut sink = false; let mut sink = false;
while !p.current().is_terminator() { while !p.current().is_terminator() {
@ -1323,7 +1323,7 @@ fn params(p: &mut Parser) {
} }
/// Parses a single parameter in a parameter list. /// Parses a single parameter in a parameter list.
fn param<'s>(p: &mut Parser<'s>, seen: &mut HashSet<&'s str>, sink: &mut bool) { fn param<'s>(p: &mut Parser<'s>, seen: &mut FxHashSet<&'s str>, sink: &mut bool) {
let m = p.marker(); let m = p.marker();
// Parses argument sink: `..sink`. // Parses argument sink: `..sink`.
@ -1358,7 +1358,7 @@ fn param<'s>(p: &mut Parser<'s>, seen: &mut HashSet<&'s str>, sink: &mut bool) {
fn pattern<'s>( fn pattern<'s>(
p: &mut Parser<'s>, p: &mut Parser<'s>,
reassignment: bool, reassignment: bool,
seen: &mut HashSet<&'s str>, seen: &mut FxHashSet<&'s str>,
dupe: Option<&'s str>, dupe: Option<&'s str>,
) { ) {
match p.current() { match p.current() {
@ -1372,7 +1372,7 @@ fn pattern<'s>(
fn destructuring_or_parenthesized<'s>( fn destructuring_or_parenthesized<'s>(
p: &mut Parser<'s>, p: &mut Parser<'s>,
reassignment: bool, reassignment: bool,
seen: &mut HashSet<&'s str>, seen: &mut FxHashSet<&'s str>,
) { ) {
let mut sink = false; let mut sink = false;
let mut count = 0; let mut count = 0;
@ -1410,7 +1410,7 @@ fn destructuring_or_parenthesized<'s>(
fn destructuring_item<'s>( fn destructuring_item<'s>(
p: &mut Parser<'s>, p: &mut Parser<'s>,
reassignment: bool, reassignment: bool,
seen: &mut HashSet<&'s str>, seen: &mut FxHashSet<&'s str>,
maybe_just_parens: &mut bool, maybe_just_parens: &mut bool,
sink: &mut bool, sink: &mut bool,
) { ) {
@ -1457,7 +1457,7 @@ fn destructuring_item<'s>(
fn pattern_leaf<'s>( fn pattern_leaf<'s>(
p: &mut Parser<'s>, p: &mut Parser<'s>,
reassignment: bool, reassignment: bool,
seen: &mut HashSet<&'s str>, seen: &mut FxHashSet<&'s str>,
dupe: Option<&'s str>, dupe: Option<&'s str>,
) { ) {
if p.current().is_keyword() { if p.current().is_keyword() {
@ -1920,7 +1920,7 @@ struct MemoArena {
/// A map from the parser's current position to a range of previously parsed /// A map from the parser's current position to a range of previously parsed
/// nodes in the arena and a checkpoint of the parser's state. These allow /// nodes in the arena and a checkpoint of the parser's state. These allow
/// us to reset the parser to avoid parsing the same location again. /// us to reset the parser to avoid parsing the same location again.
memo_map: HashMap<MemoKey, (Range<usize>, PartialState)>, memo_map: FxHashMap<MemoKey, (Range<usize>, PartialState)>,
} }
/// A type alias for the memo key so it doesn't get confused with other usizes. /// A type alias for the memo key so it doesn't get confused with other usizes.

View File

@ -16,6 +16,7 @@ readme = { workspace = true }
once_cell = { workspace = true } once_cell = { workspace = true }
portable-atomic = { workspace = true } portable-atomic = { workspace = true }
rayon = { workspace = true } rayon = { workspace = true }
rustc-hash = { workspace = true }
siphasher = { workspace = true } siphasher = { workspace = true }
thin-vec = { workspace = true } thin-vec = { workspace = true }
unicode-math-class = { workspace = true } unicode-math-class = { workspace = true }

View File

@ -1,22 +1,24 @@
use std::borrow::Borrow; use std::borrow::Borrow;
use std::cmp::Ordering; use std::cmp::Ordering;
use std::collections::HashMap;
use std::fmt::{self, Debug, Display, Formatter}; use std::fmt::{self, Debug, Display, Formatter};
use std::hash::{Hash, Hasher}; use std::hash::{Hash, Hasher};
use std::num::NonZeroU64; use std::num::NonZeroU64;
use std::ops::Deref; use std::ops::Deref;
use std::sync::{LazyLock, RwLock}; use std::sync::{LazyLock, RwLock};
use rustc_hash::FxHashMap;
/// Marks a number as a bitcode encoded `PicoStr``. /// Marks a number as a bitcode encoded `PicoStr``.
const MARKER: u64 = 1 << 63; const MARKER: u64 = 1 << 63;
/// The global runtime string interner. /// The global runtime string interner.
static INTERNER: LazyLock<RwLock<Interner>> = static INTERNER: LazyLock<RwLock<Interner>> = LazyLock::new(|| {
LazyLock::new(|| RwLock::new(Interner { seen: HashMap::new(), strings: Vec::new() })); RwLock::new(Interner { seen: FxHashMap::default(), strings: Vec::new() })
});
/// A string interner. /// A string interner.
struct Interner { struct Interner {
seen: HashMap<&'static str, PicoStr>, seen: FxHashMap<&'static str, PicoStr>,
strings: Vec<&'static str>, strings: Vec<&'static str>,
} }

View File

@ -24,6 +24,7 @@ typst-timing = { workspace = true }
typst-utils = { workspace = true } typst-utils = { workspace = true }
comemo = { workspace = true } comemo = { workspace = true }
ecow = { workspace = true } ecow = { workspace = true }
rustc-hash = { workspace = true }
[lints] [lints]
workspace = true workspace = true

View File

@ -38,11 +38,11 @@ pub use typst_syntax as syntax;
#[doc(inline)] #[doc(inline)]
pub use typst_utils as utils; pub use typst_utils as utils;
use std::collections::HashSet;
use std::sync::LazyLock; use std::sync::LazyLock;
use comemo::{Track, Tracked, Validate}; use comemo::{Track, Tracked, Validate};
use ecow::{EcoString, EcoVec, eco_format, eco_vec}; use ecow::{EcoString, EcoVec, eco_format, eco_vec};
use rustc_hash::FxHashSet;
use typst_html::HtmlDocument; use typst_html::HtmlDocument;
use typst_library::diag::{ use typst_library::diag::{
FileError, SourceDiagnostic, SourceResult, Warned, bail, warning, FileError, SourceDiagnostic, SourceResult, Warned, bail, warning,
@ -176,7 +176,7 @@ fn compile_impl<D: Document>(
/// Deduplicate diagnostics. /// Deduplicate diagnostics.
fn deduplicate(mut diags: EcoVec<SourceDiagnostic>) -> EcoVec<SourceDiagnostic> { fn deduplicate(mut diags: EcoVec<SourceDiagnostic>) -> EcoVec<SourceDiagnostic> {
let mut unique = HashSet::new(); let mut unique = FxHashSet::default();
diags.retain(|diag| { diags.retain(|diag| {
let hash = typst_utils::hash128(&(&diag.span, &diag.message)); let hash = typst_utils::hash128(&(&diag.span, &diag.message));
unique.insert(hash) unique.insert(hash)

View File

@ -26,6 +26,7 @@ codex = { workspace = true }
ecow = { workspace = true } ecow = { workspace = true }
heck = { workspace = true } heck = { workspace = true }
pulldown-cmark = { workspace = true } pulldown-cmark = { workspace = true }
rustc-hash = { workspace = true }
serde = { workspace = true } serde = { workspace = true }
serde_json = { workspace = true, optional = true } serde_json = { workspace = true, optional = true }
serde_yaml = { workspace = true } serde_yaml = { workspace = true }

View File

@ -1,7 +1,7 @@
use std::cmp::Reverse; use std::cmp::Reverse;
use std::collections::HashMap;
use std::fmt::Write; use std::fmt::Write;
use rustc_hash::FxHashMap;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use crate::{Html, Resolver}; use crate::{Html, Resolver};
@ -12,7 +12,7 @@ pub fn contributors(resolver: &dyn Resolver, from: &str, to: &str) -> Option<Htm
let bots = ["dependabot[bot]"]; let bots = ["dependabot[bot]"];
// Determine number of contributions per person. // Determine number of contributions per person.
let mut contributors = HashMap::<String, Contributor>::new(); let mut contributors = FxHashMap::<String, Contributor>::default();
for commit in resolver.commits(from, to) { for commit in resolver.commits(from, to) {
contributors contributors
.entry(commit.author.login.clone()) .entry(commit.author.login.clone())

View File

@ -9,10 +9,9 @@ pub use self::contribs::*;
pub use self::html::*; pub use self::html::*;
pub use self::model::*; pub use self::model::*;
use std::collections::HashSet;
use ecow::{EcoString, eco_format}; use ecow::{EcoString, eco_format};
use heck::ToTitleCase; use heck::ToTitleCase;
use rustc_hash::FxHashSet;
use serde::Deserialize; use serde::Deserialize;
use serde_yaml as yaml; use serde_yaml as yaml;
use std::sync::LazyLock; use std::sync::LazyLock;
@ -260,7 +259,7 @@ fn category_page(resolver: &dyn Resolver, category: Category) -> PageModel {
shorthands = Some(ShorthandsModel { markup, math }); shorthands = Some(ShorthandsModel { markup, math });
} }
let mut skip = HashSet::new(); let mut skip = FxHashSet::default();
if category == Category::Math { if category == Category::Math {
skip = GROUPS skip = GROUPS
.iter() .iter()

View File

@ -46,6 +46,7 @@ oxipng = { workspace = true }
parking_lot = { workspace = true } parking_lot = { workspace = true }
rayon = { workspace = true } rayon = { workspace = true }
regex = { workspace = true } regex = { workspace = true }
rustc-hash = { workspace = true }
tiny-skia = { workspace = true } tiny-skia = { workspace = true }
unscanny = { workspace = true } unscanny = { workspace = true }
walkdir = { workspace = true } walkdir = { workspace = true }

View File

@ -1,4 +1,3 @@
use std::collections::{HashMap, HashSet};
use std::fmt::{self, Display, Formatter}; use std::fmt::{self, Display, Formatter};
use std::ops::Range; use std::ops::Range;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
@ -6,6 +5,7 @@ use std::str::FromStr;
use std::sync::LazyLock; use std::sync::LazyLock;
use ecow::{EcoString, eco_format}; use ecow::{EcoString, eco_format};
use rustc_hash::{FxHashMap, FxHashSet};
use typst_syntax::package::PackageVersion; use typst_syntax::package::PackageVersion;
use typst_syntax::{ use typst_syntax::{
FileId, Lines, Source, VirtualPath, is_id_continue, is_ident, is_newline, FileId, Lines, Source, VirtualPath, is_id_continue, is_ident, is_newline,
@ -122,7 +122,7 @@ impl Display for NoteKind {
struct Collector { struct Collector {
tests: Vec<Test>, tests: Vec<Test>,
errors: Vec<TestParseError>, errors: Vec<TestParseError>,
seen: HashMap<EcoString, (FilePos, Vec<Attr>)>, seen: FxHashMap<EcoString, (FilePos, Vec<Attr>)>,
skipped: usize, skipped: usize,
} }
@ -132,7 +132,7 @@ impl Collector {
Self { Self {
tests: vec![], tests: vec![],
errors: vec![], errors: vec![],
seen: HashMap::new(), seen: FxHashMap::default(),
skipped: 0, skipped: 0,
} }
} }
@ -507,7 +507,7 @@ impl<'a> Parser<'a> {
/// Whether a test is within the selected set to run. /// Whether a test is within the selected set to run.
fn selected(name: &str, abs: PathBuf) -> bool { fn selected(name: &str, abs: PathBuf) -> bool {
static SKIPPED: LazyLock<HashSet<&'static str>> = LazyLock::new(|| { static SKIPPED: LazyLock<FxHashSet<&'static str>> = LazyLock::new(|| {
String::leak(std::fs::read_to_string(crate::SKIP_PATH).unwrap()) String::leak(std::fs::read_to_string(crate::SKIP_PATH).unwrap())
.lines() .lines()
.map(|line| line.trim()) .map(|line| line.trim())

View File

@ -1,5 +1,4 @@
use std::borrow::Cow; use std::borrow::Cow;
use std::collections::HashMap;
use std::fs; use std::fs;
use std::io::Write; use std::io::Write;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
@ -8,6 +7,7 @@ use std::sync::OnceLock;
use comemo::Tracked; use comemo::Tracked;
use parking_lot::Mutex; use parking_lot::Mutex;
use rustc_hash::FxHashMap;
use typst::diag::{At, FileError, FileResult, SourceResult, StrResult, bail}; use typst::diag::{At, FileError, FileResult, SourceResult, StrResult, bail};
use typst::engine::Engine; use typst::engine::Engine;
use typst::foundations::{ use typst::foundations::{
@ -108,7 +108,7 @@ struct TestBase {
library: LazyHash<Library>, library: LazyHash<Library>,
book: LazyHash<FontBook>, book: LazyHash<FontBook>,
fonts: Vec<Font>, fonts: Vec<Font>,
slots: Mutex<HashMap<FileId, FileSlot>>, slots: Mutex<FxHashMap<FileId, FileSlot>>,
} }
impl Default for TestBase { impl Default for TestBase {
@ -122,7 +122,7 @@ impl Default for TestBase {
library: LazyHash::new(library()), library: LazyHash::new(library()),
book: LazyHash::new(FontBook::from_fonts(&fonts)), book: LazyHash::new(FontBook::from_fonts(&fonts)),
fonts, fonts,
slots: Mutex::new(HashMap::new()), slots: Mutex::new(FxHashMap::default()),
} }
} }
} }