Skip to content

Commit

Permalink
feat: sort items to keep deterministic
Browse files Browse the repository at this point in the history
  • Loading branch information
JSerFeng committed Feb 14, 2025
1 parent ace605f commit 418d386
Show file tree
Hide file tree
Showing 6 changed files with 100 additions and 47 deletions.
78 changes: 45 additions & 33 deletions crates/rspack_plugin_split_chunks/src/plugin/max_size.rs
Original file line number Diff line number Diff line change
Expand Up @@ -116,8 +116,6 @@ fn hash_filename(filename: &str, options: &CompilerOptions) -> String {
hash_digest.rendered(8).to_string()
}

static REPLACE_MODULE_IDENTIFIER_REG: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r"^.*!|\?[^?!]*$").expect("regexp init failed"));
static REPLACE_RELATIVE_PREFIX_REG: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r"^(\.\.?\/)+").expect("regexp init failed"));
static REPLACE_ILLEGEL_LETTER_REG: LazyLock<Regex> =
Expand All @@ -144,7 +142,7 @@ fn get_too_small_types(
if let Some(min_ty_size) = min_size.get(ty)
&& ty_size < min_ty_size
{
types.insert(ty.clone());
types.insert(*ty);
}
});
types
Expand All @@ -156,7 +154,7 @@ fn remove_problematic_nodes(
min_size: &SplitChunkSizes,
result: &mut Vec<Group>,
) -> bool {
let problem_types = get_too_small_types(&considered_size, min_size);
let problem_types = get_too_small_types(considered_size, min_size);

if !problem_types.is_empty() {
// We hit an edge case where the working set is already smaller than minSize
Expand Down Expand Up @@ -198,17 +196,18 @@ fn remove_problematic_nodes(
.filter(|(ty, _)| problem_types.contains(ty))
.count();

if min_matched < group_matched {
return group;
} else if min_matched > group_matched {
return min;
};

if sum_for_types(&min.size, &problem_types) > sum_for_types(&group.size, &problem_types) {
return group;
match min_matched.cmp(&group_matched) {
std::cmp::Ordering::Less => group,
std::cmp::Ordering::Greater => min,
std::cmp::Ordering::Equal => {
if sum_for_types(&min.size, &problem_types) > sum_for_types(&group.size, &problem_types)
{
group
} else {
min
}
}
}

min
});

let best_group: &mut Group =
Expand All @@ -231,6 +230,32 @@ fn sum_for_types(size: &SplitChunkSizes, ty: &FxHashSet<SourceType>) -> f64 {
.sum()
}

fn get_key(module: &dyn Module, delemeter: &str, compilation: &Compilation) -> String {

Check warning on line 233 in crates/rspack_plugin_split_chunks/src/plugin/max_size.rs

View workflow job for this annotation

GitHub Actions / Spell check

"delemeter" should be "delimiter".
let ident = make_paths_relative(
compilation.options.context.as_str(),
module.identifier().as_str(),
);
let name = if let Some(name_for_condition) = module.name_for_condition() {
Cow::Owned(make_paths_relative(
compilation.options.context.as_str(),
&name_for_condition,
))
} else {
static RE: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r"^.*!|\?[^?!]*$").expect("should build regex"));
RE.replace_all(&ident, "")
};

let full_key = format!(
"{}{}{}",
name,
delemeter,

Check warning on line 252 in crates/rspack_plugin_split_chunks/src/plugin/max_size.rs

View workflow job for this annotation

GitHub Actions / Spell check

"delemeter" should be "delimiter".
hash_filename(&ident, &compilation.options)
);

request_to_id(&full_key)
}

fn deterministic_grouping_for_modules(
compilation: &Compilation,
chunk: &ChunkUkey,
Expand All @@ -244,32 +269,18 @@ fn deterministic_grouping_for_modules(
let items = compilation
.chunk_graph
.get_chunk_modules(chunk, &module_graph);
let context = compilation.options.context.as_ref();

let nodes = items.into_iter().map(|module| {
let module: &dyn Module = &**module;
let name: String = if let Some(name_for_condition) = module.name_for_condition() {
make_paths_relative(context, &name_for_condition)
} else {
let path = make_paths_relative(context, module.identifier().as_str());
REPLACE_MODULE_IDENTIFIER_REG
.replace_all(&path, "")
.to_string()
};
let key = format!(
"{}{}{}",
name,
delimiter,
hash_filename(&name, &compilation.options)
);

GroupItem {
module: module.identifier(),
size: get_size(module, compilation),
key: request_to_id(&key),
key: get_key(module, delimiter, compilation),
}
});

let initial_nodes = nodes
let mut initial_nodes = nodes
.into_iter()
.filter_map(|node| {
// The Module itself is already bigger than `allow_max_size`, we will create a chunk
Expand All @@ -291,6 +302,7 @@ fn deterministic_grouping_for_modules(
.collect::<Vec<_>>();

if !initial_nodes.is_empty() {
initial_nodes.sort_by(|a, b| a.key.cmp(&b.key));
let similarities = get_similarities(&initial_nodes);
let initial_group = Group::new(initial_nodes, None, similarities);

Expand Down Expand Up @@ -377,7 +389,7 @@ fn deterministic_grouping_for_modules(
&& right_size.bigger_than(min_size)
{
best_similarity = similarity;
best = pos as i32;
best = pos;
}
let size = &group.nodes[pos as usize].size;
left_size.add_by(size);
Expand Down Expand Up @@ -420,7 +432,7 @@ fn deterministic_grouping_for_modules(
fn subtract_size_from(total: &mut SplitChunkSizes, size: &SplitChunkSizes) {
size.iter().for_each(|(ty, ty_size)| {
let total_ty_size = total.get(ty).copied().unwrap_or(0.0);
total.insert(ty.clone(), total_ty_size - ty_size);
total.insert(*ty, total_ty_size - ty_size);
});
}

Expand Down
Original file line number Diff line number Diff line change
@@ -1,34 +1,49 @@
const rspack = require("@rspack/core");

/** @type {import("@rspack/core").Configuration} */
module.exports = {
target: "node",
target: "web",
entry: "./src/index.js",
output: {
filename: "[name].js"
},
experiments: {
css: true
css: false
},
module: {
generator: {
"css/auto": {
exportsOnly: false
rules: [
{
test: /\.css$/,
use: [rspack.CssExtractRspackPlugin.loader, "css-loader"]
}
}
]
},
plugins: [new rspack.CssExtractRspackPlugin()],
performance: false,
optimization: {
chunkIds: "named",
usedExports: false,
sideEffects: false,
concatenateModules: false,
moduleIds: "named",
splitChunks: {
chunks: "all",
cacheGroups: {
default: false,
defaultVendors: false,
fragment: {
minChunks: 1,
maxSize: 200 * 1024,
// user specifies that css size in chunk should also satisfy minSize
// which is not in this test, so the css module should be splitted
// and re-check the js part
minSize: 100,

// there are 2 css, each one of them are only 120 bytes which is less than minSize
// so the total size of the css are 240 bytes which is greater than minSize
// so the nodes are
// [js js css js js js js js css]
// if scan from left to right, the minSize can only satisfy when scan to the last css
// if scan from right to left, the minSize can only satisfy when scan to the first css
// so split chunks should remove problematic nodes, in this case the 2 css
// and then recalculate the size of the rest of the nodes
minSize: 200,
priority: 10
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,3 +2,4 @@ import './50k-1'
import './50k-2'
import './50k-3'
import './50k-4'
import './small.css'
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
body {
/* small.css */
/* small.css */
/* small.css */
/* small.css */
/* small.css */
/* small.css */
/* small.css */
color: red
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
body {
/* small.css */
/* small.css */
/* small.css */
/* small.css */
/* small.css */
/* small.css */
/* small.css */
color: red
}
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,15 @@ module.exports = {
findBundle: function (i, options) {
// should split based on their file path
return [
"main.js",
"fragment-src_aaa_sync_recursive_.js",
"fragment-src_bbb_sync_recursive_.js",
"fragment-src_index_js.js"
// the total css size are not satisfied by minSize, so the css modules
// are split and try again to see if the reset size satisfied the minSize
// then its okay, so the js can be split
"fragment-src_aaa_index_js.js",
"fragment-src_aaa_small_css-src_small_css.css",
"fragment-src_bbb_index_js-src_aaa_small_css.js",
"fragment-src_ccc_50k-1_js-src_ccc_50k-2_js-src_ccc_50k-3_js-src_ccc_50k-4_js.js",
"fragment-src_index_js.js",
"main.js"
];
}
};

0 comments on commit 418d386

Please sign in to comment.