From 21ac78fda97eae260b8ffa9aaba6c016d63402a4 Mon Sep 17 00:00:00 2001 From: Sebastian Zivota Date: Fri, 21 Jul 2023 18:22:59 +0200 Subject: [PATCH 01/23] first attempt --- converted.map | 1 + src/types.rs | 50 ++++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 51 insertions(+) create mode 100644 converted.map diff --git a/converted.map b/converted.map new file mode 100644 index 00000000..0464e26d --- /dev/null +++ b/converted.map @@ -0,0 +1 @@ +{"version":3,"sources":["original.js"],"sourcesContent":["problems = 99"],"names":[],"mappings":"IAAA,MAAQ,C,iBAAC,CAAC,CAAC"} \ No newline at end of file diff --git a/src/types.rs b/src/types.rs index d37d2fb5..637d108a 100644 --- a/src/types.rs +++ b/src/types.rs @@ -825,6 +825,56 @@ impl SourceMap { Ok((sm, mapping)) } + + /// Transforms `self` by precomposing it with another sourcemap. + /// + /// This function assumes that `other` only maps between two files and + /// its target file is the source file of `self`. In other words, if `self` + /// maps from `minified.js` to `original_1.js`, …, `original_n.js`, then + /// `other` must map from `transformed.js` to `minitfied.js` for some file + /// `transformed.js`. The resulting sourcemap will then map from `transformed.js` + /// to to `original_1.js`, …, `original_n.js`. + /// + /// Mappings are composed in the obvious way: if `other` maps `(l₁, c₁)` to `(l₂, c₂)` and `self` + /// maps `(l₂', c₂')` to `(l₃, c₃)`, then `self.transform(&other)` maps `(l₁, c₁)` to `(l₃, c₃)`. + /// + /// The source root, sources, source contents, and names will be copied from `self`. The only information + /// that is used from `other` are the mappings. + pub fn transform(&self, other: &Self) -> Self { + let mut builder = SourceMapBuilder::new(self.file.as_deref()); + builder.set_source_root(self.get_source_root()); + + for &RawToken { + dst_line, + dst_col, + src_line, + src_col, + .. + } in &other.tokens + { + match self.lookup_token(src_line, src_col) { + Some(Token { raw, .. }) => { + let name = self.get_name(raw.name_id); + let source = self.get_source(raw.src_id); + + if let Some(source) = source { + let contents = self.get_source_contents(raw.src_id); + + let new_id = builder.add_source(source); + builder.set_source_contents(new_id, contents); + } + + builder.add(dst_line, dst_col, raw.src_line, raw.src_col, source, name); + } + + None => { + builder.add(dst_line, dst_col, u32::MAX, u32::MAX, None, None); + } + } + } + + builder.into_sourcemap() + } } impl SourceMapIndex { From 4a1e769d2c247cf0f5b58755730e0cb1b692649e Mon Sep 17 00:00:00 2001 From: Sebastian Zivota Date: Fri, 21 Jul 2023 22:16:18 +0200 Subject: [PATCH 02/23] Second try (clusterfuck) --- src/types.rs | 197 +++++++++++++++++++++++++++++++++++++++++++++------ 1 file changed, 177 insertions(+), 20 deletions(-) diff --git a/src/types.rs b/src/types.rs index 637d108a..8d39182b 100644 --- a/src/types.rs +++ b/src/types.rs @@ -2,6 +2,7 @@ use std::borrow::Cow; use std::cmp::Ordering; use std::fmt; use std::io::{Read, Write}; +use std::ops::Bound; use std::path::Path; use crate::builder::SourceMapBuilder; @@ -844,31 +845,101 @@ impl SourceMap { let mut builder = SourceMapBuilder::new(self.file.as_deref()); builder.set_source_root(self.get_source_root()); - for &RawToken { - dst_line, - dst_col, - src_line, - src_col, - .. - } in &other.tokens - { - match self.lookup_token(src_line, src_col) { - Some(Token { raw, .. }) => { - let name = self.get_name(raw.name_id); - let source = self.get_source(raw.src_id); + let mut other_tokens = other.tokens.clone(); + other_tokens.sort_unstable_by_key(|token| (token.src_line, token.src_col)); - if let Some(source) = source { - let contents = self.get_source_contents(raw.src_id); + dbg!(&other_tokens); - let new_id = builder.add_source(source); - builder.set_source_contents(new_id, contents); - } + let mut other_tokens_iter = other_tokens.iter().peekable(); + + while let Some(¤t) = other_tokens_iter.next() { + if current.src_line == u32::MAX || current.src_col == u32::MAX { + builder.add( + current.dst_line, + current.dst_col, + u32::MAX, + u32::MAX, + None, + None, + ); + continue; + } - builder.add(dst_line, dst_col, raw.src_line, raw.src_col, source, name); + let first_idx = match dbg!(greatest_lower_bound( + dbg!(&self.index), + dbg!(&(¤t.src_line, ¤t.src_col)), + |(l, c, _)| (l, c), + )) { + None => Bound::Unbounded, + Some((_, _, idx)) => Bound::Included(*idx as usize), + }; + dbg!(first_idx); + // All tokens in `self` that are "covered" by the current token in `other`. + let self_tokens = match other_tokens_iter.peek() { + Some(&&next) => { + let last_idx = match greatest_lower_bound( + &self.index, + &(&next.src_line, &next.src_col), + |(l, c, _)| (l, c), + ) { + None => Bound::Unbounded, + Some((_, _, idx)) => Bound::Excluded(*idx as usize), + }; + + dbg!(last_idx); + + &self.tokens[(first_idx, last_idx)] } + None => &self.tokens[(first_idx, Bound::Unbounded)], + }; - None => { - builder.add(dst_line, dst_col, u32::MAX, u32::MAX, None, None); + dbg!(self_tokens); + + match self_tokens { + [] => { + builder.add( + current.dst_line, + current.dst_col, + u32::MAX, + u32::MAX, + None, + None, + ); + } + [_first, ..] => { + let (line_diff, col_diff) = ( + current.src_line as i32 - current.dst_line as i32, + current.src_col as i32 - current.dst_col as i32, + ); + + dbg!(line_diff, col_diff); + + for token in self_tokens { + let name = self.get_name(token.name_id); + let source = self.get_source(token.src_id); + + if let Some(source) = source { + let contents = self.get_source_contents(token.src_id); + + let new_id = builder.add_source(source); + builder.set_source_contents(new_id, contents); + } + + let (mut line, mut col) = (token.dst_line as i32, token.dst_col as i32); + if (line, col) >= (current.src_line as i32, current.src_col as i32) { + line -= line_diff; + col -= col_diff; + } + + builder.add( + dbg!(line as u32), + dbg!(col as u32), + token.src_line, + token.src_col, + source, + name, + ); + } } } } @@ -1160,4 +1231,90 @@ mod tests { assert_eq!(new_sm.debug_id, Some(DebugId::default())); } + + #[test] + fn test_transform() { + let first_sourcemap = br#"{ + "version":3, + "mappings":"IAAA,GAAG,uBAAQ,GAAG", + "names":[], + "sources":["original.js"], + "sourcesContent":["my problems = 99"] + }"#; + + let second_sourcemap = br#"{ + "version":3, + "mappings":"AAAA", + "names":[], + "sources":["edited.js"], + "sourcesContent":["var my answer/* ignore this */ = 42;"] + }"#; + + let first_sourcemap = SourceMap::from_slice(first_sourcemap).unwrap(); + let second_sourcemap = SourceMap::from_slice(second_sourcemap).unwrap(); + + let transformed = first_sourcemap.transform(&second_sourcemap); + + dbg!(transformed); + } + + #[test] + fn test_transform_2() { + let first_sourcemap = br#"{ + "version":3, + "mappings":"IAAA,GAAG,uBAAQ,GAAG", + "names":[], + "sources":["original.js"], + "sourcesContent":["my problems = 99"] + }"#; + + let second_sourcemap = br#"{ + "version":3, + "mappings":"AAAA,OAAa", + "names":[], + "sources":["edited.js"], + "sourcesContent":["var my answer/* ignore this */ = 42;"] + }"#; + + let first_sourcemap = SourceMap::from_slice(first_sourcemap).unwrap(); + let second_sourcemap = SourceMap::from_slice(second_sourcemap).unwrap(); + + let transformed = first_sourcemap.transform(&second_sourcemap); + + dbg!(&transformed); + + let mut f = std::fs::File::create("test_transform_2.map").unwrap(); + transformed.to_writer(&mut f).unwrap(); + } + + #[test] + fn test_transform_3() { + // Maps "var my answer/* ignore this */ = 42;\nthis is not nice" to "my problems = 99\nthis is nice". + let first_sourcemap = br#"{ + "version":3, + "mappings":"IAAA,GAAG,uBAAQ,GAAG,GAAE;AAChB,WAAO", + "names":[], + "sources":["original.js"], + "sourcesContent":["my problems = 99\nthis is nice"] + }"#; + + // Maps "var my /* ignore this */ = 42;\nthis is nice" to "var my answer/* ignore this */ = 42;\nthis is not nice". + let second_sourcemap = br#"{ + "version":3, + "mappings":"AAAA,OAAa;AACb,OAAW", + "names":[], + "sources":["edited.js"], + "sourcesContent":["var my answer/* ignore this */ = 42;\nthis is not nice"] + }"#; + + let first_sourcemap = SourceMap::from_slice(first_sourcemap).unwrap(); + let second_sourcemap = SourceMap::from_slice(second_sourcemap).unwrap(); + + let transformed = first_sourcemap.transform(&second_sourcemap); + + dbg!(&transformed); + + let mut f = std::fs::File::create("test_transform_3.map").unwrap(); + transformed.to_writer(&mut f).unwrap(); + } } From d220c5f3e2a80618d2c22cc8e501f1d16650d47c Mon Sep 17 00:00:00 2001 From: Sebastian Zivota Date: Sat, 22 Jul 2023 00:33:56 +0200 Subject: [PATCH 03/23] WIP best attempt yet --- src/types.rs | 198 ++++++++++++++++++++++++++++++--------------------- 1 file changed, 117 insertions(+), 81 deletions(-) diff --git a/src/types.rs b/src/types.rs index 8d39182b..2c57a449 100644 --- a/src/types.rs +++ b/src/types.rs @@ -842,104 +842,140 @@ impl SourceMap { /// The source root, sources, source contents, and names will be copied from `self`. The only information /// that is used from `other` are the mappings. pub fn transform(&self, other: &Self) -> Self { + #[derive(Debug, Clone, Copy)] + struct Range { + start: (u32, u32), + end: (u32, u32), + value: RawToken, + } let mut builder = SourceMapBuilder::new(self.file.as_deref()); builder.set_source_root(self.get_source_root()); - let mut other_tokens = other.tokens.clone(); - other_tokens.sort_unstable_by_key(|token| (token.src_line, token.src_col)); + // Turn `self.tokens` and `other.tokens` into vectors of ranges so we have easy access to + // both start and end. For `other`, the range is on `src_line/col`, for `self` it's on + // `dst_line/col`. + let mut other_ranges = Vec::new(); + let mut other_token_iter = other.tokens.iter().peekable(); + + while let Some(&t) = other_token_iter.next() { + let (end_line, end_col) = other_token_iter + .peek() + .map_or((u32::MAX, u32::MAX), |&&t| (t.src_line, t.src_col)); + other_ranges.push(Range { + start: (t.src_line, t.src_col), + end: (end_line, end_col), + value: t, + }); + } + + other_ranges.sort_unstable_by_key(|r| r.start); + + dbg!(&other_ranges); + + let mut self_ranges = Vec::new(); + let mut self_token_iter = self.tokens.iter().peekable(); + + while let Some(&t) = self_token_iter.next() { + let (end_line, end_col) = self_token_iter + .peek() + .map_or((u32::MAX, u32::MAX), |&&t| (t.dst_line, t.dst_col)); + self_ranges.push(Range { + start: (t.dst_line, t.dst_col), + end: (end_line, end_col), + value: t, + }); + } + + self_ranges.sort_unstable_by_key(|r| r.start); + + dbg!(&self_ranges); + + let mut self_ranges_iter = self_ranges.iter_mut(); + + let Some(mut self_range) = self_ranges_iter.next() else { + return builder.into_sourcemap(); + }; + + // Iterate over `other.ranges` (sorted by `src_line/col`). For each such range, consider + // all `self.ranges` which overlap with it. + for &other_range in &other_ranges { + // The `other_range` offsets lines and columns by a certain amount. All `self_ranges` + // it covers will get the same offset. + let (line_diff, col_diff) = ( + other_range.value.src_line as i32 - other_range.value.dst_line as i32, + other_range.value.src_col as i32 - other_range.value.dst_col as i32, + ); + + dbg!(line_diff, col_diff); - dbg!(&other_tokens); + // Skip `self_ranges` that are entirely before the `other_range`. + while self_range.end <= other_range.start { + let Some(r) = self_ranges_iter.next() else { + return builder.into_sourcemap(); + }; - let mut other_tokens_iter = other_tokens.iter().peekable(); + self_range = r; + } - while let Some(¤t) = other_tokens_iter.next() { - if current.src_line == u32::MAX || current.src_col == u32::MAX { + // If the first `self_range` under this `other_range` starts after the `other_range`, + // there is a gap. Insert an empty mapping there. + if self_range.start > other_range.start { builder.add( - current.dst_line, - current.dst_col, + other_range.value.dst_line, + other_range.value.dst_col, u32::MAX, u32::MAX, None, None, ); - continue; } - let first_idx = match dbg!(greatest_lower_bound( - dbg!(&self.index), - dbg!(&(¤t.src_line, ¤t.src_col)), - |(l, c, _)| (l, c), - )) { - None => Bound::Unbounded, - Some((_, _, idx)) => Bound::Included(*idx as usize), - }; - dbg!(first_idx); - // All tokens in `self` that are "covered" by the current token in `other`. - let self_tokens = match other_tokens_iter.peek() { - Some(&&next) => { - let last_idx = match greatest_lower_bound( - &self.index, - &(&next.src_line, &next.src_col), - |(l, c, _)| (l, c), - ) { - None => Bound::Unbounded, - Some((_, _, idx)) => Bound::Excluded(*idx as usize), - }; + // Iterate over `self_ranges` that fall at least partially within the `other_range`. + while self_range.start < other_range.end { + dbg!(&self_range); + // If `self_range` started before `other_range`, cut it off. + self_range.start = std::cmp::max(self_range.start, other_range.start); + dbg!(&self_range); + let token = &mut self_range.value; + // Keep the `dst_line/col` in sync with the range start. + token.dst_line = self_range.start.0; + token.dst_col = self_range.start.1; + dbg!(&token); + + // Lookup the `self_range`'s source and name. + let name = self.get_name(token.name_id); + let source = self.get_source(token.src_id); + + if let Some(source) = source { + let contents = self.get_source_contents(token.src_id); + + let new_id = builder.add_source(source); + builder.set_source_contents(new_id, contents); + } - dbg!(last_idx); + let dst_line = (token.dst_line as i32 - line_diff) as u32; + let dst_col = (token.dst_col as i32 - col_diff) as u32; - &self.tokens[(first_idx, last_idx)] - } - None => &self.tokens[(first_idx, Bound::Unbounded)], - }; + builder.add( + dst_line, + dst_col, + token.src_line, + token.src_col, + source, + name, + ); - dbg!(self_tokens); - - match self_tokens { - [] => { - builder.add( - current.dst_line, - current.dst_col, - u32::MAX, - u32::MAX, - None, - None, - ); - } - [_first, ..] => { - let (line_diff, col_diff) = ( - current.src_line as i32 - current.dst_line as i32, - current.src_col as i32 - current.dst_col as i32, - ); + if self_range.end < other_range.end { + // We're not yet past the end of the `other_range`. Advance the `self_range`. + let Some(r) = self_ranges_iter.next() else { + return builder.into_sourcemap(); + }; - dbg!(line_diff, col_diff); - - for token in self_tokens { - let name = self.get_name(token.name_id); - let source = self.get_source(token.src_id); - - if let Some(source) = source { - let contents = self.get_source_contents(token.src_id); - - let new_id = builder.add_source(source); - builder.set_source_contents(new_id, contents); - } - - let (mut line, mut col) = (token.dst_line as i32, token.dst_col as i32); - if (line, col) >= (current.src_line as i32, current.src_col as i32) { - line -= line_diff; - col -= col_diff; - } - - builder.add( - dbg!(line as u32), - dbg!(col as u32), - token.src_line, - token.src_col, - source, - name, - ); - } + self_range = r; + } else { + // The next `self_range` certainly doesn't fall under this `other_range` anymore + // (because the current one already satisfies `self_range.end` >= `other_range.end`) + break; } } } @@ -1233,7 +1269,7 @@ mod tests { } #[test] - fn test_transform() { + fn test_transform_1() { let first_sourcemap = br#"{ "version":3, "mappings":"IAAA,GAAG,uBAAQ,GAAG", From ede2af404fd0fbc21ec0cfe6b84f52bf4bf4acce Mon Sep 17 00:00:00 2001 From: Sebastian Zivota Date: Mon, 24 Jul 2023 11:35:56 +0200 Subject: [PATCH 04/23] WIP --- src/types.rs | 22 ++++++++++------------ 1 file changed, 10 insertions(+), 12 deletions(-) diff --git a/src/types.rs b/src/types.rs index 2c57a449..5edd5253 100644 --- a/src/types.rs +++ b/src/types.rs @@ -2,7 +2,6 @@ use std::borrow::Cow; use std::cmp::Ordering; use std::fmt; use std::io::{Read, Write}; -use std::ops::Bound; use std::path::Path; use crate::builder::SourceMapBuilder; @@ -854,8 +853,10 @@ impl SourceMap { // Turn `self.tokens` and `other.tokens` into vectors of ranges so we have easy access to // both start and end. For `other`, the range is on `src_line/col`, for `self` it's on // `dst_line/col`. + let mut other_tokens = other.tokens.clone(); + other_tokens.sort_unstable_by_key(|t| (t.src_line, t.src_col)); + let mut other_token_iter = other_tokens.iter().peekable(); let mut other_ranges = Vec::new(); - let mut other_token_iter = other.tokens.iter().peekable(); while let Some(&t) = other_token_iter.next() { let (end_line, end_col) = other_token_iter @@ -868,12 +869,12 @@ impl SourceMap { }); } - other_ranges.sort_unstable_by_key(|r| r.start); - dbg!(&other_ranges); + let mut self_tokens = self.tokens.clone(); + self_tokens.sort_unstable_by_key(|t| (t.dst_line, t.dst_col)); + let mut self_token_iter = self_tokens.iter().peekable(); let mut self_ranges = Vec::new(); - let mut self_token_iter = self.tokens.iter().peekable(); while let Some(&t) = self_token_iter.next() { let (end_line, end_col) = self_token_iter @@ -886,8 +887,6 @@ impl SourceMap { }); } - self_ranges.sort_unstable_by_key(|r| r.start); - dbg!(&self_ranges); let mut self_ranges_iter = self_ranges.iter_mut(); @@ -910,11 +909,10 @@ impl SourceMap { // Skip `self_ranges` that are entirely before the `other_range`. while self_range.end <= other_range.start { - let Some(r) = self_ranges_iter.next() else { - return builder.into_sourcemap(); - }; - - self_range = r; + match self_ranges_iter.next() { + Some(r) => self_range = r, + None => return builder.into_sourcemap(), + } } // If the first `self_range` under this `other_range` starts after the `other_range`, From a59607cbd149df089d7d1d2cf6e2021426ff4e87 Mon Sep 17 00:00:00 2001 From: Sebastian Zivota Date: Mon, 24 Jul 2023 14:01:28 +0200 Subject: [PATCH 05/23] Refactoring --- src/types.rs | 215 ++++++++++++++++----------------------------------- 1 file changed, 66 insertions(+), 149 deletions(-) diff --git a/src/types.rs b/src/types.rs index 5edd5253..4104762a 100644 --- a/src/types.rs +++ b/src/types.rs @@ -826,126 +826,102 @@ impl SourceMap { Ok((sm, mapping)) } - /// Transforms `self` by precomposing it with another sourcemap. + /// Composes two sourcemaps. /// - /// This function assumes that `other` only maps between two files and - /// its target file is the source file of `self`. In other words, if `self` + /// This function assumes that `first` only maps between two files and + /// its target file is the source file of `second`. In other words, if `first` + /// maps from `transformed.js` to `minitfied.js` and `second` /// maps from `minified.js` to `original_1.js`, …, `original_n.js`, then - /// `other` must map from `transformed.js` to `minitfied.js` for some file - /// `transformed.js`. The resulting sourcemap will then map from `transformed.js` + /// the resulting sourcemap maps from `transformed.js` /// to to `original_1.js`, …, `original_n.js`. /// - /// Mappings are composed in the obvious way: if `other` maps `(l₁, c₁)` to `(l₂, c₂)` and `self` - /// maps `(l₂', c₂')` to `(l₃, c₃)`, then `self.transform(&other)` maps `(l₁, c₁)` to `(l₃, c₃)`. - /// - /// The source root, sources, source contents, and names will be copied from `self`. The only information - /// that is used from `other` are the mappings. - pub fn transform(&self, other: &Self) -> Self { + /// The source root, sources, source contents, and names will be copied from `second`. The only information + /// that is used from `first` are the mappings. + pub fn compose(first: &Self, second: &Self) -> Self { #[derive(Debug, Clone, Copy)] struct Range { start: (u32, u32), end: (u32, u32), value: RawToken, } - let mut builder = SourceMapBuilder::new(self.file.as_deref()); - builder.set_source_root(self.get_source_root()); - - // Turn `self.tokens` and `other.tokens` into vectors of ranges so we have easy access to - // both start and end. For `other`, the range is on `src_line/col`, for `self` it's on - // `dst_line/col`. - let mut other_tokens = other.tokens.clone(); - other_tokens.sort_unstable_by_key(|t| (t.src_line, t.src_col)); - let mut other_token_iter = other_tokens.iter().peekable(); - let mut other_ranges = Vec::new(); - - while let Some(&t) = other_token_iter.next() { - let (end_line, end_col) = other_token_iter + let mut builder = SourceMapBuilder::new(second.file.as_deref()); + builder.set_source_root(second.get_source_root()); + + // Turn `first.tokens` and `second.tokens` into vectors of ranges so we have easy access to + // both start and end. + let mut first_tokens = first.tokens.clone(); + first_tokens.sort_unstable_by_key(|t| (t.src_line, t.src_col)); + let mut first_token_iter = first_tokens.iter().peekable(); + let mut first_ranges = Vec::new(); + + while let Some(&t) = first_token_iter.next() { + let (end_line, end_col) = first_token_iter .peek() .map_or((u32::MAX, u32::MAX), |&&t| (t.src_line, t.src_col)); - other_ranges.push(Range { + first_ranges.push(Range { start: (t.src_line, t.src_col), end: (end_line, end_col), value: t, }); } - dbg!(&other_ranges); - - let mut self_tokens = self.tokens.clone(); - self_tokens.sort_unstable_by_key(|t| (t.dst_line, t.dst_col)); - let mut self_token_iter = self_tokens.iter().peekable(); - let mut self_ranges = Vec::new(); + let mut second_tokens = second.tokens.clone(); + second_tokens.sort_unstable_by_key(|t| (t.dst_line, t.dst_col)); + let mut second_token_iter = second_tokens.iter().peekable(); + let mut second_ranges = Vec::new(); - while let Some(&t) = self_token_iter.next() { - let (end_line, end_col) = self_token_iter + while let Some(&t) = second_token_iter.next() { + let (end_line, end_col) = second_token_iter .peek() .map_or((u32::MAX, u32::MAX), |&&t| (t.dst_line, t.dst_col)); - self_ranges.push(Range { + second_ranges.push(Range { start: (t.dst_line, t.dst_col), end: (end_line, end_col), value: t, }); } - dbg!(&self_ranges); + let mut second_ranges_iter = second_ranges.iter_mut(); - let mut self_ranges_iter = self_ranges.iter_mut(); - - let Some(mut self_range) = self_ranges_iter.next() else { + let Some(mut second_range) = second_ranges_iter.next() else { return builder.into_sourcemap(); }; - // Iterate over `other.ranges` (sorted by `src_line/col`). For each such range, consider - // all `self.ranges` which overlap with it. - for &other_range in &other_ranges { - // The `other_range` offsets lines and columns by a certain amount. All `self_ranges` + // Iterate over `first.ranges` (sorted by `src_line/col`). For each such range, consider + // all `second.ranges` which overlap with it. + for &first_range in &first_ranges { + // The `first_range` offsets lines and columns by a certain amount. All `second_ranges` // it covers will get the same offset. let (line_diff, col_diff) = ( - other_range.value.src_line as i32 - other_range.value.dst_line as i32, - other_range.value.src_col as i32 - other_range.value.dst_col as i32, + first_range.value.src_line as i32 - first_range.value.dst_line as i32, + first_range.value.src_col as i32 - first_range.value.dst_col as i32, ); - dbg!(line_diff, col_diff); - - // Skip `self_ranges` that are entirely before the `other_range`. - while self_range.end <= other_range.start { - match self_ranges_iter.next() { - Some(r) => self_range = r, + // Skip `second_ranges` that are entirely before the `first_range`. + while second_range.end <= first_range.start { + match second_ranges_iter.next() { + Some(r) => second_range = r, None => return builder.into_sourcemap(), } } - // If the first `self_range` under this `other_range` starts after the `other_range`, - // there is a gap. Insert an empty mapping there. - if self_range.start > other_range.start { - builder.add( - other_range.value.dst_line, - other_range.value.dst_col, - u32::MAX, - u32::MAX, - None, - None, - ); - } + // At this point `second_range.end` > `first_range.start` - // Iterate over `self_ranges` that fall at least partially within the `other_range`. - while self_range.start < other_range.end { - dbg!(&self_range); - // If `self_range` started before `other_range`, cut it off. - self_range.start = std::cmp::max(self_range.start, other_range.start); - dbg!(&self_range); - let token = &mut self_range.value; + // Iterate over `second_ranges` that fall at least partially within the `first_range`. + while second_range.start < first_range.end { + // If `second_range` started before `first_range`, cut it off. + second_range.start = std::cmp::max(second_range.start, first_range.start); + let token = &mut second_range.value; // Keep the `dst_line/col` in sync with the range start. - token.dst_line = self_range.start.0; - token.dst_col = self_range.start.1; - dbg!(&token); + token.dst_line = second_range.start.0; + token.dst_col = second_range.start.1; - // Lookup the `self_range`'s source and name. - let name = self.get_name(token.name_id); - let source = self.get_source(token.src_id); + // Lookup the `second_range`'s source and name. + let name = second.get_name(token.name_id); + let source = second.get_source(token.src_id); if let Some(source) = source { - let contents = self.get_source_contents(token.src_id); + let contents = second.get_source_contents(token.src_id); let new_id = builder.add_source(source); builder.set_source_contents(new_id, contents); @@ -963,16 +939,13 @@ impl SourceMap { name, ); - if self_range.end < other_range.end { - // We're not yet past the end of the `other_range`. Advance the `self_range`. - let Some(r) = self_ranges_iter.next() else { - return builder.into_sourcemap(); - }; - - self_range = r; + if second_range.end < first_range.end { + // Advance the `second_range`. + match second_ranges_iter.next() { + Some(r) => second_range = r, + None => return builder.into_sourcemap(), + } } else { - // The next `self_range` certainly doesn't fall under this `other_range` anymore - // (because the current one already satisfies `self_range.end` >= `other_range.end`) break; } } @@ -1239,6 +1212,8 @@ impl SourceMapSection { #[cfg(test)] mod tests { + use std::fs::File; + use super::{RewriteOptions, SourceMap}; use debugid::DebugId; @@ -1267,16 +1242,9 @@ mod tests { } #[test] - fn test_transform_1() { + fn test_compose_identity() { + // Identity mapping on "var my answer/* ignore this */ = 42;". let first_sourcemap = br#"{ - "version":3, - "mappings":"IAAA,GAAG,uBAAQ,GAAG", - "names":[], - "sources":["original.js"], - "sourcesContent":["my problems = 99"] - }"#; - - let second_sourcemap = br#"{ "version":3, "mappings":"AAAA", "names":[], @@ -1284,17 +1252,8 @@ mod tests { "sourcesContent":["var my answer/* ignore this */ = 42;"] }"#; - let first_sourcemap = SourceMap::from_slice(first_sourcemap).unwrap(); - let second_sourcemap = SourceMap::from_slice(second_sourcemap).unwrap(); - - let transformed = first_sourcemap.transform(&second_sourcemap); - - dbg!(transformed); - } - - #[test] - fn test_transform_2() { - let first_sourcemap = br#"{ + // Maps "var my answer/* ignore this */ = 42;" to "my problems = 99". + let second_sourcemap = br#"{ "version":3, "mappings":"IAAA,GAAG,uBAAQ,GAAG", "names":[], @@ -1302,53 +1261,11 @@ mod tests { "sourcesContent":["my problems = 99"] }"#; - let second_sourcemap = br#"{ - "version":3, - "mappings":"AAAA,OAAa", - "names":[], - "sources":["edited.js"], - "sourcesContent":["var my answer/* ignore this */ = 42;"] - }"#; - let first_sourcemap = SourceMap::from_slice(first_sourcemap).unwrap(); let second_sourcemap = SourceMap::from_slice(second_sourcemap).unwrap(); - let transformed = first_sourcemap.transform(&second_sourcemap); - - dbg!(&transformed); - - let mut f = std::fs::File::create("test_transform_2.map").unwrap(); - transformed.to_writer(&mut f).unwrap(); - } - - #[test] - fn test_transform_3() { - // Maps "var my answer/* ignore this */ = 42;\nthis is not nice" to "my problems = 99\nthis is nice". - let first_sourcemap = br#"{ - "version":3, - "mappings":"IAAA,GAAG,uBAAQ,GAAG,GAAE;AAChB,WAAO", - "names":[], - "sources":["original.js"], - "sourcesContent":["my problems = 99\nthis is nice"] - }"#; - - // Maps "var my /* ignore this */ = 42;\nthis is nice" to "var my answer/* ignore this */ = 42;\nthis is not nice". - let second_sourcemap = br#"{ - "version":3, - "mappings":"AAAA,OAAa;AACb,OAAW", - "names":[], - "sources":["edited.js"], - "sourcesContent":["var my answer/* ignore this */ = 42;\nthis is not nice"] - }"#; - - let first_sourcemap = SourceMap::from_slice(first_sourcemap).unwrap(); - let second_sourcemap = SourceMap::from_slice(second_sourcemap).unwrap(); - - let transformed = first_sourcemap.transform(&second_sourcemap); - - dbg!(&transformed); + let composed = SourceMap::compose(&first_sourcemap, &second_sourcemap); - let mut f = std::fs::File::create("test_transform_3.map").unwrap(); - transformed.to_writer(&mut f).unwrap(); + assert_eq!(composed.tokens, second_sourcemap.tokens); } } From 2f7826b23f69fecb620d4c3df05b1ce5ff51b107 Mon Sep 17 00:00:00 2001 From: Sebastian Zivota Date: Mon, 24 Jul 2023 14:04:53 +0200 Subject: [PATCH 06/23] Lint --- src/types.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/src/types.rs b/src/types.rs index 4104762a..e5000483 100644 --- a/src/types.rs +++ b/src/types.rs @@ -1212,7 +1212,6 @@ impl SourceMapSection { #[cfg(test)] mod tests { - use std::fs::File; use super::{RewriteOptions, SourceMap}; use debugid::DebugId; From c25ed563ab4ad7da53836b4972731f116025cb06 Mon Sep 17 00:00:00 2001 From: Sebastian Zivota Date: Tue, 25 Jul 2023 14:27:26 +0200 Subject: [PATCH 07/23] More refactoring --- src/types.rs | 137 +++++++++++++++++++++++++++++++-------------------- 1 file changed, 84 insertions(+), 53 deletions(-) diff --git a/src/types.rs b/src/types.rs index e5000483..71451fe2 100644 --- a/src/types.rs +++ b/src/types.rs @@ -828,100 +828,116 @@ impl SourceMap { /// Composes two sourcemaps. /// - /// This function assumes that `first` only maps between two files and - /// its target file is the source file of `second`. In other words, if `first` - /// maps from `transformed.js` to `minitfied.js` and `second` + /// This function assumes that `left` only maps between two files and + /// its target file is the source file of `right`. In other words, if `left` + /// maps from `transformed.js` to `minitfied.js` and `right` /// maps from `minified.js` to `original_1.js`, …, `original_n.js`, then /// the resulting sourcemap maps from `transformed.js` /// to to `original_1.js`, …, `original_n.js`. /// - /// The source root, sources, source contents, and names will be copied from `second`. The only information + /// The source root, sources, source contents, and names will be copied from `right`. The only information /// that is used from `first` are the mappings. - pub fn compose(first: &Self, second: &Self) -> Self { + pub fn compose(left: &Self, right: &Self) -> Self { + // Helper struct that makes it easier to compare tokens by the start and end + // of the range they cover. #[derive(Debug, Clone, Copy)] struct Range { start: (u32, u32), end: (u32, u32), value: RawToken, } - let mut builder = SourceMapBuilder::new(second.file.as_deref()); - builder.set_source_root(second.get_source_root()); + let mut builder = SourceMapBuilder::new(right.file.as_deref()); + builder.set_source_root(right.get_source_root()); - // Turn `first.tokens` and `second.tokens` into vectors of ranges so we have easy access to + // Turn `left.tokens` and `right.tokens` into vectors of ranges so we have easy access to // both start and end. - let mut first_tokens = first.tokens.clone(); - first_tokens.sort_unstable_by_key(|t| (t.src_line, t.src_col)); - let mut first_token_iter = first_tokens.iter().peekable(); - let mut first_ranges = Vec::new(); + let mut left_tokens = left.tokens.clone(); + left_tokens.sort_unstable_by_key(|t| (t.src_line, t.src_col)); + let mut left_token_iter = left_tokens.iter().peekable(); + let mut left_ranges = Vec::new(); - while let Some(&t) = first_token_iter.next() { - let (end_line, end_col) = first_token_iter + while let Some(&t) = left_token_iter.next() { + let (end_line, end_col) = left_token_iter .peek() .map_or((u32::MAX, u32::MAX), |&&t| (t.src_line, t.src_col)); - first_ranges.push(Range { + left_ranges.push(Range { start: (t.src_line, t.src_col), end: (end_line, end_col), value: t, }); } - let mut second_tokens = second.tokens.clone(); - second_tokens.sort_unstable_by_key(|t| (t.dst_line, t.dst_col)); - let mut second_token_iter = second_tokens.iter().peekable(); - let mut second_ranges = Vec::new(); + let mut right_tokens = right.tokens.clone(); + right_tokens.sort_unstable_by_key(|t| (t.dst_line, t.dst_col)); + let mut right_token_iter = right_tokens.iter().peekable(); + let mut right_ranges = Vec::new(); - while let Some(&t) = second_token_iter.next() { - let (end_line, end_col) = second_token_iter + while let Some(&t) = right_token_iter.next() { + let (end_line, end_col) = right_token_iter .peek() .map_or((u32::MAX, u32::MAX), |&&t| (t.dst_line, t.dst_col)); - second_ranges.push(Range { + right_ranges.push(Range { start: (t.dst_line, t.dst_col), end: (end_line, end_col), value: t, }); } - let mut second_ranges_iter = second_ranges.iter_mut(); + let mut right_ranges_iter = right_ranges.iter_mut(); - let Some(mut second_range) = second_ranges_iter.next() else { + let Some(mut right_range) = right_ranges_iter.next() else { return builder.into_sourcemap(); }; - // Iterate over `first.ranges` (sorted by `src_line/col`). For each such range, consider - // all `second.ranges` which overlap with it. - for &first_range in &first_ranges { - // The `first_range` offsets lines and columns by a certain amount. All `second_ranges` + // Iterate over `left.ranges` (sorted by `src_line/col`). For each such range, consider + // all `right.ranges` which overlap with it. + for &left_range in &left_ranges { + // The `left_range` offsets lines and columns by a certain amount. All `right_ranges` // it covers will get the same offset. let (line_diff, col_diff) = ( - first_range.value.src_line as i32 - first_range.value.dst_line as i32, - first_range.value.src_col as i32 - first_range.value.dst_col as i32, + left_range.value.src_line as i32 - left_range.value.dst_line as i32, + left_range.value.src_col as i32 - left_range.value.dst_col as i32, ); - // Skip `second_ranges` that are entirely before the `first_range`. - while second_range.end <= first_range.start { - match second_ranges_iter.next() { - Some(r) => second_range = r, + // Skip `right_ranges` that are entirely before the `left_range`. + while right_range.end <= left_range.start { + match right_ranges_iter.next() { + Some(r) => right_range = r, None => return builder.into_sourcemap(), } } - // At this point `second_range.end` > `first_range.start` + // At this point `right_range.end` > `left_range.start` - // Iterate over `second_ranges` that fall at least partially within the `first_range`. - while second_range.start < first_range.end { - // If `second_range` started before `first_range`, cut it off. - second_range.start = std::cmp::max(second_range.start, first_range.start); - let token = &mut second_range.value; + // If the first `right_range` starts after the `left_range`, + // there's a gap between the `left_range` and the `right_range`. + // Add an "empty" mapping for that gap. + if right_range.start > left_range.start { + builder.add( + left_range.value.dst_line, + left_range.value.dst_col, + u32::MAX, + u32::MAX, + None, + None, + ); + } + + // Iterate over `right_ranges` that fall at least partially within the `left_range`. + while right_range.start < left_range.end { + // If `right_range` started before `left_range`, cut it off. + right_range.start = std::cmp::max(right_range.start, left_range.start); + let token = &mut right_range.value; // Keep the `dst_line/col` in sync with the range start. - token.dst_line = second_range.start.0; - token.dst_col = second_range.start.1; + token.dst_line = right_range.start.0; + token.dst_col = right_range.start.1; - // Lookup the `second_range`'s source and name. - let name = second.get_name(token.name_id); - let source = second.get_source(token.src_id); + // Lookup the `right_range`'s source and name. + let name = right.get_name(token.name_id); + let source = right.get_source(token.src_id); if let Some(source) = source { - let contents = second.get_source_contents(token.src_id); + let contents = right.get_source_contents(token.src_id); let new_id = builder.add_source(source); builder.set_source_contents(new_id, contents); @@ -939,14 +955,16 @@ impl SourceMap { name, ); - if second_range.end < first_range.end { - // Advance the `second_range`. - match second_ranges_iter.next() { - Some(r) => second_range = r, + if right_range.end >= left_range.end { + // There are surely no more `right_ranges` for this `left_range`. + // Break the loop without advancing the `right_range`. + break; + } else { + // Advance the `right_range`. + match right_ranges_iter.next() { + Some(r) => right_range = r, None => return builder.into_sourcemap(), } - } else { - break; } } } @@ -1212,6 +1230,7 @@ impl SourceMapSection { #[cfg(test)] mod tests { + use crate::RawToken; use super::{RewriteOptions, SourceMap}; use debugid::DebugId; @@ -1265,6 +1284,18 @@ mod tests { let composed = SourceMap::compose(&first_sourcemap, &second_sourcemap); - assert_eq!(composed.tokens, second_sourcemap.tokens); + // The composition added an explicit non-mapping at the beginning. + assert_eq!( + &composed.tokens[0], + &RawToken { + dst_line: 0, + dst_col: 0, + src_line: u32::MAX, + src_col: u32::MAX, + src_id: u32::MAX, + name_id: u32::MAX, + } + ); + assert_eq!(&composed.tokens[1..], second_sourcemap.tokens); } } From d517ed9ff6a659c50e5e50383aea6b6127b3d133 Mon Sep 17 00:00:00 2001 From: Sebastian Zivota Date: Tue, 25 Jul 2023 14:54:17 +0200 Subject: [PATCH 08/23] Documentation --- src/types.rs | 25 +++++++++++++++++++++---- 1 file changed, 21 insertions(+), 4 deletions(-) diff --git a/src/types.rs b/src/types.rs index 71451fe2..3737872a 100644 --- a/src/types.rs +++ b/src/types.rs @@ -838,6 +838,20 @@ impl SourceMap { /// The source root, sources, source contents, and names will be copied from `right`. The only information /// that is used from `first` are the mappings. pub fn compose(left: &Self, right: &Self) -> Self { + // The composition works by going through the tokens in `right` in order and adjusting + // them depending on the token in `left` they're "covered" by. + // For example: + // Let `l` be a token in `left` mapping `(17, 23)` to `(8, 30)` and let + // `r₁ : (8, 32) -> (102, 35)`, `r₂ : (8, 40) -> (102, 50)`, and + // `r₃ : (9, 10) -> (103, 12)` be the tokens in `right` that fall in the range of `l`. + // `l` offsets these tokens by `(+9, -7)`, so `r₁, … , r₃` must be offset by the same + // amount. Thus, the composed sourcemap will contain the tokens + // `c₁ : (17, 25) -> (102, 35)`, `c₂ : (17, 33) -> (102, 50)`, and + // `c3 : (18, 3) -> (103, 12)`. + // Moreover, there is a small gap between the start of the range of `l` (`(8, 30)`) and the start + // of the domain of `r₁` (`(8, 32)`). Therefore, we also add a token `c₀ : (8, 30) -> !` that maps + // the small range `(8, 30)..(8, 32)` to nothing. + // Helper struct that makes it easier to compare tokens by the start and end // of the range they cover. #[derive(Debug, Clone, Copy)] @@ -851,6 +865,9 @@ impl SourceMap { // Turn `left.tokens` and `right.tokens` into vectors of ranges so we have easy access to // both start and end. + // We want to compare `left` tokens and `right` by line/column numbers in the "middle" file. + // These line/column numbers are the `src_line/col` for `left` tokens and `dst_line/col` for + // the right tokens. let mut left_tokens = left.tokens.clone(); left_tokens.sort_unstable_by_key(|t| (t.src_line, t.src_col)); let mut left_token_iter = left_tokens.iter().peekable(); @@ -895,8 +912,8 @@ impl SourceMap { // The `left_range` offsets lines and columns by a certain amount. All `right_ranges` // it covers will get the same offset. let (line_diff, col_diff) = ( - left_range.value.src_line as i32 - left_range.value.dst_line as i32, - left_range.value.src_col as i32 - left_range.value.dst_col as i32, + left_range.value.dst_line as i32 - left_range.value.src_line as i32, + left_range.value.dst_col as i32 - left_range.value.src_col as i32, ); // Skip `right_ranges` that are entirely before the `left_range`. @@ -943,8 +960,8 @@ impl SourceMap { builder.set_source_contents(new_id, contents); } - let dst_line = (token.dst_line as i32 - line_diff) as u32; - let dst_col = (token.dst_col as i32 - col_diff) as u32; + let dst_line = (token.dst_line as i32 + line_diff) as u32; + let dst_col = (token.dst_col as i32 + col_diff) as u32; builder.add( dst_line, From 64d26307af418dc27770591df7af1c4d8a2aee08 Mon Sep 17 00:00:00 2001 From: Sebastian Zivota Date: Tue, 25 Jul 2023 14:58:27 +0200 Subject: [PATCH 09/23] Committed file by mistake --- converted.map | 1 - 1 file changed, 1 deletion(-) delete mode 100644 converted.map diff --git a/converted.map b/converted.map deleted file mode 100644 index 0464e26d..00000000 --- a/converted.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"sources":["original.js"],"sourcesContent":["problems = 99"],"names":[],"mappings":"IAAA,MAAQ,C,iBAAC,CAAC,CAAC"} \ No newline at end of file From 0edf07fcecad039e1a8f05000b17c160253973fd Mon Sep 17 00:00:00 2001 From: Sebastian Zivota Date: Wed, 26 Jul 2023 11:20:34 +0200 Subject: [PATCH 10/23] break -> continue --- src/types.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/types.rs b/src/types.rs index 3737872a..83ba39c4 100644 --- a/src/types.rs +++ b/src/types.rs @@ -974,8 +974,8 @@ impl SourceMap { if right_range.end >= left_range.end { // There are surely no more `right_ranges` for this `left_range`. - // Break the loop without advancing the `right_range`. - break; + // Continue to the next `left_range` without advancing the `right_range`. + continue; } else { // Advance the `right_range`. match right_ranges_iter.next() { From 1ac49ad97507dcb3350f2a2b9df20f1910742dbf Mon Sep 17 00:00:00 2001 From: Sebastian Zivota Date: Wed, 26 Jul 2023 11:50:39 +0200 Subject: [PATCH 11/23] Revert "break -> continue" This reverts commit 0edf07fcecad039e1a8f05000b17c160253973fd. --- src/types.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/types.rs b/src/types.rs index 83ba39c4..3737872a 100644 --- a/src/types.rs +++ b/src/types.rs @@ -974,8 +974,8 @@ impl SourceMap { if right_range.end >= left_range.end { // There are surely no more `right_ranges` for this `left_range`. - // Continue to the next `left_range` without advancing the `right_range`. - continue; + // Break the loop without advancing the `right_range`. + break; } else { // Advance the `right_range`. match right_ranges_iter.next() { From 4ba0e1072b5e98a78b2f834130db5f3df9372653 Mon Sep 17 00:00:00 2001 From: Sebastian Zivota Date: Thu, 27 Jul 2023 15:40:10 +0200 Subject: [PATCH 12/23] Remove empty mappings --- src/types.rs | 28 +--------------------------- 1 file changed, 1 insertion(+), 27 deletions(-) diff --git a/src/types.rs b/src/types.rs index 3737872a..d16d138a 100644 --- a/src/types.rs +++ b/src/types.rs @@ -926,20 +926,6 @@ impl SourceMap { // At this point `right_range.end` > `left_range.start` - // If the first `right_range` starts after the `left_range`, - // there's a gap between the `left_range` and the `right_range`. - // Add an "empty" mapping for that gap. - if right_range.start > left_range.start { - builder.add( - left_range.value.dst_line, - left_range.value.dst_col, - u32::MAX, - u32::MAX, - None, - None, - ); - } - // Iterate over `right_ranges` that fall at least partially within the `left_range`. while right_range.start < left_range.end { // If `right_range` started before `left_range`, cut it off. @@ -1301,18 +1287,6 @@ mod tests { let composed = SourceMap::compose(&first_sourcemap, &second_sourcemap); - // The composition added an explicit non-mapping at the beginning. - assert_eq!( - &composed.tokens[0], - &RawToken { - dst_line: 0, - dst_col: 0, - src_line: u32::MAX, - src_col: u32::MAX, - src_id: u32::MAX, - name_id: u32::MAX, - } - ); - assert_eq!(&composed.tokens[1..], second_sourcemap.tokens); + assert_eq!(composed.tokens, second_sourcemap.tokens); } } From d16a1462102f6e2959e036d0ea845f04b3b9dd30 Mon Sep 17 00:00:00 2001 From: Sebastian Zivota Date: Thu, 27 Jul 2023 16:08:56 +0200 Subject: [PATCH 13/23] Factor out duplicated code --- src/types.rs | 55 +++++++++++++++++++++------------------------------- 1 file changed, 22 insertions(+), 33 deletions(-) diff --git a/src/types.rs b/src/types.rs index d16d138a..2e688e5e 100644 --- a/src/types.rs +++ b/src/types.rs @@ -863,42 +863,33 @@ impl SourceMap { let mut builder = SourceMapBuilder::new(right.file.as_deref()); builder.set_source_root(right.get_source_root()); + /// Turns a list of tokens into a list of ranges, using the provided function to determine the start of a token. + fn create_ranges(tokens: &[RawToken], key: fn(&RawToken) -> (u32, u32)) -> Vec { + let mut tokens = tokens.to_vec(); + tokens.sort_unstable_by_key(key); + + let mut token_iter = tokens.into_iter().peekable(); + let mut ranges = Vec::new(); + + while let Some(t) = token_iter.next() { + let (end_line, end_col) = token_iter.peek().map_or((u32::MAX, u32::MAX), key); + ranges.push(Range { + start: key(&t), + end: (end_line, end_col), + value: t, + }); + } + + ranges + } + // Turn `left.tokens` and `right.tokens` into vectors of ranges so we have easy access to // both start and end. // We want to compare `left` tokens and `right` by line/column numbers in the "middle" file. // These line/column numbers are the `src_line/col` for `left` tokens and `dst_line/col` for // the right tokens. - let mut left_tokens = left.tokens.clone(); - left_tokens.sort_unstable_by_key(|t| (t.src_line, t.src_col)); - let mut left_token_iter = left_tokens.iter().peekable(); - let mut left_ranges = Vec::new(); - - while let Some(&t) = left_token_iter.next() { - let (end_line, end_col) = left_token_iter - .peek() - .map_or((u32::MAX, u32::MAX), |&&t| (t.src_line, t.src_col)); - left_ranges.push(Range { - start: (t.src_line, t.src_col), - end: (end_line, end_col), - value: t, - }); - } - - let mut right_tokens = right.tokens.clone(); - right_tokens.sort_unstable_by_key(|t| (t.dst_line, t.dst_col)); - let mut right_token_iter = right_tokens.iter().peekable(); - let mut right_ranges = Vec::new(); - - while let Some(&t) = right_token_iter.next() { - let (end_line, end_col) = right_token_iter - .peek() - .map_or((u32::MAX, u32::MAX), |&&t| (t.dst_line, t.dst_col)); - right_ranges.push(Range { - start: (t.dst_line, t.dst_col), - end: (end_line, end_col), - value: t, - }); - } + let left_ranges = create_ranges(&left.tokens, |t| (t.src_line, t.src_col)); + let mut right_ranges = create_ranges(&right.tokens, |t| (t.dst_line, t.dst_col)); let mut right_ranges_iter = right_ranges.iter_mut(); @@ -1233,8 +1224,6 @@ impl SourceMapSection { #[cfg(test)] mod tests { - use crate::RawToken; - use super::{RewriteOptions, SourceMap}; use debugid::DebugId; From 6a73ac387b98256c68c67279b58723a91c7a899b Mon Sep 17 00:00:00 2001 From: Sebastian Zivota Date: Thu, 27 Jul 2023 16:25:56 +0200 Subject: [PATCH 14/23] Take out some mutation --- src/types.rs | 22 ++++++++++++---------- 1 file changed, 12 insertions(+), 10 deletions(-) diff --git a/src/types.rs b/src/types.rs index 2e688e5e..c7cdf9ee 100644 --- a/src/types.rs +++ b/src/types.rs @@ -889,12 +889,13 @@ impl SourceMap { // These line/column numbers are the `src_line/col` for `left` tokens and `dst_line/col` for // the right tokens. let left_ranges = create_ranges(&left.tokens, |t| (t.src_line, t.src_col)); - let mut right_ranges = create_ranges(&right.tokens, |t| (t.dst_line, t.dst_col)); + let right_ranges = create_ranges(&right.tokens, |t| (t.dst_line, t.dst_col)); - let mut right_ranges_iter = right_ranges.iter_mut(); + let mut right_ranges_iter = right_ranges.iter(); - let Some(mut right_range) = right_ranges_iter.next() else { - return builder.into_sourcemap(); + let mut right_range = match right_ranges_iter.next() { + Some(r) => r, + None => return builder.into_sourcemap(), }; // Iterate over `left.ranges` (sorted by `src_line/col`). For each such range, consider @@ -919,12 +920,13 @@ impl SourceMap { // Iterate over `right_ranges` that fall at least partially within the `left_range`. while right_range.start < left_range.end { - // If `right_range` started before `left_range`, cut it off. - right_range.start = std::cmp::max(right_range.start, left_range.start); - let token = &mut right_range.value; - // Keep the `dst_line/col` in sync with the range start. - token.dst_line = right_range.start.0; - token.dst_col = right_range.start.1; + // If `right_range` started before `left_range`, cut off the token's start. + let (dst_line, dst_col) = std::cmp::max(right_range.start, left_range.start); + let token = RawToken { + dst_line, + dst_col, + ..right_range.value + }; // Lookup the `right_range`'s source and name. let name = right.get_name(token.name_id); From 0164177449b6f5653ba0eabe0eaa8833679befde Mon Sep 17 00:00:00 2001 From: Sebastian Zivota Date: Thu, 27 Jul 2023 17:53:44 +0200 Subject: [PATCH 15/23] Add diagram --- src/types.rs | 22 +++++++++++++++++----- 1 file changed, 17 insertions(+), 5 deletions(-) diff --git a/src/types.rs b/src/types.rs index c7cdf9ee..fc8d2ca2 100644 --- a/src/types.rs +++ b/src/types.rs @@ -842,15 +842,27 @@ impl SourceMap { // them depending on the token in `left` they're "covered" by. // For example: // Let `l` be a token in `left` mapping `(17, 23)` to `(8, 30)` and let - // `r₁ : (8, 32) -> (102, 35)`, `r₂ : (8, 40) -> (102, 50)`, and + // `r₁ : (8, 28) -> (102, 35)`, `r₂ : (8, 40) -> (102, 50)`, and // `r₃ : (9, 10) -> (103, 12)` be the tokens in `right` that fall in the range of `l`. // `l` offsets these tokens by `(+9, -7)`, so `r₁, … , r₃` must be offset by the same // amount. Thus, the composed sourcemap will contain the tokens - // `c₁ : (17, 25) -> (102, 35)`, `c₂ : (17, 33) -> (102, 50)`, and + // `c₁ : (17, 23) -> (102, 35)`, `c₂ : (17, 33) -> (102, 50)`, and // `c3 : (18, 3) -> (103, 12)`. - // Moreover, there is a small gap between the start of the range of `l` (`(8, 30)`) and the start - // of the domain of `r₁` (`(8, 32)`). Therefore, we also add a token `c₀ : (8, 30) -> !` that maps - // the small range `(8, 30)..(8, 32)` to nothing. + // + // Or, in diagram form: + // + // (17, 23) (position in the left file) + // ↓ l + // (8, 30) + // (8, 28) (8, 40) (9, 10) (positions in the middle file) + // ↓ r₁ ↓ r₂ ↓ r₃ + // (102, 35) (102, 50) (103, 12) (positions in the right file) + // + // becomes + // + // (17, 23) (17, 33) (18, 3) (positions in the left file) + // ↓ c₁ ↓ c₂ ↓ c₃ + // (102, 35) (102, 50) (103, 12) (positions in the right file) // Helper struct that makes it easier to compare tokens by the start and end // of the range they cover. From 284dab5a8dd8b405e49a61fa8d37f56f82f90dd0 Mon Sep 17 00:00:00 2001 From: Sebastian Zivota Date: Thu, 27 Jul 2023 17:55:47 +0200 Subject: [PATCH 16/23] Add test for composition with identity on the right --- src/types.rs | 47 ++++++++++++++++++++++++++++++++++++++++------- 1 file changed, 40 insertions(+), 7 deletions(-) diff --git a/src/types.rs b/src/types.rs index fc8d2ca2..a17225c8 100644 --- a/src/types.rs +++ b/src/types.rs @@ -1266,9 +1266,9 @@ mod tests { } #[test] - fn test_compose_identity() { + fn test_compose_identity_left() { // Identity mapping on "var my answer/* ignore this */ = 42;". - let first_sourcemap = br#"{ + let left_sourcemap = br#"{ "version":3, "mappings":"AAAA", "names":[], @@ -1277,7 +1277,7 @@ mod tests { }"#; // Maps "var my answer/* ignore this */ = 42;" to "my problems = 99". - let second_sourcemap = br#"{ + let right_sourcemap = br#"{ "version":3, "mappings":"IAAA,GAAG,uBAAQ,GAAG", "names":[], @@ -1285,11 +1285,44 @@ mod tests { "sourcesContent":["my problems = 99"] }"#; - let first_sourcemap = SourceMap::from_slice(first_sourcemap).unwrap(); - let second_sourcemap = SourceMap::from_slice(second_sourcemap).unwrap(); + let left_sourcemap = SourceMap::from_slice(left_sourcemap).unwrap(); + let right_sourcemap = SourceMap::from_slice(right_sourcemap).unwrap(); - let composed = SourceMap::compose(&first_sourcemap, &second_sourcemap); + let composed = SourceMap::compose(&left_sourcemap, &right_sourcemap); - assert_eq!(composed.tokens, second_sourcemap.tokens); + assert_eq!(composed.tokens, right_sourcemap.tokens); + } + + #[test] + fn test_compose_identity_right() { + // Hires map from "var my problems = 99;" to "my problems = 99" + let left_sourcemap = br#"{ + "version":3, + "mappings":"GAAA,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC", + "names":[], + "sources":["edited.js"], + "sourcesContent":["my problems = 99"] + }"#; + + // Identity map on "my problems = 99" + let right_sourcemap = br#"{ + "version":3, + "mappings":"AAAA", + "names":[], + "sources":["original.js"], + "sourcesContent":["my problems = 99"] + }"#; + + let left_sourcemap = SourceMap::from_slice(left_sourcemap).unwrap(); + let right_sourcemap = SourceMap::from_slice(right_sourcemap).unwrap(); + + let composed = SourceMap::compose(&left_sourcemap, &right_sourcemap); + + // After composition, all mappings point to (0, 0). + // Morally, the composition is the same as the map with the single mapping `(0,3) -> (0, 0)`. + for t in &composed.tokens { + assert_eq!(t.src_line, 0); + assert_eq!(t.src_col, 0); + } } } From 40030a52cf09877c1db0e32303c7314239aac983 Mon Sep 17 00:00:00 2001 From: Sebastian Zivota Date: Thu, 27 Jul 2023 18:58:54 +0200 Subject: [PATCH 17/23] Reframe function and documentation --- src/types.rs | 140 +++++++++++++++++++++++++++------------------------ 1 file changed, 73 insertions(+), 67 deletions(-) diff --git a/src/types.rs b/src/types.rs index a17225c8..2cd07803 100644 --- a/src/types.rs +++ b/src/types.rs @@ -826,43 +826,48 @@ impl SourceMap { Ok((sm, mapping)) } - /// Composes two sourcemaps. + /// Adjusts the mappings in `original` using the mappings in `adjustment`. /// - /// This function assumes that `left` only maps between two files and - /// its target file is the source file of `right`. In other words, if `left` - /// maps from `transformed.js` to `minitfied.js` and `right` - /// maps from `minified.js` to `original_1.js`, …, `original_n.js`, then - /// the resulting sourcemap maps from `transformed.js` - /// to to `original_1.js`, …, `original_n.js`. + /// Here is the intended use case for this function: + /// * You have a source file (for example, minified JS) `foo.js` and a + /// corresponding sourcemap `foo.js.map`. + /// * You modify `foo.js` in some way and generate a sourcemap `transform.js.map` + /// representing this modification. This can be done using `magic-string`, for example. + /// * You want a sourcemap that is "like" `foo.js.map`, but takes the changes you made to `foo.js` into account. + /// Then `SourceMap::adjust_mappings(foo.js.map, transform.js.map)` is the desired sourcemap. /// - /// The source root, sources, source contents, and names will be copied from `right`. The only information - /// that is used from `first` are the mappings. - pub fn compose(left: &Self, right: &Self) -> Self { - // The composition works by going through the tokens in `right` in order and adjusting - // them depending on the token in `left` they're "covered" by. + /// This function assumes that `adjustment` contains no relevant information except for mappings. + /// All information about sources and names is copied from `original`. + /// + /// Note that the resulting sourcemap will be at most as fine-grained as `original.` For example, + /// if `original` maps every line/column to `0/0`, then `SourceMap::adjust_mappings(original, adjustment)` + /// will not map to anything other than`0/0`, irrespective of how detailed the mappings in `adjustment` are. + pub fn adjust_mappings(original: &Self, adjustment: &Self) -> Self { + // The algorithm works by going through the tokens in `original` in order and adjusting + // them depending on the token in `adjustment` they're "covered" by. // For example: - // Let `l` be a token in `left` mapping `(17, 23)` to `(8, 30)` and let + // Let `l` be a token in `original` mapping `(17, 23)` to `(8, 30)` and let // `r₁ : (8, 28) -> (102, 35)`, `r₂ : (8, 40) -> (102, 50)`, and - // `r₃ : (9, 10) -> (103, 12)` be the tokens in `right` that fall in the range of `l`. + // `r₃ : (9, 10) -> (103, 12)` be the tokens in `original` that fall in the range of `l`. // `l` offsets these tokens by `(+9, -7)`, so `r₁, … , r₃` must be offset by the same - // amount. Thus, the composed sourcemap will contain the tokens + // amount. Thus, the adjusted sourcemap will contain the tokens // `c₁ : (17, 23) -> (102, 35)`, `c₂ : (17, 33) -> (102, 50)`, and // `c3 : (18, 3) -> (103, 12)`. // // Or, in diagram form: // - // (17, 23) (position in the left file) + // (17, 23) (position in the edited source file) // ↓ l // (8, 30) - // (8, 28) (8, 40) (9, 10) (positions in the middle file) + // (8, 28) (8, 40) (9, 10) (positions in the original source file) // ↓ r₁ ↓ r₂ ↓ r₃ - // (102, 35) (102, 50) (103, 12) (positions in the right file) + // (102, 35) (102, 50) (103, 12) (positions in the target file) // // becomes // - // (17, 23) (17, 33) (18, 3) (positions in the left file) + // (17, 23) (17, 33) (18, 3) (positions in the edited source file) // ↓ c₁ ↓ c₂ ↓ c₃ - // (102, 35) (102, 50) (103, 12) (positions in the right file) + // (102, 35) (102, 50) (103, 12) (positions in the target file) // Helper struct that makes it easier to compare tokens by the start and end // of the range they cover. @@ -872,8 +877,8 @@ impl SourceMap { end: (u32, u32), value: RawToken, } - let mut builder = SourceMapBuilder::new(right.file.as_deref()); - builder.set_source_root(right.get_source_root()); + let mut builder = SourceMapBuilder::new(original.file.as_deref()); + builder.set_source_root(original.get_source_root()); /// Turns a list of tokens into a list of ranges, using the provided function to determine the start of a token. fn create_ranges(tokens: &[RawToken], key: fn(&RawToken) -> (u32, u32)) -> Vec { @@ -895,57 +900,58 @@ impl SourceMap { ranges } - // Turn `left.tokens` and `right.tokens` into vectors of ranges so we have easy access to + // Turn `original.tokens` and `adjustment.tokens` into vectors of ranges so we have easy access to // both start and end. - // We want to compare `left` tokens and `right` by line/column numbers in the "middle" file. - // These line/column numbers are the `src_line/col` for `left` tokens and `dst_line/col` for - // the right tokens. - let left_ranges = create_ranges(&left.tokens, |t| (t.src_line, t.src_col)); - let right_ranges = create_ranges(&right.tokens, |t| (t.dst_line, t.dst_col)); + // We want to compare `original` and `adjustment` tokens by line/column numbers in the "original source" file. + // These line/column numbers are the `dst_line/col` for + // the `original` tokens and `src_line/col` for the `adjustment` tokens. + let original_ranges = create_ranges(&original.tokens, |t| (t.dst_line, t.dst_col)); + let adjustment_ranges = create_ranges(&adjustment.tokens, |t| (t.src_line, t.src_col)); - let mut right_ranges_iter = right_ranges.iter(); + let mut original_ranges_iter = original_ranges.iter(); - let mut right_range = match right_ranges_iter.next() { + let mut original_range = match original_ranges_iter.next() { Some(r) => r, None => return builder.into_sourcemap(), }; - // Iterate over `left.ranges` (sorted by `src_line/col`). For each such range, consider - // all `right.ranges` which overlap with it. - for &left_range in &left_ranges { - // The `left_range` offsets lines and columns by a certain amount. All `right_ranges` + // Iterate over `adjustment_ranges` (sorted by `src_line/col`). For each such range, consider + // all `original_ranges` which overlap with it. + for &adjustment_range in &adjustment_ranges { + // The `adjustment_range` offsets lines and columns by a certain amount. All `original_ranges` // it covers will get the same offset. let (line_diff, col_diff) = ( - left_range.value.dst_line as i32 - left_range.value.src_line as i32, - left_range.value.dst_col as i32 - left_range.value.src_col as i32, + adjustment_range.value.dst_line as i32 - adjustment_range.value.src_line as i32, + adjustment_range.value.dst_col as i32 - adjustment_range.value.src_col as i32, ); - // Skip `right_ranges` that are entirely before the `left_range`. - while right_range.end <= left_range.start { - match right_ranges_iter.next() { - Some(r) => right_range = r, + // Skip `original_ranges` that are entirely before the `adjustment_range`. + while original_range.end <= adjustment_range.start { + match original_ranges_iter.next() { + Some(r) => original_range = r, None => return builder.into_sourcemap(), } } - // At this point `right_range.end` > `left_range.start` + // At this point `original_range.end` > `adjustment_range.start` - // Iterate over `right_ranges` that fall at least partially within the `left_range`. - while right_range.start < left_range.end { - // If `right_range` started before `left_range`, cut off the token's start. - let (dst_line, dst_col) = std::cmp::max(right_range.start, left_range.start); + // Iterate over `original_ranges` that fall at least partially within the `adjustment_range`. + while original_range.start < adjustment_range.end { + // If `original_range` started before `adjustment_range`, cut off the token's start. + let (dst_line, dst_col) = + std::cmp::max(original_range.start, adjustment_range.start); let token = RawToken { dst_line, dst_col, - ..right_range.value + ..original_range.value }; - // Lookup the `right_range`'s source and name. - let name = right.get_name(token.name_id); - let source = right.get_source(token.src_id); + // Look up the `original_range`'s source and name. + let name = original.get_name(token.name_id); + let source = original.get_source(token.src_id); if let Some(source) = source { - let contents = right.get_source_contents(token.src_id); + let contents = original.get_source_contents(token.src_id); let new_id = builder.add_source(source); builder.set_source_contents(new_id, contents); @@ -963,14 +969,14 @@ impl SourceMap { name, ); - if right_range.end >= left_range.end { - // There are surely no more `right_ranges` for this `left_range`. - // Break the loop without advancing the `right_range`. + if original_range.end >= adjustment_range.end { + // There are surely no more `original_ranges` for this `adjustment_range`. + // Break the loop without advancing the `original_range`. break; } else { - // Advance the `right_range`. - match right_ranges_iter.next() { - Some(r) => right_range = r, + // Advance the `original_range`. + match original_ranges_iter.next() { + Some(r) => original_range = r, None => return builder.into_sourcemap(), } } @@ -1268,7 +1274,7 @@ mod tests { #[test] fn test_compose_identity_left() { // Identity mapping on "var my answer/* ignore this */ = 42;". - let left_sourcemap = br#"{ + let adjustment = br#"{ "version":3, "mappings":"AAAA", "names":[], @@ -1277,7 +1283,7 @@ mod tests { }"#; // Maps "var my answer/* ignore this */ = 42;" to "my problems = 99". - let right_sourcemap = br#"{ + let original = br#"{ "version":3, "mappings":"IAAA,GAAG,uBAAQ,GAAG", "names":[], @@ -1285,18 +1291,18 @@ mod tests { "sourcesContent":["my problems = 99"] }"#; - let left_sourcemap = SourceMap::from_slice(left_sourcemap).unwrap(); - let right_sourcemap = SourceMap::from_slice(right_sourcemap).unwrap(); + let adjustment = SourceMap::from_slice(adjustment).unwrap(); + let original = SourceMap::from_slice(original).unwrap(); - let composed = SourceMap::compose(&left_sourcemap, &right_sourcemap); + let composed = SourceMap::adjust_mappings(&original, &adjustment); - assert_eq!(composed.tokens, right_sourcemap.tokens); + assert_eq!(composed.tokens, original.tokens); } #[test] fn test_compose_identity_right() { // Hires map from "var my problems = 99;" to "my problems = 99" - let left_sourcemap = br#"{ + let adjustment = br#"{ "version":3, "mappings":"GAAA,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC", "names":[], @@ -1305,7 +1311,7 @@ mod tests { }"#; // Identity map on "my problems = 99" - let right_sourcemap = br#"{ + let original = br#"{ "version":3, "mappings":"AAAA", "names":[], @@ -1313,10 +1319,10 @@ mod tests { "sourcesContent":["my problems = 99"] }"#; - let left_sourcemap = SourceMap::from_slice(left_sourcemap).unwrap(); - let right_sourcemap = SourceMap::from_slice(right_sourcemap).unwrap(); + let adjustment = SourceMap::from_slice(adjustment).unwrap(); + let original = SourceMap::from_slice(original).unwrap(); - let composed = SourceMap::compose(&left_sourcemap, &right_sourcemap); + let composed = SourceMap::adjust_mappings(&original, &adjustment); // After composition, all mappings point to (0, 0). // Morally, the composition is the same as the map with the single mapping `(0,3) -> (0, 0)`. From d778ea3f4b7c75319cbeab710da5dd7fcf1ef58b Mon Sep 17 00:00:00 2001 From: Sebastian Zivota Date: Fri, 28 Jul 2023 15:01:56 +0200 Subject: [PATCH 18/23] Fix typo --- src/types.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/types.rs b/src/types.rs index 2cd07803..547aa55d 100644 --- a/src/types.rs +++ b/src/types.rs @@ -846,7 +846,7 @@ impl SourceMap { // The algorithm works by going through the tokens in `original` in order and adjusting // them depending on the token in `adjustment` they're "covered" by. // For example: - // Let `l` be a token in `original` mapping `(17, 23)` to `(8, 30)` and let + // Let `l` be a token in `adjustment` mapping `(17, 23)` to `(8, 30)` and let // `r₁ : (8, 28) -> (102, 35)`, `r₂ : (8, 40) -> (102, 50)`, and // `r₃ : (9, 10) -> (103, 12)` be the tokens in `original` that fall in the range of `l`. // `l` offsets these tokens by `(+9, -7)`, so `r₁, … , r₃` must be offset by the same From 9d9b0c916c3d24374c7b521e351b6d18c4d26831 Mon Sep 17 00:00:00 2001 From: Sebastian Zivota Date: Mon, 31 Jul 2023 17:34:24 +0200 Subject: [PATCH 19/23] Tokens stop at line ends --- src/types.rs | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/src/types.rs b/src/types.rs index 547aa55d..ae2304b4 100644 --- a/src/types.rs +++ b/src/types.rs @@ -889,10 +889,13 @@ impl SourceMap { let mut ranges = Vec::new(); while let Some(t) = token_iter.next() { - let (end_line, end_col) = token_iter.peek().map_or((u32::MAX, u32::MAX), key); + let start = key(&t); + let next_start = token_iter.peek().map_or((u32::MAX, u32::MAX), key); + // A token extends either to the start of the next token or the end of the line, whichever comes sooner + let end = std::cmp::min(next_start, (start.0, u32::MAX)); ranges.push(Range { - start: key(&t), - end: (end_line, end_col), + start, + end, value: t, }); } From f8d8be09a519a93eb6bc14aa8740b921416e8363 Mon Sep 17 00:00:00 2001 From: Sebastian Zivota Date: Mon, 31 Jul 2023 18:28:11 +0200 Subject: [PATCH 20/23] Add injection test --- src/types.rs | 40 +++++++++++++++++++ .../esbuild-composed.bundle.js.map | 1 + .../esbuild-injected.bundle.js | 6 +++ .../esbuild-injected.bundle.js.map | 1 + .../adjust_mappings/esbuild.bundle.js | 2 + .../adjust_mappings/esbuild.bundle.js.map | 7 ++++ .../rollup-composed.bundle.js.map | 1 + .../adjust_mappings/rollup-injected.bundle.js | 6 +++ .../rollup-injected.bundle.js.map | 1 + .../fixtures/adjust_mappings/rollup.bundle.js | 2 + .../adjust_mappings/rollup.bundle.js.map | 1 + .../vite-composed.bundle.js.map | 1 + .../adjust_mappings/vite-injected.bundle.js | 6 +++ .../vite-injected.bundle.js.map | 1 + tests/fixtures/adjust_mappings/vite.bundle.js | 2 + .../adjust_mappings/vite.bundle.js.map | 1 + .../webpack-composed.bundle.js.map | 1 + .../webpack-injected.bundle.js | 5 +++ .../webpack-injected.bundle.js.map | 1 + .../adjust_mappings/webpack.bundle.js | 2 + .../adjust_mappings/webpack.bundle.js.map | 1 + 21 files changed, 89 insertions(+) create mode 100644 tests/fixtures/adjust_mappings/esbuild-composed.bundle.js.map create mode 100644 tests/fixtures/adjust_mappings/esbuild-injected.bundle.js create mode 100644 tests/fixtures/adjust_mappings/esbuild-injected.bundle.js.map create mode 100644 tests/fixtures/adjust_mappings/esbuild.bundle.js create mode 100644 tests/fixtures/adjust_mappings/esbuild.bundle.js.map create mode 100644 tests/fixtures/adjust_mappings/rollup-composed.bundle.js.map create mode 100644 tests/fixtures/adjust_mappings/rollup-injected.bundle.js create mode 100644 tests/fixtures/adjust_mappings/rollup-injected.bundle.js.map create mode 100644 tests/fixtures/adjust_mappings/rollup.bundle.js create mode 100644 tests/fixtures/adjust_mappings/rollup.bundle.js.map create mode 100644 tests/fixtures/adjust_mappings/vite-composed.bundle.js.map create mode 100644 tests/fixtures/adjust_mappings/vite-injected.bundle.js create mode 100644 tests/fixtures/adjust_mappings/vite-injected.bundle.js.map create mode 100644 tests/fixtures/adjust_mappings/vite.bundle.js create mode 100644 tests/fixtures/adjust_mappings/vite.bundle.js.map create mode 100644 tests/fixtures/adjust_mappings/webpack-composed.bundle.js.map create mode 100644 tests/fixtures/adjust_mappings/webpack-injected.bundle.js create mode 100644 tests/fixtures/adjust_mappings/webpack-injected.bundle.js.map create mode 100644 tests/fixtures/adjust_mappings/webpack.bundle.js create mode 100644 tests/fixtures/adjust_mappings/webpack.bundle.js.map diff --git a/src/types.rs b/src/types.rs index ae2304b4..d98e9031 100644 --- a/src/types.rs +++ b/src/types.rs @@ -1334,4 +1334,44 @@ mod tests { assert_eq!(t.src_col, 0); } } + + #[test] + fn test_adjust_mappings_injection() { + // A test that `adjust_mappings` does what it's supposed to for debug id injection. + // + // For each bundler: + // * `bundle.js` and `bundle.js.map` are taken from https://github.com/kamilogorek/sourcemaps-playground/. + // * `injected.bundle.js` and `injected.bundle.js.map` were created using the function`fixup_js_file` in `sentry-cli`. + // `injected.bundle.js.map` maps from `injected.bundle.js` to `bundle.js`. + // * `composed.bundle.js.map` is the result of calling `adjust_mappings` on `bundle.js.map` and `injected.bundle.js.map`. + // + // If everything is working as intended, `composed.bundle.js.map` is a (good) sourcemap from `injected.bundle.js` to + // the original sources. To verify that this is indeed the case, you can compare `bundle.js` / `bundle.js.map` with + // `injected.bundle.js` / `composed.bundle.js.map` using https://sokra.github.io/source-map-visualization/#custom. + for bundler in ["esbuild", "rollup", "vite", "webpack"] { + let original_map_file = std::fs::File::open(format!( + "tests/fixtures/adjust_mappings/{bundler}.bundle.js.map" + )) + .unwrap(); + + let injected_map_file = std::fs::File::open(format!( + "tests/fixtures/adjust_mappings/{bundler}-injected.bundle.js.map" + )) + .unwrap(); + + let composed_map_file = std::fs::File::open(format!( + "tests/fixtures/adjust_mappings/{bundler}-composed.bundle.js.map" + )) + .unwrap(); + + let original_map = SourceMap::from_reader(original_map_file).unwrap(); + let injected_map = SourceMap::from_reader(injected_map_file).unwrap(); + let composed_map = SourceMap::from_reader(composed_map_file).unwrap(); + + assert_eq!( + SourceMap::adjust_mappings(&original_map, &injected_map).tokens, + composed_map.tokens + ); + } + } } diff --git a/tests/fixtures/adjust_mappings/esbuild-composed.bundle.js.map b/tests/fixtures/adjust_mappings/esbuild-composed.bundle.js.map new file mode 100644 index 00000000..212aac21 --- /dev/null +++ b/tests/fixtures/adjust_mappings/esbuild-composed.bundle.js.map @@ -0,0 +1 @@ +{"version":3,"sources":["../src/bar.js","../src/foo.js","../src/index.js"],"sourcesContent":["export function bar(fn, msg) {\n fn(msg);\n}\n","import { bar } from './bar.js';\n\nexport function foo(fn, msg) {\n bar(fn, msg);\n}\n","import { foo } from './foo.js';\n\nfunction wat(fn, msg) {\n foo(fn, msg);\n}\n\nwat(function hello(msg) {\n throw new Error(msg);\n}, 'boop');\n"],"names":["bar","fn","msg","foo","wat"],"mappings":";;MAAO,SAASA,EAAIC,EAAIC,EAAK,CAC3BD,EAAGC,CAAG,CACR,CCAO,SAASC,EAAIF,EAAIC,EAAK,CAC3BF,EAAIC,EAAIC,CAAG,CACb,CCFA,SAASE,EAAIH,EAAIC,EAAK,CACpBC,EAAIF,EAAIC,CAAG,CACb,CAEAE,EAAI,SAAeF,EAAK,CACtB,MAAM,IAAI,MAAMA,CAAG,CACrB,EAAG,MAAM"} \ No newline at end of file diff --git a/tests/fixtures/adjust_mappings/esbuild-injected.bundle.js b/tests/fixtures/adjust_mappings/esbuild-injected.bundle.js new file mode 100644 index 00000000..76db3c93 --- /dev/null +++ b/tests/fixtures/adjust_mappings/esbuild-injected.bundle.js @@ -0,0 +1,6 @@ + +!function(){try{var e="undefined"!=typeof window?window:"undefined"!=typeof global?global:"undefined"!=typeof self?self:{},n=(new Error).stack;n&&(e._sentryDebugIds=e._sentryDebugIds||{},e._sentryDebugIds[n]="00000000-0000-0000-0000-000000000000")}catch(e){}}(); +(()=>{function t(r,o){r(o)}function n(r,o){t(r,o)}function f(r,o){n(r,o)}f(function(o){throw new Error(o)},"boop");})(); +//# sourceMappingURL=esbuild.bundle.js.map + +//# debugId=00000000-0000-0000-0000-000000000000 diff --git a/tests/fixtures/adjust_mappings/esbuild-injected.bundle.js.map b/tests/fixtures/adjust_mappings/esbuild-injected.bundle.js.map new file mode 100644 index 00000000..e519e307 --- /dev/null +++ b/tests/fixtures/adjust_mappings/esbuild-injected.bundle.js.map @@ -0,0 +1 @@ +{"version":3,"mappings":";;AAAA;AACA;;","names":[],"sources":["pre_injection.js"],"sourcesContent":["(()=>{function t(r,o){r(o)}function n(r,o){t(r,o)}function f(r,o){n(r,o)}f(function(o){throw new Error(o)},\"boop\");})();\n//# sourceMappingURL=esbuild.bundle.js.map\n"],"file":null} \ No newline at end of file diff --git a/tests/fixtures/adjust_mappings/esbuild.bundle.js b/tests/fixtures/adjust_mappings/esbuild.bundle.js new file mode 100644 index 00000000..8873083c --- /dev/null +++ b/tests/fixtures/adjust_mappings/esbuild.bundle.js @@ -0,0 +1,2 @@ +(()=>{function t(r,o){r(o)}function n(r,o){t(r,o)}function f(r,o){n(r,o)}f(function(o){throw new Error(o)},"boop");})(); +//# sourceMappingURL=esbuild.bundle.js.map diff --git a/tests/fixtures/adjust_mappings/esbuild.bundle.js.map b/tests/fixtures/adjust_mappings/esbuild.bundle.js.map new file mode 100644 index 00000000..1abf784a --- /dev/null +++ b/tests/fixtures/adjust_mappings/esbuild.bundle.js.map @@ -0,0 +1,7 @@ +{ + "version": 3, + "sources": ["../src/bar.js", "../src/foo.js", "../src/index.js"], + "sourcesContent": ["export function bar(fn, msg) {\n fn(msg);\n}\n", "import { bar } from './bar.js';\n\nexport function foo(fn, msg) {\n bar(fn, msg);\n}\n", "import { foo } from './foo.js';\n\nfunction wat(fn, msg) {\n foo(fn, msg);\n}\n\nwat(function hello(msg) {\n throw new Error(msg);\n}, 'boop');\n"], + "mappings": "MAAO,SAASA,EAAIC,EAAIC,EAAK,CAC3BD,EAAGC,CAAG,CACR,CCAO,SAASC,EAAIC,EAAIC,EAAK,CAC3BC,EAAIF,EAAIC,CAAG,CACb,CCFA,SAASE,EAAIC,EAAIC,EAAK,CACpBC,EAAIF,EAAIC,CAAG,CACb,CAEAF,EAAI,SAAeE,EAAK,CACtB,MAAM,IAAI,MAAMA,CAAG,CACrB,EAAG,MAAM", + "names": ["bar", "fn", "msg", "foo", "fn", "msg", "bar", "wat", "fn", "msg", "foo"] +} diff --git a/tests/fixtures/adjust_mappings/rollup-composed.bundle.js.map b/tests/fixtures/adjust_mappings/rollup-composed.bundle.js.map new file mode 100644 index 00000000..88c621be --- /dev/null +++ b/tests/fixtures/adjust_mappings/rollup-composed.bundle.js.map @@ -0,0 +1 @@ +{"version":3,"file":"rollup.bundle.js","sources":["../src/index.js","../src/foo.js","../src/bar.js"],"sourcesContent":["import { foo } from './foo.js';\n\nfunction wat(fn, msg) {\n foo(fn, msg);\n}\n\nwat(function hello(msg) {\n throw new Error(msg);\n}, 'boop');\n","import { bar } from './bar.js';\n\nexport function foo(fn, msg) {\n bar(fn, msg);\n}\n","export function bar(fn, msg) {\n fn(msg);\n}\n"],"names":["fn","msg","Error","bar","foo"],"mappings":";;yBAEA,IAAaA,IAIT,SAAeC,GACjB,MAAM,IAAIC,MAAMD,EAClB,ECNO,SAAaD,EAAIC,ICFjB,SAAaD,EAAIC,GACtBD,EAAGC,EACL,CDCEE,CAAIH,EAAIC,EACV,CDDEG,CAAIJ,EAKH"} \ No newline at end of file diff --git a/tests/fixtures/adjust_mappings/rollup-injected.bundle.js b/tests/fixtures/adjust_mappings/rollup-injected.bundle.js new file mode 100644 index 00000000..63969fbf --- /dev/null +++ b/tests/fixtures/adjust_mappings/rollup-injected.bundle.js @@ -0,0 +1,6 @@ + +!function(){try{var e="undefined"!=typeof window?window:"undefined"!=typeof global?global:"undefined"!=typeof self?self:{},n=(new Error).stack;n&&(e._sentryDebugIds=e._sentryDebugIds||{},e._sentryDebugIds[n]="00000000-0000-0000-0000-000000000000")}catch(e){}}(); +!function(){"use strict";var n;n=function(n){throw new Error(n)},function(n,o){!function(n,o){n(o)}(n,o)}(n,"boop")}(); +//# sourceMappingURL=rollup.bundle.js.map + +//# debugId=00000000-0000-0000-0000-000000000000 diff --git a/tests/fixtures/adjust_mappings/rollup-injected.bundle.js.map b/tests/fixtures/adjust_mappings/rollup-injected.bundle.js.map new file mode 100644 index 00000000..ed7c2448 --- /dev/null +++ b/tests/fixtures/adjust_mappings/rollup-injected.bundle.js.map @@ -0,0 +1 @@ +{"version":3,"mappings":";;AAAA;AACA;;","names":[],"sources":["pre_injection.js"],"sourcesContent":["!function(){\"use strict\";var n;n=function(n){throw new Error(n)},function(n,o){!function(n,o){n(o)}(n,o)}(n,\"boop\")}();\n//# sourceMappingURL=rollup.bundle.js.map\n"],"file":null} \ No newline at end of file diff --git a/tests/fixtures/adjust_mappings/rollup.bundle.js b/tests/fixtures/adjust_mappings/rollup.bundle.js new file mode 100644 index 00000000..21d7dd9e --- /dev/null +++ b/tests/fixtures/adjust_mappings/rollup.bundle.js @@ -0,0 +1,2 @@ +!function(){"use strict";var n;n=function(n){throw new Error(n)},function(n,o){!function(n,o){n(o)}(n,o)}(n,"boop")}(); +//# sourceMappingURL=rollup.bundle.js.map diff --git a/tests/fixtures/adjust_mappings/rollup.bundle.js.map b/tests/fixtures/adjust_mappings/rollup.bundle.js.map new file mode 100644 index 00000000..24a810b6 --- /dev/null +++ b/tests/fixtures/adjust_mappings/rollup.bundle.js.map @@ -0,0 +1 @@ +{"version":3,"file":"rollup.bundle.js","sources":["../src/index.js","../src/foo.js","../src/bar.js"],"sourcesContent":["import { foo } from './foo.js';\n\nfunction wat(fn, msg) {\n foo(fn, msg);\n}\n\nwat(function hello(msg) {\n throw new Error(msg);\n}, 'boop');\n","import { bar } from './bar.js';\n\nexport function foo(fn, msg) {\n bar(fn, msg);\n}\n","export function bar(fn, msg) {\n fn(msg);\n}\n"],"names":["fn","msg","Error","bar","foo"],"mappings":"yBAEA,IAAaA,IAIT,SAAeC,GACjB,MAAM,IAAIC,MAAMD,EAClB,ECNO,SAAaD,EAAIC,ICFjB,SAAaD,EAAIC,GACtBD,EAAGC,EACL,CDCEE,CAAIH,EAAIC,EACV,CDDEG,CAAIJ,EAKH"} \ No newline at end of file diff --git a/tests/fixtures/adjust_mappings/vite-composed.bundle.js.map b/tests/fixtures/adjust_mappings/vite-composed.bundle.js.map new file mode 100644 index 00000000..0bc2a5bf --- /dev/null +++ b/tests/fixtures/adjust_mappings/vite-composed.bundle.js.map @@ -0,0 +1 @@ +{"version":3,"file":"vite.bundle.js","sources":["../src/bar.js","../src/foo.js","../src/index.js"],"sourcesContent":["export function bar(fn, msg) {\n fn(msg);\n}\n","import { bar } from './bar.js';\n\nexport function foo(fn, msg) {\n bar(fn, msg);\n}\n","import { foo } from './foo.js';\n\nfunction wat(fn, msg) {\n foo(fn, msg);\n}\n\nwat(function hello(msg) {\n throw new Error(msg);\n}, 'boop');\n"],"names":["bar","fn","msg","foo","wat"],"mappings":";;yBAAO,SAASA,EAAIC,EAAIC,EAAK,CAC3BD,EAAGC,CAAG,CACR,CCAO,SAASC,EAAIF,EAAIC,EAAK,CAC3BF,EAAIC,EAAIC,CAAG,CACb,CCFA,SAASE,EAAIH,EAAIC,EAAK,CACpBC,EAAIF,EAAIC,CAAG,CACb,CAEAE,EAAI,SAAeF,EAAK,CACtB,MAAM,IAAI,MAAMA,CAAG,CACrB,EAAG,MAAM"} \ No newline at end of file diff --git a/tests/fixtures/adjust_mappings/vite-injected.bundle.js b/tests/fixtures/adjust_mappings/vite-injected.bundle.js new file mode 100644 index 00000000..73e1ab42 --- /dev/null +++ b/tests/fixtures/adjust_mappings/vite-injected.bundle.js @@ -0,0 +1,6 @@ + +!function(){try{var e="undefined"!=typeof window?window:"undefined"!=typeof global?global:"undefined"!=typeof self?self:{},n=(new Error).stack;n&&(e._sentryDebugIds=e._sentryDebugIds||{},e._sentryDebugIds[n]="00000000-0000-0000-0000-000000000000")}catch(e){}}(); +(function(){"use strict";function t(n,o){n(o)}function c(n,o){t(n,o)}function f(n,o){c(n,o)}f(function(o){throw new Error(o)},"boop")})(); +//# sourceMappingURL=vite.bundle.js.map + +//# debugId=00000000-0000-0000-0000-000000000000 diff --git a/tests/fixtures/adjust_mappings/vite-injected.bundle.js.map b/tests/fixtures/adjust_mappings/vite-injected.bundle.js.map new file mode 100644 index 00000000..626fa3d7 --- /dev/null +++ b/tests/fixtures/adjust_mappings/vite-injected.bundle.js.map @@ -0,0 +1 @@ +{"version":3,"mappings":";;AAAA;AACA;;","names":[],"sources":["pre_injection.js"],"sourcesContent":["(function(){\"use strict\";function t(n,o){n(o)}function c(n,o){t(n,o)}function f(n,o){c(n,o)}f(function(o){throw new Error(o)},\"boop\")})();\n//# sourceMappingURL=vite.bundle.js.map\n"],"file":null} \ No newline at end of file diff --git a/tests/fixtures/adjust_mappings/vite.bundle.js b/tests/fixtures/adjust_mappings/vite.bundle.js new file mode 100644 index 00000000..bfd29747 --- /dev/null +++ b/tests/fixtures/adjust_mappings/vite.bundle.js @@ -0,0 +1,2 @@ +(function(){"use strict";function t(n,o){n(o)}function c(n,o){t(n,o)}function f(n,o){c(n,o)}f(function(o){throw new Error(o)},"boop")})(); +//# sourceMappingURL=vite.bundle.js.map diff --git a/tests/fixtures/adjust_mappings/vite.bundle.js.map b/tests/fixtures/adjust_mappings/vite.bundle.js.map new file mode 100644 index 00000000..3ebd65a7 --- /dev/null +++ b/tests/fixtures/adjust_mappings/vite.bundle.js.map @@ -0,0 +1 @@ +{"version":3,"file":"vite.bundle.js","sources":["../src/bar.js","../src/foo.js","../src/index.js"],"sourcesContent":["export function bar(fn, msg) {\n fn(msg);\n}\n","import { bar } from './bar.js';\n\nexport function foo(fn, msg) {\n bar(fn, msg);\n}\n","import { foo } from './foo.js';\n\nfunction wat(fn, msg) {\n foo(fn, msg);\n}\n\nwat(function hello(msg) {\n throw new Error(msg);\n}, 'boop');\n"],"names":["bar","fn","msg","foo","wat"],"mappings":"yBAAO,SAASA,EAAIC,EAAIC,EAAK,CAC3BD,EAAGC,CAAG,CACR,CCAO,SAASC,EAAIF,EAAIC,EAAK,CAC3BF,EAAIC,EAAIC,CAAG,CACb,CCFA,SAASE,EAAIH,EAAIC,EAAK,CACpBC,EAAIF,EAAIC,CAAG,CACb,CAEAE,EAAI,SAAeF,EAAK,CACtB,MAAM,IAAI,MAAMA,CAAG,CACrB,EAAG,MAAM"} \ No newline at end of file diff --git a/tests/fixtures/adjust_mappings/webpack-composed.bundle.js.map b/tests/fixtures/adjust_mappings/webpack-composed.bundle.js.map new file mode 100644 index 00000000..7e31ab38 --- /dev/null +++ b/tests/fixtures/adjust_mappings/webpack-composed.bundle.js.map @@ -0,0 +1 @@ +{"version":3,"file":"webpack.bundle.js","sources":["webpack://sourcemaps-playground/./src/foo.js","webpack://sourcemaps-playground/./src/bar.js","webpack://sourcemaps-playground/./src/index.js"],"sourceRoot":"","sourcesContent":["import { bar } from './bar.js';\n\nexport function foo(fn, msg) {\n bar(fn, msg);\n}\n","export function bar(fn, msg) {\n fn(msg);\n}\n","import { foo } from './foo.js';\n\nfunction wat(fn, msg) {\n foo(fn, msg);\n}\n\nwat(function hello(msg) {\n throw new Error(msg);\n}, 'boop');\n"],"names":["fn","msg","bar","foo","Error"],"mappings":";;oBAEO,SAAaA,EAAIC,ICFjB,SAAaD,EAAIC,GACtBD,EAAGC,EACL,CDCEC,CAAIF,EEKH,OFJH,EEDEG,EAGE,SAAeF,GACjB,MAAM,IAAIG,MAAMH,EAClB,G"} \ No newline at end of file diff --git a/tests/fixtures/adjust_mappings/webpack-injected.bundle.js b/tests/fixtures/adjust_mappings/webpack-injected.bundle.js new file mode 100644 index 00000000..010e4f3a --- /dev/null +++ b/tests/fixtures/adjust_mappings/webpack-injected.bundle.js @@ -0,0 +1,5 @@ + +!function(){try{var e="undefined"!=typeof window?window:"undefined"!=typeof global?global:"undefined"!=typeof self?self:{},n=(new Error).stack;n&&(e._sentryDebugIds=e._sentryDebugIds||{},e._sentryDebugIds[n]="00000000-0000-0000-0000-000000000000")}catch(e){}}(); +(()=>{"use strict";(function(n,o){!function(n,o){n(o)}(n,"boop")})((function(n){throw new Error(n)}))})(); +//# sourceMappingURL=webpack.bundle.js.map +//# debugId=00000000-0000-0000-0000-000000000000 diff --git a/tests/fixtures/adjust_mappings/webpack-injected.bundle.js.map b/tests/fixtures/adjust_mappings/webpack-injected.bundle.js.map new file mode 100644 index 00000000..aad4bdaf --- /dev/null +++ b/tests/fixtures/adjust_mappings/webpack-injected.bundle.js.map @@ -0,0 +1 @@ +{"version":3,"mappings":";;AAAA;AACA;;","names":[],"sources":["pre_injection.js"],"sourcesContent":["(()=>{\"use strict\";(function(n,o){!function(n,o){n(o)}(n,\"boop\")})((function(n){throw new Error(n)}))})();\n//# sourceMappingURL=webpack.bundle.js.map"],"file":null} \ No newline at end of file diff --git a/tests/fixtures/adjust_mappings/webpack.bundle.js b/tests/fixtures/adjust_mappings/webpack.bundle.js new file mode 100644 index 00000000..4462e0a8 --- /dev/null +++ b/tests/fixtures/adjust_mappings/webpack.bundle.js @@ -0,0 +1,2 @@ +(()=>{"use strict";(function(n,o){!function(n,o){n(o)}(n,"boop")})((function(n){throw new Error(n)}))})(); +//# sourceMappingURL=webpack.bundle.js.map \ No newline at end of file diff --git a/tests/fixtures/adjust_mappings/webpack.bundle.js.map b/tests/fixtures/adjust_mappings/webpack.bundle.js.map new file mode 100644 index 00000000..2c478f2c --- /dev/null +++ b/tests/fixtures/adjust_mappings/webpack.bundle.js.map @@ -0,0 +1 @@ +{"version":3,"file":"webpack.bundle.js","mappings":"oBAEO,SAAaA,EAAIC,ICFjB,SAAaD,EAAIC,GACtBD,EAAGC,EACL,CDCEC,CAAIF,EEKH,OFJH,EEDEG,EAGE,SAAeF,GACjB,MAAM,IAAIG,MAAMH,EAClB,G","sources":["webpack://sourcemaps-playground/./src/foo.js","webpack://sourcemaps-playground/./src/bar.js","webpack://sourcemaps-playground/./src/index.js"],"sourcesContent":["import { bar } from './bar.js';\n\nexport function foo(fn, msg) {\n bar(fn, msg);\n}\n","export function bar(fn, msg) {\n fn(msg);\n}\n","import { foo } from './foo.js';\n\nfunction wat(fn, msg) {\n foo(fn, msg);\n}\n\nwat(function hello(msg) {\n throw new Error(msg);\n}, 'boop');\n"],"names":["fn","msg","bar","foo","Error"],"sourceRoot":""} \ No newline at end of file From 013727054de5ecb0dd579f487e6e52a40947d49e Mon Sep 17 00:00:00 2001 From: Sebastian Zivota Date: Tue, 1 Aug 2023 09:47:56 +0200 Subject: [PATCH 21/23] Clean up tests and add rspack test case --- src/types.rs | 70 ++----------------- .../rspack-composed.bundle.js.map | 1 + .../adjust_mappings/rspack-injected.bundle.js | 5 ++ .../rspack-injected.bundle.js.map | 1 + .../fixtures/adjust_mappings/rspack.bundle.js | 2 + .../adjust_mappings/rspack.bundle.js.map | 1 + 6 files changed, 15 insertions(+), 65 deletions(-) create mode 100644 tests/fixtures/adjust_mappings/rspack-composed.bundle.js.map create mode 100644 tests/fixtures/adjust_mappings/rspack-injected.bundle.js create mode 100644 tests/fixtures/adjust_mappings/rspack-injected.bundle.js.map create mode 100644 tests/fixtures/adjust_mappings/rspack.bundle.js create mode 100644 tests/fixtures/adjust_mappings/rspack.bundle.js.map diff --git a/src/types.rs b/src/types.rs index d98e9031..e6d9ee29 100644 --- a/src/types.rs +++ b/src/types.rs @@ -839,9 +839,7 @@ impl SourceMap { /// This function assumes that `adjustment` contains no relevant information except for mappings. /// All information about sources and names is copied from `original`. /// - /// Note that the resulting sourcemap will be at most as fine-grained as `original.` For example, - /// if `original` maps every line/column to `0/0`, then `SourceMap::adjust_mappings(original, adjustment)` - /// will not map to anything other than`0/0`, irrespective of how detailed the mappings in `adjustment` are. + /// Note that the resulting sourcemap will be at most as fine-grained as `original.`. pub fn adjust_mappings(original: &Self, adjustment: &Self) -> Self { // The algorithm works by going through the tokens in `original` in order and adjusting // them depending on the token in `adjustment` they're "covered" by. @@ -1274,67 +1272,6 @@ mod tests { assert_eq!(new_sm.debug_id, Some(DebugId::default())); } - #[test] - fn test_compose_identity_left() { - // Identity mapping on "var my answer/* ignore this */ = 42;". - let adjustment = br#"{ - "version":3, - "mappings":"AAAA", - "names":[], - "sources":["edited.js"], - "sourcesContent":["var my answer/* ignore this */ = 42;"] - }"#; - - // Maps "var my answer/* ignore this */ = 42;" to "my problems = 99". - let original = br#"{ - "version":3, - "mappings":"IAAA,GAAG,uBAAQ,GAAG", - "names":[], - "sources":["original.js"], - "sourcesContent":["my problems = 99"] - }"#; - - let adjustment = SourceMap::from_slice(adjustment).unwrap(); - let original = SourceMap::from_slice(original).unwrap(); - - let composed = SourceMap::adjust_mappings(&original, &adjustment); - - assert_eq!(composed.tokens, original.tokens); - } - - #[test] - fn test_compose_identity_right() { - // Hires map from "var my problems = 99;" to "my problems = 99" - let adjustment = br#"{ - "version":3, - "mappings":"GAAA,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC", - "names":[], - "sources":["edited.js"], - "sourcesContent":["my problems = 99"] - }"#; - - // Identity map on "my problems = 99" - let original = br#"{ - "version":3, - "mappings":"AAAA", - "names":[], - "sources":["original.js"], - "sourcesContent":["my problems = 99"] - }"#; - - let adjustment = SourceMap::from_slice(adjustment).unwrap(); - let original = SourceMap::from_slice(original).unwrap(); - - let composed = SourceMap::adjust_mappings(&original, &adjustment); - - // After composition, all mappings point to (0, 0). - // Morally, the composition is the same as the map with the single mapping `(0,3) -> (0, 0)`. - for t in &composed.tokens { - assert_eq!(t.src_line, 0); - assert_eq!(t.src_col, 0); - } - } - #[test] fn test_adjust_mappings_injection() { // A test that `adjust_mappings` does what it's supposed to for debug id injection. @@ -1348,7 +1285,10 @@ mod tests { // If everything is working as intended, `composed.bundle.js.map` is a (good) sourcemap from `injected.bundle.js` to // the original sources. To verify that this is indeed the case, you can compare `bundle.js` / `bundle.js.map` with // `injected.bundle.js` / `composed.bundle.js.map` using https://sokra.github.io/source-map-visualization/#custom. - for bundler in ["esbuild", "rollup", "vite", "webpack"] { + // + // NB: In the case of `rspack`, the sourcemap generated by the bundler is *horrible*. It's probably not useful, but + // `adjust_mappings` preserves it as far as it goes. + for bundler in ["esbuild", "rollup", "vite", "webpack", "rspack"] { let original_map_file = std::fs::File::open(format!( "tests/fixtures/adjust_mappings/{bundler}.bundle.js.map" )) diff --git a/tests/fixtures/adjust_mappings/rspack-composed.bundle.js.map b/tests/fixtures/adjust_mappings/rspack-composed.bundle.js.map new file mode 100644 index 00000000..5f95c59a --- /dev/null +++ b/tests/fixtures/adjust_mappings/rspack-composed.bundle.js.map @@ -0,0 +1 @@ +{"version":3,"file":"rspack.bundle.js","sources":["./src/bar.js","./src/foo.js","./src/index.js"],"sourcesContent":["export function bar(fn, msg) {\n fn(msg);\n}\n","import { bar } from './bar.js';\n\nexport function foo(fn, msg) {\n bar(fn, msg);\n}\n","import { foo } from './foo.js';\n\nfunction wat(fn, msg) {\n foo(fn, msg);\n}\n\nwat(function hello(msg) {\n throw new Error(msg);\n}, 'boop');\n"],"names":["bar","fn","msg","foo","Error"],"mappings":";;mDAAO,SAASA,EAAIC,CAAE,CAAEC,CAAG,EACzBD,EAAGC,EACL,C,yEAFgB,O,oCAAAF,C,+GCEA,O,oCAAAG,C,kBAAT,SAASA,EAAIF,CAAE,CAAEC,CAAG,EACzB,KAAAF,GAAA,EAAIC,EAAIC,EACV,C,wFCFaD,EAAIC,E,WAAJD,EAIT,SAAeC,CAAG,EACpB,MAAM,AAAIE,MAAMF,EAClB,EANiBA,EAMd,OALD,KAAAC,GAAA,EAAIF,EAAIC,E"} \ No newline at end of file diff --git a/tests/fixtures/adjust_mappings/rspack-injected.bundle.js b/tests/fixtures/adjust_mappings/rspack-injected.bundle.js new file mode 100644 index 00000000..ea714ae7 --- /dev/null +++ b/tests/fixtures/adjust_mappings/rspack-injected.bundle.js @@ -0,0 +1,5 @@ + +!function(){try{var e="undefined"!=typeof window?window:"undefined"!=typeof global?global:"undefined"!=typeof self?self:{},n=(new Error).stack;n&&(e._sentryDebugIds=e._sentryDebugIds||{},e._sentryDebugIds[n]="00000000-0000-0000-0000-000000000000")}catch(e){}}(); +!function(){var e={62:function(e,r,t){"use strict";function n(e,r){e(r)}Object.defineProperty(r,"__esModule",{value:!0}),Object.defineProperty(r,"bar",{enumerable:!0,get:function(){return n}})},447:function(e,r,t){"use strict";Object.defineProperty(r,"__esModule",{value:!0}),Object.defineProperty(r,"foo",{enumerable:!0,get:function(){return o}});var n=t("62");function o(e,r){(0,n.bar)(e,r)}},151:function(e,r,t){"use strict";Object.defineProperty(r,"__esModule",{value:!0});var n,o,u=t("447");n=function(e){throw Error(e)},o="boop",(0,u.foo)(n,o)}},r={};!function t(n){var o=r[n];if(void 0!==o)return o.exports;var u=r[n]={exports:{}};return e[n](u,u.exports,t),u.exports}("151")}(); +//# sourceMappingURL=rspack.bundle.js.map +//# debugId=00000000-0000-0000-0000-000000000000 diff --git a/tests/fixtures/adjust_mappings/rspack-injected.bundle.js.map b/tests/fixtures/adjust_mappings/rspack-injected.bundle.js.map new file mode 100644 index 00000000..bf98d32e --- /dev/null +++ b/tests/fixtures/adjust_mappings/rspack-injected.bundle.js.map @@ -0,0 +1 @@ +{"version":3,"mappings":";;AAAA;AACA;;","names":[],"sources":["pre_injection.js"],"sourcesContent":["!function(){var e={62:function(e,r,t){\"use strict\";function n(e,r){e(r)}Object.defineProperty(r,\"__esModule\",{value:!0}),Object.defineProperty(r,\"bar\",{enumerable:!0,get:function(){return n}})},447:function(e,r,t){\"use strict\";Object.defineProperty(r,\"__esModule\",{value:!0}),Object.defineProperty(r,\"foo\",{enumerable:!0,get:function(){return o}});var n=t(\"62\");function o(e,r){(0,n.bar)(e,r)}},151:function(e,r,t){\"use strict\";Object.defineProperty(r,\"__esModule\",{value:!0});var n,o,u=t(\"447\");n=function(e){throw Error(e)},o=\"boop\",(0,u.foo)(n,o)}},r={};!function t(n){var o=r[n];if(void 0!==o)return o.exports;var u=r[n]={exports:{}};return e[n](u,u.exports,t),u.exports}(\"151\")}();\n//# sourceMappingURL=rspack.bundle.js.map"],"file":null} \ No newline at end of file diff --git a/tests/fixtures/adjust_mappings/rspack.bundle.js b/tests/fixtures/adjust_mappings/rspack.bundle.js new file mode 100644 index 00000000..efd4c40c --- /dev/null +++ b/tests/fixtures/adjust_mappings/rspack.bundle.js @@ -0,0 +1,2 @@ +!function(){var e={62:function(e,r,t){"use strict";function n(e,r){e(r)}Object.defineProperty(r,"__esModule",{value:!0}),Object.defineProperty(r,"bar",{enumerable:!0,get:function(){return n}})},447:function(e,r,t){"use strict";Object.defineProperty(r,"__esModule",{value:!0}),Object.defineProperty(r,"foo",{enumerable:!0,get:function(){return o}});var n=t("62");function o(e,r){(0,n.bar)(e,r)}},151:function(e,r,t){"use strict";Object.defineProperty(r,"__esModule",{value:!0});var n,o,u=t("447");n=function(e){throw Error(e)},o="boop",(0,u.foo)(n,o)}},r={};!function t(n){var o=r[n];if(void 0!==o)return o.exports;var u=r[n]={exports:{}};return e[n](u,u.exports,t),u.exports}("151")}(); +//# sourceMappingURL=rspack.bundle.js.map \ No newline at end of file diff --git a/tests/fixtures/adjust_mappings/rspack.bundle.js.map b/tests/fixtures/adjust_mappings/rspack.bundle.js.map new file mode 100644 index 00000000..2b0f39cf --- /dev/null +++ b/tests/fixtures/adjust_mappings/rspack.bundle.js.map @@ -0,0 +1 @@ +{"version":3,"file":"rspack.bundle.js","sources":["./src/bar.js","./src/foo.js","./src/index.js"],"sourcesContent":["export function bar(fn, msg) {\n fn(msg);\n}\n","import { bar } from './bar.js';\n\nexport function foo(fn, msg) {\n bar(fn, msg);\n}\n","import { foo } from './foo.js';\n\nfunction wat(fn, msg) {\n foo(fn, msg);\n}\n\nwat(function hello(msg) {\n throw new Error(msg);\n}, 'boop');\n"],"names":["bar","fn","msg","foo","Error"],"mappings":"mDAAO,SAASA,EAAIC,CAAE,CAAEC,CAAG,EACzBD,EAAGC,EACL,C,yEAFgB,O,oCAAAF,C,+GCEA,O,oCAAAG,C,kBAAT,SAASA,EAAIF,CAAE,CAAEC,CAAG,EACzB,KAAAF,GAAA,EAAIC,EAAIC,EACV,C,wFCFaD,EAAIC,E,WAAJD,EAIT,SAAeC,CAAG,EACpB,MAAM,AAAIE,MAAMF,EAClB,EANiBA,EAMd,OALD,KAAAC,GAAA,EAAIF,EAAIC,E"} \ No newline at end of file From ab1b0134695be400f213b48fc98180e21812a979 Mon Sep 17 00:00:00 2001 From: Sebastian Zivota Date: Tue, 1 Aug 2023 13:58:51 +0200 Subject: [PATCH 22/23] Property-based testing --- Cargo.toml | 4 + src/types.rs | 210 +++++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 214 insertions(+) diff --git a/Cargo.toml b/Cargo.toml index 35e48a40..0a7b5eec 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -42,3 +42,7 @@ ram_bundle = ["scroll"] [[example]] name = "split_ram_bundle" required-features = ["ram_bundle"] + +[dev-dependencies] +magic_string = "0.3.4" +proptest = "1.2.0" diff --git a/src/types.rs b/src/types.rs index e6d9ee29..8d004172 100644 --- a/src/types.rs +++ b/src/types.rs @@ -1314,4 +1314,214 @@ mod tests { ); } } + + mod prop { + //! This module exists to test the following property: + //! + //! Let `s` be a string. + //! 1. Edit `s` with `magic-string` in such a way that edits (insertions, deletions) only happen *within* lines. + //! Call the resulting string `t` and the sourcemap relating the two `m₁`. + //! 2. Further edit `t` with `magic-string` so that only *whole* lines are edited (inserted, deleted, prepended, appended). + //! Call the resulting string `u` and the sourcemap relating `u` to `t` `m₂`. + //! 3. Do (1) and (2) in one go. The resulting string should still be `u`. Call the sourcemap + //! relating `u` and `s` `m₃`. + //! + //! Then `SourceMap::adjust_mappings(m₁, m₂) = m₃`. + //! + //! Or, in diagram form: + //! + //! u -----m₂--------> t -----m₁--------> s + //! | -----------------m₃-----------------> | + //! + //! For the sake of simplicty, all input strings are 10 lines by 10 columns of the characters a-z. + use magic_string::MagicString; + use proptest::prelude::*; + + use crate::SourceMap; + + /// An edit in the first batch (only within a line). + #[derive(Debug, Clone)] + enum FirstEdit { + /// Insert a string at a column. + Insert(u32, String), + /// Delete from one column to the other. + Delete(i64, i64), + } + + impl FirstEdit { + /// Applies an edit to the given line in the given `MagicString`. + fn apply(&self, line: usize, ms: &mut MagicString) { + // Every line is 11 bytes long, counting the newline. + let line_offset = line * 11; + match self { + FirstEdit::Insert(col, s) => { + ms.append_left(line_offset as u32 + *col, s).unwrap(); + } + FirstEdit::Delete(start, end) => { + ms.remove(line_offset as i64 + *start, line_offset as i64 + *end) + .unwrap(); + } + } + } + } + + /// Find the start and end index of the n'th line in the given string + /// (including the terminating newline, if there is one). + fn nth_line_start_end(n: usize, s: &str) -> (usize, usize) { + let line = s.lines().nth(n).unwrap(); + let start = line.as_ptr() as usize - s.as_ptr() as usize; + // All lines except line 9 have a final newline. + let end = if n == 9 { + start + line.len() + } else { + start + line.len() + 1 + }; + (start, end) + } + + /// An edit in the second batch (only whole lines). + #[derive(Debug, Clone)] + enum SecondEdit { + /// Prepends a string. + Prepend(String), + /// Appends a string. + Append(String), + /// Inserts a string at a given line. + Insert(usize, String), + /// Deletes a a line. + Delete(usize), + } + + impl SecondEdit { + /// Applies an edit to a `MagicString`. + /// + /// This must know the original string (which unfortunately can't be extracted from a `MagicString`) + /// to find line boundaries. + fn apply(&self, orig: &str, ms: &mut MagicString) { + match self { + SecondEdit::Prepend(s) => { + ms.prepend(s).unwrap(); + } + SecondEdit::Append(s) => { + ms.append(s).unwrap(); + } + SecondEdit::Insert(line, s) => { + let (start, _) = nth_line_start_end(*line, orig); + ms.prepend_left(start as u32, s).unwrap(); + } + SecondEdit::Delete(line) => { + let (start, end) = nth_line_start_end(*line, orig); + ms.remove(start as i64, end as i64).unwrap(); + } + } + } + } + + /// Produces a random 10x10 grid of the characters a-z. + fn starting_string() -> impl Strategy { + (vec!["[a-z]{10}"; 10]).prop_map(|v| v.join("\n")) + } + + /// Produces a random first-batch edit. + fn first_edit() -> impl Strategy { + prop_oneof![ + (1u32..9, "[a-z]{5}").prop_map(|(c, s)| FirstEdit::Insert(c, s)), + (1i64..10) + .prop_flat_map(|end| (0..end, Just(end))) + .prop_map(|(a, b)| FirstEdit::Delete(a, b)) + ] + } + + /// Produces a random sequence of first-batch edits, one per line. + /// + /// Thus, each line will either have an insertion or a deletion. + fn first_edit_sequence() -> impl Strategy> { + let mut vec = Vec::with_capacity(10); + + for _ in 0..10 { + vec.push(first_edit()) + } + + vec + } + + /// Produces a random sequence of second-batch edits, one per line. + /// + /// Each edit may delete a line, insert a line, or prepend or append something + /// to the whole string. No two edits operate on the same line. The order of the edits is random. + fn second_edit_sequence() -> impl Strategy> { + let edits = (0..10) + .map(|i| { + prop_oneof![ + "[a-z\n]{12}".prop_map(SecondEdit::Prepend), + "[a-z\n]{12}".prop_map(SecondEdit::Append), + "[a-z\n]{11}\n".prop_map(move |s| SecondEdit::Insert(i, s)), + Just(SecondEdit::Delete(i)), + ] + }) + .collect::>(); + + edits.prop_shuffle() + } + + proptest! { + #[test] + fn test_composition_identity( + input in starting_string(), + first_edits in first_edit_sequence(), + second_edits in second_edit_sequence(), + ) { + + // Do edits in two batches and generate two sourcemaps + + let mut ms1 = MagicString::new(&input); + + for (line, first_edit) in first_edits.iter().enumerate() { + first_edit.apply(line, &mut ms1); + } + + let first_map = ms1.generate_map(Default::default()).unwrap().to_string().unwrap(); + let first_map = SourceMap::from_slice(first_map.as_bytes()).unwrap(); + + let transformed_input = ms1.to_string(); + + let mut ms2 = MagicString::new(&transformed_input); + + for second_edit in second_edits.iter() { + second_edit.apply(&transformed_input, &mut ms2); + } + + let output_1 = ms2.to_string(); + + let second_map = ms2.generate_map(Default::default()).unwrap().to_string().unwrap(); + let second_map = SourceMap::from_slice(second_map.as_bytes()).unwrap(); + + // Do edits again in one batch and generate one big sourcemap + + let mut ms3 = MagicString::new(&input); + + for (line, first_edit) in first_edits.iter().enumerate() { + first_edit.apply(line, &mut ms3); + } + + for second_edit in second_edits.iter() { + second_edit.apply(&input, &mut ms3); + } + + let output_2 = ms3.to_string(); + + let third_map = ms3.generate_map(Default::default()).unwrap().to_string().unwrap(); + let third_map = SourceMap::from_slice(third_map.as_bytes()).unwrap(); + + + // Both methods must produce the same output + assert_eq!(output_1, output_2); + + + let composed_map = SourceMap::adjust_mappings(&first_map, &second_map); + + assert_eq!(composed_map.tokens, third_map.tokens); + } + } + } } From e78971c919621c604685ae8a0596ebf16af66dcb Mon Sep 17 00:00:00 2001 From: Sebastian Zivota Date: Tue, 1 Aug 2023 14:22:02 +0200 Subject: [PATCH 23/23] changelog --- CHANGELOG.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index a65c3c4f..e32bc247 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,11 @@ # Changelog +## Unreleased + +### Various fixes and improvements + +- feat: Implement sourcemap composition(#67) by @loewenheim + ## 6.3.0 ### Various fixes & improvements