From c28baca8545d1efb814e25bfe8fa3ecca83ba2f1 Mon Sep 17 00:00:00 2001 From: Devon Govett Date: Mon, 4 Jul 2022 10:52:54 -0700 Subject: [PATCH] Add JSON and data URL parsing and serialization support to Rust API (#109) --- .github/workflows/test.yml | 2 + Cargo.lock | 29 ++++- parcel_sourcemap/Cargo.toml | 7 ++ parcel_sourcemap/src/lib.rs | 158 ++++++++++++++++++++++-- parcel_sourcemap/src/mapping.rs | 4 +- parcel_sourcemap/src/mapping_line.rs | 2 +- parcel_sourcemap/src/sourcemap_error.rs | 30 +++++ 7 files changed, 217 insertions(+), 15 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 3289cecf..11505ce7 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -38,6 +38,8 @@ jobs: run: yarn run build:node-release - name: Run node tests run: yarn run test:node + - name: Run rust tests + run: cargo test --all-features test-wasm: name: wasm diff --git a/Cargo.lock b/Cargo.lock index 849f135e..543aa052 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -13,6 +13,12 @@ dependencies = [ "version_check", ] +[[package]] +name = "base64" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "904dfeac50f3cdaba28fc6f57fdcddb75f49ed61346676a78c4ffe55877802fd" + [[package]] name = "bumpalo" version = "3.9.1" @@ -52,6 +58,15 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" +[[package]] +name = "data-url" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3a30bfce702bcfa94e906ef82421f2c0e61c076ad76030c16ee5d2e9a32fe193" +dependencies = [ + "matches", +] + [[package]] name = "fs_extra" version = "1.2.0" @@ -135,6 +150,12 @@ dependencies = [ "cfg-if", ] +[[package]] +name = "matches" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a3e378b66a060d48947b590737b30a1be76706c8dd7b8ba0f2fe3989c68a853f" + [[package]] name = "napi" version = "1.8.0" @@ -180,7 +201,11 @@ checksum = "7b10983b38c53aebdf33f542c6275b0f58a238129d00c4ae0e6fb59738d783ca" name = "parcel_sourcemap" version = "2.0.5" dependencies = [ + "base64", + "data-url", "rkyv", + "serde", + "serde_json", "vlq", ] @@ -315,9 +340,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.81" +version = "1.0.82" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b7ce2b32a1aed03c558dc61a5cd328f15aff2dbc17daad8fb8af04d2100e15c" +checksum = "82c2c1fdcd807d1098552c5b9a36e425e42e9fbd7c6a37a8425f390f781f7fa7" dependencies = [ "itoa", "ryu", diff --git a/parcel_sourcemap/Cargo.toml b/parcel_sourcemap/Cargo.toml index d196a96d..b5ce4791 100644 --- a/parcel_sourcemap/Cargo.toml +++ b/parcel_sourcemap/Cargo.toml @@ -11,3 +11,10 @@ repository = "https://github.com/parcel-bundler/source-map" [dependencies] "vlq" = "0.5.1" rkyv = "0.7.38" +serde = {version = "1", features = ["derive"], optional = true} +serde_json = { version = "1", optional = true } +base64 = { version = "0.13.0", optional = true } +data-url = { version = "0.1.1", optional = true } + +[features] +json = ["serde", "serde_json", "base64", "data-url"] diff --git a/parcel_sourcemap/src/lib.rs b/parcel_sourcemap/src/lib.rs index 5d042995..0288c0cc 100644 --- a/parcel_sourcemap/src/lib.rs +++ b/parcel_sourcemap/src/lib.rs @@ -7,9 +7,13 @@ pub mod utils; mod vlq_utils; use crate::utils::make_relative_path; +#[cfg(feature = "json")] +use data_url::DataUrl; pub use mapping::{Mapping, OriginalLocation}; use mapping_line::MappingLine; pub use sourcemap_error::{SourceMapError, SourceMapErrorType}; +#[cfg(feature = "json")] +use std::borrow::Cow; use std::io; use rkyv::{ @@ -229,11 +233,11 @@ impl SourceMap { } } - pub fn add_sources(&mut self, sources: Vec<&str>) -> Vec { + pub fn add_sources>(&mut self, sources: Vec) -> Vec { self.inner.sources.reserve(sources.len()); let mut result_vec = Vec::with_capacity(sources.len()); for s in sources.iter() { - result_vec.push(self.add_source(s)); + result_vec.push(self.add_source(s.as_ref())); } result_vec } @@ -270,9 +274,9 @@ impl SourceMap { }; } - pub fn add_names(&mut self, names: Vec<&str>) -> Vec { + pub fn add_names>(&mut self, names: Vec) -> Vec { self.inner.names.reserve(names.len()); - return names.iter().map(|n| self.add_name(n)).collect(); + return names.iter().map(|n| self.add_name(n.as_ref())).collect(); } pub fn get_name_index(&self, name: &str) -> Option { @@ -510,12 +514,12 @@ impl SourceMap { Ok(()) } - pub fn add_vlq_map( + pub fn add_vlq_map>( &mut self, input: &[u8], - sources: Vec<&str>, - sources_content: Vec<&str>, - names: Vec<&str>, + sources: Vec, + sources_content: Vec, + names: Vec, line_offset: i64, column_offset: i64, ) -> Result<(), SourceMapError> { @@ -532,7 +536,7 @@ impl SourceMap { self.inner.sources_content.reserve(sources_content.len()); for (i, source_content) in sources_content.iter().enumerate() { if let Some(source_index) = source_indexes.get(i) { - self.set_source_content(*source_index as usize, source_content)?; + self.set_source_content(*source_index as usize, source_content.as_ref())?; } } @@ -627,7 +631,7 @@ impl SourceMap { } let line = generated_line as usize; - let abs_offset = generated_line_offset.abs() as usize; + let abs_offset = generated_line_offset.unsigned_abs() as usize; if generated_line_offset > 0 { if line > self.inner.mapping_lines.len() { self.ensure_lines(line + abs_offset); @@ -670,6 +674,85 @@ impl SourceMap { Ok(()) } + + #[cfg(feature = "json")] + pub fn from_json<'a>(project_root: &str, input: &'a str) -> Result { + #[derive(serde::Deserialize)] + #[serde(rename_all = "camelCase")] + struct JSONSourceMap<'a> { + mappings: &'a str, + #[serde(borrow)] + sources: Vec>, + sources_content: Vec>, + names: Vec>, + } + + let json: JSONSourceMap = serde_json::from_str(input)?; + let mut sm = Self::new(project_root); + sm.add_vlq_map( + json.mappings.as_bytes(), + json.sources, + json.sources_content, + json.names, + 0, + 0, + )?; + Ok(sm) + } + + #[cfg(feature = "json")] + pub fn to_json(&mut self, source_root: Option<&str>) -> Result { + let mut vlq_output: Vec = Vec::new(); + self.write_vlq(&mut vlq_output)?; + + #[derive(serde::Serialize)] + #[serde(rename_all = "camelCase")] + struct JSONSourceMap<'a> { + version: u8, + source_root: Option<&'a str>, + mappings: &'a str, + sources: &'a Vec, + sources_content: &'a Vec, + names: &'a Vec, + } + + let sm = JSONSourceMap { + version: 3, + source_root, + mappings: unsafe { std::str::from_utf8_unchecked(&vlq_output) }, + sources: self.get_sources(), + sources_content: self.get_sources_content(), + names: self.get_names(), + }; + + Ok(serde_json::to_string(&sm)?) + } + + #[cfg(feature = "json")] + pub fn from_data_url(project_root: &str, data_url: &str) -> Result { + let url = DataUrl::process(&data_url)?; + let mime = url.mime_type(); + if mime.type_ != "application" || mime.subtype != "json" { + return Err(SourceMapError::new(SourceMapErrorType::DataUrlError)); + } + + let (data, _) = url + .decode_to_vec() + .map_err(|_| SourceMapError::new(SourceMapErrorType::DataUrlError))?; + let input = unsafe { std::str::from_utf8_unchecked(data.as_slice()) }; + + Self::from_json(project_root, input) + } + + #[cfg(feature = "json")] + pub fn to_data_url(&mut self, source_root: Option<&str>) -> Result { + let buf = self.to_json(source_root)?; + let b64 = base64::encode(&buf); + Ok(format!( + "data:application/json;charset=utf-8;base64,{}", + b64 + )) + } } #[allow(non_fmt_panics)] @@ -688,3 +771,58 @@ fn test_buffers() { Err(err) => panic!(err), } } + +#[cfg(feature = "json")] +#[test] +fn test_to_json() { + let mut map = SourceMap::new("/"); + map.add_mapping(1, 1, None); + let json = map.to_json(Some("/")).unwrap(); + assert_eq!( + json, + r#"{"version":3,"sourceRoot":"/","mappings":";C","sources":[],"sourcesContent":[],"names":[]}"# + ); +} + +#[cfg(feature = "json")] +#[test] +fn test_from_json() { + let map = SourceMap::from_json("/", r#"{"version":3,"sourceRoot":"/","mappings":";C","sources":[],"sourcesContent":[],"names":[]}"#).unwrap(); + let mappings = map.get_mappings(); + assert_eq!( + mappings, + vec![Mapping { + generated_line: 1, + generated_column: 1, + original: None + }] + ); +} + +#[cfg(feature = "json")] +#[test] +fn test_to_data_url() { + let mut map = SourceMap::new("/"); + map.add_mapping(1, 1, None); + let url = map.to_data_url(Some("/")).unwrap(); + println!("{}", url); + assert_eq!( + url, + r#"data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJzb3VyY2VSb290IjoiLyIsIm1hcHBpbmdzIjoiO0MiLCJzb3VyY2VzIjpbXSwic291cmNlc0NvbnRlbnQiOltdLCJuYW1lcyI6W119"# + ); +} + +#[cfg(feature = "json")] +#[test] +fn test_from_data_url() { + let map = SourceMap::from_data_url("/", r#"data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJzb3VyY2VSb290IjoiLyIsIm1hcHBpbmdzIjoiO0MiLCJzb3VyY2VzIjpbXSwic291cmNlc0NvbnRlbnQiOltdLCJuYW1lcyI6W119"#).unwrap(); + let mappings = map.get_mappings(); + assert_eq!( + mappings, + vec![Mapping { + generated_line: 1, + generated_column: 1, + original: None + }] + ); +} diff --git a/parcel_sourcemap/src/mapping.rs b/parcel_sourcemap/src/mapping.rs index 82c840a0..281d49bc 100644 --- a/parcel_sourcemap/src/mapping.rs +++ b/parcel_sourcemap/src/mapping.rs @@ -1,6 +1,6 @@ use rkyv::{Archive, Deserialize, Serialize}; -#[derive(Archive, Serialize, Deserialize, Debug, Clone, Copy)] +#[derive(Archive, Serialize, Deserialize, Debug, Clone, Copy, PartialEq)] pub struct OriginalLocation { pub original_line: u32, pub original_column: u32, @@ -19,7 +19,7 @@ impl OriginalLocation { } } -#[derive(Archive, Serialize, Deserialize, Debug)] +#[derive(Archive, Serialize, Deserialize, Debug, PartialEq)] pub struct Mapping { pub generated_line: u32, pub generated_column: u32, diff --git a/parcel_sourcemap/src/mapping_line.rs b/parcel_sourcemap/src/mapping_line.rs index 376b647a..4fedff41 100644 --- a/parcel_sourcemap/src/mapping_line.rs +++ b/parcel_sourcemap/src/mapping_line.rs @@ -108,7 +108,7 @@ impl MappingLine { index = start_index; } - let abs_offset = generated_column_offset.abs() as u32; + let abs_offset = generated_column_offset.unsigned_abs() as u32; for i in index..self.mappings.len() { let mapping = &mut self.mappings[i]; mapping.generated_column = if generated_column_offset < 0 { diff --git a/parcel_sourcemap/src/sourcemap_error.rs b/parcel_sourcemap/src/sourcemap_error.rs index fc546874..b1ef653a 100644 --- a/parcel_sourcemap/src/sourcemap_error.rs +++ b/parcel_sourcemap/src/sourcemap_error.rs @@ -38,6 +38,13 @@ pub enum SourceMapErrorType { // Failed to convert utf-8 to array FromUtf8Error = 11, + + // Failed to serialize to JSON + JSONError = 12, + + // Failed to parse data url + #[cfg(feature = "json")] + DataUrlError = 13, } #[derive(Debug)] @@ -107,6 +114,13 @@ impl std::fmt::Display for SourceMapError { SourceMapErrorType::FromUtf8Error => { write!(f, "Could not convert utf-8 array to string")?; } + SourceMapErrorType::JSONError => { + write!(f, "Error reading or writing to JSON")?; + } + #[cfg(feature = "json")] + SourceMapErrorType::DataUrlError => { + write!(f, "Error parsing data url")?; + } } // Add reason to error string if there is one @@ -162,3 +176,19 @@ impl From for SourceMapError { SourceMapError::new(SourceMapErrorType::FromUtf8Error) } } + +#[cfg(feature = "json")] +impl From for SourceMapError { + #[inline] + fn from(_err: serde_json::Error) -> SourceMapError { + SourceMapError::new(SourceMapErrorType::JSONError) + } +} + +#[cfg(feature = "json")] +impl From for SourceMapError { + #[inline] + fn from(_err: data_url::DataUrlError) -> SourceMapError { + SourceMapError::new(SourceMapErrorType::DataUrlError) + } +}