diff --git a/Cargo.toml b/Cargo.toml index acff1fd1cb45..2489bf33f58c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -17,6 +17,7 @@ name = "swc" swc_atoms = { path ="./atoms" } swc_common = { path ="./common" } swc_ecmascript = { path ="./ecmascript" } +log = { version = "0.4", features = ["release_max_level_info"] } serde = { version = "1", features = ["derive"] } serde_json = "1" failure = "0.1" @@ -26,11 +27,12 @@ hashbrown = "0.6" regex = "1" either = "1" dashmap = "=3.4.0" -sourcemap = "4.1.1 " +sourcemap = "5" [dev-dependencies] testing = { path = "./testing" } walkdir = "2" +rayon = "1" [[example]] name = "usage" diff --git a/common/src/fold/and_then.rs b/common/src/fold/and_then.rs index 8aac56ebebc1..da5db255030c 100644 --- a/common/src/fold/and_then.rs +++ b/common/src/fold/and_then.rs @@ -36,10 +36,6 @@ pub struct AndThen { pub second: B, } -// fn type_name() -> String { -// format!("{}", unsafe { std::intrinsics::type_name::() }) -// } - impl Fold for AndThen where T: FoldWith, diff --git a/common/src/pass.rs b/common/src/pass.rs index afdb3c19c8ca..7bdf24faa69b 100644 --- a/common/src/pass.rs +++ b/common/src/pass.rs @@ -1,3 +1,4 @@ +#[cfg(feature = "fold")] use crate::{Fold, FoldWith}; use serde::export::PhantomData; use std::borrow::Cow; diff --git a/common/src/source_map.rs b/common/src/source_map.rs index 186faee53fde..d939105f7942 100644 --- a/common/src/source_map.rs +++ b/common/src/source_map.rs @@ -25,11 +25,16 @@ use crate::{ use hashbrown::HashMap; use log::debug; use std::{ - cmp, env, fs, + cmp, + cmp::{max, min}, + env, fs, hash::Hash, io::{self, Read}, path::{Path, PathBuf}, - sync::Arc, + sync::{ + atomic::{AtomicUsize, Ordering::SeqCst}, + Arc, + }, }; // _____________________________________________________________________________ @@ -101,6 +106,7 @@ pub(super) struct SourceMapFiles { pub struct SourceMap { pub(super) files: Lock, + start_pos: AtomicUsize, file_loader: Box, // This is used to apply the file path remapping as specified via // --remap-path-prefix to all SourceFiles allocated within this SourceMap. @@ -120,25 +126,20 @@ impl SourceMap { pub fn new(path_mapping: FilePathMapping) -> SourceMap { SourceMap { files: Default::default(), + start_pos: Default::default(), file_loader: Box::new(RealFileLoader), path_mapping, doctest_offset: None, } } - pub fn new_doctest(path_mapping: FilePathMapping, file: FileName, line: isize) -> SourceMap { - SourceMap { - doctest_offset: Some((file, line)), - ..SourceMap::new(path_mapping) - } - } - pub fn with_file_loader( file_loader: Box, path_mapping: FilePathMapping, ) -> SourceMap { SourceMap { files: Default::default(), + start_pos: Default::default(), file_loader, path_mapping, doctest_offset: None, @@ -155,11 +156,7 @@ impl SourceMap { pub fn load_file(&self, path: &Path) -> io::Result> { let src = self.file_loader.read_file(path)?; - let filename = if let Some((ref name, _)) = self.doctest_offset { - name.clone() - } else { - path.to_owned().into() - }; + let filename = path.to_owned().into(); Ok(self.new_source_file(filename, src)) } @@ -178,19 +175,19 @@ impl SourceMap { .cloned() } - fn next_start_pos(&self) -> usize { + fn next_start_pos(&self, len: usize) -> usize { match self.files.borrow().source_files.last() { - None => 0, + None => self.start_pos.fetch_add(len + 1, SeqCst), // Add one so there is some space between files. This lets us distinguish // positions in the source_map, even in the presence of zero-length files. - Some(last) => last.end_pos.to_usize() + 1, + Some(last) => self.start_pos.fetch_add(len + 1, SeqCst), } } /// Creates a new source_file. /// This does not ensure that only one SourceFile exists per file name. pub fn new_source_file(&self, filename: FileName, src: String) -> Arc { - let start_pos = self.next_start_pos(); + let start_pos = self.next_start_pos(src.len()); // The path is used to determine the directory for loading submodules and // include files, so it must be before remapping. @@ -214,12 +211,14 @@ impl SourceMap { Pos::from_usize(start_pos), )); - let mut files = self.files.borrow_mut(); + { + let mut files = self.files.borrow_mut(); - files.source_files.push(source_file.clone()); - files - .stable_id_to_source_file - .insert(StableSourceFileId::new(&source_file), source_file.clone()); + files.source_files.push(source_file.clone()); + files + .stable_id_to_source_file + .insert(StableSourceFileId::new(&source_file), source_file.clone()); + } source_file } @@ -253,8 +252,17 @@ impl SourceMap { Ok(SourceFileAndLine { sf: f, line: a }) => { let line = a + 1; // Line numbers start at 1 let linebpos = f.lines[a]; + assert!( + pos >= linebpos, + "{}: bpos = {:?}; linebpos = {:?};", + f.name, + pos, + linebpos, + ); + let linechpos = self.bytepos_to_file_charpos(linebpos); - let col = chpos - linechpos; + + let col = max(chpos, linechpos) - min(chpos, linechpos); let col_display = { let start_width_idx = f @@ -281,7 +289,7 @@ impl SourceMap { chpos, linechpos ); debug!("byte is on line: {}", line); - assert!(chpos >= linechpos); + // assert!(chpos >= linechpos); Loc { file: f, line, @@ -313,9 +321,7 @@ impl SourceMap { // If the relevant source_file is empty, we don't return a line number. pub fn lookup_line(&self, pos: BytePos) -> Result> { - let idx = self.lookup_source_file_idx(pos); - - let f = (*self.files.borrow().source_files)[idx].clone(); + let f = self.lookup_source_file(pos); match f.lookup_line(pos) { Some(line) => Ok(SourceFileAndLine { sf: f, line }), @@ -774,16 +780,14 @@ impl SourceMap { /// For a global BytePos compute the local offset within the containing /// SourceFile pub fn lookup_byte_offset(&self, bpos: BytePos) -> SourceFileAndBytePos { - let idx = self.lookup_source_file_idx(bpos); - let sf = (*self.files.borrow().source_files)[idx].clone(); + let sf = self.lookup_source_file(bpos); let offset = bpos - sf.start_pos; SourceFileAndBytePos { sf, pos: offset } } /// Converts an absolute BytePos to a CharPos relative to the source_file. pub fn bytepos_to_file_charpos(&self, bpos: BytePos) -> CharPos { - let idx = self.lookup_source_file_idx(bpos); - let map = &(*self.files.borrow().source_files)[idx]; + let map = self.lookup_source_file(bpos); // The number of extra bytes due to multibyte chars in the SourceFile let mut total_extra_bytes = 0; @@ -802,12 +806,18 @@ impl SourceMap { } } - assert!(map.start_pos.to_u32() + total_extra_bytes <= bpos.to_u32()); + assert!( + map.start_pos.to_u32() + total_extra_bytes <= bpos.to_u32(), + "map.start_pos = {:?}; total_extra_bytes = {}; bpos = {:?}", + map.start_pos, + total_extra_bytes, + bpos, + ); CharPos(bpos.to_usize() - map.start_pos.to_usize() - total_extra_bytes as usize) } // Return the index of the source_file (in self.files) which contains pos. - pub fn lookup_source_file_idx(&self, pos: BytePos) -> usize { + fn lookup_source_file(&self, pos: BytePos) -> Arc { let files = self.files.borrow(); let files = &files.source_files; let count = files.len(); @@ -830,7 +840,7 @@ impl SourceMap { pos.to_usize() ); - a + files[a].clone() } pub fn count_lines(&self) -> usize { diff --git a/common/src/syntax_pos/analyze_source_file.rs b/common/src/syntax_pos/analyze_source_file.rs index 0b6ebee5737f..b7afdf70a3ce 100644 --- a/common/src/syntax_pos/analyze_source_file.rs +++ b/common/src/syntax_pos/analyze_source_file.rs @@ -8,10 +8,9 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. use super::*; -use cfg_if::cfg_if; use unicode_width::UnicodeWidthChar; -/// Find all newlines, multi-byte characters, and non-narrow characters in a +/// Finds all newlines, multi-byte characters, and non-narrow characters in a /// SourceFile. /// /// This function will use an SSE2 enhanced implementation if hardware support @@ -47,9 +46,8 @@ pub fn analyze_source_file( (lines, multi_byte_chars, non_narrow_chars) } -cfg_if! { - if #[cfg(all(any(target_arch = "x86", target_arch = "x86_64"), - not(stage0)))] { +cfg_if::cfg_if! { + if #[cfg(all(any(target_arch = "x86", target_arch = "x86_64")))] { fn analyze_source_file_dispatch(src: &str, source_file_start_pos: BytePos, lines: &mut Vec, @@ -74,7 +72,7 @@ cfg_if! { } } - /// Check 16 byte chunks of text at a time. If the chunk contains + /// Checks 16 byte chunks of text at a time. If the chunk contains /// something other than printable ASCII characters and newlines, the /// function falls back to the generic implementation. Otherwise it uses /// SSE2 intrinsics to quickly find all newlines. @@ -102,13 +100,10 @@ cfg_if! { let mut intra_chunk_offset = 0; for chunk_index in 0 .. chunk_count { - #[allow(clippy::cast_ptr_alignment)] - let chunk = { - let ptr = src_bytes.as_ptr() as *const __m128i; - // We don't know if the pointer is aligned to 16 bytes, so we - // use `loadu`, which supports unaligned loading. - _mm_loadu_si128(ptr.add(chunk_index)) - }; + let ptr = src_bytes.as_ptr() as *const __m128i; + // We don't know if the pointer is aligned to 16 bytes, so we + // use `loadu`, which supports unaligned loading. + let chunk = _mm_loadu_si128(ptr.offset(chunk_index as isize)); // For character in the chunk, see if its byte value is < 0, which // indicates that it's part of a UTF-8 char. @@ -139,7 +134,7 @@ cfg_if! { if control_char_mask == newlines_mask { // All control characters are newlines, record them - let mut newlines_mask = 0xFFFF_0000 | newlines_mask as u32; + let mut newlines_mask = 0xFFFF0000 | newlines_mask as u32; let output_offset = output_offset + BytePos::from_usize(chunk_index * CHUNK_SIZE + 1); @@ -287,146 +282,152 @@ fn analyze_source_file_generic( i - scan_len } -macro_rules! test { - (case: $test_name:ident, +#[cfg(test)] +mod tests { + use super::*; + + macro_rules! test { + (case: $test_name:ident, text: $text:expr, source_file_start_pos: $source_file_start_pos:expr, lines: $lines:expr, multi_byte_chars: $multi_byte_chars:expr, non_narrow_chars: $non_narrow_chars:expr,) => { - #[test] - fn $test_name() { - let (lines, multi_byte_chars, non_narrow_chars) = - analyze_source_file($text, BytePos($source_file_start_pos)); + #[test] + fn $test_name() { + let (lines, multi_byte_chars, non_narrow_chars) = + analyze_source_file($text, BytePos($source_file_start_pos)); - let expected_lines: Vec = $lines.into_iter().map(BytePos).collect(); + let expected_lines: Vec = + $lines.into_iter().map(|pos| BytePos(pos)).collect(); - assert_eq!(lines, expected_lines); + assert_eq!(lines, expected_lines); - let expected_mbcs: Vec = $multi_byte_chars - .into_iter() - .map(|(pos, bytes)| MultiByteChar { - pos: BytePos(pos), - bytes, - }) - .collect(); + let expected_mbcs: Vec = $multi_byte_chars + .into_iter() + .map(|(pos, bytes)| MultiByteChar { + pos: BytePos(pos), + bytes, + }) + .collect(); - assert_eq!(multi_byte_chars, expected_mbcs); + assert_eq!(multi_byte_chars, expected_mbcs); - let expected_nncs: Vec = $non_narrow_chars - .into_iter() - .map(|(pos, width)| NonNarrowChar::new(BytePos(pos), width)) - .collect(); + let expected_nncs: Vec = $non_narrow_chars + .into_iter() + .map(|(pos, width)| NonNarrowChar::new(BytePos(pos), width)) + .collect(); - assert_eq!(non_narrow_chars, expected_nncs); - } - }; -} + assert_eq!(non_narrow_chars, expected_nncs); + } + }; + } + + test!( + case: empty_text, + text: "", + source_file_start_pos: 0, + lines: vec![], + multi_byte_chars: vec![], + non_narrow_chars: vec![], + ); + + test!( + case: newlines_short, + text: "a\nc", + source_file_start_pos: 0, + lines: vec![0, 2], + multi_byte_chars: vec![], + non_narrow_chars: vec![], + ); + + test!( + case: newlines_long, + text: "012345678\nabcdef012345678\na", + source_file_start_pos: 0, + lines: vec![0, 10, 26], + multi_byte_chars: vec![], + non_narrow_chars: vec![], + ); + + test!( + case: newline_and_multi_byte_char_in_same_chunk, + text: "01234β789\nbcdef0123456789abcdef", + source_file_start_pos: 0, + lines: vec![0, 11], + multi_byte_chars: vec![(5, 2)], + non_narrow_chars: vec![], + ); -test!( - case: empty_text, - text: "", - source_file_start_pos: 0, - lines: vec![], - multi_byte_chars: vec![], - non_narrow_chars: vec![], -); - -test!( - case: newlines_short, - text: "a\nc", - source_file_start_pos: 0, - lines: vec![0, 2], - multi_byte_chars: vec![], - non_narrow_chars: vec![], -); - -test!( - case: newlines_long, - text: "012345678\nabcdef012345678\na", - source_file_start_pos: 0, - lines: vec![0, 10, 26], - multi_byte_chars: vec![], - non_narrow_chars: vec![], -); - -test!( - case: newline_and_multi_byte_char_in_same_chunk, - text: "01234β789\nbcdef0123456789abcdef", - source_file_start_pos: 0, - lines: vec![0, 11], - multi_byte_chars: vec![(5, 2)], - non_narrow_chars: vec![], -); - -test!( - case: newline_and_control_char_in_same_chunk, - text: "01234\u{07}6789\nbcdef0123456789abcdef", - source_file_start_pos: 0, - lines: vec![0, 11], - multi_byte_chars: vec![], - non_narrow_chars: vec![(5, 0)], -); - -test!( - case: multi_byte_char_short, - text: "aβc", - source_file_start_pos: 0, - lines: vec![0], - multi_byte_chars: vec![(1, 2)], - non_narrow_chars: vec![], -); - -test!( - case: multi_byte_char_long, - text: "0123456789abcΔf012345β", - source_file_start_pos: 0, - lines: vec![0], - multi_byte_chars: vec![(13, 2), (22, 2)], - non_narrow_chars: vec![], -); - -test!( - case: multi_byte_char_across_chunk_boundary, - text: "0123456789abcdeΔ123456789abcdef01234", - source_file_start_pos: 0, - lines: vec![0], - multi_byte_chars: vec![(15, 2)], - non_narrow_chars: vec![], -); - -test!( - case: multi_byte_char_across_chunk_boundary_tail, - text: "0123456789abcdeΔ....", - source_file_start_pos: 0, - lines: vec![0], - multi_byte_chars: vec![(15, 2)], - non_narrow_chars: vec![], -); - -test!( - case: non_narrow_short, - text: "0\t2", - source_file_start_pos: 0, - lines: vec![0], - multi_byte_chars: vec![], - non_narrow_chars: vec![(1, 4)], -); - -test!( - case: non_narrow_long, - text: "01\t3456789abcdef01234567\u{07}9", - source_file_start_pos: 0, - lines: vec![0], - multi_byte_chars: vec![], - non_narrow_chars: vec![(2, 4), (24, 0)], -); - -test!( - case: output_offset_all, - text: "01\t345\n789abcΔf01234567\u{07}9\nbcΔf", - source_file_start_pos: 1000, - lines: vec![0 + 1000, 7 + 1000, 27 + 1000], - multi_byte_chars: vec![(13 + 1000, 2), (29 + 1000, 2)], - non_narrow_chars: vec![(2 + 1000, 4), (24 + 1000, 0)], -); + test!( + case: newline_and_control_char_in_same_chunk, + text: "01234\u{07}6789\nbcdef0123456789abcdef", + source_file_start_pos: 0, + lines: vec![0, 11], + multi_byte_chars: vec![], + non_narrow_chars: vec![(5, 0)], + ); + + test!( + case: multi_byte_char_short, + text: "aβc", + source_file_start_pos: 0, + lines: vec![0], + multi_byte_chars: vec![(1, 2)], + non_narrow_chars: vec![], + ); + + test!( + case: multi_byte_char_long, + text: "0123456789abcΔf012345β", + source_file_start_pos: 0, + lines: vec![0], + multi_byte_chars: vec![(13, 2), (22, 2)], + non_narrow_chars: vec![], + ); + + test!( + case: multi_byte_char_across_chunk_boundary, + text: "0123456789abcdeΔ123456789abcdef01234", + source_file_start_pos: 0, + lines: vec![0], + multi_byte_chars: vec![(15, 2)], + non_narrow_chars: vec![], + ); + + test!( + case: multi_byte_char_across_chunk_boundary_tail, + text: "0123456789abcdeΔ....", + source_file_start_pos: 0, + lines: vec![0], + multi_byte_chars: vec![(15, 2)], + non_narrow_chars: vec![], + ); + + test!( + case: non_narrow_short, + text: "0\t2", + source_file_start_pos: 0, + lines: vec![0], + multi_byte_chars: vec![], + non_narrow_chars: vec![(1, 4)], + ); + + test!( + case: non_narrow_long, + text: "01\t3456789abcdef01234567\u{07}9", + source_file_start_pos: 0, + lines: vec![0], + multi_byte_chars: vec![], + non_narrow_chars: vec![(2, 4), (24, 0)], + ); + + test!( + case: output_offset_all, + text: "01\t345\n789abcΔf01234567\u{07}9\nbcΔf", + source_file_start_pos: 1000, + lines: vec![0 + 1000, 7 + 1000, 27 + 1000], + multi_byte_chars: vec![(13 + 1000, 2), (29 + 1000, 2)], + non_narrow_chars: vec![(2 + 1000, 4), (24 + 1000, 0)], + ); +} diff --git a/ecmascript/codegen/Cargo.toml b/ecmascript/codegen/Cargo.toml index 3c64c75333aa..b635d52e3162 100644 --- a/ecmascript/codegen/Cargo.toml +++ b/ecmascript/codegen/Cargo.toml @@ -15,7 +15,7 @@ swc_atoms = { version = "0.2", path ="../../atoms" } swc_common = { version = "0.5", path ="../../common" } swc_ecma_ast = { version = "0.17.0", path ="../ast" } swc_ecma_codegen_macros = { version = "0.4", path ="./macros" } -sourcemap = "4.1.1" +sourcemap = "5" num-bigint = { version = "0.2", features = ["serde"] } [dev-dependencies] diff --git a/ecmascript/codegen/src/lib.rs b/ecmascript/codegen/src/lib.rs index 49c27be5077d..259fce38d7c0 100644 --- a/ecmascript/codegen/src/lib.rs +++ b/ecmascript/codegen/src/lib.rs @@ -1387,7 +1387,8 @@ impl<'a> Emitter<'a> { _ => false, } }; - if format.contains(ListFormat::CommaDelimited) && has_trailing_comma { + + if has_trailing_comma && format.contains(ListFormat::CommaDelimited) { self.wr.write_punct(",")?; formatting_space!(self); } diff --git a/ecmascript/transforms/Cargo.toml b/ecmascript/transforms/Cargo.toml index e0c8cceb5fb4..f1beb076c71e 100644 --- a/ecmascript/transforms/Cargo.toml +++ b/ecmascript/transforms/Cargo.toml @@ -37,4 +37,4 @@ testing = { version = "0.5", path ="../../testing" } swc_ecma_codegen = { version = "0.16.0", path ="../codegen" } tempfile = "3" pretty_assertions = "0.6" -sourcemap = "4.1.1" +sourcemap = "5" diff --git a/ecmascript/transforms/src/pass.rs b/ecmascript/transforms/src/pass.rs index cb5689c43913..973b1477c908 100644 --- a/ecmascript/transforms/src/pass.rs +++ b/ecmascript/transforms/src/pass.rs @@ -182,10 +182,6 @@ pub struct JoinedPass { pub ty: PhantomData, } -// fn type_name() -> String { -// format!("{}", unsafe { std::intrinsics::type_name::() }) -// } - impl Fold for JoinedPass where T: FoldWith, diff --git a/integration-tests/.gitignore b/integration-tests/.gitignore new file mode 100644 index 000000000000..c209921ae4fc --- /dev/null +++ b/integration-tests/.gitignore @@ -0,0 +1,2 @@ +!.gitkeep +*/ \ No newline at end of file diff --git a/integration-tests/.gitkeep b/integration-tests/.gitkeep new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/scripts/prepare-integration.sh b/scripts/prepare-integration.sh new file mode 100755 index 000000000000..6ebd173cf140 --- /dev/null +++ b/scripts/prepare-integration.sh @@ -0,0 +1,3 @@ +#!/bin/sh + +git clone https://github.com/angular/angular.git ./integration-tests/angular --shallow diff --git a/tests/projects.rs b/tests/projects.rs index ca6de288f8bd..efe6466120eb 100644 --- a/tests/projects.rs +++ b/tests/projects.rs @@ -1,5 +1,10 @@ +use rayon::prelude::*; use std::path::Path; -use swc::{config::Options, error::Error, Compiler}; +use swc::{ + config::{Options, SourceMapsConfig}, + error::Error, + Compiler, +}; use testing::{NormalizedOutput, StdErr, Tester}; use walkdir::WalkDir; @@ -71,66 +76,70 @@ fn project(dir: &str) { .expect("failed"); } -//fn par_project(dir: &str) { -// Tester::new() -// .print_errors(|cm, handler| { -// let c = Compiler::new(cm.clone(), handler); -// -// let entries = WalkDir::new(dir) -// .into_iter() -// .map(|entry| entry.unwrap()) -// .filter(|e| { -// if e.metadata().unwrap().is_dir() { -// return false; -// } -// -// if !e.file_name().to_string_lossy().ends_with(".ts") -// && !e.file_name().to_string_lossy().ends_with(".js") -// && !e.file_name().to_string_lossy().ends_with(".tsx") -// { -// return false; -// } -// -// true -// }) -// .collect::>(); -// -// entries.into_par_iter().for_each(|entry| { -// let fm = cm.load_file(entry.path()).expect("failed to load -// file"); let _ = c.process_js_file( -// fm, -// &Options { -// swcrc: true, -// is_module: true, -// -// ..Default::default() -// }, -// ); -// }); -// -// if c.handler.has_errors() { -// Err(()) -// } else { -// Ok(()) -// } -// }) -// .map(|_| ()) -// .expect(""); -//} +fn par_project(dir: &str) { + Tester::new() + .print_errors(|cm, handler| { + let c = Compiler::new(cm.clone(), handler); + + let entries = WalkDir::new(dir) + .into_iter() + .map(|entry| entry.unwrap()) + .filter(|e| { + if e.metadata().unwrap().is_dir() { + return false; + } + + if !e.file_name().to_string_lossy().ends_with(".ts") + && !e.file_name().to_string_lossy().ends_with(".js") + && !e.file_name().to_string_lossy().ends_with(".tsx") + { + return false; + } + + true + }) + .collect::>(); + + entries.into_par_iter().for_each(|entry| { + let fm = cm.load_file(entry.path()).expect( + "failed to load + file", + ); + let _ = c.process_js_file( + fm, + &Options { + swcrc: true, + is_module: true, + source_maps: Some(SourceMapsConfig::Bool(true)), + ..Default::default() + }, + ); + }); + + if c.handler.has_errors() { + Err(()) + } else { + Ok(()) + } + }) + .map(|_| ()) + .expect(""); +} + +#[test] +#[ignore] +fn angular_core() { + par_project("integration-tests/angular/packages/core/src"); +} -//#[test] -//fn angular_core() { -// project("tests/projects/angular/repo/packages/core/src"); -//} -// //#[test] //fn rxjs() { -// project("tests/projects/rxjs/repo/src"); +// par_project("integration-tests/rxjs/repo/src"); //} // //#[test] //fn webpack() { -// project("tests/projects/webpack/repo/lib"); +// par_project("integration-tests/webpack/repo/lib"); //} /// should respect modules config in .swcrc