fix(lsp): regression - formatting was broken on windows (#21972)

~~Waiting on: https://github.com/denoland/deno_config/pull/31~~

Closes #21971
Closes https://github.com/denoland/vscode_deno/issues/1029
This commit is contained in:
David Sherret 2024-01-18 15:57:30 -05:00 committed by GitHub
parent 4e3aff8400
commit 35c1652f56
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
12 changed files with 148 additions and 117 deletions

4
Cargo.lock generated
View file

@ -1126,9 +1126,9 @@ dependencies = [
[[package]]
name = "deno_config"
version = "0.7.1"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "62edb1811b076bf29670385098bc6e1cfee37dae70b71a665e7a1534098ba805"
checksum = "0257353ef2a16029e15d8ee47509cac1a29eba8a223caa747996f6c7719c09d4"
dependencies = [
"anyhow",
"glob",

View file

@ -55,7 +55,7 @@ winres.workspace = true
[dependencies]
deno_ast = { workspace = true, features = ["bundler", "cjs", "codegen", "dep_graph", "module_specifier", "proposal", "react", "sourcemap", "transforms", "typescript", "view", "visit"] }
deno_cache_dir = "=0.6.1"
deno_config = "=0.7.1"
deno_config = "=0.8.0"
deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] }
deno_doc = { version = "=0.93.0", features = ["html"] }
deno_emit = "=0.33.0"

View file

@ -9,7 +9,6 @@ pub mod package_json;
pub use self::import_map::resolve_import_map_from_specifier;
use self::package_json::PackageJsonDeps;
use ::import_map::ImportMap;
use deno_config::glob::PathOrPattern;
use deno_core::resolve_url_or_path;
use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot;
use deno_npm::NpmSystemInfo;
@ -244,7 +243,7 @@ impl BenchOptions {
}
}
#[derive(Clone, Debug, Default)]
#[derive(Clone, Debug)]
pub struct FmtOptions {
pub check: bool,
pub options: FmtOptionsConfig,
@ -252,6 +251,14 @@ pub struct FmtOptions {
}
impl FmtOptions {
pub fn new_with_base(base: PathBuf) -> Self {
Self {
check: false,
options: FmtOptionsConfig::default(),
files: FilePatterns::new_with_base(base),
}
}
pub fn resolve(
maybe_fmt_config: Option<FmtConfig>,
maybe_fmt_flags: Option<FmtFlags>,
@ -369,7 +376,7 @@ pub enum LintReporterKind {
Compact,
}
#[derive(Clone, Debug, Default)]
#[derive(Clone, Debug)]
pub struct LintOptions {
pub rules: LintRulesConfig,
pub files: FilePatterns,
@ -377,6 +384,14 @@ pub struct LintOptions {
}
impl LintOptions {
pub fn new_with_base(base: PathBuf) -> Self {
Self {
rules: Default::default(),
files: FilePatterns::new_with_base(base),
reporter_kind: Default::default(),
}
}
pub fn resolve(
maybe_lint_config: Option<LintConfig>,
maybe_lint_flags: Option<LintFlags>,
@ -1648,7 +1663,8 @@ fn resolve_files(
maybe_file_flags: Option<FileFlags>,
initial_cwd: &Path,
) -> Result<FilePatterns, AnyError> {
let mut maybe_files_config = maybe_files_config.unwrap_or_default();
let mut maybe_files_config = maybe_files_config
.unwrap_or_else(|| FilePatterns::new_with_base(initial_cwd.to_path_buf()));
if let Some(file_flags) = maybe_file_flags {
if !file_flags.include.is_empty() {
maybe_files_config.include =
@ -1665,18 +1681,7 @@ fn resolve_files(
)?;
}
}
Ok(FilePatterns {
include: {
let files = match maybe_files_config.include {
Some(include) => include,
None => PathOrPatternSet::new(vec![PathOrPattern::Path(
initial_cwd.to_path_buf(),
)]),
};
Some(files)
},
exclude: maybe_files_config.exclude,
})
Ok(maybe_files_config)
}
/// Resolves the no_prompt value based on the cli flags and environment.
@ -1896,6 +1901,7 @@ mod test {
let resolved_files = resolve_files(
Some(FilePatterns {
base: temp_dir_path.to_path_buf(),
include: Some(
PathOrPatternSet::from_relative_path_or_patterns(
temp_dir_path,

View file

@ -1540,6 +1540,7 @@ mod tests {
use crate::lsp::documents::Documents;
use crate::lsp::documents::LanguageId;
use crate::lsp::language_server::StateSnapshot;
use deno_config::glob::FilePatterns;
use pretty_assertions::assert_eq;
use std::path::Path;
use std::path::PathBuf;
@ -1640,6 +1641,11 @@ let c: number = "a";
Arc::new(GlobalHttpCache::new(cache_location, RealDenoCacheEnv));
let ts_server = TsServer::new(Default::default(), cache);
ts_server.start(None);
let lint_options = LintOptions {
rules: Default::default(),
files: FilePatterns::new_with_base(temp_dir.path().to_path_buf()),
reporter_kind: Default::default(),
};
// test enabled
{
@ -1647,7 +1653,7 @@ let c: number = "a";
let diagnostics = generate_lint_diagnostics(
&snapshot,
&enabled_config,
&Default::default(),
&lint_options,
Default::default(),
);
assert_eq!(get_diagnostics_for_single(diagnostics).len(), 6);
@ -1679,7 +1685,7 @@ let c: number = "a";
let diagnostics = generate_lint_diagnostics(
&snapshot,
&disabled_config,
&Default::default(),
&lint_options,
Default::default(),
);
assert_eq!(get_diagnostics_for_single(diagnostics).len(), 0);

View file

@ -844,8 +844,7 @@ impl FileSystemDocuments {
}
pub struct UpdateDocumentConfigOptions<'a> {
pub enabled_paths: PathOrPatternSet,
pub disabled_paths: PathOrPatternSet,
pub file_patterns: FilePatterns,
pub document_preload_limit: usize,
pub maybe_import_map: Option<Arc<import_map::ImportMap>>,
pub maybe_config_file: Option<&'a ConfigFile>,
@ -1321,8 +1320,7 @@ impl Documents {
pub fn update_config(&mut self, options: UpdateDocumentConfigOptions) {
#[allow(clippy::too_many_arguments)]
fn calculate_resolver_config_hash(
enabled_paths: &PathOrPatternSet,
disabled_paths: &PathOrPatternSet,
file_patterns: &FilePatterns,
document_preload_limit: usize,
maybe_import_map: Option<&import_map::ImportMap>,
maybe_jsx_config: Option<&JsxImportSourceConfig>,
@ -1349,8 +1347,10 @@ impl Documents {
let mut hasher = FastInsecureHasher::default();
hasher.write_hashable(document_preload_limit);
hasher.write_hashable(&get_pattern_set_vec(enabled_paths));
hasher.write_hashable(&get_pattern_set_vec(disabled_paths));
hasher.write_hashable(
&file_patterns.include.as_ref().map(get_pattern_set_vec),
);
hasher.write_hashable(&get_pattern_set_vec(&file_patterns.exclude));
if let Some(import_map) = maybe_import_map {
hasher.write_str(&import_map.to_json());
hasher.write_str(import_map.base_url().as_str());
@ -1387,8 +1387,7 @@ impl Documents {
.maybe_config_file
.and_then(|cf| cf.to_maybe_jsx_import_source_config().ok().flatten());
let new_resolver_config_hash = calculate_resolver_config_hash(
&options.enabled_paths,
&options.disabled_paths,
&options.file_patterns,
options.document_preload_limit,
options.maybe_import_map.as_deref(),
maybe_jsx_config.as_ref(),
@ -1450,8 +1449,7 @@ impl Documents {
// only refresh the dependencies if the underlying configuration has changed
if self.resolver_config_hash != new_resolver_config_hash {
self.refresh_dependencies(
options.enabled_paths,
options.disabled_paths,
options.file_patterns,
options.document_preload_limit,
);
self.resolver_config_hash = new_resolver_config_hash;
@ -1464,8 +1462,7 @@ impl Documents {
fn refresh_dependencies(
&mut self,
enabled_paths: PathOrPatternSet,
disabled_paths: PathOrPatternSet,
file_patterns: FilePatterns,
document_preload_limit: usize,
) {
let resolver = self.resolver.as_graph_resolver();
@ -1487,8 +1484,7 @@ impl Documents {
log::debug!("Preloading documents from enabled urls...");
let mut finder =
PreloadDocumentFinder::new(PreloadDocumentFinderOptions {
enabled_paths,
disabled_paths,
file_patterns,
limit: document_preload_limit,
});
for specifier in finder.by_ref() {
@ -1900,8 +1896,7 @@ enum PendingEntry {
}
struct PreloadDocumentFinderOptions {
enabled_paths: PathOrPatternSet,
disabled_paths: PathOrPatternSet,
file_patterns: FilePatterns,
limit: usize,
}
@ -1933,24 +1928,21 @@ impl PreloadDocumentFinder {
visited_paths: Default::default(),
};
let file_patterns = FilePatterns {
include: Some(options.enabled_paths),
exclude: options.disabled_paths,
};
let file_patterns_by_base = file_patterns.split_by_base();
let file_patterns_by_base = options.file_patterns.split_by_base();
// initialize the finder with the initial paths
for (path, file_patterns) in file_patterns_by_base {
for file_patterns in file_patterns_by_base {
let path = &file_patterns.base;
if path.is_dir() {
if is_allowed_root_dir(&path) {
if is_allowed_root_dir(path) {
finder
.root_dir_entries
.push(PendingEntry::Dir(path, Rc::new(file_patterns)));
.push(PendingEntry::Dir(path.clone(), Rc::new(file_patterns)));
}
} else {
finder
.pending_entries
.push_back(PendingEntry::SpecifiedRootFile(path));
.push_back(PendingEntry::SpecifiedRootFile(path.clone()));
}
}
finder
@ -2247,8 +2239,9 @@ console.log(b, "hello deno");
.unwrap();
documents.update_config(UpdateDocumentConfigOptions {
enabled_paths: Default::default(),
disabled_paths: Default::default(),
file_patterns: FilePatterns::new_with_base(
documents_path.to_path_buf(),
),
document_preload_limit: 1_000,
maybe_import_map: Some(Arc::new(import_map)),
maybe_config_file: None,
@ -2289,8 +2282,9 @@ console.log(b, "hello deno");
.unwrap();
documents.update_config(UpdateDocumentConfigOptions {
enabled_paths: Default::default(),
disabled_paths: Default::default(),
file_patterns: FilePatterns::new_with_base(
documents_path.to_path_buf(),
),
document_preload_limit: 1_000,
maybe_import_map: Some(Arc::new(import_map)),
maybe_config_file: None,
@ -2357,17 +2351,22 @@ console.log(b, "hello deno");
temp_dir.write("root3/mod.ts", ""); // no, not provided
let mut urls = PreloadDocumentFinder::new(PreloadDocumentFinderOptions {
enabled_paths: PathOrPatternSet::from_relative_path_or_patterns(
temp_dir.path().as_path(),
&[
"root1".to_string(),
"root2/file1.ts".to_string(),
"root2/main.min.ts".to_string(),
"root2/folder".to_string(),
],
)
.unwrap(),
disabled_paths: Default::default(),
file_patterns: FilePatterns {
base: temp_dir.path().to_path_buf(),
include: Some(
PathOrPatternSet::from_relative_path_or_patterns(
temp_dir.path().as_path(),
&[
"root1".to_string(),
"root2/file1.ts".to_string(),
"root2/main.min.ts".to_string(),
"root2/folder".to_string(),
],
)
.unwrap(),
),
exclude: Default::default(),
},
limit: 1_000,
})
.collect::<Vec<_>>();
@ -2397,10 +2396,11 @@ console.log(b, "hello deno");
// now try iterating with a low limit
let urls = PreloadDocumentFinder::new(PreloadDocumentFinderOptions {
enabled_paths: PathOrPatternSet::new(vec![PathOrPattern::Path(
temp_dir.path().to_path_buf(),
)]),
disabled_paths: Default::default(),
file_patterns: FilePatterns {
base: temp_dir.path().to_path_buf(),
include: Default::default(),
exclude: Default::default(),
},
limit: 10, // entries and not results
})
.collect::<Vec<_>>();
@ -2412,18 +2412,19 @@ console.log(b, "hello deno");
// now try with certain directories and files disabled
let mut urls = PreloadDocumentFinder::new(PreloadDocumentFinderOptions {
enabled_paths: PathOrPatternSet::new(vec![PathOrPattern::Path(
temp_dir.path().to_path_buf(),
)]),
disabled_paths: PathOrPatternSet::from_relative_path_or_patterns(
temp_dir.path().as_path(),
&[
"root1".to_string(),
"root2/file1.ts".to_string(),
"**/*.js".to_string(), // ignore js files
],
)
.unwrap(),
file_patterns: FilePatterns {
base: temp_dir.path().to_path_buf(),
include: Default::default(),
exclude: PathOrPatternSet::from_relative_path_or_patterns(
temp_dir.path().as_path(),
&[
"root1".to_string(),
"root2/file1.ts".to_string(),
"**/*.js".to_string(), // ignore js files
],
)
.unwrap(),
},
limit: 1_000,
})
.collect::<Vec<_>>();
@ -2443,20 +2444,22 @@ console.log(b, "hello deno");
pub fn test_pre_load_document_finder_disallowed_dirs() {
if cfg!(windows) {
let paths = PreloadDocumentFinder::new(PreloadDocumentFinderOptions {
enabled_paths: PathOrPatternSet::new(vec![PathOrPattern::Path(
PathBuf::from("C:\\"),
)]),
disabled_paths: Default::default(),
file_patterns: FilePatterns {
base: PathBuf::from("C:\\"),
include: Default::default(),
exclude: Default::default(),
},
limit: 1_000,
})
.collect::<Vec<_>>();
assert_eq!(paths, vec![]);
} else {
let paths = PreloadDocumentFinder::new(PreloadDocumentFinderOptions {
enabled_paths: PathOrPatternSet::new(vec![PathOrPattern::Path(
PathBuf::from("/"),
)]),
disabled_paths: Default::default(),
file_patterns: FilePatterns {
base: PathBuf::from("/"),
include: Default::default(),
exclude: Default::default(),
},
limit: 1_000,
})
.collect::<Vec<_>>();

View file

@ -2,8 +2,8 @@
use base64::Engine;
use deno_ast::MediaType;
use deno_config::glob::FilePatterns;
use deno_core::anyhow::anyhow;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use deno_core::parking_lot::Mutex;
use deno_core::resolve_url;
@ -29,6 +29,7 @@ use std::collections::HashMap;
use std::collections::HashSet;
use std::env;
use std::fmt::Write as _;
use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
use tokio::sync::mpsc::unbounded_channel;
@ -237,6 +238,7 @@ pub struct Inner {
/// The collection of documents that the server is currently handling, either
/// on disk or "open" within the client.
pub documents: Documents,
initial_cwd: PathBuf,
http_client: Arc<HttpClient>,
task_queue: LanguageServerTaskQueue,
/// Handles module registries, which allow discovery of modules
@ -527,6 +529,9 @@ impl Inner {
diagnostics_state.clone(),
);
let assets = Assets::new(ts_server.clone());
let initial_cwd = std::env::current_dir().unwrap_or_else(|_| {
panic!("Could not resolve current working directory")
});
Self {
assets,
@ -538,13 +543,14 @@ impl Inner {
diagnostics_server,
documents,
http_client,
initial_cwd: initial_cwd.clone(),
maybe_global_cache_path: None,
maybe_import_map: None,
maybe_import_map_uri: None,
maybe_package_json: None,
fmt_options: Default::default(),
fmt_options: FmtOptions::new_with_base(initial_cwd.clone()),
task_queue: Default::default(),
lint_options: Default::default(),
lint_options: LintOptions::new_with_base(initial_cwd),
maybe_testing_server: None,
module_registries,
module_registries_location,
@ -874,6 +880,7 @@ impl Inner {
let npm_resolver = create_npm_resolver(
&deno_dir,
&self.initial_cwd,
&self.http_client,
self.config.maybe_config_file(),
self.config.maybe_lockfile(),
@ -1043,15 +1050,13 @@ impl Inner {
async fn update_config_file(&mut self) -> Result<(), AnyError> {
self.config.clear_config_file();
self.fmt_options = Default::default();
self.lint_options = Default::default();
self.fmt_options = FmtOptions::new_with_base(self.initial_cwd.clone());
self.lint_options = LintOptions::new_with_base(self.initial_cwd.clone());
if let Some(config_file) = self.get_config_file()? {
// this doesn't need to be an actual directory because flags is specified as `None`
let dummy_args_cwd = PathBuf::from("/");
let lint_options = config_file
.to_lint_config()
.and_then(|maybe_lint_config| {
LintOptions::resolve(maybe_lint_config, None, &dummy_args_cwd)
LintOptions::resolve(maybe_lint_config, None, &self.initial_cwd)
})
.map_err(|err| {
anyhow!("Unable to update lint configuration: {:?}", err)
@ -1059,7 +1064,7 @@ impl Inner {
let fmt_options = config_file
.to_fmt_config()
.and_then(|maybe_fmt_config| {
FmtOptions::resolve(maybe_fmt_config, None, &dummy_args_cwd)
FmtOptions::resolve(maybe_fmt_config, None, &self.initial_cwd)
})
.map_err(|err| {
anyhow!("Unable to update formatter configuration: {:?}", err)
@ -1148,6 +1153,7 @@ impl Inner {
async fn create_npm_resolver(
deno_dir: &DenoDir,
initial_cwd: &Path,
http_client: &Arc<HttpClient>,
maybe_config_file: Option<&ConfigFile>,
maybe_lockfile: Option<&Arc<Mutex<Lockfile>>>,
@ -1161,9 +1167,7 @@ async fn create_npm_resolver(
create_cli_npm_resolver_for_lsp(if is_byonm {
CliNpmResolverCreateOptions::Byonm(CliNpmResolverByonmCreateOptions {
fs: Arc::new(deno_fs::RealFs),
root_node_modules_dir: std::env::current_dir()
.unwrap()
.join("node_modules"),
root_node_modules_dir: initial_cwd.join("node_modules"),
})
} else {
CliNpmResolverCreateOptions::Managed(CliNpmResolverManagedCreateOptions {
@ -1348,8 +1352,11 @@ impl Inner {
async fn refresh_documents_config(&mut self) {
self.documents.update_config(UpdateDocumentConfigOptions {
enabled_paths: self.config.get_enabled_paths(),
disabled_paths: self.config.get_disabled_paths(),
file_patterns: FilePatterns {
base: self.initial_cwd.clone(),
include: Some(self.config.get_enabled_paths()),
exclude: self.config.get_disabled_paths(),
},
document_preload_limit: self
.config
.workspace_settings()
@ -3722,7 +3729,7 @@ impl Inner {
type_check_mode: crate::args::TypeCheckMode::Local,
..Default::default()
},
std::env::current_dir().with_context(|| "Failed getting cwd.")?,
self.initial_cwd.clone(),
self.config.maybe_config_file().cloned(),
self.config.maybe_lockfile().cloned(),
self.maybe_package_json.clone(),

View file

@ -8507,13 +8507,15 @@ fn lsp_format_exclude_default_config() {
#[test]
fn lsp_format_json() {
let context = TestContextBuilder::new().use_temp_cwd().build();
let temp_dir_path = context.temp_dir().path();
// Also test out using a non-json file extension here.
// What should matter is the language identifier.
let lock_file_path = temp_dir_path.join("file.lock");
let mut client = context.new_lsp_command().build();
client.initialize_default();
client.did_open(json!({
"textDocument": {
// Also test out using a non-json file extension here.
// What should matter is the language identifier.
"uri": "file:///a/file.lock",
"uri": lock_file_path.uri_file(),
"languageId": "json",
"version": 1,
"text": "{\"key\":\"value\"}"
@ -8524,7 +8526,7 @@ fn lsp_format_json() {
"textDocument/formatting",
json!({
"textDocument": {
"uri": "file:///a/file.lock"
"uri": lock_file_path.uri_file(),
},
"options": {
"tabSize": 2,
@ -8635,11 +8637,12 @@ fn lsp_json_import_with_query_string() {
#[test]
fn lsp_format_markdown() {
let context = TestContextBuilder::new().use_temp_cwd().build();
let markdown_file = context.temp_dir().path().join("file.md");
let mut client = context.new_lsp_command().build();
client.initialize_default();
client.did_open(json!({
"textDocument": {
"uri": "file:///a/file.md",
"uri": markdown_file.uri_file(),
"languageId": "markdown",
"version": 1,
"text": "# Hello World"
@ -8650,7 +8653,7 @@ fn lsp_format_markdown() {
"textDocument/formatting",
json!({
"textDocument": {
"uri": "file:///a/file.md"
"uri": markdown_file.uri_file()
},
"options": {
"tabSize": 2,
@ -8705,11 +8708,12 @@ fn lsp_format_with_config() {
builder.set_config("./deno.fmt.jsonc");
});
let ts_file = temp_dir.path().join("file.ts");
client
.did_open(
json!({
"textDocument": {
"uri": "file:///a/file.ts",
"uri": ts_file.uri_file(),
"languageId": "typescript",
"version": 1,
"text": "export async function someVeryLongFunctionName() {\nconst response = fetch(\"http://localhost:4545/some/non/existent/path.json\");\nconsole.log(response.text());\nconsole.log(\"finished!\")\n}"
@ -8722,7 +8726,7 @@ fn lsp_format_with_config() {
"textDocument/formatting",
json!({
"textDocument": {
"uri": "file:///a/file.ts"
"uri": ts_file.uri_file()
},
"options": {
"tabSize": 2,

View file

@ -378,6 +378,7 @@ fn collect_coverages(
) -> Result<Vec<cdp::ScriptCoverage>, AnyError> {
let mut coverages: Vec<cdp::ScriptCoverage> = Vec::new();
let file_patterns = FilePatterns {
base: initial_cwd.to_path_buf(),
include: Some({
if files.include.is_empty() {
PathOrPatternSet::new(vec![PathOrPattern::Path(

View file

@ -93,6 +93,7 @@ pub async fn doc(flags: Flags, doc_flags: DocFlags) -> Result<(), AnyError> {
let module_specifiers = collect_specifiers(
FilePatterns {
base: cli_options.initial_cwd().to_path_buf(),
include: Some(PathOrPatternSet::from_relative_path_or_patterns(
cli_options.initial_cwd(),
source_files,

View file

@ -128,7 +128,7 @@ async fn prepare_publish(
};
let exclude_patterns = deno_json
.to_files_config()
.map(|files| files.unwrap_or_default().exclude)?;
.map(|files| files.map(|f| f.exclude).unwrap_or_default())?;
let tarball = deno_core::unsync::spawn_blocking(move || {
let unfurler = ImportMapUnfurler::new(&import_map);

View file

@ -227,14 +227,15 @@ fn maybe_update_config_file(
return ModifiedResult::default();
}
let fmt_config = config_file
let fmt_config_options = config_file
.to_fmt_config()
.ok()
.unwrap_or_default()
.flatten()
.map(|config| config.options)
.unwrap_or_default();
let result = update_config_file(
config_file,
&fmt_config.options,
&fmt_config_options,
if try_add_import_map {
Some(
ModuleSpecifier::from_file_path(output_dir.join("import_map.json"))

View file

@ -285,8 +285,8 @@ impl<TFilter: Fn(&Path, &FilePatterns) -> bool> FileCollector<TFilter> {
let mut target_files = Vec::new();
let mut visited_paths = HashSet::new();
let file_patterns_by_base = file_patterns.split_by_base();
for (base, file_patterns) in file_patterns_by_base {
let file = normalize_path(base);
for file_patterns in file_patterns_by_base {
let file = normalize_path(&file_patterns.base);
// use an iterator in order to minimize the number of file system operations
let mut iterator = WalkDir::new(&file)
.follow_links(false) // the default, but be explicit
@ -807,9 +807,8 @@ mod tests {
create_files(&ignore_dir_path, &ignore_dir_files);
let file_patterns = FilePatterns {
include: Some(PathOrPatternSet::new(vec![PathOrPattern::Path(
root_dir_path.to_path_buf(),
)])),
base: root_dir_path.to_path_buf(),
include: None,
exclude: PathOrPatternSet::new(vec![PathOrPattern::Path(
ignore_dir_path.to_path_buf(),
)]),
@ -871,6 +870,7 @@ mod tests {
// test opting out of ignoring by specifying the dir
let file_patterns = FilePatterns {
base: root_dir_path.to_path_buf(),
include: Some(PathOrPatternSet::new(vec![
PathOrPattern::Path(root_dir_path.to_path_buf()),
PathOrPattern::Path(
@ -948,6 +948,7 @@ mod tests {
let result = collect_specifiers(
FilePatterns {
base: root_dir_path.to_path_buf(),
include: Some(
PathOrPatternSet::from_relative_path_or_patterns(
root_dir_path.as_path(),
@ -997,6 +998,7 @@ mod tests {
};
let result = collect_specifiers(
FilePatterns {
base: root_dir_path.to_path_buf(),
include: Some(PathOrPatternSet::new(vec![PathOrPattern::new(
&format!(
"{}{}",