feat(unstable/lint): no-slow-types for JSR packages (#22430)

1. Renames zap/fast-check to instead be a `no-slow-types` lint rule.
1. This lint rule is automatically run when doing `deno lint` for
packages (deno.json files with a name, version, and exports field)
1. This lint rules still occurs on publish. It can be skipped by running
with `--no-slow-types`
This commit is contained in:
David Sherret 2024-02-19 10:28:41 -05:00 committed by GitHub
parent 2b279ad630
commit 66424032a2
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
49 changed files with 781 additions and 431 deletions

8
Cargo.lock generated
View file

@ -1215,9 +1215,9 @@ dependencies = [
[[package]] [[package]]
name = "deno_config" name = "deno_config"
version = "0.9.2" version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e587768367b7b1e353407feccaf1fee9358f83ccd2d75ce405d59ec480172831" checksum = "ba7641dd37ffcc1aeb06dff206a3bdd9e9a52f177f5edd43b734933174c38067"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"glob", "glob",
@ -1418,9 +1418,9 @@ dependencies = [
[[package]] [[package]]
name = "deno_graph" name = "deno_graph"
version = "0.66.0" version = "0.66.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8c67c7c05d70e43560b1dfa38ee385d2d0153ccd4ea16fdc6a706881fd60f3c5" checksum = "e10efbd226fb00e97c04350051cbb025957b2de025117493ee5b9e53cc7e230f"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"async-trait", "async-trait",

View file

@ -64,11 +64,11 @@ winres.workspace = true
[dependencies] [dependencies]
deno_ast = { workspace = true, features = ["bundler", "cjs", "codegen", "proposal", "react", "sourcemap", "transforms", "typescript", "view", "visit"] } deno_ast = { workspace = true, features = ["bundler", "cjs", "codegen", "proposal", "react", "sourcemap", "transforms", "typescript", "view", "visit"] }
deno_cache_dir = { workspace = true } deno_cache_dir = { workspace = true }
deno_config = "=0.9.2" deno_config = "=0.10.0"
deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] } deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] }
deno_doc = { version = "=0.107.0", features = ["html"] } deno_doc = { version = "=0.107.0", features = ["html"] }
deno_emit = "=0.37.0" deno_emit = "=0.37.0"
deno_graph = "=0.66.0" deno_graph = "=0.66.2"
deno_lint = { version = "=0.56.0", features = ["docs"] } deno_lint = { version = "=0.56.0", features = ["docs"] }
deno_lockfile.workspace = true deno_lockfile.workspace = true
deno_npm = "=0.17.0" deno_npm = "=0.17.0"

View file

@ -301,7 +301,7 @@ pub struct VendorFlags {
pub struct PublishFlags { pub struct PublishFlags {
pub token: Option<String>, pub token: Option<String>,
pub dry_run: bool, pub dry_run: bool,
pub no_zap: bool, pub allow_slow_types: bool,
} }
#[derive(Clone, Debug, Eq, PartialEq)] #[derive(Clone, Debug, Eq, PartialEq)]
@ -2389,9 +2389,9 @@ fn publish_subcommand() -> Command {
.action(ArgAction::SetTrue), .action(ArgAction::SetTrue),
) )
.arg( .arg(
Arg::new("no-zap") Arg::new("allow-slow-types")
.long("no-zap") .long("allow-slow-types")
.help("Skip Zap compatibility validation") .help("Allow publishing with slow types")
.action(ArgAction::SetTrue), .action(ArgAction::SetTrue),
) )
}) })
@ -3828,7 +3828,7 @@ fn publish_parse(flags: &mut Flags, matches: &mut ArgMatches) {
flags.subcommand = DenoSubcommand::Publish(PublishFlags { flags.subcommand = DenoSubcommand::Publish(PublishFlags {
token: matches.remove_one("token"), token: matches.remove_one("token"),
dry_run: matches.get_flag("dry-run"), dry_run: matches.get_flag("dry-run"),
no_zap: matches.get_flag("no-zap"), allow_slow_types: matches.get_flag("allow-slow-types"),
}); });
} }

View file

@ -23,6 +23,7 @@ use crate::util::sync::TaskQueue;
use crate::util::sync::TaskQueuePermit; use crate::util::sync::TaskQueuePermit;
use deno_config::ConfigFile; use deno_config::ConfigFile;
use deno_config::WorkspaceMemberConfig;
use deno_core::anyhow::bail; use deno_core::anyhow::bail;
use deno_core::anyhow::Context; use deno_core::anyhow::Context;
use deno_core::error::custom_error; use deno_core::error::custom_error;
@ -277,7 +278,7 @@ impl ModuleGraphBuilder {
graph_kind: GraphKind, graph_kind: GraphKind,
roots: Vec<ModuleSpecifier>, roots: Vec<ModuleSpecifier>,
loader: &mut dyn Loader, loader: &mut dyn Loader,
) -> Result<deno_graph::ModuleGraph, AnyError> { ) -> Result<ModuleGraph, AnyError> {
self self
.create_graph_with_options(CreateGraphOptions { .create_graph_with_options(CreateGraphOptions {
is_dynamic: false, is_dynamic: false,
@ -289,10 +290,29 @@ impl ModuleGraphBuilder {
.await .await
} }
pub async fn create_publish_graph(
&self,
packages: &[WorkspaceMemberConfig],
) -> Result<ModuleGraph, AnyError> {
let mut roots = Vec::new();
for package in packages {
roots.extend(package.config_file.resolve_export_value_urls()?);
}
self
.create_graph_with_options(CreateGraphOptions {
is_dynamic: false,
graph_kind: deno_graph::GraphKind::All,
roots,
workspace_fast_check: true,
loader: None,
})
.await
}
pub async fn create_graph_with_options( pub async fn create_graph_with_options(
&self, &self,
options: CreateGraphOptions<'_>, options: CreateGraphOptions<'_>,
) -> Result<deno_graph::ModuleGraph, AnyError> { ) -> Result<ModuleGraph, AnyError> {
let mut graph = ModuleGraph::new(options.graph_kind); let mut graph = ModuleGraph::new(options.graph_kind);
self self

View file

@ -796,7 +796,11 @@ fn generate_lint_diagnostics(
let documents = snapshot let documents = snapshot
.documents .documents
.documents(DocumentsFilter::OpenDiagnosable); .documents(DocumentsFilter::OpenDiagnosable);
let lint_rules = get_configured_rules(lint_options.rules.clone()); let lint_rules = get_configured_rules(
lint_options.rules.clone(),
config.config_file.as_ref(),
)
.rules;
let mut diagnostics_vec = Vec::new(); let mut diagnostics_vec = Vec::new();
for document in documents { for document in documents {
let settings = let settings =

View file

@ -2,28 +2,19 @@
//! This module provides file linting utilities using //! This module provides file linting utilities using
//! [`deno_lint`](https://github.com/denoland/deno_lint). //! [`deno_lint`](https://github.com/denoland/deno_lint).
use crate::args::Flags;
use crate::args::LintFlags;
use crate::args::LintOptions;
use crate::args::LintReporterKind;
use crate::args::LintRulesConfig;
use crate::colors;
use crate::factory::CliFactory;
use crate::tools::fmt::run_parallelized;
use crate::util::file_watcher;
use crate::util::fs::canonicalize_path;
use crate::util::fs::specifier_from_file_path;
use crate::util::fs::FileCollector;
use crate::util::path::is_script_ext;
use crate::util::sync::AtomicFlag;
use deno_ast::diagnostics::Diagnostic; use deno_ast::diagnostics::Diagnostic;
use deno_ast::MediaType; use deno_ast::MediaType;
use deno_ast::ModuleSpecifier;
use deno_ast::ParsedSource; use deno_ast::ParsedSource;
use deno_ast::SourceRange;
use deno_ast::SourceTextInfo;
use deno_config::glob::FilePatterns; use deno_config::glob::FilePatterns;
use deno_core::anyhow::bail; use deno_core::anyhow::bail;
use deno_core::error::generic_error; use deno_core::error::generic_error;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_core::parking_lot::Mutex;
use deno_core::serde_json; use deno_core::serde_json;
use deno_graph::FastCheckDiagnostic;
use deno_lint::diagnostic::LintDiagnostic; use deno_lint::diagnostic::LintDiagnostic;
use deno_lint::linter::LintFileOptions; use deno_lint::linter::LintFileOptions;
use deno_lint::linter::Linter; use deno_lint::linter::Linter;
@ -33,15 +24,32 @@ use deno_lint::rules::LintRule;
use log::debug; use log::debug;
use log::info; use log::info;
use serde::Serialize; use serde::Serialize;
use std::borrow::Cow;
use std::collections::HashSet;
use std::fs; use std::fs;
use std::io::stdin; use std::io::stdin;
use std::io::Read; use std::io::Read;
use std::path::Path; use std::path::Path;
use std::path::PathBuf; use std::path::PathBuf;
use std::sync::Arc; use std::sync::Arc;
use std::sync::Mutex;
use crate::args::Flags;
use crate::args::LintFlags;
use crate::args::LintOptions;
use crate::args::LintReporterKind;
use crate::args::LintRulesConfig;
use crate::cache::IncrementalCache; use crate::cache::IncrementalCache;
use crate::colors;
use crate::factory::CliFactory;
use crate::tools::fmt::run_parallelized;
use crate::util::file_watcher;
use crate::util::fs::canonicalize_path;
use crate::util::fs::specifier_from_file_path;
use crate::util::fs::FileCollector;
use crate::util::path::is_script_ext;
use crate::util::sync::AtomicFlag;
pub mod no_slow_types;
static STDIN_FILE_NAME: &str = "$deno$stdin.ts"; static STDIN_FILE_NAME: &str = "$deno$stdin.ts";
@ -110,15 +118,18 @@ pub async fn lint(flags: Flags, lint_flags: LintFlags) -> Result<(), AnyError> {
let success = if is_stdin { let success = if is_stdin {
let reporter_kind = lint_options.reporter_kind; let reporter_kind = lint_options.reporter_kind;
let reporter_lock = Arc::new(Mutex::new(create_reporter(reporter_kind))); let reporter_lock = Arc::new(Mutex::new(create_reporter(reporter_kind)));
let lint_rules = get_config_rules_err_empty(lint_options.rules)?; let lint_rules = get_config_rules_err_empty(
lint_options.rules,
cli_options.maybe_config_file().as_ref(),
)?;
let file_path = cli_options.initial_cwd().join(STDIN_FILE_NAME); let file_path = cli_options.initial_cwd().join(STDIN_FILE_NAME);
let r = lint_stdin(&file_path, lint_rules); let r = lint_stdin(&file_path, lint_rules.rules);
let success = handle_lint_result( let success = handle_lint_result(
&file_path.to_string_lossy(), &file_path.to_string_lossy(),
r, r,
reporter_lock.clone(), reporter_lock.clone(),
); );
reporter_lock.lock().unwrap().close(1); reporter_lock.lock().close(1);
success success
} else { } else {
let target_files = let target_files =
@ -146,61 +157,105 @@ async fn lint_files(
paths: Vec<PathBuf>, paths: Vec<PathBuf>,
) -> Result<bool, AnyError> { ) -> Result<bool, AnyError> {
let caches = factory.caches()?; let caches = factory.caches()?;
let lint_rules = get_config_rules_err_empty(lint_options.rules)?; let maybe_config_file = factory.cli_options().maybe_config_file().as_ref();
let lint_rules =
get_config_rules_err_empty(lint_options.rules, maybe_config_file)?;
let incremental_cache = Arc::new(IncrementalCache::new( let incremental_cache = Arc::new(IncrementalCache::new(
caches.lint_incremental_cache_db(), caches.lint_incremental_cache_db(),
// use a hash of the rule names in order to bust the cache &lint_rules.incremental_cache_state(),
&{
// ensure this is stable by sorting it
let mut names = lint_rules.iter().map(|r| r.code()).collect::<Vec<_>>();
names.sort_unstable();
names
},
&paths, &paths,
)); ));
let target_files_len = paths.len(); let target_files_len = paths.len();
let reporter_kind = lint_options.reporter_kind; let reporter_kind = lint_options.reporter_kind;
// todo(dsherret): abstract away this lock behind a performant interface
let reporter_lock = let reporter_lock =
Arc::new(Mutex::new(create_reporter(reporter_kind.clone()))); Arc::new(Mutex::new(create_reporter(reporter_kind.clone())));
let has_error = Arc::new(AtomicFlag::default()); let has_error = Arc::new(AtomicFlag::default());
run_parallelized(paths, { let mut futures = Vec::with_capacity(2);
if lint_rules.no_slow_types {
if let Some(config_file) = maybe_config_file {
let members = config_file.to_workspace_members()?;
let has_error = has_error.clone();
let reporter_lock = reporter_lock.clone();
let module_graph_builder = factory.module_graph_builder().await?.clone();
let path_urls = paths
.iter()
.filter_map(|p| ModuleSpecifier::from_file_path(p).ok())
.collect::<HashSet<_>>();
futures.push(deno_core::unsync::spawn(async move {
let graph = module_graph_builder.create_publish_graph(&members).await?;
// todo(dsherret): this isn't exactly correct as linting isn't properly
// setup to handle workspaces. Iterating over the workspace members
// should be done at a higher level because it also needs to take into
// account the config per workspace member.
for member in &members {
let export_urls = member.config_file.resolve_export_value_urls()?;
if !export_urls.iter().any(|url| path_urls.contains(url)) {
continue; // entrypoint is not specified, so skip
}
let diagnostics = no_slow_types::collect_no_slow_type_diagnostics(
&export_urls,
&graph,
);
if !diagnostics.is_empty() {
has_error.raise();
let mut reporter = reporter_lock.lock();
for diagnostic in &diagnostics {
reporter
.visit_diagnostic(LintOrCliDiagnostic::FastCheck(diagnostic));
}
}
}
Ok(())
}));
}
}
futures.push({
let has_error = has_error.clone(); let has_error = has_error.clone();
let lint_rules = lint_rules.clone(); let lint_rules = lint_rules.rules.clone();
let reporter_lock = reporter_lock.clone(); let reporter_lock = reporter_lock.clone();
let incremental_cache = incremental_cache.clone(); let incremental_cache = incremental_cache.clone();
move |file_path| { deno_core::unsync::spawn(async move {
let file_text = fs::read_to_string(&file_path)?; run_parallelized(paths, {
move |file_path| {
let file_text = fs::read_to_string(&file_path)?;
// don't bother rechecking this file if it didn't have any diagnostics before // don't bother rechecking this file if it didn't have any diagnostics before
if incremental_cache.is_file_same(&file_path, &file_text) { if incremental_cache.is_file_same(&file_path, &file_text) {
return Ok(()); return Ok(());
} }
let r = lint_file(&file_path, file_text, lint_rules); let r = lint_file(&file_path, file_text, lint_rules);
if let Ok((file_diagnostics, file_source)) = &r { if let Ok((file_diagnostics, file_source)) = &r {
if file_diagnostics.is_empty() { if file_diagnostics.is_empty() {
// update the incremental cache if there were no diagnostics // update the incremental cache if there were no diagnostics
incremental_cache incremental_cache
.update_file(&file_path, file_source.text_info().text_str()) .update_file(&file_path, file_source.text_info().text_str())
}
}
let success = handle_lint_result(
&file_path.to_string_lossy(),
r,
reporter_lock.clone(),
);
if !success {
has_error.raise();
}
Ok(())
} }
} })
.await
})
});
let success = handle_lint_result( deno_core::futures::future::try_join_all(futures).await?;
&file_path.to_string_lossy(),
r,
reporter_lock.clone(),
);
if !success {
has_error.raise();
}
Ok(())
}
})
.await?;
incremental_cache.wait_completion().await; incremental_cache.wait_completion().await;
reporter_lock.lock().unwrap().close(target_files_len); reporter_lock.lock().close(target_files_len);
Ok(!has_error.is_raised()) Ok(!has_error.is_raised())
} }
@ -311,16 +366,16 @@ fn handle_lint_result(
result: Result<(Vec<LintDiagnostic>, ParsedSource), AnyError>, result: Result<(Vec<LintDiagnostic>, ParsedSource), AnyError>,
reporter_lock: Arc<Mutex<Box<dyn LintReporter + Send>>>, reporter_lock: Arc<Mutex<Box<dyn LintReporter + Send>>>,
) -> bool { ) -> bool {
let mut reporter = reporter_lock.lock().unwrap(); let mut reporter = reporter_lock.lock();
match result { match result {
Ok((mut file_diagnostics, source)) => { Ok((mut file_diagnostics, _source)) => {
file_diagnostics.sort_by(|a, b| match a.specifier.cmp(&b.specifier) { file_diagnostics.sort_by(|a, b| match a.specifier.cmp(&b.specifier) {
std::cmp::Ordering::Equal => a.range.start.cmp(&b.range.start), std::cmp::Ordering::Equal => a.range.start.cmp(&b.range.start),
file_order => file_order, file_order => file_order,
}); });
for d in file_diagnostics.iter() { for d in &file_diagnostics {
reporter.visit_diagnostic(d, &source); reporter.visit_diagnostic(LintOrCliDiagnostic::Lint(d));
} }
file_diagnostics.is_empty() file_diagnostics.is_empty()
} }
@ -331,8 +386,99 @@ fn handle_lint_result(
} }
} }
#[derive(Clone, Copy)]
pub enum LintOrCliDiagnostic<'a> {
Lint(&'a LintDiagnostic),
FastCheck(&'a FastCheckDiagnostic),
}
impl<'a> LintOrCliDiagnostic<'a> {
pub fn specifier(&self) -> &ModuleSpecifier {
match self {
LintOrCliDiagnostic::Lint(d) => &d.specifier,
LintOrCliDiagnostic::FastCheck(d) => d.specifier(),
}
}
pub fn range(&self) -> Option<(&SourceTextInfo, SourceRange)> {
match self {
LintOrCliDiagnostic::Lint(d) => Some((&d.text_info, d.range)),
LintOrCliDiagnostic::FastCheck(d) => {
d.range().map(|r| (&r.text_info, r.range))
}
}
}
}
impl<'a> deno_ast::diagnostics::Diagnostic for LintOrCliDiagnostic<'a> {
fn level(&self) -> deno_ast::diagnostics::DiagnosticLevel {
match self {
LintOrCliDiagnostic::Lint(d) => d.level(),
LintOrCliDiagnostic::FastCheck(d) => d.level(),
}
}
fn code(&self) -> Cow<'_, str> {
match self {
LintOrCliDiagnostic::Lint(d) => d.code(),
LintOrCliDiagnostic::FastCheck(_) => Cow::Borrowed("no-slow-types"),
}
}
fn message(&self) -> Cow<'_, str> {
match self {
LintOrCliDiagnostic::Lint(d) => d.message(),
LintOrCliDiagnostic::FastCheck(d) => d.message(),
}
}
fn location(&self) -> deno_ast::diagnostics::DiagnosticLocation {
match self {
LintOrCliDiagnostic::Lint(d) => d.location(),
LintOrCliDiagnostic::FastCheck(d) => d.location(),
}
}
fn snippet(&self) -> Option<deno_ast::diagnostics::DiagnosticSnippet<'_>> {
match self {
LintOrCliDiagnostic::Lint(d) => d.snippet(),
LintOrCliDiagnostic::FastCheck(d) => d.snippet(),
}
}
fn hint(&self) -> Option<Cow<'_, str>> {
match self {
LintOrCliDiagnostic::Lint(d) => d.hint(),
LintOrCliDiagnostic::FastCheck(d) => d.hint(),
}
}
fn snippet_fixed(
&self,
) -> Option<deno_ast::diagnostics::DiagnosticSnippet<'_>> {
match self {
LintOrCliDiagnostic::Lint(d) => d.snippet_fixed(),
LintOrCliDiagnostic::FastCheck(d) => d.snippet_fixed(),
}
}
fn info(&self) -> Cow<'_, [Cow<'_, str>]> {
match self {
LintOrCliDiagnostic::Lint(d) => d.info(),
LintOrCliDiagnostic::FastCheck(d) => d.info(),
}
}
fn docs_url(&self) -> Option<Cow<'_, str>> {
match self {
LintOrCliDiagnostic::Lint(d) => d.docs_url(),
LintOrCliDiagnostic::FastCheck(d) => d.docs_url(),
}
}
}
trait LintReporter { trait LintReporter {
fn visit_diagnostic(&mut self, d: &LintDiagnostic, source: &ParsedSource); fn visit_diagnostic(&mut self, d: LintOrCliDiagnostic);
fn visit_error(&mut self, file_path: &str, err: &AnyError); fn visit_error(&mut self, file_path: &str, err: &AnyError);
fn close(&mut self, check_count: usize); fn close(&mut self, check_count: usize);
} }
@ -354,7 +500,7 @@ impl PrettyLintReporter {
} }
impl LintReporter for PrettyLintReporter { impl LintReporter for PrettyLintReporter {
fn visit_diagnostic(&mut self, d: &LintDiagnostic, _source: &ParsedSource) { fn visit_diagnostic(&mut self, d: LintOrCliDiagnostic) {
self.lint_count += 1; self.lint_count += 1;
eprintln!("{}", d.display()); eprintln!("{}", d.display());
@ -391,18 +537,25 @@ impl CompactLintReporter {
} }
impl LintReporter for CompactLintReporter { impl LintReporter for CompactLintReporter {
fn visit_diagnostic(&mut self, d: &LintDiagnostic, _source: &ParsedSource) { fn visit_diagnostic(&mut self, d: LintOrCliDiagnostic) {
self.lint_count += 1; self.lint_count += 1;
let line_and_column = d.text_info.line_and_column_display(d.range.start); match d.range() {
eprintln!( Some((text_info, range)) => {
"{}: line {}, col {} - {} ({})", let line_and_column = text_info.line_and_column_display(range.start);
d.specifier, eprintln!(
line_and_column.line_number, "{}: line {}, col {} - {} ({})",
line_and_column.column_number, d.specifier(),
d.message, line_and_column.line_number,
d.code line_and_column.column_number,
) d.message(),
d.code(),
)
}
None => {
eprintln!("{}: {} ({})", d.specifier(), d.message(), d.code())
}
}
} }
fn visit_error(&mut self, file_path: &str, err: &AnyError) { fn visit_error(&mut self, file_path: &str, err: &AnyError) {
@ -457,7 +610,7 @@ struct JsonLintDiagnosticRange {
#[derive(Clone, Serialize)] #[derive(Clone, Serialize)]
struct JsonLintDiagnostic { struct JsonLintDiagnostic {
pub filename: String, pub filename: String,
pub range: JsonLintDiagnosticRange, pub range: Option<JsonLintDiagnosticRange>,
pub message: String, pub message: String,
pub code: String, pub code: String,
pub hint: Option<String>, pub hint: Option<String>,
@ -479,22 +632,22 @@ impl JsonLintReporter {
} }
impl LintReporter for JsonLintReporter { impl LintReporter for JsonLintReporter {
fn visit_diagnostic(&mut self, d: &LintDiagnostic, _source: &ParsedSource) { fn visit_diagnostic(&mut self, d: LintOrCliDiagnostic) {
self.diagnostics.push(JsonLintDiagnostic { self.diagnostics.push(JsonLintDiagnostic {
filename: d.specifier.to_string(), filename: d.specifier().to_string(),
range: JsonLintDiagnosticRange { range: d.range().map(|(text_info, range)| JsonLintDiagnosticRange {
start: JsonDiagnosticLintPosition::new( start: JsonDiagnosticLintPosition::new(
d.range.start.as_byte_index(d.text_info.range().start), range.start.as_byte_index(text_info.range().start),
d.text_info.line_and_column_index(d.range.start), text_info.line_and_column_index(range.start),
), ),
end: JsonDiagnosticLintPosition::new( end: JsonDiagnosticLintPosition::new(
d.range.end.as_byte_index(d.text_info.range().start), range.end.as_byte_index(text_info.range().start),
d.text_info.line_and_column_index(d.range.end), text_info.line_and_column_index(range.end),
), ),
}, }),
message: d.message.clone(), message: d.message().to_string(),
code: d.code.clone(), code: d.code().to_string(),
hint: d.hint.clone(), hint: d.hint().map(|h| h.to_string()),
}); });
} }
@ -518,13 +671,22 @@ fn sort_diagnostics(diagnostics: &mut [JsonLintDiagnostic]) {
use std::cmp::Ordering; use std::cmp::Ordering;
let file_order = a.filename.cmp(&b.filename); let file_order = a.filename.cmp(&b.filename);
match file_order { match file_order {
Ordering::Equal => { Ordering::Equal => match &a.range {
let line_order = a.range.start.line.cmp(&b.range.start.line); Some(a_range) => match &b.range {
match line_order { Some(b_range) => {
Ordering::Equal => a.range.start.col.cmp(&b.range.start.col), let line_order = a_range.start.line.cmp(&b_range.start.line);
_ => line_order, match line_order {
} Ordering::Equal => a_range.start.col.cmp(&b_range.start.col),
} _ => line_order,
}
}
None => Ordering::Less,
},
None => match &b.range {
Some(_) => Ordering::Greater,
None => Ordering::Equal,
},
},
_ => file_order, _ => file_order,
} }
}); });
@ -532,26 +694,75 @@ fn sort_diagnostics(diagnostics: &mut [JsonLintDiagnostic]) {
fn get_config_rules_err_empty( fn get_config_rules_err_empty(
rules: LintRulesConfig, rules: LintRulesConfig,
) -> Result<Vec<&'static dyn LintRule>, AnyError> { maybe_config_file: Option<&deno_config::ConfigFile>,
let lint_rules = get_configured_rules(rules); ) -> Result<ConfiguredRules, AnyError> {
if lint_rules.is_empty() { let lint_rules = get_configured_rules(rules, maybe_config_file);
if lint_rules.rules.is_empty() {
bail!("No rules have been configured") bail!("No rules have been configured")
} }
Ok(lint_rules) Ok(lint_rules)
} }
#[derive(Debug, Clone)]
pub struct ConfiguredRules {
pub rules: Vec<&'static dyn LintRule>,
// cli specific rules
pub no_slow_types: bool,
}
impl ConfiguredRules {
fn incremental_cache_state(&self) -> Vec<&str> {
// use a hash of the rule names in order to bust the cache
let mut names = self.rules.iter().map(|r| r.code()).collect::<Vec<_>>();
// ensure this is stable by sorting it
names.sort_unstable();
if self.no_slow_types {
names.push("no-slow-types");
}
names
}
}
pub fn get_configured_rules( pub fn get_configured_rules(
rules: LintRulesConfig, rules: LintRulesConfig,
) -> Vec<&'static dyn LintRule> { maybe_config_file: Option<&deno_config::ConfigFile>,
) -> ConfiguredRules {
const NO_SLOW_TYPES_NAME: &str = "no-slow-types";
let implicit_no_slow_types = maybe_config_file
.map(|c| c.is_package() || !c.json.workspaces.is_empty())
.unwrap_or(false);
if rules.tags.is_none() && rules.include.is_none() && rules.exclude.is_none() if rules.tags.is_none() && rules.include.is_none() && rules.exclude.is_none()
{ {
rules::get_recommended_rules() ConfiguredRules {
rules: rules::get_recommended_rules(),
no_slow_types: implicit_no_slow_types,
}
} else { } else {
rules::get_filtered_rules( let no_slow_types = implicit_no_slow_types
&& !rules
.exclude
.as_ref()
.map(|exclude| exclude.iter().any(|i| i == NO_SLOW_TYPES_NAME))
.unwrap_or(false);
let rules = rules::get_filtered_rules(
rules.tags.or_else(|| Some(vec!["recommended".to_string()])), rules.tags.or_else(|| Some(vec!["recommended".to_string()])),
rules.exclude, rules.exclude.map(|exclude| {
rules.include, exclude
) .into_iter()
.filter(|c| c != NO_SLOW_TYPES_NAME)
.collect()
}),
rules.include.map(|include| {
include
.into_iter()
.filter(|c| c != NO_SLOW_TYPES_NAME)
.collect()
}),
);
ConfiguredRules {
rules,
no_slow_types,
}
} }
} }
@ -569,8 +780,9 @@ mod test {
include: None, include: None,
tags: None, tags: None,
}; };
let rules = get_configured_rules(rules_config); let rules = get_configured_rules(rules_config, None);
let mut rule_names = rules let mut rule_names = rules
.rules
.into_iter() .into_iter()
.map(|r| r.code().to_string()) .map(|r| r.code().to_string())
.collect::<Vec<_>>(); .collect::<Vec<_>>();

View file

@ -0,0 +1,38 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use deno_ast::diagnostics::Diagnostic;
use deno_ast::ModuleSpecifier;
use deno_graph::FastCheckDiagnostic;
use deno_graph::ModuleGraph;
/// Collects diagnostics from the module graph for the
/// given package's export URLs.
pub fn collect_no_slow_type_diagnostics(
package_export_urls: &[ModuleSpecifier],
graph: &ModuleGraph,
) -> Vec<FastCheckDiagnostic> {
let mut js_exports = package_export_urls
.iter()
.filter_map(|url| graph.get(url).and_then(|m| m.js()));
// fast check puts the same diagnostics in each entrypoint for the
// package (since it's all or nothing), so we only need to check
// the first one JS entrypoint
let Some(module) = js_exports.next() else {
// could happen if all the exports are JSON
return vec![];
};
if let Some(diagnostics) = module.fast_check_diagnostics() {
let mut diagnostics = diagnostics.clone();
diagnostics.sort_by_cached_key(|d| {
(
d.specifier().clone(),
d.range().map(|r| r.range),
d.code().to_string(),
)
});
diagnostics
} else {
Vec::new()
}
}

View file

@ -30,7 +30,7 @@ pub struct PublishDiagnosticsCollector {
impl PublishDiagnosticsCollector { impl PublishDiagnosticsCollector {
pub fn print_and_error(&self) -> Result<(), AnyError> { pub fn print_and_error(&self) -> Result<(), AnyError> {
let mut errors = 0; let mut errors = 0;
let mut has_zap_errors = false; let mut has_slow_types_errors = false;
let diagnostics = self.diagnostics.lock().unwrap().take(); let diagnostics = self.diagnostics.lock().unwrap().take();
for diagnostic in diagnostics { for diagnostic in diagnostics {
eprint!("{}", diagnostic.display()); eprint!("{}", diagnostic.display());
@ -38,17 +38,23 @@ impl PublishDiagnosticsCollector {
errors += 1; errors += 1;
} }
if matches!(diagnostic, PublishDiagnostic::FastCheck(..)) { if matches!(diagnostic, PublishDiagnostic::FastCheck(..)) {
has_zap_errors = true; has_slow_types_errors = true;
} }
} }
if errors > 0 { if errors > 0 {
if has_zap_errors { if has_slow_types_errors {
eprintln!( eprintln!(
"This package contains Zap errors. Although conforming to Zap will" "This package contains errors for slow types. Fixing these errors will:\n"
); );
eprintln!("significantly improve the type checking performance of your library,"); eprintln!(
eprintln!("you can choose to skip it by providing the --no-zap flag."); " 1. Significantly improve your package users' type checking performance."
eprintln!(); );
eprintln!(" 2. Improve the automatic documentation generation.");
eprintln!(" 3. Enable automatic .d.ts generation for Node.js.");
eprintln!(
"\nDon't want to bother? You can choose to skip this step by"
);
eprintln!("providing the --allow-slow-types flag.\n");
} }
Err(anyhow!( Err(anyhow!(

View file

@ -1,17 +1,9 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use std::collections::HashSet; use std::collections::HashSet;
use std::collections::VecDeque;
use std::sync::Arc; use std::sync::Arc;
use deno_ast::ModuleSpecifier;
use deno_ast::SourceTextInfo; use deno_ast::SourceTextInfo;
use deno_config::ConfigFile;
use deno_config::WorkspaceConfig;
use deno_core::anyhow::bail;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use deno_graph::FastCheckDiagnostic;
use deno_graph::ModuleEntryRef; use deno_graph::ModuleEntryRef;
use deno_graph::ModuleGraph; use deno_graph::ModuleGraph;
use deno_graph::ResolutionResolved; use deno_graph::ResolutionResolved;
@ -21,55 +13,6 @@ use lsp_types::Url;
use super::diagnostics::PublishDiagnostic; use super::diagnostics::PublishDiagnostic;
use super::diagnostics::PublishDiagnosticsCollector; use super::diagnostics::PublishDiagnosticsCollector;
#[derive(Debug)]
pub struct MemberRoots {
pub name: String,
pub dir_url: ModuleSpecifier,
pub exports: Vec<ModuleSpecifier>,
}
pub fn get_workspace_member_roots(
config: &WorkspaceConfig,
) -> Result<Vec<MemberRoots>, AnyError> {
let mut members = Vec::with_capacity(config.members.len());
let mut seen_names = HashSet::with_capacity(config.members.len());
for member in &config.members {
if !seen_names.insert(&member.package_name) {
bail!(
"Cannot have two workspace packages with the same name ('{}' at {})",
member.package_name,
member.path.display(),
);
}
members.push(MemberRoots {
name: member.package_name.clone(),
dir_url: member.config_file.specifier.join("./").unwrap().clone(),
exports: resolve_config_file_roots_from_exports(&member.config_file)?,
});
}
Ok(members)
}
pub fn resolve_config_file_roots_from_exports(
config_file: &ConfigFile,
) -> Result<Vec<ModuleSpecifier>, AnyError> {
let exports_config = config_file
.to_exports_config()
.with_context(|| {
format!("Failed to parse exports at {}", config_file.specifier)
})?
.into_map();
let mut exports = Vec::with_capacity(exports_config.len());
for (_, value) in exports_config {
let entry_point =
config_file.specifier.join(&value).with_context(|| {
format!("Failed to join {} with {}", config_file.specifier, value)
})?;
exports.push(entry_point);
}
Ok(exports)
}
pub fn collect_invalid_external_imports( pub fn collect_invalid_external_imports(
graph: &ModuleGraph, graph: &ModuleGraph,
diagnostics_collector: &PublishDiagnosticsCollector, diagnostics_collector: &PublishDiagnosticsCollector,
@ -142,63 +85,3 @@ pub fn collect_invalid_external_imports(
} }
} }
} }
/// Collects diagnostics from the module graph for the given packages.
/// Returns true if any diagnostics were collected.
pub fn collect_fast_check_type_graph_diagnostics(
graph: &ModuleGraph,
packages: &[MemberRoots],
diagnostics_collector: &PublishDiagnosticsCollector,
) -> bool {
let mut had_diagnostic = false;
let mut seen_modules = HashSet::with_capacity(graph.specifiers_count());
for package in packages {
let mut pending = VecDeque::new();
for export in &package.exports {
if seen_modules.insert(export.clone()) {
pending.push_back(export.clone());
}
}
'analyze_package: while let Some(specifier) = pending.pop_front() {
let Ok(Some(module)) = graph.try_get_prefer_types(&specifier) else {
continue;
};
let Some(es_module) = module.js() else {
continue;
};
if let Some(diagnostics) = es_module.fast_check_diagnostics() {
for diagnostic in diagnostics {
had_diagnostic = true;
diagnostics_collector
.push(PublishDiagnostic::FastCheck(diagnostic.clone()));
if matches!(
diagnostic,
FastCheckDiagnostic::UnsupportedJavaScriptEntrypoint { .. }
) {
break 'analyze_package; // no need to keep analyzing this package
}
}
}
// analyze the next dependencies
for dep in es_module.dependencies_prefer_fast_check().values() {
let Some(specifier) = graph.resolve_dependency_from_dep(dep, true)
else {
continue;
};
let dep_in_same_package =
specifier.as_str().starts_with(package.dir_url.as_str());
if dep_in_same_package {
let is_new = seen_modules.insert(specifier.clone());
if is_new {
pending.push_back(specifier.clone());
}
}
}
}
}
had_diagnostic
}

View file

@ -8,6 +8,7 @@ use std::sync::Arc;
use base64::prelude::BASE64_STANDARD; use base64::prelude::BASE64_STANDARD;
use base64::Engine; use base64::Engine;
use deno_config::ConfigFile; use deno_config::ConfigFile;
use deno_config::WorkspaceMemberConfig;
use deno_core::anyhow::bail; use deno_core::anyhow::bail;
use deno_core::anyhow::Context; use deno_core::anyhow::Context;
use deno_core::error::AnyError; use deno_core::error::AnyError;
@ -33,12 +34,10 @@ use crate::factory::CliFactory;
use crate::graph_util::ModuleGraphBuilder; use crate::graph_util::ModuleGraphBuilder;
use crate::http_util::HttpClient; use crate::http_util::HttpClient;
use crate::tools::check::CheckOptions; use crate::tools::check::CheckOptions;
use crate::tools::lint::no_slow_types;
use crate::tools::registry::diagnostics::PublishDiagnostic;
use crate::tools::registry::diagnostics::PublishDiagnosticsCollector; use crate::tools::registry::diagnostics::PublishDiagnosticsCollector;
use crate::tools::registry::graph::collect_fast_check_type_graph_diagnostics;
use crate::tools::registry::graph::collect_invalid_external_imports; use crate::tools::registry::graph::collect_invalid_external_imports;
use crate::tools::registry::graph::get_workspace_member_roots;
use crate::tools::registry::graph::resolve_config_file_roots_from_exports;
use crate::tools::registry::graph::MemberRoots;
use crate::util::display::human_size; use crate::util::display::human_size;
use crate::util::import_map::ImportMapUnfurler; use crate::util::import_map::ImportMapUnfurler;
@ -80,16 +79,8 @@ impl PreparedPublishPackage {
static SUGGESTED_ENTRYPOINTS: [&str; 4] = static SUGGESTED_ENTRYPOINTS: [&str; 4] =
["mod.ts", "mod.js", "index.ts", "index.js"]; ["mod.ts", "mod.js", "index.ts", "index.js"];
fn get_deno_json_package_name(
deno_json: &ConfigFile,
) -> Result<String, AnyError> {
match deno_json.json.name.clone() {
Some(name) => Ok(name),
None => bail!("{} is missing 'name' field", deno_json.specifier),
}
}
async fn prepare_publish( async fn prepare_publish(
package_name: &str,
deno_json: &ConfigFile, deno_json: &ConfigFile,
source_cache: Arc<ParsedSourceCache>, source_cache: Arc<ParsedSourceCache>,
graph: Arc<deno_graph::ModuleGraph>, graph: Arc<deno_graph::ModuleGraph>,
@ -101,7 +92,6 @@ async fn prepare_publish(
let Some(version) = deno_json.json.version.clone() else { let Some(version) = deno_json.json.version.clone() else {
bail!("{} is missing 'version' field", deno_json.specifier); bail!("{} is missing 'version' field", deno_json.specifier);
}; };
let name = get_deno_json_package_name(deno_json)?;
if deno_json.json.exports.is_none() { if deno_json.json.exports.is_none() {
let mut suggested_entrypoint = None; let mut suggested_entrypoint = None;
@ -118,22 +108,22 @@ async fn prepare_publish(
"version": "{}", "version": "{}",
"exports": "{}" "exports": "{}"
}}"#, }}"#,
name, package_name,
version, version,
suggested_entrypoint.unwrap_or("<path_to_entrypoint>") suggested_entrypoint.unwrap_or("<path_to_entrypoint>")
); );
bail!( bail!(
"You did not specify an entrypoint to \"{}\" package in {}. Add `exports` mapping in the configuration file, eg:\n{}", "You did not specify an entrypoint to \"{}\" package in {}. Add `exports` mapping in the configuration file, eg:\n{}",
name, package_name,
deno_json.specifier, deno_json.specifier,
exports_content exports_content
); );
} }
let Some(name) = name.strip_prefix('@') else { let Some(name_no_at) = package_name.strip_prefix('@') else {
bail!("Invalid package name, use '@<scope_name>/<package_name> format"); bail!("Invalid package name, use '@<scope_name>/<package_name> format");
}; };
let Some((scope, package_name)) = name.split_once('/') else { let Some((scope, name_no_scope)) = name_no_at.split_once('/') else {
bail!("Invalid package name, use '@<scope_name>/<package_name> format"); bail!("Invalid package name, use '@<scope_name>/<package_name> format");
}; };
let file_patterns = deno_json.to_publish_config()?.map(|c| c.files); let file_patterns = deno_json.to_publish_config()?.map(|c| c.files);
@ -152,11 +142,11 @@ async fn prepare_publish(
}) })
.await??; .await??;
log::debug!("Tarball size ({}): {}", name, tarball.bytes.len()); log::debug!("Tarball size ({}): {}", package_name, tarball.bytes.len());
Ok(Rc::new(PreparedPublishPackage { Ok(Rc::new(PreparedPublishPackage {
scope: scope.to_string(), scope: scope.to_string(),
package: package_name.to_string(), package: name_no_scope.to_string(),
version: version.to_string(), version: version.to_string(),
tarball, tarball,
// the config file is always at the root of a publishing dir, // the config file is always at the root of a publishing dir,
@ -660,77 +650,44 @@ struct PreparePackagesData {
async fn prepare_packages_for_publishing( async fn prepare_packages_for_publishing(
cli_factory: &CliFactory, cli_factory: &CliFactory,
no_zap: bool, allow_slow_types: bool,
diagnostics_collector: &PublishDiagnosticsCollector, diagnostics_collector: &PublishDiagnosticsCollector,
deno_json: ConfigFile, deno_json: ConfigFile,
import_map: Arc<ImportMap>, import_map: Arc<ImportMap>,
) -> Result<PreparePackagesData, AnyError> { ) -> Result<PreparePackagesData, AnyError> {
let maybe_workspace_config = deno_json.to_workspace_config()?; let members = deno_json.to_workspace_members()?;
let module_graph_builder = cli_factory.module_graph_builder().await?.as_ref(); let module_graph_builder = cli_factory.module_graph_builder().await?.as_ref();
let source_cache = cli_factory.parsed_source_cache(); let source_cache = cli_factory.parsed_source_cache();
let type_checker = cli_factory.type_checker().await?; let type_checker = cli_factory.type_checker().await?;
let cli_options = cli_factory.cli_options(); let cli_options = cli_factory.cli_options();
let Some(workspace_config) = maybe_workspace_config else { if members.len() > 1 {
let roots = resolve_config_file_roots_from_exports(&deno_json)?; println!("Publishing a workspace...");
let graph = build_and_check_graph_for_publish( }
module_graph_builder,
type_checker,
cli_options,
no_zap,
diagnostics_collector,
&[MemberRoots {
name: get_deno_json_package_name(&deno_json)?,
dir_url: deno_json.specifier.join("./").unwrap().clone(),
exports: roots,
}],
)
.await?;
let package = prepare_publish(
&deno_json,
source_cache.clone(),
graph,
import_map,
diagnostics_collector,
)
.await?;
let package_name = format!("@{}/{}", package.scope, package.package);
let publish_order_graph =
PublishOrderGraph::new_single(package_name.clone());
let package_by_name = HashMap::from([(package_name, package)]);
return Ok(PreparePackagesData {
publish_order_graph,
package_by_name,
});
};
println!("Publishing a workspace...");
// create the module graph // create the module graph
let roots = get_workspace_member_roots(&workspace_config)?;
let graph = build_and_check_graph_for_publish( let graph = build_and_check_graph_for_publish(
module_graph_builder, module_graph_builder,
type_checker, type_checker,
cli_options, cli_options,
no_zap, allow_slow_types,
diagnostics_collector, diagnostics_collector,
&roots, &members,
) )
.await?; .await?;
let mut package_by_name = let mut package_by_name = HashMap::with_capacity(members.len());
HashMap::with_capacity(workspace_config.members.len());
let publish_order_graph = let publish_order_graph =
publish_order::build_publish_order_graph(&graph, &roots)?; publish_order::build_publish_order_graph(&graph, &members)?;
let results = workspace_config let results = members
.members .into_iter()
.iter()
.cloned()
.map(|member| { .map(|member| {
let import_map = import_map.clone(); let import_map = import_map.clone();
let graph = graph.clone(); let graph = graph.clone();
async move { async move {
let package = prepare_publish( let package = prepare_publish(
&member.package_name,
&member.config_file, &member.config_file,
source_cache.clone(), source_cache.clone(),
graph, graph,
@ -761,64 +718,69 @@ async fn build_and_check_graph_for_publish(
module_graph_builder: &ModuleGraphBuilder, module_graph_builder: &ModuleGraphBuilder,
type_checker: &TypeChecker, type_checker: &TypeChecker,
cli_options: &CliOptions, cli_options: &CliOptions,
no_zap: bool, allow_slow_types: bool,
diagnostics_collector: &PublishDiagnosticsCollector, diagnostics_collector: &PublishDiagnosticsCollector,
packages: &[MemberRoots], packages: &[WorkspaceMemberConfig],
) -> Result<Arc<deno_graph::ModuleGraph>, deno_core::anyhow::Error> { ) -> Result<Arc<deno_graph::ModuleGraph>, deno_core::anyhow::Error> {
let graph = Arc::new( let graph =
module_graph_builder Arc::new(module_graph_builder.create_publish_graph(packages).await?);
.create_graph_with_options(crate::graph_util::CreateGraphOptions {
is_dynamic: false,
// All because we're going to use this same graph to determine the publish order later
graph_kind: deno_graph::GraphKind::All,
roots: packages
.iter()
.flat_map(|r| r.exports.iter())
.cloned()
.collect(),
workspace_fast_check: true,
loader: None,
})
.await?,
);
graph.valid()?; graph.valid()?;
// todo(dsherret): move to lint rule
collect_invalid_external_imports(&graph, diagnostics_collector); collect_invalid_external_imports(&graph, diagnostics_collector);
let mut has_fast_check_diagnostics = false; if allow_slow_types {
if !no_zap { log::info!(
log::info!("Checking fast check type graph for errors..."); concat!(
has_fast_check_diagnostics = collect_fast_check_type_graph_diagnostics( "{} Publishing a library with slow types is not recommended. ",
&graph, "This may lead to poor type checking performance for users of ",
packages, "your package, may affect the quality of automatic documentation ",
diagnostics_collector, "generation, and your package will not be shipped with a .d.ts ",
"file for Node.js users."
),
colors::yellow("Warning"),
); );
} } else {
log::info!("Checking for slow types in the public API...");
let mut any_pkg_had_diagnostics = false;
for package in packages {
let export_urls = package.config_file.resolve_export_value_urls()?;
let diagnostics =
no_slow_types::collect_no_slow_type_diagnostics(&export_urls, &graph);
if !diagnostics.is_empty() {
any_pkg_had_diagnostics = true;
for diagnostic in diagnostics {
diagnostics_collector.push(PublishDiagnostic::FastCheck(diagnostic));
}
}
}
if !has_fast_check_diagnostics { if !any_pkg_had_diagnostics {
log::info!("Ensuring type checks..."); // this is a temporary measure until we know that fast check is reliable and stable
let diagnostics = type_checker let check_diagnostics = type_checker
.check_diagnostics( .check_diagnostics(
graph.clone(), graph.clone(),
CheckOptions { CheckOptions {
lib: cli_options.ts_type_lib_window(), lib: cli_options.ts_type_lib_window(),
log_ignored_options: false, log_ignored_options: false,
reload: cli_options.reload_flag(), reload: cli_options.reload_flag(),
}, },
) )
.await?; .await?;
if !diagnostics.is_empty() { if !check_diagnostics.is_empty() {
bail!( bail!(
concat!( concat!(
"{:#}\n\n", "Failed ensuring public API type output is valid.\n\n",
"You may have discovered a bug in Deno's fast check implementation. ", "{:#}\n\n",
"Fast check is still early days and we would appreciate if you log a ", "You may have discovered a bug in Deno. Please open an issue at: ",
"bug if you believe this is one: https://github.com/denoland/deno/issues/" "https://github.com/denoland/deno/issues/"
), ),
diagnostics check_diagnostics
); );
}
} }
} }
Ok(graph) Ok(graph)
} }
@ -852,7 +814,7 @@ pub async fn publish(
let prepared_data = prepare_packages_for_publishing( let prepared_data = prepare_packages_for_publishing(
&cli_factory, &cli_factory,
publish_flags.no_zap, publish_flags.allow_slow_types,
&diagnostics_collector, &diagnostics_collector,
config_file.clone(), config_file.clone(),
import_map, import_map,
@ -866,10 +828,7 @@ pub async fn publish(
} }
if publish_flags.dry_run { if publish_flags.dry_run {
log::warn!( log::warn!("{} Aborting due to --dry-run", colors::yellow("Warning"));
"{} Aborting due to --dry-run",
crate::colors::yellow("Warning")
);
return Ok(()); return Ok(());
} }

View file

@ -4,12 +4,12 @@ use std::collections::HashMap;
use std::collections::HashSet; use std::collections::HashSet;
use std::collections::VecDeque; use std::collections::VecDeque;
use deno_ast::ModuleSpecifier;
use deno_config::WorkspaceMemberConfig;
use deno_core::anyhow::bail; use deno_core::anyhow::bail;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_graph::ModuleGraph; use deno_graph::ModuleGraph;
use super::graph::MemberRoots;
pub struct PublishOrderGraph { pub struct PublishOrderGraph {
packages: HashMap<String, HashSet<String>>, packages: HashMap<String, HashSet<String>>,
in_degree: HashMap<String, usize>, in_degree: HashMap<String, usize>,
@ -17,14 +17,6 @@ pub struct PublishOrderGraph {
} }
impl PublishOrderGraph { impl PublishOrderGraph {
pub fn new_single(package_name: String) -> Self {
Self {
packages: HashMap::from([(package_name.clone(), HashSet::new())]),
in_degree: HashMap::from([(package_name.clone(), 0)]),
reverse_map: HashMap::from([(package_name, Vec::new())]),
}
}
pub fn next(&mut self) -> Vec<String> { pub fn next(&mut self) -> Vec<String> {
let mut package_names_with_depth = self let mut package_names_with_depth = self
.in_degree .in_degree
@ -122,22 +114,26 @@ impl PublishOrderGraph {
pub fn build_publish_order_graph( pub fn build_publish_order_graph(
graph: &ModuleGraph, graph: &ModuleGraph,
roots: &[MemberRoots], roots: &[WorkspaceMemberConfig],
) -> Result<PublishOrderGraph, AnyError> { ) -> Result<PublishOrderGraph, AnyError> {
let packages = build_pkg_deps(graph, roots); let packages = build_pkg_deps(graph, roots)?;
Ok(build_publish_order_graph_from_pkgs_deps(packages)) Ok(build_publish_order_graph_from_pkgs_deps(packages))
} }
fn build_pkg_deps( fn build_pkg_deps(
graph: &deno_graph::ModuleGraph, graph: &deno_graph::ModuleGraph,
roots: &[MemberRoots], roots: &[WorkspaceMemberConfig],
) -> HashMap<String, HashSet<String>> { ) -> Result<HashMap<String, HashSet<String>>, AnyError> {
let mut members = HashMap::with_capacity(roots.len()); let mut members = HashMap::with_capacity(roots.len());
let mut seen_modules = HashSet::with_capacity(graph.modules().count()); let mut seen_modules = HashSet::with_capacity(graph.modules().count());
for root in roots { let roots = roots
.iter()
.map(|r| (ModuleSpecifier::from_file_path(&r.dir_path).unwrap(), r))
.collect::<Vec<_>>();
for (root_dir_url, root) in &roots {
let mut deps = HashSet::new(); let mut deps = HashSet::new();
let mut pending = VecDeque::new(); let mut pending = VecDeque::new();
pending.extend(root.exports.clone()); pending.extend(root.config_file.resolve_export_value_urls()?);
while let Some(specifier) = pending.pop_front() { while let Some(specifier) = pending.pop_front() {
let Some(module) = graph.get(&specifier).and_then(|m| m.js()) else { let Some(module) = graph.get(&specifier).and_then(|m| m.js()) else {
continue; continue;
@ -163,23 +159,23 @@ fn build_pkg_deps(
if specifier.scheme() != "file" { if specifier.scheme() != "file" {
continue; continue;
} }
if specifier.as_str().starts_with(root.dir_url.as_str()) { if specifier.as_str().starts_with(root_dir_url.as_str()) {
if seen_modules.insert(specifier.clone()) { if seen_modules.insert(specifier.clone()) {
pending.push_back(specifier.clone()); pending.push_back(specifier.clone());
} }
} else { } else {
let found_root = roots let found_root = roots.iter().find(|(dir_url, _)| {
.iter() specifier.as_str().starts_with(dir_url.as_str())
.find(|root| specifier.as_str().starts_with(root.dir_url.as_str())); });
if let Some(root) = found_root { if let Some(root) = found_root {
deps.insert(root.name.clone()); deps.insert(root.1.package_name.clone());
} }
} }
} }
} }
members.insert(root.name.clone(), deps); members.insert(root.package_name.clone(), deps);
} }
members Ok(members)
} }
fn build_publish_order_graph_from_pkgs_deps( fn build_publish_order_graph_from_pkgs_deps(

View file

@ -210,3 +210,45 @@ fn lint_with_glob_config_and_flags() {
assert_contains!(output, "Found 2 problems"); assert_contains!(output, "Found 2 problems");
assert_contains!(output, "Checked 2 files"); assert_contains!(output, "Checked 2 files");
} }
itest!(no_slow_types {
args: "lint",
output: "lint/no_slow_types/no_slow_types.out",
cwd: Some("lint/no_slow_types"),
exit_code: 1,
});
itest!(no_slow_types_entrypoint {
args: "lint a.ts",
output: "lint/no_slow_types/no_slow_types_entrypoint.out",
cwd: Some("lint/no_slow_types"),
exit_code: 1,
});
itest!(no_slow_types_non_entrypoint {
args: "lint d.ts",
output_str: Some("Checked 1 file\n"),
cwd: Some("lint/no_slow_types"),
exit_code: 0,
});
itest!(no_slow_types_excluded {
args: "lint --rules-exclude=no-slow-types",
output_str: Some("Checked 4 files\n"),
cwd: Some("lint/no_slow_types"),
exit_code: 0,
});
itest!(no_slow_types_non_package {
args: "lint --config=deno.non-package.json",
output_str: Some("Checked 4 files\n"),
cwd: Some("lint/no_slow_types"),
exit_code: 0,
});
itest!(no_slow_types_workspace {
args: "lint",
output: "lint/no_slow_types_workspace/output.out",
cwd: Some("lint/no_slow_types_workspace"),
exit_code: 1,
});

View file

@ -22,17 +22,17 @@ itest!(missing_deno_json {
exit_code: 1, exit_code: 1,
}); });
itest!(invalid_fast_check { itest!(has_slow_types {
args: "publish --token 'sadfasdf'", args: "publish --token 'sadfasdf'",
output: "publish/invalid_fast_check.out", output: "publish/has_slow_types.out",
cwd: Some("publish/invalid_fast_check"), cwd: Some("publish/has_slow_types"),
exit_code: 1, exit_code: 1,
}); });
itest!(no_zap { itest!(allow_slow_types {
args: "publish --no-zap --token 'sadfasdf'", args: "publish --allow-slow-types --token 'sadfasdf'",
output: "publish/no_zap.out", output: "publish/allow_slow_types.out",
cwd: Some("publish/invalid_fast_check"), cwd: Some("publish/has_slow_types"),
envs: env_vars_for_jsr_tests(), envs: env_vars_for_jsr_tests(),
http_server: true, http_server: true,
exit_code: 0, exit_code: 0,
@ -83,7 +83,9 @@ fn publish_non_exported_files_using_import_map() {
.new_command() .new_command()
.args("publish --log-level=debug --token 'sadfasdf'") .args("publish --log-level=debug --token 'sadfasdf'")
.run(); .run();
output.assert_exit_code(0);
let lines = output.combined_output().split('\n').collect::<Vec<_>>(); let lines = output.combined_output().split('\n').collect::<Vec<_>>();
eprintln!("{}", output.combined_output());
assert!(lines assert!(lines
.iter() .iter()
.any(|l| l.contains("Unfurling") && l.ends_with("mod.ts"))); .any(|l| l.contains("Unfurling") && l.ends_with("mod.ts")));

View file

@ -0,0 +1,3 @@
export function add(a: number, b: number) {
return a + b;
}

View file

@ -0,0 +1,5 @@
export function addB(a: number, b: number) {
return a + b;
}
export * from "./d.ts";

View file

@ -0,0 +1,4 @@
// this one won't error because it's not an export
export function addC(a: number, b: number) {
return a + b;
}

View file

@ -0,0 +1,4 @@
// this one is re-exported via b.ts
export function addD(a: number, b: number) {
return a + b;
}

View file

@ -0,0 +1,8 @@
{
"name": "@pkg/pkg",
"version": "1.0.0",
"exports": {
"./a": "./a.ts",
"./b": "./b.ts"
}
}

View file

@ -0,0 +1,2 @@
{
}

View file

@ -0,0 +1,35 @@
error[no-slow-types]: missing explicit return type in the public API
--> [WILDCARD]a.ts:1:17
|
1 | export function add(a: number, b: number) {
| ^^^ this function is missing an explicit return type
= hint: add an explicit return type to the function
info: all functions in the public API must have an explicit return type
docs: https://jsr.io/go/slow-type-missing-explicit-return-type
error[no-slow-types]: missing explicit return type in the public API
--> [WILDCARD]b.ts:1:17
|
1 | export function addB(a: number, b: number) {
| ^^^^ this function is missing an explicit return type
= hint: add an explicit return type to the function
info: all functions in the public API must have an explicit return type
docs: https://jsr.io/go/slow-type-missing-explicit-return-type
error[no-slow-types]: missing explicit return type in the public API
--> [WILDCARD]d.ts:2:17
|
2 | export function addD(a: number, b: number) {
| ^^^^ this function is missing an explicit return type
= hint: add an explicit return type to the function
info: all functions in the public API must have an explicit return type
docs: https://jsr.io/go/slow-type-missing-explicit-return-type
Found 3 problems
Checked 4 files

View file

@ -0,0 +1,35 @@
error[no-slow-types]: missing explicit return type in the public API
--> [WILDCARD]a.ts:1:17
|
1 | export function add(a: number, b: number) {
| ^^^ this function is missing an explicit return type
= hint: add an explicit return type to the function
info: all functions in the public API must have an explicit return type
docs: https://jsr.io/go/slow-type-missing-explicit-return-type
error[no-slow-types]: missing explicit return type in the public API
--> [WILDCARD]b.ts:1:17
|
1 | export function addB(a: number, b: number) {
| ^^^^ this function is missing an explicit return type
= hint: add an explicit return type to the function
info: all functions in the public API must have an explicit return type
docs: https://jsr.io/go/slow-type-missing-explicit-return-type
error[no-slow-types]: missing explicit return type in the public API
--> [WILDCARD]d.ts:2:17
|
2 | export function addD(a: number, b: number) {
| ^^^^ this function is missing an explicit return type
= hint: add an explicit return type to the function
info: all functions in the public API must have an explicit return type
docs: https://jsr.io/go/slow-type-missing-explicit-return-type
Found 3 problems
Checked 1 file

View file

@ -0,0 +1,5 @@
export function addB(a: number, b: number) {
return a + b;
}
export * from "./d.ts";

View file

@ -0,0 +1,4 @@
// this one is re-exported via b.ts
export function addD(a: number, b: number) {
return a + b;
}

View file

@ -0,0 +1,8 @@
{
"name": "@pkg/a",
"version": "1.0.0",
"exports": {
"./a": "./mod.ts",
"./b": "./b.ts"
}
}

View file

@ -0,0 +1,3 @@
export function add(a: number, b: number) {
return a + b;
}

View file

@ -0,0 +1,5 @@
{
"name": "@pkg/b",
"version": "1.0.0",
"exports": "./mod.ts"
}

View file

@ -0,0 +1,4 @@
// ok
export function addB(a: number, b: number): number {
return a + b;
}

View file

@ -0,0 +1,5 @@
{
"name": "@pkg/c",
"version": "1.0.0",
"exports": "./mod_c.ts"
}

View file

@ -0,0 +1,4 @@
// not ok
export function addC(a: number, b: number) {
return a + b;
}

View file

@ -0,0 +1,7 @@
{
"workspaces": [
"./a",
"./b",
"./c"
]
}

View file

@ -0,0 +1,46 @@
error[no-slow-types]: missing explicit return type in the public API
--> [WILDCARD]b.ts:1:17
|
1 | export function addB(a: number, b: number) {
| ^^^^ this function is missing an explicit return type
= hint: add an explicit return type to the function
info: all functions in the public API must have an explicit return type
docs: https://jsr.io/go/slow-type-missing-explicit-return-type
error[no-slow-types]: missing explicit return type in the public API
--> [WILDCARD]d.ts:2:17
|
2 | export function addD(a: number, b: number) {
| ^^^^ this function is missing an explicit return type
= hint: add an explicit return type to the function
info: all functions in the public API must have an explicit return type
docs: https://jsr.io/go/slow-type-missing-explicit-return-type
error[no-slow-types]: missing explicit return type in the public API
--> [WILDCARD]mod.ts:1:17
|
1 | export function add(a: number, b: number) {
| ^^^ this function is missing an explicit return type
= hint: add an explicit return type to the function
info: all functions in the public API must have an explicit return type
docs: https://jsr.io/go/slow-type-missing-explicit-return-type
error[no-slow-types]: missing explicit return type in the public API
--> [WILDCARD]mod_c.ts:2:17
|
2 | export function addC(a: number, b: number) {
| ^^^^ this function is missing an explicit return type
= hint: add an explicit return type to the function
info: all functions in the public API must have an explicit return type
docs: https://jsr.io/go/slow-type-missing-explicit-return-type
Found 4 problems
Checked 5 files

View file

@ -0,0 +1,4 @@
Warning Publishing a library with slow types is not recommended. This may lead to poor type checking performance for users of your package, may affect the quality of automatic documentation generation, and your package will not be shipped with a .d.ts file for Node.js users.
Publishing @foo/bar@1.1.0 ...
Successfully published @foo/bar@1.1.0
Visit http://127.0.0.1:4250/@foo/bar@1.1.0 for details

View file

@ -1,5 +1,4 @@
Checking fast check type graph for errors... Checking for slow types in the public API...
Ensuring type checks...
Check file:///[WILDCARD]/publish/deno_jsonc/mod.ts Check file:///[WILDCARD]/publish/deno_jsonc/mod.ts
Publishing @foo/bar@1.0.0 ... Publishing @foo/bar@1.0.0 ...
Successfully published @foo/bar@1.0.0 Successfully published @foo/bar@1.0.0

View file

@ -1,4 +1,3 @@
Checking fast check type graph for errors... Checking for slow types in the public API...
Ensuring type checks...
Check [WILDCARD] Check [WILDCARD]
Warning Aborting due to --dry-run Warning Aborting due to --dry-run

View file

@ -0,0 +1,21 @@
Checking for slow types in the public API...
error[missing-explicit-return-type]: missing explicit return type in the public API
--> [WILDCARD]mod.ts:2:17
|
2 | export function getRandom() {
| ^^^^^^^^^ this function is missing an explicit return type
= hint: add an explicit return type to the function
info: all functions in the public API must have an explicit return type
docs: https://jsr.io/go/slow-type-missing-explicit-return-type
This package contains errors for slow types. Fixing these errors will:
1. Significantly improve your package users' type checking performance.
2. Improve the automatic documentation generation.
3. Enable automatic .d.ts generation for Node.js.
Don't want to bother? You can choose to skip this step by
providing the --allow-slow-types flag.
error: Found 1 problem

View file

@ -1,16 +0,0 @@
Checking fast check type graph for errors...
error[zap-missing-explicit-return-type]: missing explicit return type in the public API
--> [WILDCARD]mod.ts:2:17
|
2 | export function getRandom() {
| ^^^^^^^^^ this function is missing an explicit return type
= hint: add an explicit return type to the function
info: all functions in the public API must have an explicit return type
docs: https://jsr.io/go/zap-missing-explicit-return-type
This package contains Zap errors. Although conforming to Zap will
significantly improve the type checking performance of your library,
you can choose to skip it by providing the --no-zap flag.
error: Found 1 problem

View file

@ -2,8 +2,7 @@ Download http://localhost:4545/welcome.ts
Download http://localhost:4545/echo.ts Download http://localhost:4545/echo.ts
Download http://localhost:4545/npm/registry/chalk Download http://localhost:4545/npm/registry/chalk
Download http://localhost:4545/npm/registry/chalk/chalk-5.0.1.tgz Download http://localhost:4545/npm/registry/chalk/chalk-5.0.1.tgz
Checking fast check type graph for errors... Checking for slow types in the public API...
Ensuring type checks...
Check file://[WILDCARD]mod.ts Check file://[WILDCARD]mod.ts
error[invalid-external-import]: invalid import to a non-JSR 'http' specifier error[invalid-external-import]: invalid import to a non-JSR 'http' specifier
--> [WILDCARD]mod.ts:1:8 --> [WILDCARD]mod.ts:1:8

View file

@ -1,5 +1,4 @@
Checking fast check type graph for errors... Checking for slow types in the public API...
Ensuring type checks...
Check file://[WILDCARD]mod.ts Check file://[WILDCARD]mod.ts
error[invalid-path]: package path must not contain whitespace (found ' ') error[invalid-path]: package path must not contain whitespace (found ' ')
--> [WILDCARD]path with spaces.txt --> [WILDCARD]path with spaces.txt

View file

@ -1,5 +1,4 @@
Checking fast check type graph for errors... Checking for slow types in the public API...
Ensuring type checks...
Check file:///[WILDCARD]/javascript_decl_file/mod.js Check file:///[WILDCARD]/javascript_decl_file/mod.js
Publishing @foo/bar@1.0.0 ... Publishing @foo/bar@1.0.0 ...
Successfully published @foo/bar@1.0.0 Successfully published @foo/bar@1.0.0

View file

@ -1,11 +1,19 @@
Checking fast check type graph for errors... Checking for slow types in the public API...
warning[zap-unsupported-javascript-entrypoint]: used a JavaScript module without type declarations as an entrypoints warning[unsupported-javascript-entrypoint]: used a JavaScript module without type declarations as an entrypoint
--> [WILDCARD]mod.js --> [WILDCARD]mod.js
= hint: add a type declaration (d.ts) for the JavaScript module, or rewrite it to TypeScript = hint: add a type declaration (d.ts) for the JavaScript module, or rewrite it to TypeScript
info: JavaScript files with no corresponding declaration require type inference to be type checked info: JavaScript files with no corresponding declaration require type inference to be type checked
info: fast check avoids type inference, so JavaScript entrypoints should be avoided info: fast check avoids type inference, so JavaScript entrypoints should be avoided
docs: https://jsr.io/go/zap-unsupported-javascript-entrypoint docs: https://jsr.io/go/slow-type-unsupported-javascript-entrypoint
warning[unsupported-javascript-entrypoint]: used a JavaScript module without type declarations as an entrypoint
--> [WILDCARD]other.js
= hint: add a type declaration (d.ts) for the JavaScript module, or rewrite it to TypeScript
info: JavaScript files with no corresponding declaration require type inference to be type checked
info: fast check avoids type inference, so JavaScript entrypoints should be avoided
docs: https://jsr.io/go/slow-type-unsupported-javascript-entrypoint
Publishing @foo/bar@1.0.0 ... Publishing @foo/bar@1.0.0 ...
Successfully published @foo/bar@1.0.0 Successfully published @foo/bar@1.0.0

View file

@ -1,5 +0,0 @@
Ensuring type checks...
Check file:///[WILDCARD]/mod.ts
Publishing @foo/bar@1.1.0 ...
Successfully published @foo/bar@1.1.0
Visit http://127.0.0.1:4250/@foo/bar@1.1.0 for details

View file

@ -1,5 +1,4 @@
Checking fast check type graph for errors... Checking for slow types in the public API...
Ensuring type checks...
Download http://localhost:4545/npm/registry/@types/node Download http://localhost:4545/npm/registry/@types/node
Download http://localhost:4545/npm/registry/@types/node/node-[WILDCARD].tgz Download http://localhost:4545/npm/registry/@types/node/node-[WILDCARD].tgz
Check file:///[WILDCARD]/publish/node_specifier/mod.ts Check file:///[WILDCARD]/publish/node_specifier/mod.ts

View file

@ -1,5 +1,4 @@
Checking fast check type graph for errors... Checking for slow types in the public API...
Ensuring type checks...
Check file:///[WILDCARD]/publish/successful/mod.ts Check file:///[WILDCARD]/publish/successful/mod.ts
Publishing @foo/bar@1.0.0 ... Publishing @foo/bar@1.0.0 ...
Successfully published @foo/bar@1.0.0 Successfully published @foo/bar@1.0.0

View file

@ -1,5 +1,4 @@
Checking fast check type graph for errors... Checking for slow types in the public API...
Ensuring type checks...
Check [WILDCARD]mod.ts Check [WILDCARD]mod.ts
warning[unsupported-file-type]: unsupported file type 'symlink' warning[unsupported-file-type]: unsupported file type 'symlink'
--> [WILDCARD]symlink --> [WILDCARD]symlink

View file

@ -1,5 +1,4 @@
Checking fast check type graph for errors... Checking for slow types in the public API...
Ensuring type checks...
Check file://[WILDCARD]/mod.ts Check file://[WILDCARD]/mod.ts
warning[unanalyzable-dynamic-import]: unable to analyze dynamic import warning[unanalyzable-dynamic-import]: unable to analyze dynamic import
--> [WILDCARD]mod.ts:1:7 --> [WILDCARD]mod.ts:1:7

View file

@ -1,6 +1,5 @@
Publishing a workspace... Publishing a workspace...
Checking fast check type graph for errors... Checking for slow types in the public API...
Ensuring type checks...
Check file:///[WILDCARD]/workspace/foo/mod.ts Check file:///[WILDCARD]/workspace/foo/mod.ts
Check file:///[WILDCARD]/workspace/bar/mod.ts Check file:///[WILDCARD]/workspace/bar/mod.ts
Publishing @foo/bar@1.0.0 ... Publishing @foo/bar@1.0.0 ...

View file

@ -1,5 +1,4 @@
Checking fast check type graph for errors... Checking for slow types in the public API...
Ensuring type checks...
Check file:///[WILDCARD]/workspace/bar/mod.ts Check file:///[WILDCARD]/workspace/bar/mod.ts
Publishing @foo/bar@1.0.0 ... Publishing @foo/bar@1.0.0 ...
Successfully published @foo/bar@1.0.0 Successfully published @foo/bar@1.0.0