refactor: rewrite TS dependency analysis in Rust (#5029)

This commit completely overhauls how module analysis is 
performed in TS compiler by moving the logic to Rust.

In the current setup module analysis is performed using 
"ts.preProcessFile" API in a special TS compiler worker 
running on a separate thread.

"ts.preProcessFile" allowed us to build a lot of functionality
in CLI including X-TypeScript-Types header support 
and @deno-types directive support. Unfortunately at the 
same time complexity of the ops required to perform 
supporting tasks exploded and caused some hidden 
permission escapes.

This PR introduces "ModuleGraphLoader" which can parse
source and load recursively all dependent source files; as 
well as declaration files. All dependencies used in TS 
compiler and now fetched and collected upfront in Rust 
before spinning up TS compiler.

To achieve feature parity with existing APIs this commit 
includes a lot of changes:

* add "ModuleGraphLoader"
  - can fetch local and remote sources
  - parses source code using SWC and extracts imports, exports, file references, special 
     headers
  - this struct inherited all of the hidden complexity and cruft from TS version and requires 
     several follow up PRs
* rewrite cli/tsc.rs to perform module analysis upfront and send all required source code to 
  TS worker in one message
* remove op_resolve_modules and op_fetch_source_files from cli/ops/compiler.rs
* run TS worker on the same thread
This commit is contained in:
Bartek Iwańczuk 2020-05-18 12:59:29 +02:00 committed by GitHub
parent ce81064e4c
commit 9d63772fe5
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
21 changed files with 1512 additions and 699 deletions

View file

@ -34,6 +34,7 @@ pub struct SourceFile {
pub url: Url,
pub filename: PathBuf,
pub types_url: Option<Url>,
pub types_header: Option<String>,
pub media_type: msg::MediaType,
pub source_code: Vec<u8>,
}
@ -323,6 +324,7 @@ impl SourceFileFetcher {
media_type,
source_code,
types_url,
types_header: None,
})
}
@ -380,6 +382,7 @@ impl SourceFileFetcher {
&fake_filepath,
headers.get("content-type").map(|e| e.as_str()),
);
let types_header = headers.get("x-typescript-types").map(|e| e.to_string());
let types_url = match media_type {
msg::MediaType::JavaScript | msg::MediaType::JSX => get_types_url(
&module_url,
@ -394,6 +397,7 @@ impl SourceFileFetcher {
media_type,
source_code,
types_url,
types_header,
}))
}
@ -502,6 +506,8 @@ impl SourceFileFetcher {
headers.get("content-type").map(String::as_str),
);
let types_header =
headers.get("x-typescript-types").map(String::to_string);
let types_url = match media_type {
msg::MediaType::JavaScript | msg::MediaType::JSX => get_types_url(
&module_url,
@ -517,6 +523,7 @@ impl SourceFileFetcher {
media_type,
source_code: source,
types_url,
types_header,
};
Ok(source_file)

View file

@ -95,6 +95,7 @@ impl GlobalState {
maybe_referrer: Option<ModuleSpecifier>,
target_lib: TargetLib,
permissions: Permissions,
is_dyn_import: bool,
) -> Result<CompiledModule, ErrBox> {
let state1 = self.clone();
let state2 = self.clone();
@ -115,14 +116,20 @@ impl GlobalState {
| msg::MediaType::JSX => {
state1
.ts_compiler
.compile(state1.clone(), &out, target_lib, permissions.clone())
.compile(state1.clone(), &out, target_lib, permissions, is_dyn_import)
.await
}
msg::MediaType::JavaScript => {
if state1.ts_compiler.compile_js {
state2
.ts_compiler
.compile(state1.clone(), &out, target_lib, permissions.clone())
.compile(
state1.clone(),
&out,
target_lib,
permissions,
is_dyn_import,
)
.await
} else {
if let Some(types_url) = out.types_url.clone() {

View file

@ -17,39 +17,12 @@ import { CompilerOptions } from "./compiler_options.ts";
import { Diagnostic, DiagnosticItem } from "./diagnostics.ts";
import { fromTypeScriptDiagnostic } from "./diagnostics_util.ts";
import { TranspileOnlyResult } from "./ops/runtime_compiler.ts";
import { sendAsync, sendSync } from "./ops/dispatch_json.ts";
import { bootstrapWorkerRuntime } from "./runtime_worker.ts";
import { assert, log } from "./util.ts";
import { assert } from "./util.ts";
import * as util from "./util.ts";
import { TextDecoder, TextEncoder } from "./web/text_encoding.ts";
import { core } from "./core.ts";
export function resolveModules(
specifiers: string[],
referrer?: string
): string[] {
util.log("compiler::resolveModules", { specifiers, referrer });
return sendSync("op_resolve_modules", { specifiers, referrer });
}
export function fetchSourceFiles(
specifiers: string[],
referrer?: string
): Promise<
Array<{
url: string;
filename: string;
mediaType: number;
sourceCode: string;
}>
> {
util.log("compiler::fetchSourceFiles", { specifiers, referrer });
return sendAsync("op_fetch_source_files", {
specifiers,
referrer,
});
}
const encoder = new TextEncoder();
const decoder = new TextDecoder();
@ -263,75 +236,6 @@ class SourceFile {
this.extension = getExtension(this.url, this.mediaType);
}
imports(processJsImports: boolean): SourceFileSpecifierMap[] {
if (this.processed) {
throw new Error("SourceFile has already been processed.");
}
assert(this.sourceCode != null);
// we shouldn't process imports for files which contain the nocheck pragma
// (like bundles)
if (this.sourceCode.match(/\/{2}\s+@ts-nocheck/)) {
log(`Skipping imports for "${this.filename}"`);
return [];
}
const readImportFiles = true;
const isJsOrJsx =
this.mediaType === MediaType.JavaScript ||
this.mediaType === MediaType.JSX;
const detectJsImports = isJsOrJsx;
const preProcessedFileInfo = ts.preProcessFile(
this.sourceCode,
readImportFiles,
detectJsImports
);
this.processed = true;
const files: SourceFileSpecifierMap[] = [];
function process(references: Array<{ fileName: string }>): void {
for (const { fileName } of references) {
files.push({ original: fileName, mapped: fileName });
}
}
const {
importedFiles,
referencedFiles,
libReferenceDirectives,
typeReferenceDirectives,
} = preProcessedFileInfo;
const typeDirectives = parseTypeDirectives(this.sourceCode);
if (typeDirectives) {
for (const importedFile of importedFiles) {
// If there's a type directive for current processed file; then we provide
// different `mapped` specifier.
const mappedModuleName = getMappedModuleName(
importedFile,
typeDirectives
);
files.push({
original: importedFile.fileName,
mapped: mappedModuleName ?? importedFile.fileName,
});
}
} else if (processJsImports || !isJsOrJsx) {
process(importedFiles);
}
process(referencedFiles);
// built in libs comes across as `"dom"` for example, and should be filtered
// out during pre-processing as they are either already cached or they will
// be lazily fetched by the compiler host. Ones that contain full files are
// not filtered out and will be fetched as normal.
const filteredLibs = libReferenceDirectives.filter(
({ fileName }) => !ts.libMap.has(fileName.toLowerCase())
);
process(filteredLibs);
process(typeReferenceDirectives);
return files;
}
static addToCache(json: SourceFileJson): SourceFile {
if (SOURCE_FILE_CACHE.has(json.url)) {
throw new TypeError("SourceFile already exists");
@ -546,6 +450,13 @@ class Host implements ts.CompilerHost {
return moduleNames.map((specifier) => {
const maybeUrl = SourceFile.getResolvedUrl(specifier, containingFile);
util.log("compiler::host.resolveModuleNames maybeUrl", {
specifier,
containingFile,
maybeUrl,
sf: SourceFile.getCached(maybeUrl!),
});
let sourceFile: SourceFile | undefined = undefined;
if (specifier.startsWith(ASSETS)) {
@ -623,7 +534,10 @@ SNAPSHOT_HOST.getSourceFile(
ts.ScriptTarget.ESNext
);
const TS_SNAPSHOT_PROGRAM = ts.createProgram({
// We never use this program; it's only created
// during snapshotting to hydrate and populate
// source file cache with lib declaration files.
const _TS_SNAPSHOT_PROGRAM = ts.createProgram({
rootNames: [`${ASSETS}/bootstrap.ts`],
options: SNAPSHOT_COMPILER_OPTIONS,
host: SNAPSHOT_HOST,
@ -632,25 +546,6 @@ const TS_SNAPSHOT_PROGRAM = ts.createProgram({
// This function is called only during snapshotting process
const SYSTEM_LOADER = getAsset("system_loader.js");
function resolveSpecifier(specifier: string, referrer: string): string {
// The resolveModules op only handles fully qualified URLs for referrer.
// However we will have cases where referrer is "/foo.ts". We add this dummy
// prefix "file://" in order to use the op.
// TODO(ry) Maybe we should perhaps ModuleSpecifier::resolve_import() to
// handle this situation.
let dummyPrefix = false;
const prefix = "file://";
if (referrer.startsWith("/")) {
dummyPrefix = true;
referrer = prefix + referrer;
}
let r = resolveModules([specifier], referrer)[0];
if (dummyPrefix) {
r = r.replace(prefix, "");
}
return r;
}
function getMediaType(filename: string): MediaType {
const maybeExtension = /\.([a-zA-Z]+)$/.exec(filename);
if (!maybeExtension) {
@ -675,155 +570,107 @@ function getMediaType(filename: string): MediaType {
}
}
function processLocalImports(
sources: Record<string, string>,
specifiers: SourceFileSpecifierMap[],
referrer?: string,
processJsImports = false
): string[] {
if (!specifiers.length) {
return [];
}
const moduleNames = specifiers.map((specifierMap) => {
if (referrer) {
return resolveSpecifier(specifierMap.mapped, referrer);
} else {
return specifierMap.mapped;
}
});
function buildLocalSourceFileCache(
sourceFileMap: Record<string, SourceFileMapEntry>
): void {
for (const entry of Object.values(sourceFileMap)) {
assert(entry.sourceCode.length > 0);
SourceFile.addToCache({
url: entry.url,
filename: entry.url,
mediaType: getMediaType(entry.url),
sourceCode: entry.sourceCode,
});
for (let i = 0; i < moduleNames.length; i++) {
const moduleName = moduleNames[i];
const specifierMap = specifiers[i];
assert(moduleName in sources, `Missing module in sources: "${moduleName}"`);
let sourceFile = SourceFile.getCached(moduleName);
if (typeof sourceFile === "undefined") {
sourceFile = SourceFile.addToCache({
url: moduleName,
filename: moduleName,
sourceCode: sources[moduleName],
mediaType: getMediaType(moduleName),
});
for (const importDesc of entry.imports) {
let mappedUrl = importDesc.resolvedSpecifier;
const importedFile = sourceFileMap[importDesc.resolvedSpecifier];
assert(importedFile);
const isJsOrJsx =
importedFile.mediaType === MediaType.JavaScript ||
importedFile.mediaType === MediaType.JSX;
// If JS or JSX perform substitution for types if available
if (isJsOrJsx) {
if (importedFile.typeHeaders.length > 0) {
const typeHeaders = importedFile.typeHeaders[0];
mappedUrl = typeHeaders.resolvedSpecifier;
} else if (importDesc.resolvedTypeDirective) {
mappedUrl = importDesc.resolvedTypeDirective;
} else if (importedFile.typesDirectives.length > 0) {
const typeDirective = importedFile.typesDirectives[0];
mappedUrl = typeDirective.resolvedSpecifier;
}
}
mappedUrl = mappedUrl.replace("memory://", "");
SourceFile.cacheResolvedUrl(mappedUrl, importDesc.specifier, entry.url);
}
assert(sourceFile);
SourceFile.cacheResolvedUrl(
sourceFile.url,
specifierMap.original,
referrer
);
if (!sourceFile.processed) {
processLocalImports(
sources,
sourceFile.imports(processJsImports),
sourceFile.url,
processJsImports
for (const fileRef of entry.referencedFiles) {
SourceFile.cacheResolvedUrl(
fileRef.resolvedSpecifier.replace("memory://", ""),
fileRef.specifier,
entry.url
);
}
for (const fileRef of entry.libDirectives) {
SourceFile.cacheResolvedUrl(
fileRef.resolvedSpecifier.replace("memory://", ""),
fileRef.specifier,
entry.url
);
}
}
return moduleNames;
}
async function processImports(
specifiers: SourceFileSpecifierMap[],
referrer?: string,
processJsImports = false
): Promise<string[]> {
if (!specifiers.length) {
return [];
}
const sources = specifiers.map(({ mapped }) => mapped);
const resolvedSources = resolveModules(sources, referrer);
const sourceFiles = await fetchSourceFiles(resolvedSources, referrer);
assert(sourceFiles.length === specifiers.length);
for (let i = 0; i < sourceFiles.length; i++) {
const specifierMap = specifiers[i];
const sourceFileJson = sourceFiles[i];
let sourceFile = SourceFile.getCached(sourceFileJson.url);
if (typeof sourceFile === "undefined") {
sourceFile = SourceFile.addToCache(sourceFileJson);
}
assert(sourceFile);
SourceFile.cacheResolvedUrl(
sourceFile.url,
specifierMap.original,
referrer
);
if (!sourceFile.processed) {
const sourceFileImports = sourceFile.imports(processJsImports);
await processImports(sourceFileImports, sourceFile.url, processJsImports);
}
}
return resolvedSources;
}
interface FileReference {
fileName: string;
pos: number;
end: number;
}
function getMappedModuleName(
source: FileReference,
typeDirectives: Map<FileReference, string>
): string | undefined {
const { fileName: sourceFileName, pos: sourcePos } = source;
for (const [{ fileName, pos }, value] of typeDirectives.entries()) {
if (sourceFileName === fileName && sourcePos === pos) {
return value;
}
}
return undefined;
}
const typeDirectiveRegEx = /@deno-types\s*=\s*(["'])((?:(?=(\\?))\3.)*?)\1/gi;
const importExportRegEx = /(?:import|export)(?:\s+|\s+[\s\S]*?from\s+)?(["'])((?:(?=(\\?))\3.)*?)\1/;
function parseTypeDirectives(
sourceCode: string | undefined
): Map<FileReference, string> | undefined {
if (!sourceCode) {
return;
}
// collect all the directives in the file and their start and end positions
const directives: FileReference[] = [];
let maybeMatch: RegExpExecArray | null = null;
while ((maybeMatch = typeDirectiveRegEx.exec(sourceCode))) {
const [matchString, , fileName] = maybeMatch;
const { index: pos } = maybeMatch;
directives.push({
fileName,
pos,
end: pos + matchString.length,
function buildSourceFileCache(
sourceFileMap: Record<string, SourceFileMapEntry>
): void {
for (const entry of Object.values(sourceFileMap)) {
assert(entry.sourceCode.length > 0);
SourceFile.addToCache({
url: entry.url,
filename: entry.url,
mediaType: entry.mediaType,
sourceCode: entry.sourceCode,
});
}
if (!directives.length) {
return;
}
// work from the last directive backwards for the next `import`/`export`
// statement
directives.reverse();
const results = new Map<FileReference, string>();
for (const { end, fileName, pos } of directives) {
const searchString = sourceCode.substring(end);
const maybeMatch = importExportRegEx.exec(searchString);
if (maybeMatch) {
const [matchString, , targetFileName] = maybeMatch;
const targetPos =
end + maybeMatch.index + matchString.indexOf(targetFileName) - 1;
const target: FileReference = {
fileName: targetFileName,
pos: targetPos,
end: targetPos + targetFileName.length,
};
results.set(target, fileName);
for (const importDesc of entry.imports) {
let mappedUrl = importDesc.resolvedSpecifier;
const importedFile = sourceFileMap[importDesc.resolvedSpecifier];
assert(importedFile);
const isJsOrJsx =
importedFile.mediaType === MediaType.JavaScript ||
importedFile.mediaType === MediaType.JSX;
// If JS or JSX perform substitution for types if available
if (isJsOrJsx) {
if (importedFile.typeHeaders.length > 0) {
const typeHeaders = importedFile.typeHeaders[0];
mappedUrl = typeHeaders.resolvedSpecifier;
} else if (importDesc.resolvedTypeDirective) {
mappedUrl = importDesc.resolvedTypeDirective;
} else if (importedFile.typesDirectives.length > 0) {
const typeDirective = importedFile.typesDirectives[0];
mappedUrl = typeDirective.resolvedSpecifier;
}
}
SourceFile.cacheResolvedUrl(mappedUrl, importDesc.specifier, entry.url);
}
for (const fileRef of entry.referencedFiles) {
SourceFile.cacheResolvedUrl(
fileRef.resolvedSpecifier,
fileRef.specifier,
entry.url
);
}
for (const fileRef of entry.libDirectives) {
SourceFile.cacheResolvedUrl(
fileRef.resolvedSpecifier,
fileRef.specifier,
entry.url
);
}
sourceCode = sourceCode.substring(0, pos);
}
return results;
}
interface EmmitedSource {
@ -1209,24 +1056,48 @@ function setRootExports(program: ts.Program, rootModule: string): void {
.map((sym) => sym.getName());
}
interface ImportDescriptor {
specifier: string;
resolvedSpecifier: string;
typeDirective?: string;
resolvedTypeDirective?: string;
}
interface ReferenceDescriptor {
specifier: string;
resolvedSpecifier: string;
}
interface SourceFileMapEntry {
// fully resolved URL
url: string;
sourceCode: string;
mediaType: MediaType;
imports: ImportDescriptor[];
referencedFiles: ReferenceDescriptor[];
libDirectives: ReferenceDescriptor[];
typesDirectives: ReferenceDescriptor[];
typeHeaders: ReferenceDescriptor[];
}
interface CompilerRequestCompile {
type: CompilerRequestType.Compile;
target: CompilerHostTarget;
rootNames: string[];
// TODO(ry) add compiler config to this interface.
// options: ts.CompilerOptions;
configPath?: string;
config?: string;
unstable: boolean;
bundle: boolean;
cwd: string;
// key value is fully resolved URL
sourceFileMap: Record<string, SourceFileMapEntry>;
}
interface CompilerRequestRuntimeCompile {
type: CompilerRequestType.RuntimeCompile;
target: CompilerHostTarget;
rootName: string;
sources?: Record<string, string>;
rootNames: string[];
sourceFileMap: Record<string, SourceFileMapEntry>;
unstable?: boolean;
bundle?: boolean;
options?: string;
@ -1259,9 +1130,7 @@ interface RuntimeBundleResult {
diagnostics: DiagnosticItem[];
}
async function compile(
request: CompilerRequestCompile
): Promise<CompileResult> {
function compile(request: CompilerRequestCompile): CompileResult {
const {
bundle,
config,
@ -1270,6 +1139,7 @@ async function compile(
target,
unstable,
cwd,
sourceFileMap,
} = request;
util.log(">>> compile start", {
rootNames,
@ -1307,18 +1177,7 @@ async function compile(
diagnostics = processConfigureResponse(configResult, configPath) || [];
}
// This will recursively analyse all the code for other imports,
// requesting those from the privileged side, populating the in memory
// cache which will be used by the host, before resolving.
const specifiers = rootNames.map((rootName) => {
return { original: rootName, mapped: rootName };
});
const resolvedRootModules = await processImports(
specifiers,
undefined,
bundle || host.getCompilationSettings().checkJs
);
buildSourceFileCache(sourceFileMap);
// if there was a configuration and no diagnostics with it, we will continue
// to generate the program and possibly emit it.
if (diagnostics.length === 0) {
@ -1327,7 +1186,6 @@ async function compile(
rootNames,
options,
host,
oldProgram: TS_SNAPSHOT_PROGRAM,
});
diagnostics = ts
@ -1338,8 +1196,8 @@ async function compile(
if (diagnostics && diagnostics.length === 0) {
if (bundle) {
// we only support a single root module when bundling
assert(resolvedRootModules.length === 1);
setRootExports(program, resolvedRootModules[0]);
assert(rootNames.length === 1);
setRootExports(program, rootNames[0]);
}
const emitResult = program.emit();
assert(emitResult.emitSkipped === false, "Unexpected skip of the emit.");
@ -1351,7 +1209,7 @@ async function compile(
let bundleOutput = undefined;
if (bundle) {
if (diagnostics && diagnostics.length === 0 && bundle) {
assert(state.bundleOutput);
bundleOutput = state.bundleOutput;
}
@ -1371,71 +1229,38 @@ async function compile(
return result;
}
async function runtimeCompile(
function runtimeCompile(
request: CompilerRequestRuntimeCompile
): Promise<RuntimeCompileResult | RuntimeBundleResult> {
const { bundle, options, rootName, sources, target, unstable } = request;
): RuntimeCompileResult | RuntimeBundleResult {
const {
bundle,
options,
rootNames,
target,
unstable,
sourceFileMap,
} = request;
util.log(">>> runtime compile start", {
rootName,
rootNames,
bundle,
sources: sources ? Object.keys(sources) : undefined,
});
// resolve the root name, if there are sources, the root name does not
// get resolved
const resolvedRootName = sources ? rootName : resolveModules([rootName])[0];
// if there are options, convert them into TypeScript compiler options,
// and resolve any external file references
let convertedOptions: ts.CompilerOptions | undefined;
let additionalFiles: string[] | undefined;
if (options) {
const result = convertCompilerOptions(options);
convertedOptions = result.options;
additionalFiles = result.files;
}
const checkJsImports =
bundle || (convertedOptions && convertedOptions.checkJs);
// recursively process imports, loading each file into memory. If there
// are sources, these files are pulled out of the there, otherwise the
// files are retrieved from the privileged side
const specifiers = [
{
original: resolvedRootName,
mapped: resolvedRootName,
},
];
const rootNames = sources
? processLocalImports(sources, specifiers, undefined, checkJsImports)
: await processImports(specifiers, undefined, checkJsImports);
if (additionalFiles) {
// any files supplied in the configuration are resolved externally,
// even if sources are provided
const resolvedNames = resolveModules(additionalFiles);
const resolvedSpecifiers = resolvedNames.map((rn) => {
return {
original: rn,
mapped: rn,
};
});
const additionalImports = await processImports(
resolvedSpecifiers,
undefined,
checkJsImports
);
rootNames.push(...additionalImports);
}
buildLocalSourceFileCache(sourceFileMap);
const state: WriteFileState = {
type: request.type,
bundle,
host: undefined,
rootNames,
sources,
emitMap: {},
bundleOutput: undefined,
};
@ -1472,7 +1297,6 @@ async function runtimeCompile(
rootNames,
options: host.getCompilationSettings(),
host,
oldProgram: TS_SNAPSHOT_PROGRAM,
});
if (bundle) {
@ -1489,8 +1313,7 @@ async function runtimeCompile(
assert(state.emitMap);
util.log("<<< runtime compile finish", {
rootName,
sources: sources ? Object.keys(sources) : undefined,
rootNames,
bundle,
emitMap: Object.keys(state.emitMap),
});
@ -1545,14 +1368,12 @@ async function tsCompilerOnMessage({
}): Promise<void> {
switch (request.type) {
case CompilerRequestType.Compile: {
const result = await compile(request as CompilerRequestCompile);
const result = compile(request as CompilerRequestCompile);
globalThis.postMessage(result);
break;
}
case CompilerRequestType.RuntimeCompile: {
const result = await runtimeCompile(
request as CompilerRequestRuntimeCompile
);
const result = runtimeCompile(request as CompilerRequestRuntimeCompile);
globalThis.postMessage(result);
break;
}
@ -1570,7 +1391,8 @@ async function tsCompilerOnMessage({
} (${CompilerRequestType[(request as CompilerRequest).type]})`
);
}
// Currently Rust shuts down worker after single request
// Shutdown after single request
globalThis.close();
}
function bootstrapTsCompilerRuntime(): void {

View file

@ -42,6 +42,7 @@ pub mod installer;
mod js;
mod lockfile;
mod metrics;
mod module_graph;
pub mod msg;
pub mod op_error;
pub mod ops;
@ -69,6 +70,7 @@ pub use dprint_plugin_typescript::swc_ecma_parser;
use crate::doc::parser::DocFileLoader;
use crate::file_fetcher::SourceFile;
use crate::file_fetcher::SourceFileFetcher;
use crate::fs as deno_fs;
use crate::global_state::GlobalState;
use crate::msg::MediaType;
use crate::op_error::OpError;
@ -210,6 +212,7 @@ async fn print_file_info(
None,
TargetLib::Main,
Permissions::allow_all(),
false,
)
.await?;
@ -354,6 +357,7 @@ async fn eval_command(
filename: main_module_url.to_file_path().unwrap(),
url: main_module_url,
types_url: None,
types_header: None,
media_type: if as_typescript {
MediaType::TypeScript
} else {
@ -382,12 +386,21 @@ async fn bundle_command(
source_file: String,
out_file: Option<PathBuf>,
) -> Result<(), ErrBox> {
let module_name = ModuleSpecifier::resolve_url_or_path(&source_file)?;
let mut module_name = ModuleSpecifier::resolve_url_or_path(&source_file)?;
let url = module_name.as_url();
// TODO(bartlomieju): fix this hack in ModuleSpecifier
if url.scheme() == "file" {
let a = deno_fs::normalize_path(&url.to_file_path().unwrap());
let u = Url::from_file_path(a).unwrap();
module_name = ModuleSpecifier::from(u)
}
let global_state = GlobalState::new(flags)?;
debug!(">>>>> bundle START");
let bundle_result = global_state
.ts_compiler
.bundle(global_state.clone(), module_name.to_string(), out_file)
.bundle(global_state.clone(), module_name, out_file)
.await;
debug!(">>>>> bundle END");
bundle_result
@ -530,6 +543,7 @@ async fn test_command(
filename: test_file_url.to_file_path().unwrap(),
url: test_file_url,
types_url: None,
types_header: None,
media_type: MediaType::TypeScript,
source_code: test_file.clone().into_bytes(),
};

732
cli/module_graph.rs Normal file
View file

@ -0,0 +1,732 @@
// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.
use crate::file_fetcher::SourceFile;
use crate::file_fetcher::SourceFileFetcher;
use crate::import_map::ImportMap;
use crate::msg::MediaType;
use crate::op_error::OpError;
use crate::permissions::Permissions;
use crate::swc_util::analyze_dependencies_and_references;
use crate::swc_util::TsReferenceKind;
use crate::tsc::get_available_libs;
use deno_core::ErrBox;
use deno_core::ModuleSpecifier;
use futures::stream::FuturesUnordered;
use futures::stream::StreamExt;
use futures::Future;
use futures::FutureExt;
use serde::Serialize;
use serde::Serializer;
use std::collections::HashMap;
use std::hash::BuildHasher;
use std::pin::Pin;
fn serialize_module_specifier<S>(
spec: &ModuleSpecifier,
s: S,
) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
s.serialize_str(&spec.to_string())
}
fn serialize_option_module_specifier<S>(
maybe_spec: &Option<ModuleSpecifier>,
s: S,
) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
if let Some(spec) = maybe_spec {
serialize_module_specifier(spec, s)
} else {
s.serialize_none()
}
}
#[derive(Debug, Serialize)]
pub struct ModuleGraph(HashMap<String, ModuleGraphFile>);
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct ImportDescriptor {
specifier: String,
#[serde(serialize_with = "serialize_module_specifier")]
resolved_specifier: ModuleSpecifier,
// These two fields are for support of @deno-types directive
// directly prepending import statement
type_directive: Option<String>,
#[serde(serialize_with = "serialize_option_module_specifier")]
resolved_type_directive: Option<ModuleSpecifier>,
}
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct ReferenceDescriptor {
specifier: String,
#[serde(serialize_with = "serialize_module_specifier")]
resolved_specifier: ModuleSpecifier,
}
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct ModuleGraphFile {
pub specifier: String,
pub url: String,
pub filename: String,
pub imports: Vec<ImportDescriptor>,
pub referenced_files: Vec<ReferenceDescriptor>,
pub lib_directives: Vec<ReferenceDescriptor>,
pub types_directives: Vec<ReferenceDescriptor>,
pub type_headers: Vec<ReferenceDescriptor>,
pub media_type: i32,
pub source_code: String,
}
type SourceFileFuture =
Pin<Box<dyn Future<Output = Result<SourceFile, ErrBox>>>>;
pub struct ModuleGraphLoader {
permissions: Permissions,
file_fetcher: SourceFileFetcher,
maybe_import_map: Option<ImportMap>,
pending_downloads: FuturesUnordered<SourceFileFuture>,
pub graph: ModuleGraph,
is_dyn_import: bool,
analyze_dynamic_imports: bool,
}
impl ModuleGraphLoader {
pub fn new(
file_fetcher: SourceFileFetcher,
maybe_import_map: Option<ImportMap>,
permissions: Permissions,
is_dyn_import: bool,
analyze_dynamic_imports: bool,
) -> Self {
Self {
file_fetcher,
permissions,
maybe_import_map,
pending_downloads: FuturesUnordered::new(),
graph: ModuleGraph(HashMap::new()),
is_dyn_import,
analyze_dynamic_imports,
}
}
/// This method is used to add specified module and all of its
/// dependencies to the graph.
///
/// It resolves when all dependent modules have been fetched and analyzed.
///
/// This method can be called multiple times.
pub async fn add_to_graph(
&mut self,
specifier: &ModuleSpecifier,
) -> Result<(), ErrBox> {
self.download_module(specifier.clone(), None)?;
loop {
let source_file = self.pending_downloads.next().await.unwrap()?;
self.visit_module(&source_file.url.clone().into(), source_file)?;
if self.pending_downloads.is_empty() {
break;
}
}
Ok(())
}
/// This method is used to create a graph from in-memory files stored in
/// a hash map. Useful for creating module graph for code received from
/// the runtime.
pub fn build_local_graph<S: BuildHasher>(
&mut self,
_root_name: &str,
source_map: &HashMap<String, String, S>,
) -> Result<(), ErrBox> {
for (spec, source_code) in source_map.iter() {
self.visit_memory_module(spec.to_string(), source_code.to_string())?;
}
Ok(())
}
/// Consumes the loader and returns created graph.
pub fn get_graph(self) -> HashMap<String, ModuleGraphFile> {
self.graph.0
}
fn visit_memory_module(
&mut self,
specifier: String,
source_code: String,
) -> Result<(), ErrBox> {
let mut imports = vec![];
let mut referenced_files = vec![];
let mut lib_directives = vec![];
let mut types_directives = vec![];
// FIXME(bartlomieju):
// The resolveModules op only handles fully qualified URLs for referrer.
// However we will have cases where referrer is "/foo.ts". We add this dummy
// prefix "memory://" in order to use resolution logic.
let module_specifier =
if let Ok(spec) = ModuleSpecifier::resolve_url(&specifier) {
spec
} else {
ModuleSpecifier::resolve_url(&format!("memory://{}", specifier))?
};
let (import_descs, ref_descs) = analyze_dependencies_and_references(
&source_code,
self.analyze_dynamic_imports,
)?;
for import_desc in import_descs {
let maybe_resolved =
if let Some(import_map) = self.maybe_import_map.as_ref() {
import_map
.resolve(&import_desc.specifier, &module_specifier.to_string())?
} else {
None
};
let resolved_specifier = if let Some(resolved) = maybe_resolved {
resolved
} else {
ModuleSpecifier::resolve_import(
&import_desc.specifier,
&module_specifier.to_string(),
)?
};
let resolved_type_directive =
if let Some(types_specifier) = import_desc.deno_types.as_ref() {
Some(ModuleSpecifier::resolve_import(
&types_specifier,
&module_specifier.to_string(),
)?)
} else {
None
};
let import_descriptor = ImportDescriptor {
specifier: import_desc.specifier.to_string(),
resolved_specifier,
type_directive: import_desc.deno_types,
resolved_type_directive,
};
imports.push(import_descriptor);
}
let available_libs = get_available_libs();
for ref_desc in ref_descs {
if available_libs.contains(&ref_desc.specifier) {
continue;
}
let resolved_specifier = ModuleSpecifier::resolve_import(
&ref_desc.specifier,
&module_specifier.to_string(),
)?;
let reference_descriptor = ReferenceDescriptor {
specifier: ref_desc.specifier.to_string(),
resolved_specifier,
};
match ref_desc.kind {
TsReferenceKind::Lib => {
lib_directives.push(reference_descriptor);
}
TsReferenceKind::Types => {
types_directives.push(reference_descriptor);
}
TsReferenceKind::Path => {
referenced_files.push(reference_descriptor);
}
}
}
self.graph.0.insert(
module_specifier.to_string(),
ModuleGraphFile {
specifier: specifier.to_string(),
url: specifier.to_string(),
filename: specifier,
// ignored, it's set in TS worker
media_type: MediaType::JavaScript as i32,
source_code,
imports,
referenced_files,
lib_directives,
types_directives,
type_headers: vec![],
},
);
Ok(())
}
fn download_module(
&mut self,
module_specifier: ModuleSpecifier,
maybe_referrer: Option<ModuleSpecifier>,
) -> Result<(), ErrBox> {
if self.graph.0.contains_key(&module_specifier.to_string()) {
return Ok(());
}
if !self.is_dyn_import {
// Verify that remote file doesn't try to statically import local file.
if let Some(referrer) = maybe_referrer.as_ref() {
let referrer_url = referrer.as_url();
match referrer_url.scheme() {
"http" | "https" => {
let specifier_url = module_specifier.as_url();
match specifier_url.scheme() {
"http" | "https" => {}
_ => {
let e = OpError::permission_denied("Remote module are not allowed to statically import local modules. Use dynamic import instead.".to_string());
return Err(e.into());
}
}
}
_ => {}
}
}
}
let spec = module_specifier;
let file_fetcher = self.file_fetcher.clone();
let perms = self.permissions.clone();
let load_future = async move {
let spec_ = spec.clone();
let source_file = file_fetcher
.fetch_source_file(&spec_, maybe_referrer, perms)
.await?;
// FIXME(bartlomieju):
// because of redirects we may end up with wrong URL,
// substitute with original one
Ok(SourceFile {
url: spec_.as_url().to_owned(),
..source_file
})
}
.boxed_local();
self.pending_downloads.push(load_future);
Ok(())
}
fn visit_module(
&mut self,
module_specifier: &ModuleSpecifier,
source_file: SourceFile,
) -> Result<(), ErrBox> {
let mut imports = vec![];
let mut referenced_files = vec![];
let mut lib_directives = vec![];
let mut types_directives = vec![];
let mut type_headers = vec![];
let source_code = String::from_utf8(source_file.source_code)?;
if source_file.media_type == MediaType::JavaScript
|| source_file.media_type == MediaType::TypeScript
{
if let Some(types_specifier) = source_file.types_header {
let type_header = ReferenceDescriptor {
specifier: types_specifier.to_string(),
resolved_specifier: ModuleSpecifier::resolve_import(
&types_specifier,
&module_specifier.to_string(),
)?,
};
self.download_module(
type_header.resolved_specifier.clone(),
Some(module_specifier.clone()),
)?;
type_headers.push(type_header);
}
let (import_descs, ref_descs) = analyze_dependencies_and_references(
&source_code,
self.analyze_dynamic_imports,
)?;
for import_desc in import_descs {
let maybe_resolved =
if let Some(import_map) = self.maybe_import_map.as_ref() {
import_map
.resolve(&import_desc.specifier, &module_specifier.to_string())?
} else {
None
};
let resolved_specifier = if let Some(resolved) = maybe_resolved {
resolved
} else {
ModuleSpecifier::resolve_import(
&import_desc.specifier,
&module_specifier.to_string(),
)?
};
let resolved_type_directive =
if let Some(types_specifier) = import_desc.deno_types.as_ref() {
Some(ModuleSpecifier::resolve_import(
&types_specifier,
&module_specifier.to_string(),
)?)
} else {
None
};
let import_descriptor = ImportDescriptor {
specifier: import_desc.specifier.to_string(),
resolved_specifier,
type_directive: import_desc.deno_types,
resolved_type_directive,
};
self.download_module(
import_descriptor.resolved_specifier.clone(),
Some(module_specifier.clone()),
)?;
if let Some(type_dir_url) =
import_descriptor.resolved_type_directive.as_ref()
{
self.download_module(
type_dir_url.clone(),
Some(module_specifier.clone()),
)?;
}
imports.push(import_descriptor);
}
let available_libs = get_available_libs();
for ref_desc in ref_descs {
if available_libs.contains(&ref_desc.specifier) {
continue;
}
let resolved_specifier = ModuleSpecifier::resolve_import(
&ref_desc.specifier,
&module_specifier.to_string(),
)?;
let reference_descriptor = ReferenceDescriptor {
specifier: ref_desc.specifier.to_string(),
resolved_specifier,
};
self.download_module(
reference_descriptor.resolved_specifier.clone(),
Some(module_specifier.clone()),
)?;
match ref_desc.kind {
TsReferenceKind::Lib => {
lib_directives.push(reference_descriptor);
}
TsReferenceKind::Types => {
types_directives.push(reference_descriptor);
}
TsReferenceKind::Path => {
referenced_files.push(reference_descriptor);
}
}
}
}
self.graph.0.insert(
module_specifier.to_string(),
ModuleGraphFile {
specifier: module_specifier.to_string(),
url: source_file.url.to_string(),
filename: source_file.filename.to_str().unwrap().to_string(),
media_type: source_file.media_type as i32,
source_code,
imports,
referenced_files,
lib_directives,
types_directives,
type_headers,
},
);
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::GlobalState;
async fn build_graph(
module_specifier: &ModuleSpecifier,
) -> Result<HashMap<String, ModuleGraphFile>, ErrBox> {
let global_state = GlobalState::new(Default::default()).unwrap();
let mut graph_loader = ModuleGraphLoader::new(
global_state.file_fetcher.clone(),
None,
Permissions::allow_all(),
false,
false,
);
graph_loader.add_to_graph(&module_specifier).await?;
Ok(graph_loader.get_graph())
}
#[tokio::test]
async fn source_graph_fetch() {
let http_server_guard = crate::test_util::http_server();
let module_specifier = ModuleSpecifier::resolve_url_or_path(
"http://localhost:4545/cli/tests/019_media_types.ts",
)
.unwrap();
let graph = build_graph(&module_specifier)
.await
.expect("Failed to build graph");
let a = graph
.get("http://localhost:4545/cli/tests/019_media_types.ts")
.unwrap();
assert!(graph.contains_key(
"http://localhost:4545/cli/tests/subdir/mt_text_ecmascript.j3.js"
));
assert!(graph.contains_key(
"http://localhost:4545/cli/tests/subdir/mt_video_vdn.t2.ts"
));
assert!(graph.contains_key("http://localhost:4545/cli/tests/subdir/mt_application_x_typescript.t4.ts"));
assert!(graph.contains_key(
"http://localhost:4545/cli/tests/subdir/mt_video_mp2t.t3.ts"
));
assert!(graph.contains_key("http://localhost:4545/cli/tests/subdir/mt_application_x_javascript.j4.js"));
assert!(graph.contains_key(
"http://localhost:4545/cli/tests/subdir/mt_application_ecmascript.j2.js"
));
assert!(graph.contains_key(
"http://localhost:4545/cli/tests/subdir/mt_text_javascript.j1.js"
));
assert!(graph.contains_key(
"http://localhost:4545/cli/tests/subdir/mt_text_typescript.t1.ts"
));
assert_eq!(
serde_json::to_value(&a.imports).unwrap(),
json!([
{
"specifier": "http://localhost:4545/cli/tests/subdir/mt_text_typescript.t1.ts",
"resolvedSpecifier": "http://localhost:4545/cli/tests/subdir/mt_text_typescript.t1.ts",
"typeDirective": null,
"resolvedTypeDirective": null,
},
{
"specifier": "http://localhost:4545/cli/tests/subdir/mt_video_vdn.t2.ts",
"resolvedSpecifier": "http://localhost:4545/cli/tests/subdir/mt_video_vdn.t2.ts",
"typeDirective": null,
"resolvedTypeDirective": null,
},
{
"specifier": "http://localhost:4545/cli/tests/subdir/mt_video_mp2t.t3.ts",
"resolvedSpecifier": "http://localhost:4545/cli/tests/subdir/mt_video_mp2t.t3.ts",
"typeDirective": null,
"resolvedTypeDirective": null,
},
{
"specifier": "http://localhost:4545/cli/tests/subdir/mt_application_x_typescript.t4.ts",
"resolvedSpecifier": "http://localhost:4545/cli/tests/subdir/mt_application_x_typescript.t4.ts",
"typeDirective": null,
"resolvedTypeDirective": null,
},
{
"specifier": "http://localhost:4545/cli/tests/subdir/mt_text_javascript.j1.js",
"resolvedSpecifier": "http://localhost:4545/cli/tests/subdir/mt_text_javascript.j1.js",
"typeDirective": null,
"resolvedTypeDirective": null,
},
{
"specifier": "http://localhost:4545/cli/tests/subdir/mt_application_ecmascript.j2.js",
"resolvedSpecifier": "http://localhost:4545/cli/tests/subdir/mt_application_ecmascript.j2.js",
"typeDirective": null,
"resolvedTypeDirective": null,
},
{
"specifier": "http://localhost:4545/cli/tests/subdir/mt_text_ecmascript.j3.js",
"resolvedSpecifier": "http://localhost:4545/cli/tests/subdir/mt_text_ecmascript.j3.js",
"typeDirective": null,
"resolvedTypeDirective": null,
},
{
"specifier": "http://localhost:4545/cli/tests/subdir/mt_application_x_javascript.j4.js",
"resolvedSpecifier": "http://localhost:4545/cli/tests/subdir/mt_application_x_javascript.j4.js",
"typeDirective": null,
"resolvedTypeDirective": null,
},
])
);
drop(http_server_guard);
}
#[tokio::test]
async fn source_graph_type_references() {
let http_server_guard = crate::test_util::http_server();
let module_specifier = ModuleSpecifier::resolve_url_or_path(
"http://localhost:4545/cli/tests/type_definitions.ts",
)
.unwrap();
let graph = build_graph(&module_specifier)
.await
.expect("Failed to build graph");
eprintln!("json {:#?}", serde_json::to_value(&graph).unwrap());
let a = graph
.get("http://localhost:4545/cli/tests/type_definitions.ts")
.unwrap();
assert_eq!(
serde_json::to_value(&a.imports).unwrap(),
json!([
{
"specifier": "./type_definitions/foo.js",
"resolvedSpecifier": "http://localhost:4545/cli/tests/type_definitions/foo.js",
"typeDirective": "./type_definitions/foo.d.ts",
"resolvedTypeDirective": "http://localhost:4545/cli/tests/type_definitions/foo.d.ts"
},
{
"specifier": "./type_definitions/fizz.js",
"resolvedSpecifier": "http://localhost:4545/cli/tests/type_definitions/fizz.js",
"typeDirective": "./type_definitions/fizz.d.ts",
"resolvedTypeDirective": "http://localhost:4545/cli/tests/type_definitions/fizz.d.ts"
},
{
"specifier": "./type_definitions/qat.ts",
"resolvedSpecifier": "http://localhost:4545/cli/tests/type_definitions/qat.ts",
"typeDirective": null,
"resolvedTypeDirective": null,
},
])
);
assert!(graph
.contains_key("http://localhost:4545/cli/tests/type_definitions/foo.js"));
assert!(graph.contains_key(
"http://localhost:4545/cli/tests/type_definitions/foo.d.ts"
));
assert!(graph.contains_key(
"http://localhost:4545/cli/tests/type_definitions/fizz.js"
));
assert!(graph.contains_key(
"http://localhost:4545/cli/tests/type_definitions/fizz.d.ts"
));
assert!(graph
.contains_key("http://localhost:4545/cli/tests/type_definitions/qat.ts"));
drop(http_server_guard);
}
#[tokio::test]
async fn source_graph_type_references2() {
let http_server_guard = crate::test_util::http_server();
let module_specifier = ModuleSpecifier::resolve_url_or_path(
"http://localhost:4545/cli/tests/type_directives_02.ts",
)
.unwrap();
let graph = build_graph(&module_specifier)
.await
.expect("Failed to build graph");
eprintln!("{:#?}", serde_json::to_value(&graph).unwrap());
let a = graph
.get("http://localhost:4545/cli/tests/type_directives_02.ts")
.unwrap();
assert_eq!(
serde_json::to_value(&a.imports).unwrap(),
json!([
{
"specifier": "./subdir/type_reference.js",
"resolvedSpecifier": "http://localhost:4545/cli/tests/subdir/type_reference.js",
"typeDirective": null,
"resolvedTypeDirective": null,
}
])
);
assert!(graph.contains_key(
"http://localhost:4545/cli/tests/subdir/type_reference.d.ts"
));
let b = graph
.get("http://localhost:4545/cli/tests/subdir/type_reference.js")
.unwrap();
assert_eq!(
serde_json::to_value(&b.types_directives).unwrap(),
json!([
{
"specifier": "./type_reference.d.ts",
"resolvedSpecifier": "http://localhost:4545/cli/tests/subdir/type_reference.d.ts",
}
])
);
drop(http_server_guard);
}
#[tokio::test]
async fn source_graph_type_references3() {
let http_server_guard = crate::test_util::http_server();
let module_specifier = ModuleSpecifier::resolve_url_or_path(
"http://localhost:4545/cli/tests/type_directives_01.ts",
)
.unwrap();
let graph = build_graph(&module_specifier)
.await
.expect("Failed to build graph");
let ts = graph
.get("http://localhost:4545/cli/tests/type_directives_01.ts")
.unwrap();
assert_eq!(
serde_json::to_value(&ts.imports).unwrap(),
json!([
{
"specifier": "http://127.0.0.1:4545/xTypeScriptTypes.js",
"resolvedSpecifier": "http://127.0.0.1:4545/xTypeScriptTypes.js",
"typeDirective": null,
"resolvedTypeDirective": null,
}
])
);
let headers = graph
.get("http://127.0.0.1:4545/xTypeScriptTypes.js")
.unwrap();
assert_eq!(
serde_json::to_value(&headers.type_headers).unwrap(),
json!([
{
"specifier": "./xTypeScriptTypes.d.ts",
"resolvedSpecifier": "http://127.0.0.1:4545/xTypeScriptTypes.d.ts"
}
])
);
drop(http_server_guard);
}
}

View file

@ -2,9 +2,11 @@
// Warning! The values in this enum are duplicated in js/compiler.ts
// Update carefully!
use serde::Serialize;
#[allow(non_camel_case_types)]
#[repr(i8)]
#[derive(Clone, Copy, PartialEq, Debug)]
#[derive(Clone, Copy, PartialEq, Debug, Serialize)]
pub enum MediaType {
JavaScript = 0,
JSX = 1,

View file

@ -1,152 +1,13 @@
// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.
use super::dispatch_json::Deserialize;
use super::dispatch_json::JsonOp;
use super::dispatch_json::Value;
use crate::futures::future::try_join_all;
use crate::op_error::OpError;
use crate::state::State;
use deno_core::CoreIsolate;
use deno_core::ModuleLoader;
use deno_core::ModuleSpecifier;
use deno_core::ZeroCopyBuf;
use futures::future::FutureExt;
pub fn init(i: &mut CoreIsolate, s: &State) {
i.register_op("op_resolve_modules", s.stateful_json_op(op_resolve_modules));
i.register_op(
"op_fetch_source_files",
s.stateful_json_op(op_fetch_source_files),
);
let custom_assets = std::collections::HashMap::new(); // TODO(ry) use None.
pub fn init(i: &mut CoreIsolate, _s: &State) {
let custom_assets = std::collections::HashMap::new();
// TODO(ry) use None.
// TODO(bartlomieju): is this op even required?
i.register_op(
"op_fetch_asset",
deno_typescript::op_fetch_asset(custom_assets),
);
}
#[derive(Deserialize, Debug)]
struct SpecifiersReferrerArgs {
specifiers: Vec<String>,
referrer: Option<String>,
}
fn op_resolve_modules(
state: &State,
args: Value,
_data: Option<ZeroCopyBuf>,
) -> Result<JsonOp, OpError> {
let args: SpecifiersReferrerArgs = serde_json::from_value(args)?;
let (referrer, is_main) = if let Some(referrer) = args.referrer {
(referrer, false)
} else {
("<unknown>".to_owned(), true)
};
let mut specifiers = vec![];
for specifier in &args.specifiers {
let specifier = state
.resolve(specifier, &referrer, is_main)
.map_err(OpError::from)?;
specifiers.push(specifier.as_str().to_owned());
}
Ok(JsonOp::Sync(json!(specifiers)))
}
fn op_fetch_source_files(
state: &State,
args: Value,
_data: Option<ZeroCopyBuf>,
) -> Result<JsonOp, OpError> {
let args: SpecifiersReferrerArgs = serde_json::from_value(args)?;
let ref_specifier = if let Some(referrer) = args.referrer {
let specifier = ModuleSpecifier::resolve_url(&referrer)
.expect("Referrer is not a valid specifier");
Some(specifier)
} else {
None
};
let s = state.borrow();
let global_state = s.global_state.clone();
let permissions = s.permissions.clone();
let perms_ = permissions.clone();
drop(s);
let file_fetcher = global_state.file_fetcher.clone();
let specifiers = args.specifiers.clone();
let future = async move {
let file_futures: Vec<_> = specifiers
.into_iter()
.map(|specifier| {
let file_fetcher_ = file_fetcher.clone();
let ref_specifier_ = ref_specifier.clone();
let perms_ = perms_.clone();
async move {
let resolved_specifier = ModuleSpecifier::resolve_url(&specifier)
.expect("Invalid specifier");
// TODO(bartlomieju): duplicated from `state.rs::ModuleLoader::load` - deduplicate
// Verify that remote file doesn't try to statically import local file.
if let Some(referrer) = ref_specifier_.as_ref() {
let referrer_url = referrer.as_url();
match referrer_url.scheme() {
"http" | "https" => {
let specifier_url = resolved_specifier.as_url();
match specifier_url.scheme() {
"http" | "https" => {},
_ => {
let e = OpError::permission_denied("Remote module are not allowed to statically import local modules. Use dynamic import instead.".to_string());
return Err(e.into());
}
}
},
_ => {}
}
}
file_fetcher_
.fetch_source_file(&resolved_specifier, ref_specifier_, perms_)
.await
}
.boxed_local()
})
.collect();
let files = try_join_all(file_futures).await.map_err(OpError::from)?;
// We want to get an array of futures that resolves to
let v = files.into_iter().map(|f| {
async {
// if the source file contains a `types_url` we need to replace
// the module with the type definition when requested by the compiler
let file = match f.types_url {
Some(types_url) => {
let types_specifier = ModuleSpecifier::from(types_url);
global_state
.file_fetcher
.fetch_source_file(
&types_specifier,
ref_specifier.clone(),
permissions.clone(),
)
.await
.map_err(OpError::from)?
}
_ => f,
};
let source_code = String::from_utf8(file.source_code).map_err(|_| OpError::invalid_utf8())?;
Ok::<_, OpError>(json!({
"url": file.url.to_string(),
"filename": file.filename.to_str().unwrap(),
"mediaType": file.media_type as i32,
"sourceCode": source_code,
}))
}
});
let v = try_join_all(v).await?;
Ok(v.into())
}
.boxed_local();
Ok(JsonOp::Async(future))
}

View file

@ -242,7 +242,7 @@ impl State {
}
}
fn exit_unstable(api_name: &str) {
pub fn exit_unstable(api_name: &str) {
eprintln!(
"Unstable API '{}'. The --unstable flag must be provided.",
api_name
@ -322,6 +322,7 @@ impl ModuleLoader for State {
maybe_referrer,
target_lib,
permissions,
is_dyn_import,
)
.await?;
Ok(deno_core::ModuleSource {

View file

@ -1,5 +1,6 @@
// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.
use crate::swc_common;
use crate::swc_common::comments::CommentKind;
use crate::swc_common::comments::Comments;
use crate::swc_common::errors::Diagnostic;
use crate::swc_common::errors::DiagnosticBuilder;
@ -158,10 +159,16 @@ impl AstParser {
&self,
span: Span,
) -> Vec<swc_common::comments::Comment> {
self
.comments
.take_leading_comments(span.lo())
.unwrap_or_else(Vec::new)
let maybe_comments = self.comments.take_leading_comments(span.lo());
if let Some(comments) = maybe_comments {
// clone the comments and put them back in map
let to_return = comments.clone();
self.comments.add_leading(span.lo(), comments);
to_return
} else {
vec![]
}
}
}
@ -240,80 +247,309 @@ impl Visit for DependencyVisitor {
}
}
/// Given file name and source code return vector
/// of unresolved import specifiers.
///
/// Returned vector may contain duplicate entries.
///
/// Second argument allows to configure if dynamic
/// imports should be analyzed.
///
/// NOTE: Only statically analyzable dynamic imports
/// are considered; ie. the ones that have plain string specifier:
///
/// await import("./fizz.ts")
///
/// These imports will be ignored:
///
/// await import(`./${dir}/fizz.ts`)
/// await import("./" + "fizz.ts")
#[allow(unused)]
pub fn analyze_dependencies(
#[derive(Clone, Debug, PartialEq)]
enum DependencyKind {
Import,
DynamicImport,
Export,
}
#[derive(Clone, Debug, PartialEq)]
struct DependencyDescriptor {
span: Span,
specifier: String,
kind: DependencyKind,
}
struct NewDependencyVisitor {
dependencies: Vec<DependencyDescriptor>,
}
impl Visit for NewDependencyVisitor {
fn visit_import_decl(
&mut self,
import_decl: &swc_ecma_ast::ImportDecl,
_parent: &dyn Node,
) {
let src_str = import_decl.src.value.to_string();
self.dependencies.push(DependencyDescriptor {
specifier: src_str,
kind: DependencyKind::Import,
span: import_decl.span,
});
}
fn visit_named_export(
&mut self,
named_export: &swc_ecma_ast::NamedExport,
_parent: &dyn Node,
) {
if let Some(src) = &named_export.src {
let src_str = src.value.to_string();
self.dependencies.push(DependencyDescriptor {
specifier: src_str,
kind: DependencyKind::Export,
span: named_export.span,
});
}
}
fn visit_export_all(
&mut self,
export_all: &swc_ecma_ast::ExportAll,
_parent: &dyn Node,
) {
let src_str = export_all.src.value.to_string();
self.dependencies.push(DependencyDescriptor {
specifier: src_str,
kind: DependencyKind::Export,
span: export_all.span,
});
}
fn visit_call_expr(
&mut self,
call_expr: &swc_ecma_ast::CallExpr,
parent: &dyn Node,
) {
use swc_ecma_ast::Expr::*;
use swc_ecma_ast::ExprOrSuper::*;
swc_ecma_visit::visit_call_expr(self, call_expr, parent);
let boxed_expr = match call_expr.callee.clone() {
Super(_) => return,
Expr(boxed) => boxed,
};
match &*boxed_expr {
Ident(ident) => {
if &ident.sym.to_string() != "import" {
return;
}
}
_ => return,
};
if let Some(arg) = call_expr.args.get(0) {
match &*arg.expr {
Lit(lit) => {
if let swc_ecma_ast::Lit::Str(str_) = lit {
let src_str = str_.value.to_string();
self.dependencies.push(DependencyDescriptor {
specifier: src_str,
kind: DependencyKind::DynamicImport,
span: call_expr.span,
});
}
}
_ => return,
}
}
}
}
fn get_deno_types(parser: &AstParser, span: Span) -> Option<String> {
let comments = parser.get_span_comments(span);
if comments.is_empty() {
return None;
}
// @deno-types must directly prepend import statement - hence
// checking last comment for span
let last = comments.last().unwrap();
let comment = last.text.trim_start();
if comment.starts_with("@deno-types") {
let split: Vec<String> =
comment.split('=').map(|s| s.to_string()).collect();
assert_eq!(split.len(), 2);
let specifier_in_quotes = split.get(1).unwrap().to_string();
let specifier = specifier_in_quotes
.trim_start_matches('\"')
.trim_start_matches('\'')
.trim_end_matches('\"')
.trim_end_matches('\'')
.to_string();
return Some(specifier);
}
None
}
#[derive(Clone, Debug, PartialEq)]
pub struct ImportDescriptor {
pub specifier: String,
pub deno_types: Option<String>,
}
#[derive(Clone, Debug, PartialEq)]
pub enum TsReferenceKind {
Lib,
Types,
Path,
}
#[derive(Clone, Debug, PartialEq)]
pub struct TsReferenceDescriptor {
pub kind: TsReferenceKind,
pub specifier: String,
}
pub fn analyze_dependencies_and_references(
source_code: &str,
analyze_dynamic_imports: bool,
) -> Result<Vec<String>, SwcDiagnosticBuffer> {
) -> Result<
(Vec<ImportDescriptor>, Vec<TsReferenceDescriptor>),
SwcDiagnosticBuffer,
> {
let parser = AstParser::new();
parser.parse_module("root.ts", source_code, |parse_result| {
let module = parse_result?;
let mut collector = DependencyVisitor {
let mut collector = NewDependencyVisitor {
dependencies: vec![],
analyze_dynamic_imports,
};
let module_span = module.span;
collector.visit_module(&module, &module);
Ok(collector.dependencies)
let dependency_descriptors = collector.dependencies;
// for each import check if there's relevant @deno-types directive
let imports = dependency_descriptors
.iter()
.filter(|desc| {
if analyze_dynamic_imports {
return true;
}
desc.kind != DependencyKind::DynamicImport
})
.map(|desc| {
if desc.kind == DependencyKind::Import {
let deno_types = get_deno_types(&parser, desc.span);
ImportDescriptor {
specifier: desc.specifier.to_string(),
deno_types,
}
} else {
ImportDescriptor {
specifier: desc.specifier.to_string(),
deno_types: None,
}
}
})
.collect();
// analyze comment from beginning of the file and find TS directives
let comments = parser
.comments
.take_leading_comments(module_span.lo())
.unwrap_or_else(|| vec![]);
let mut references = vec![];
for comment in comments {
if comment.kind != CommentKind::Line {
continue;
}
// TODO(bartlomieju): you can do better than that...
let text = comment.text.to_string();
let (kind, specifier_in_quotes) =
if text.starts_with("/ <reference path=") {
(
TsReferenceKind::Path,
text.trim_start_matches("/ <reference path="),
)
} else if text.starts_with("/ <reference lib=") {
(
TsReferenceKind::Lib,
text.trim_start_matches("/ <reference lib="),
)
} else if text.starts_with("/ <reference types=") {
(
TsReferenceKind::Types,
text.trim_start_matches("/ <reference types="),
)
} else {
continue;
};
let specifier = specifier_in_quotes
.trim_end_matches("/>")
.trim_end()
.trim_start_matches('\"')
.trim_start_matches('\'')
.trim_end_matches('\"')
.trim_end_matches('\'')
.to_string();
references.push(TsReferenceDescriptor { kind, specifier });
}
Ok((imports, references))
})
}
#[test]
fn test_analyze_dependencies() {
fn test_analyze_dependencies_and_directives() {
let source = r#"
import { foo } from "./foo.ts";
export { bar } from "./foo.ts";
export * from "./bar.ts";
// This comment is placed to make sure that directives are parsed
// even when they start on non-first line
/// <reference lib="dom" />
/// <reference types="./type_reference.d.ts" />
/// <reference path="./type_reference/dep.ts" />
// @deno-types="./type_definitions/foo.d.ts"
import { foo } from "./type_definitions/foo.js";
// @deno-types="./type_definitions/fizz.d.ts"
import "./type_definitions/fizz.js";
/// <reference path="./type_reference/dep2.ts" />
import * as qat from "./type_definitions/qat.ts";
console.log(foo);
console.log(fizz);
console.log(qat.qat);
"#;
let dependencies =
analyze_dependencies(source, false).expect("Failed to parse");
let (imports, references) =
analyze_dependencies_and_references(source, true).expect("Failed to parse");
assert_eq!(
dependencies,
imports,
vec![
"./foo.ts".to_string(),
"./foo.ts".to_string(),
"./bar.ts".to_string(),
]
);
}
#[test]
fn test_analyze_dependencies_dyn_imports() {
let source = r#"
import { foo } from "./foo.ts";
export { bar } from "./foo.ts";
export * from "./bar.ts";
const a = await import("./fizz.ts");
const a = await import("./" + "buzz.ts");
"#;
let dependencies =
analyze_dependencies(source, true).expect("Failed to parse");
assert_eq!(
dependencies,
vec![
"./foo.ts".to_string(),
"./foo.ts".to_string(),
"./bar.ts".to_string(),
"./fizz.ts".to_string(),
ImportDescriptor {
specifier: "./type_definitions/foo.js".to_string(),
deno_types: Some("./type_definitions/foo.d.ts".to_string())
},
ImportDescriptor {
specifier: "./type_definitions/fizz.js".to_string(),
deno_types: Some("./type_definitions/fizz.d.ts".to_string())
},
ImportDescriptor {
specifier: "./type_definitions/qat.ts".to_string(),
deno_types: None
},
]
);
// According to TS docs (https://www.typescriptlang.org/docs/handbook/triple-slash-directives.html)
// directives that are not at the top of the file are ignored, so only
// 3 references should be captured instead of 4.
assert_eq!(
references,
vec![
TsReferenceDescriptor {
specifier: "dom".to_string(),
kind: TsReferenceKind::Lib,
},
TsReferenceDescriptor {
specifier: "./type_reference.d.ts".to_string(),
kind: TsReferenceKind::Types,
},
TsReferenceDescriptor {
specifier: "./type_reference/dep.ts".to_string(),
kind: TsReferenceKind::Path,
},
]
);
}

View file

@ -2,8 +2,8 @@
error: Uncaught TypeError: Cannot resolve extension for "[WILDCARD]config.json" with mediaType "Json".
at getExtension ($deno$/compiler.ts:[WILDCARD])
at new SourceFile ($deno$/compiler.ts:[WILDCARD])
at processImports ($deno$/compiler.ts:[WILDCARD])
at async processImports ($deno$/compiler.ts:[WILDCARD])
at async compile ($deno$/compiler.ts:[WILDCARD])
at async tsCompilerOnMessage ($deno$/compiler.ts:[WILDCARD])
at async workerMessageRecvCallback ($deno$/runtime_worker.ts:[WILDCARD])
at Function.addToCache ($deno$/compiler.ts:[WILDCARD])
at buildSourceFileCache ($deno$/compiler.ts:[WILDCARD])
at compile ($deno$/compiler.ts:[WILDCARD])
at tsCompilerOnMessage ($deno$/compiler.ts:[WILDCARD])
[WILDCARD]

View file

@ -1,5 +1,5 @@
Compile [WILDCARD]/fetch/test.ts
Download http://localhost:4545/cli/tests/subdir/mod2.ts
Download http://localhost:4545/cli/tests/subdir/print_hello.ts
Compile [WILDCARD]/fetch/other.ts
Compile [WILDCARD]/fetch/test.ts
Download http://localhost:4545/cli/tests/subdir/mt_text_typescript.t1.ts
Compile [WILDCARD]/fetch/other.ts

View file

@ -1,8 +1 @@
[WILDCARD]error: Uncaught NotFound: Cannot resolve module "[WILDCARD]/bad-module.ts" from "[WILDCARD]/error_004_missing_module.ts"
at unwrapResponse ([WILDCARD]dispatch_json.ts:[WILDCARD])
at Object.sendAsync ([WILDCARD]dispatch_json.ts:[WILDCARD])
at async processImports ($deno$/compiler.ts:[WILDCARD])
at async processImports ($deno$/compiler.ts:[WILDCARD])
at async compile ($deno$/compiler.ts:[WILDCARD])
at async tsCompilerOnMessage ($deno$/compiler.ts:[WILDCARD])
at async workerMessageRecvCallback ([WILDCARD]runtime_worker.ts:[WILDCARD])
[WILDCARD]error: Cannot resolve module "[WILDCARD]/bad-module.ts" from "[WILDCARD]/error_004_missing_module.ts"

View file

@ -1,8 +1 @@
[WILDCARD]error: Uncaught NotFound: Cannot resolve module "[WILDCARD]/bad-module.ts" from "[WILDCARD]/error_005_missing_dynamic_import.ts"
at unwrapResponse ([WILDCARD]dispatch_json.ts:[WILDCARD])
at Object.sendAsync ([WILDCARD]dispatch_json.ts:[WILDCARD])
at async processImports ($deno$/compiler.ts:[WILDCARD])
at async processImports ($deno$/compiler.ts:[WILDCARD])
at async compile ($deno$/compiler.ts:[WILDCARD])
at async tsCompilerOnMessage ($deno$/compiler.ts:[WILDCARD])
at async workerMessageRecvCallback ([WILDCARD]runtime_worker.ts:[WILDCARD])
[WILDCARD]error: Uncaught TypeError: Cannot resolve module "[WILDCARD]/bad-module.ts"

View file

@ -1,8 +1 @@
[WILDCARD]error: Uncaught NotFound: Cannot resolve module "[WILDCARD]/non-existent" from "[WILDCARD]/error_006_import_ext_failure.ts"
at unwrapResponse ([WILDCARD]dispatch_json.ts:[WILDCARD])
at Object.sendAsync ([WILDCARD]dispatch_json.ts:[WILDCARD])
at async processImports ($deno$/compiler.ts:[WILDCARD])
at async processImports ($deno$/compiler.ts:[WILDCARD])
at async compile ($deno$/compiler.ts:[WILDCARD])
at async tsCompilerOnMessage ($deno$/compiler.ts:[WILDCARD])
at async workerMessageRecvCallback ([WILDCARD]runtime_worker.ts:[WILDCARD])
[WILDCARD]error: Cannot resolve module "[WILDCARD]/non-existent" from "[WILDCARD]/error_006_import_ext_failure.ts"

View file

@ -1,9 +1 @@
[WILDCARD]error: Uncaught URIError: relative import path "bad-module.ts" not prefixed with / or ./ or ../ Imported from "[WILDCARD]/error_011_bad_module_specifier.ts"
at unwrapResponse ($deno$/ops/dispatch_json.ts:[WILDCARD])
at Object.sendSync ($deno$/ops/dispatch_json.ts:[WILDCARD])
at resolveModules ($deno$/compiler.ts:[WILDCARD])
at processImports ($deno$/compiler.ts:[WILDCARD])
at processImports ($deno$/compiler.ts:[WILDCARD])
at async compile ($deno$/compiler.ts:[WILDCARD])
at async tsCompilerOnMessage ($deno$/compiler.ts:[WILDCARD])
at async workerMessageRecvCallback ([WILDCARD]runtime_worker.ts:[WILDCARD])
[WILDCARD]error: relative import path "bad-module.ts" not prefixed with / or ./ or ../ Imported from "[WILDCARD]/error_011_bad_module_specifier.ts"

View file

@ -1,9 +1 @@
[WILDCARD]error: Uncaught URIError: relative import path "bad-module.ts" not prefixed with / or ./ or ../ Imported from "[WILDCARD]/error_012_bad_dynamic_import_specifier.ts"
at unwrapResponse ($deno$/ops/dispatch_json.ts:[WILDCARD])
at Object.sendSync ($deno$/ops/dispatch_json.ts:[WILDCARD])
at resolveModules ($deno$/compiler.ts:[WILDCARD])
at processImports ($deno$/compiler.ts:[WILDCARD])
at processImports ($deno$/compiler.ts:[WILDCARD])
at async compile ($deno$/compiler.ts:[WILDCARD])
at async tsCompilerOnMessage ($deno$/compiler.ts:[WILDCARD])
at async workerMessageRecvCallback ([WILDCARD]runtime_worker.ts:[WILDCARD])
[WILDCARD]error: Uncaught TypeError: relative import path "bad-module.ts" not prefixed with / or ./ or ../ Imported from "[WILDCARD]/error_012_bad_dynamic_import_specifier.ts"

View file

@ -1,9 +1,2 @@
[WILDCARD]
error: Uncaught PermissionDenied: Remote module are not allowed to statically import local modules. Use dynamic import instead.
at unwrapResponse ($deno$/ops/dispatch_json.ts:[WILDCARD])
at Object.sendAsync ($deno$/ops/dispatch_json.ts:[WILDCARD])
at async processImports ($deno$/compiler.ts:[WILDCARD])
at async processImports ($deno$/compiler.ts:[WILDCARD])
at async compile ($deno$/compiler.ts:[WILDCARD])
at async tsCompilerOnMessage ($deno$/compiler.ts:[WILDCARD])
at async workerMessageRecvCallback ($deno$/runtime_worker.ts:[WILDCARD])
error: Remote module are not allowed to statically import local modules. Use dynamic import instead.

View file

@ -1,10 +1 @@
[WILDCARD]error: Uncaught URIError: relative import path "baz" not prefixed with / or ./ or ../ Imported from "[WILDCARD]/type_definitions/bar.d.ts"
at unwrapResponse ($deno$/ops/dispatch_json.ts:[WILDCARD])
at Object.sendSync ($deno$/ops/dispatch_json.ts:[WILDCARD])
at resolveModules ($deno$/compiler.ts:[WILDCARD])
at processImports ($deno$/compiler.ts:[WILDCARD])
at processImports ($deno$/compiler.ts:[WILDCARD])
at async processImports ($deno$/compiler.ts:[WILDCARD])
at async compile ($deno$/compiler.ts:[WILDCARD])
at async tsCompilerOnMessage ($deno$/compiler.ts:[WILDCARD])
at async workerMessageRecvCallback ([WILDCARD]runtime_worker.ts:[WILDCARD])
[WILDCARD]error: relative import path "baz" not prefixed with / or ./ or ../ Imported from "[WILDCARD]/type_definitions/bar.d.ts"

View file

@ -1353,7 +1353,7 @@ itest!(error_004_missing_module {
});
itest!(error_005_missing_dynamic_import {
args: "run --reload error_005_missing_dynamic_import.ts",
args: "run --reload --allow-read error_005_missing_dynamic_import.ts",
exit_code: 1,
output: "error_005_missing_dynamic_import.ts.out",
});

View file

@ -0,0 +1,5 @@
export class Bar {
constructor() {
this.baz = "baz";
}
}

View file

@ -8,23 +8,25 @@ use crate::file_fetcher::SourceFileFetcher;
use crate::fmt;
use crate::fs as deno_fs;
use crate::global_state::GlobalState;
use crate::import_map::ImportMap;
use crate::module_graph::ModuleGraphLoader;
use crate::msg;
use crate::op_error::OpError;
use crate::ops;
use crate::permissions::Permissions;
use crate::source_maps::SourceMapGetter;
use crate::startup_data;
use crate::state::exit_unstable;
use crate::state::State;
use crate::tokio_util;
use crate::version;
use crate::web_worker::WebWorker;
use crate::web_worker::WebWorkerHandle;
use crate::worker::WorkerEvent;
use core::task::Context;
use deno_core::Buf;
use deno_core::ErrBox;
use deno_core::ModuleSpecifier;
use deno_core::StartupData;
use futures::future::Either;
use futures::future::Future;
use futures::future::FutureExt;
use log::info;
@ -48,8 +50,74 @@ use std::sync::atomic::Ordering;
use std::sync::Arc;
use std::sync::Mutex;
use std::task::Poll;
use std::time::Instant;
use url::Url;
// TODO(bartlomieju): make static
pub fn get_available_libs() -> Vec<String> {
vec![
"deno.ns".to_string(),
"deno.window".to_string(),
"deno.worker".to_string(),
"deno.shared_globals".to_string(),
"deno.unstable".to_string(),
"dom".to_string(),
"dom.iterable".to_string(),
"es5".to_string(),
"es6".to_string(),
"esnext".to_string(),
"es2020".to_string(),
"es2020.full".to_string(),
"es2019".to_string(),
"es2019.full".to_string(),
"es2018".to_string(),
"es2018.full".to_string(),
"es2017".to_string(),
"es2017.full".to_string(),
"es2016".to_string(),
"es2016.full".to_string(),
"es2015".to_string(),
"es2015.collection".to_string(),
"es2015.core".to_string(),
"es2015.generator".to_string(),
"es2015.iterable".to_string(),
"es2015.promise".to_string(),
"es2015.proxy".to_string(),
"es2015.reflect".to_string(),
"es2015.symbol".to_string(),
"es2015.symbol.wellknown".to_string(),
"es2016.array.include".to_string(),
"es2017.intl".to_string(),
"es2017.object".to_string(),
"es2017.sharedmemory".to_string(),
"es2017.string".to_string(),
"es2017.typedarrays".to_string(),
"es2018.asyncgenerator".to_string(),
"es2018.asynciterable".to_string(),
"es2018.intl".to_string(),
"es2018.promise".to_string(),
"es2018.regexp".to_string(),
"es2019.array".to_string(),
"es2019.object".to_string(),
"es2019.string".to_string(),
"es2019.symbol".to_string(),
"es2020.bigint".to_string(),
"es2020.promise".to_string(),
"es2020.string".to_string(),
"es2020.symbol.wellknown".to_string(),
"esnext.array".to_string(),
"esnext.asynciterable".to_string(),
"esnext.bigint".to_string(),
"esnext.intl".to_string(),
"esnext.promise".to_string(),
"esnext.string".to_string(),
"esnext.symbol".to_string(),
"scripthost".to_string(),
"webworker".to_string(),
"webworker.importscripts".to_string(),
]
}
#[derive(Debug, Clone)]
pub struct CompiledModule {
pub code: String,
@ -199,40 +267,6 @@ impl CompiledFileMetadata {
}
}
/// Creates the JSON message send to compiler.ts's onmessage.
fn req(
request_type: msg::CompilerRequestType,
root_names: Vec<String>,
compiler_config: CompilerConfig,
target: &str,
bundle: bool,
unstable: bool,
) -> Buf {
let cwd = std::env::current_dir().unwrap();
let j = match (compiler_config.path, compiler_config.content) {
(Some(config_path), Some(config_data)) => json!({
"type": request_type as i32,
"target": target,
"rootNames": root_names,
"bundle": bundle,
"unstable": unstable,
"configPath": config_path,
"config": str::from_utf8(&config_data).unwrap(),
"cwd": cwd,
}),
_ => json!({
"type": request_type as i32,
"target": target,
"rootNames": root_names,
"bundle": bundle,
"unstable": unstable,
"cwd": cwd,
}),
};
j.to_string().into_boxed_str().into_boxed_bytes()
}
/// Emit a SHA256 hash based on source code, deno version and TS config.
/// Used to check if a recompilation for source code is needed.
pub fn source_code_version_hash(
@ -349,28 +383,72 @@ impl TsCompiler {
pub async fn bundle(
&self,
global_state: GlobalState,
module_name: String,
module_specifier: ModuleSpecifier,
out_file: Option<PathBuf>,
) -> Result<(), ErrBox> {
debug!(
"Invoking the compiler to bundle. module_name: {}",
module_name
);
eprintln!("Bundling {}", module_name);
let root_names = vec![module_name];
let req_msg = req(
msg::CompilerRequestType::Compile,
root_names,
self.config.clone(),
"main",
true,
global_state.flags.unstable,
module_specifier.to_string()
);
eprintln!("Bundling {}", module_specifier.to_string());
let import_map: Option<ImportMap> =
match global_state.flags.import_map_path.as_ref() {
None => None,
Some(file_path) => {
if !global_state.flags.unstable {
exit_unstable("--importmap")
}
Some(ImportMap::load(file_path)?)
}
};
let permissions = Permissions::allow_all();
let mut module_graph_loader = ModuleGraphLoader::new(
global_state.file_fetcher.clone(),
import_map,
permissions.clone(),
false,
true,
);
module_graph_loader.add_to_graph(&module_specifier).await?;
let module_graph = module_graph_loader.get_graph();
let module_graph_json =
serde_json::to_value(module_graph).expect("Failed to serialize data");
let root_names = vec![module_specifier.to_string()];
let bundle = true;
let target = "main";
let unstable = global_state.flags.unstable;
let compiler_config = self.config.clone();
let cwd = std::env::current_dir().unwrap();
let j = match (compiler_config.path, compiler_config.content) {
(Some(config_path), Some(config_data)) => json!({
"type": msg::CompilerRequestType::Compile as i32,
"target": target,
"rootNames": root_names,
"bundle": bundle,
"unstable": unstable,
"configPath": config_path,
"config": str::from_utf8(&config_data).unwrap(),
"cwd": cwd,
"sourceFileMap": module_graph_json,
}),
_ => json!({
"type": msg::CompilerRequestType::Compile as i32,
"target": target,
"rootNames": root_names,
"bundle": bundle,
"unstable": unstable,
"cwd": cwd,
"sourceFileMap": module_graph_json,
}),
};
let req_msg = j.to_string().into_boxed_str().into_boxed_bytes();
let msg =
execute_in_thread(global_state.clone(), permissions, req_msg).await?;
execute_in_same_thread(global_state.clone(), permissions, req_msg)
.await?;
let json_str = std::str::from_utf8(&msg).unwrap();
debug!("Message: {}", json_str);
@ -427,6 +505,7 @@ impl TsCompiler {
source_file: &SourceFile,
target: TargetLib,
permissions: Permissions,
is_dyn_import: bool,
) -> Result<CompiledModule, ErrBox> {
if self.has_compiled(&source_file.url) {
return self.get_compiled_module(&source_file.url);
@ -457,19 +536,63 @@ impl TsCompiler {
}
let source_file_ = source_file.clone();
let module_url = source_file.url.clone();
let module_specifier = ModuleSpecifier::from(source_file.url.clone());
let import_map: Option<ImportMap> =
match global_state.flags.import_map_path.as_ref() {
None => None,
Some(file_path) => {
if !global_state.flags.unstable {
exit_unstable("--importmap")
}
Some(ImportMap::load(file_path)?)
}
};
let mut module_graph_loader = ModuleGraphLoader::new(
global_state.file_fetcher.clone(),
import_map,
permissions.clone(),
is_dyn_import,
false,
);
module_graph_loader.add_to_graph(&module_specifier).await?;
let module_graph = module_graph_loader.get_graph();
let module_graph_json =
serde_json::to_value(module_graph).expect("Failed to serialize data");
let target = match target {
TargetLib::Main => "main",
TargetLib::Worker => "worker",
};
let root_names = vec![module_url.to_string()];
let req_msg = req(
msg::CompilerRequestType::Compile,
root_names,
self.config.clone(),
target,
false,
global_state.flags.unstable,
);
let bundle = false;
let unstable = global_state.flags.unstable;
let compiler_config = self.config.clone();
let cwd = std::env::current_dir().unwrap();
let j = match (compiler_config.path, compiler_config.content) {
(Some(config_path), Some(config_data)) => json!({
"type": msg::CompilerRequestType::Compile as i32,
"target": target,
"rootNames": root_names,
"bundle": bundle,
"unstable": unstable,
"configPath": config_path,
"config": str::from_utf8(&config_data).unwrap(),
"cwd": cwd,
"sourceFileMap": module_graph_json,
}),
_ => json!({
"type": msg::CompilerRequestType::Compile as i32,
"target": target,
"rootNames": root_names,
"bundle": bundle,
"unstable": unstable,
"cwd": cwd,
"sourceFileMap": module_graph_json,
}),
};
let req_msg = j.to_string().into_boxed_str().into_boxed_bytes();
let ts_compiler = self.clone();
@ -479,8 +602,15 @@ impl TsCompiler {
module_url.to_string()
);
let start = Instant::now();
let msg =
execute_in_thread(global_state.clone(), permissions, req_msg).await?;
execute_in_same_thread(global_state.clone(), permissions, req_msg)
.await?;
let end = Instant::now();
debug!("time spent in compiler thread {:#?}", end - start);
let json_str = std::str::from_utf8(&msg).unwrap();
let compile_response: CompileResponse = serde_json::from_str(json_str)?;
@ -569,6 +699,7 @@ impl TsCompiler {
media_type: msg::MediaType::JavaScript,
source_code: compiled_code,
types_url: None,
types_header: None,
};
Ok(compiled_module)
@ -671,6 +802,7 @@ impl TsCompiler {
media_type: msg::MediaType::JavaScript,
source_code,
types_url: None,
types_header: None,
};
Ok(source_map_file)
@ -777,33 +909,38 @@ impl TsCompiler {
}
}
async fn execute_in_thread(
async fn execute_in_same_thread(
global_state: GlobalState,
permissions: Permissions,
req: Buf,
) -> Result<Buf, ErrBox> {
let (handle_sender, handle_receiver) =
std::sync::mpsc::sync_channel::<Result<WebWorkerHandle, ErrBox>>(1);
let builder =
std::thread::Builder::new().name("deno-ts-compiler".to_string());
let join_handle = builder.spawn(move || {
let worker = TsCompiler::setup_worker(global_state.clone(), permissions);
handle_sender.send(Ok(worker.thread_safe_handle())).unwrap();
drop(handle_sender);
tokio_util::run_basic(worker).expect("Panic in event loop");
})?;
let handle = handle_receiver.recv().unwrap()?;
let mut worker = TsCompiler::setup_worker(global_state.clone(), permissions);
let handle = worker.thread_safe_handle();
handle.post_message(req)?;
let event = handle.get_event().await?.expect("Compiler didn't respond");
let buf = match event {
WorkerEvent::Message(buf) => Ok(buf),
WorkerEvent::Error(error) => Err(error),
WorkerEvent::TerminalError(error) => Err(error),
}?;
// Shutdown worker and wait for thread to finish
handle.terminate();
join_handle.join().unwrap();
Ok(buf)
let mut event_fut = handle.get_event().boxed_local();
loop {
let select_result = futures::future::select(event_fut, &mut worker).await;
match select_result {
Either::Left((event_result, _worker)) => {
let event = event_result
.expect("Compiler didn't respond")
.expect("Empty message");
let buf = match event {
WorkerEvent::Message(buf) => Ok(buf),
WorkerEvent::Error(error) => Err(error),
WorkerEvent::TerminalError(error) => Err(error),
}?;
return Ok(buf);
}
Either::Right((worker_result, event_fut_)) => {
event_fut = event_fut_;
worker_result?;
}
}
}
}
/// This function is used by `Deno.compile()` and `Deno.bundle()` APIs.
@ -813,14 +950,55 @@ pub async fn runtime_compile<S: BuildHasher>(
root_name: &str,
sources: &Option<HashMap<String, String, S>>,
bundle: bool,
options: &Option<String>,
maybe_options: &Option<String>,
) -> Result<Value, OpError> {
let mut root_names = vec![];
let mut module_graph_loader = ModuleGraphLoader::new(
global_state.file_fetcher.clone(),
None,
permissions.clone(),
false,
false,
);
if let Some(s_map) = sources {
root_names.push(root_name.to_string());
module_graph_loader.build_local_graph(root_name, s_map)?;
} else {
let module_specifier =
ModuleSpecifier::resolve_import(root_name, "<unknown>")?;
root_names.push(module_specifier.to_string());
module_graph_loader.add_to_graph(&module_specifier).await?;
}
// download all additional files from TSconfig and add them to root_names
if let Some(options) = maybe_options {
let options_json: serde_json::Value = serde_json::from_str(options)?;
if let Some(types_option) = options_json.get("types") {
let types_arr = types_option.as_array().expect("types is not an array");
for type_value in types_arr {
let type_str = type_value
.as_str()
.expect("type is not a string")
.to_string();
let type_specifier = ModuleSpecifier::resolve_url_or_path(&type_str)?;
module_graph_loader.add_to_graph(&type_specifier).await?;
root_names.push(type_specifier.to_string())
}
}
}
let module_graph = module_graph_loader.get_graph();
let module_graph_json =
serde_json::to_value(module_graph).expect("Failed to serialize data");
let req_msg = json!({
"type": msg::CompilerRequestType::RuntimeCompile as i32,
"target": "runtime",
"rootName": root_name,
"sources": sources,
"options": options,
"rootNames": root_names,
"sourceFileMap": module_graph_json,
"options": maybe_options,
"bundle": bundle,
"unstable": global_state.flags.unstable,
})
@ -830,7 +1008,7 @@ pub async fn runtime_compile<S: BuildHasher>(
let compiler = global_state.ts_compiler.clone();
let msg = execute_in_thread(global_state, permissions, req_msg).await?;
let msg = execute_in_same_thread(global_state, permissions, req_msg).await?;
let json_str = std::str::from_utf8(&msg).unwrap();
// TODO(bartlomieju): factor `bundle` path into separate function `runtime_bundle`
@ -867,7 +1045,7 @@ pub async fn runtime_transpile<S: BuildHasher>(
.into_boxed_str()
.into_boxed_bytes();
let msg = execute_in_thread(global_state, permissions, req_msg).await?;
let msg = execute_in_same_thread(global_state, permissions, req_msg).await?;
let json_str = std::str::from_utf8(&msg).unwrap();
let v = serde_json::from_str::<serde_json::Value>(json_str)
.expect("Error decoding JSON string.");
@ -896,6 +1074,7 @@ mod tests {
media_type: msg::MediaType::TypeScript,
source_code: include_bytes!("./tests/002_hello.ts").to_vec(),
types_url: None,
types_header: None,
};
let mock_state =
GlobalState::mock(vec![String::from("deno"), String::from("hello.ts")]);
@ -906,6 +1085,7 @@ mod tests {
&out,
TargetLib::Main,
Permissions::allow_all(),
false,
)
.await;
assert!(result.is_ok());
@ -949,9 +1129,8 @@ mod tests {
.unwrap()
.join("cli/tests/002_hello.ts");
use deno_core::ModuleSpecifier;
let module_name = ModuleSpecifier::resolve_url_or_path(p.to_str().unwrap())
.unwrap()
.to_string();
let module_name =
ModuleSpecifier::resolve_url_or_path(p.to_str().unwrap()).unwrap();
let state = GlobalState::mock(vec![
String::from("deno"),