Refactor DenoDir (#2636)

* rename `ModuleMetaData` to `SourceFile` and remove TS specific
  functionality

* add `TsCompiler` struct encapsulating processing of TypeScript files

* move `SourceMapGetter` trait implementation to `//cli/compiler.rs`

* add low-level `DiskCache` API for general purpose caches and use it in
  `DenoDir` and `TsCompiler` for filesystem access

* don't use hash-like filenames for compiled modules, instead use
  metadata file for storing compilation hash

* add `SourceFileCache` for in-process caching of loaded files for fast
  subsequent access

* define `SourceFileFetcher` trait encapsulating loading of local and
  remote files and implement it for `DenoDir`

* define `use_cache` and `no_fetch` flags on `DenoDir` instead of using
  in fetch methods
This commit is contained in:
Bartek Iwańczuk 2019-07-18 00:15:30 +02:00 committed by Ryan Dahl
parent 481a82c983
commit 8214b686ce
19 changed files with 1707 additions and 1454 deletions

View file

@ -1,57 +1,80 @@
// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
use crate::deno_dir::DenoDir;
use crate::deno_dir::SourceFile;
use crate::deno_dir::SourceFileFetcher;
use crate::diagnostics::Diagnostic;
use crate::disk_cache::DiskCache;
use crate::msg;
use crate::resources;
use crate::source_maps::SourceMapGetter;
use crate::startup_data;
use crate::state::*;
use crate::tokio_util;
use crate::version;
use crate::worker::Worker;
use deno::Buf;
use deno::ErrBox;
use deno::ModuleSpecifier;
use futures::future::Either;
use futures::Future;
use futures::Stream;
use ring;
use std::collections::HashSet;
use std::fmt::Write;
use std::fs;
use std::path::PathBuf;
use std::str;
use std::sync::atomic::Ordering;
use std::sync::Mutex;
use url::Url;
// This corresponds to JS ModuleMetaData.
// TODO Rename one or the other so they correspond.
// TODO(bartlomieju): change `*_name` to `*_url` and use Url type
#[derive(Debug, Clone)]
pub struct ModuleMetaData {
pub module_name: String,
pub module_redirect_source_name: Option<String>, // source of redirect
pub filename: PathBuf,
pub media_type: msg::MediaType,
pub source_code: Vec<u8>,
pub maybe_output_code_filename: Option<PathBuf>,
pub maybe_output_code: Option<Vec<u8>>,
pub maybe_source_map_filename: Option<PathBuf>,
pub maybe_source_map: Option<Vec<u8>>,
/// Optional tuple which represents the state of the compiler
/// configuration where the first is canonical name for the configuration file
/// and a vector of the bytes of the contents of the configuration file.
type CompilerConfig = Option<(PathBuf, Vec<u8>)>;
/// Information associated with compiled file in cache.
/// Includes source code path and state hash.
/// version_hash is used to validate versions of the file
/// and could be used to remove stale file in cache.
pub struct CompiledFileMetadata {
pub source_path: PathBuf,
pub version_hash: String,
}
impl ModuleMetaData {
pub fn has_output_code_and_source_map(&self) -> bool {
self.maybe_output_code.is_some() && self.maybe_source_map.is_some()
static SOURCE_PATH: &'static str = "source_path";
static VERSION_HASH: &'static str = "version_hash";
impl CompiledFileMetadata {
pub fn from_json_string(metadata_string: String) -> Option<Self> {
// TODO: use serde for deserialization
let maybe_metadata_json: serde_json::Result<serde_json::Value> =
serde_json::from_str(&metadata_string);
if let Ok(metadata_json) = maybe_metadata_json {
let source_path = metadata_json[SOURCE_PATH].as_str().map(PathBuf::from);
let version_hash = metadata_json[VERSION_HASH].as_str().map(String::from);
if source_path.is_none() || version_hash.is_none() {
return None;
}
return Some(CompiledFileMetadata {
source_path: source_path.unwrap(),
version_hash: version_hash.unwrap(),
});
}
None
}
pub fn js_source(&self) -> String {
if self.media_type == msg::MediaType::Json {
return format!(
"export default {};",
str::from_utf8(&self.source_code).unwrap()
);
}
match self.maybe_output_code {
None => str::from_utf8(&self.source_code).unwrap().to_string(),
Some(ref output_code) => str::from_utf8(output_code).unwrap().to_string(),
}
pub fn to_json_string(self: &Self) -> Result<String, serde_json::Error> {
let mut value_map = serde_json::map::Map::new();
value_map.insert(SOURCE_PATH.to_owned(), json!(&self.source_path));
value_map.insert(VERSION_HASH.to_string(), json!(&self.version_hash));
serde_json::to_string(&value_map)
}
}
type CompilerConfig = Option<(String, Vec<u8>)>;
/// Creates the JSON message send to compiler.ts's onmessage.
fn req(
root_names: Vec<String>,
@ -74,229 +97,565 @@ fn req(
j.to_string().into_boxed_str().into_boxed_bytes()
}
/// Returns an optional tuple which represents the state of the compiler
/// configuration where the first is canonical name for the configuration file
/// and a vector of the bytes of the contents of the configuration file.
pub fn get_compiler_config(
parent_state: &ThreadSafeState,
_compiler_type: &str,
) -> CompilerConfig {
// The compiler type is being passed to make it easier to implement custom
// compilers in the future.
match (&parent_state.config_path, &parent_state.config) {
(Some(config_path), Some(config)) => {
Some((config_path.to_string(), config.to_vec()))
fn gen_hash(v: Vec<&[u8]>) -> String {
let mut ctx = ring::digest::Context::new(&ring::digest::SHA1);
for src in v.iter() {
ctx.update(src);
}
let digest = ctx.finish();
let mut out = String::new();
// TODO There must be a better way to do this...
for byte in digest.as_ref() {
write!(&mut out, "{:02x}", byte).unwrap();
}
out
}
/// Emit a SHA1 hash based on source code, deno version and TS config.
/// Used to check if a recompilation for source code is needed.
pub fn source_code_version_hash(
source_code: &[u8],
version: &str,
config_hash: &[u8],
) -> String {
gen_hash(vec![source_code, version.as_bytes(), config_hash])
}
fn load_config_file(
config_path: Option<String>,
) -> (Option<PathBuf>, Option<Vec<u8>>) {
// take the passed flag and resolve the file name relative to the cwd
let config_file = match &config_path {
Some(config_file_name) => {
debug!("Compiler config file: {}", config_file_name);
let cwd = std::env::current_dir().unwrap();
Some(cwd.join(config_file_name))
}
_ => None,
};
// Convert the PathBuf to a canonicalized string. This is needed by the
// compiler to properly deal with the configuration.
let config_path = match &config_file {
Some(config_file) => Some(config_file.canonicalize().unwrap().to_owned()),
_ => None,
};
// Load the contents of the configuration file
let config = match &config_file {
Some(config_file) => {
debug!("Attempt to load config: {}", config_file.to_str().unwrap());
match fs::read(&config_file) {
Ok(config_data) => Some(config_data.to_owned()),
_ => panic!(
"Error retrieving compiler config file at \"{}\"",
config_file.to_str().unwrap()
),
}
}
_ => None,
};
(config_path, config)
}
pub struct TsCompiler {
pub deno_dir: DenoDir,
pub config: CompilerConfig,
pub config_hash: Vec<u8>,
pub disk_cache: DiskCache,
/// Set of all URLs that have been compiled. This prevents double
/// compilation of module.
pub compiled: Mutex<HashSet<Url>>,
/// This setting is controlled by `--reload` flag. Unless the flag
/// is provided disk cache is used.
pub use_disk_cache: bool,
}
impl TsCompiler {
pub fn new(
deno_dir: DenoDir,
use_disk_cache: bool,
config_path: Option<String>,
) -> Self {
let compiler_config = match load_config_file(config_path) {
(Some(config_path), Some(config)) => Some((config_path, config.to_vec())),
_ => None,
};
let config_bytes = match &compiler_config {
Some((_, config)) => config.clone(),
_ => b"".to_vec(),
};
Self {
disk_cache: deno_dir.clone().gen_cache,
deno_dir,
config: compiler_config,
config_hash: config_bytes,
compiled: Mutex::new(HashSet::new()),
use_disk_cache,
}
}
/// Create a new V8 worker with snapshot of TS compiler and setup compiler's runtime.
fn setup_worker(state: ThreadSafeState) -> Worker {
// Count how many times we start the compiler worker.
state.metrics.compiler_starts.fetch_add(1, Ordering::SeqCst);
let mut worker = Worker::new(
"TS".to_string(),
startup_data::compiler_isolate_init(),
// TODO(ry) Maybe we should use a separate state for the compiler.
// as was done previously.
state.clone(),
);
worker.execute("denoMain()").unwrap();
worker.execute("workerMain()").unwrap();
worker.execute("compilerMain()").unwrap();
worker
}
pub fn bundle_async(
self: &Self,
state: ThreadSafeState,
module_name: String,
out_file: String,
) -> impl Future<Item = (), Error = ErrBox> {
debug!(
"Invoking the compiler to bundle. module_name: {}",
module_name
);
let root_names = vec![module_name.clone()];
let req_msg = req(root_names, self.config.clone(), Some(out_file));
let worker = TsCompiler::setup_worker(state.clone());
let resource = worker.state.resource.clone();
let compiler_rid = resource.rid;
let first_msg_fut =
resources::post_message_to_worker(compiler_rid, req_msg)
.then(move |_| worker)
.then(move |result| {
if let Err(err) = result {
// TODO(ry) Need to forward the error instead of exiting.
eprintln!("{}", err.to_string());
std::process::exit(1);
}
debug!("Sent message to worker");
let stream_future =
resources::get_message_stream_from_worker(compiler_rid)
.into_future();
stream_future.map(|(f, _rest)| f).map_err(|(f, _rest)| f)
});
first_msg_fut.map_err(|_| panic!("not handled")).and_then(
move |maybe_msg: Option<Buf>| {
debug!("Received message from worker");
if let Some(msg) = maybe_msg {
let json_str = std::str::from_utf8(&msg).unwrap();
debug!("Message: {}", json_str);
if let Some(diagnostics) = Diagnostic::from_emit_result(json_str) {
return Err(ErrBox::from(diagnostics));
}
}
Ok(())
},
)
}
/// Mark given module URL as compiled to avoid multiple compilations of same module
/// in single run.
fn mark_compiled(&self, url: &Url) {
let mut c = self.compiled.lock().unwrap();
c.insert(url.clone());
}
/// Check if given module URL has already been compiled and can be fetched directly from disk.
fn has_compiled(&self, url: &Url) -> bool {
let c = self.compiled.lock().unwrap();
c.contains(url)
}
/// Asynchronously compile module and all it's dependencies.
///
/// This method compiled every module at most once.
///
/// If `--reload` flag was provided then compiler will not on-disk cache and force recompilation.
///
/// If compilation is required then new V8 worker is spawned with fresh TS compiler.
pub fn compile_async(
self: &Self,
state: ThreadSafeState,
source_file: &SourceFile,
) -> impl Future<Item = SourceFile, Error = ErrBox> {
// TODO: maybe fetching of original SourceFile should be done here?
if source_file.media_type != msg::MediaType::TypeScript {
return Either::A(futures::future::ok(source_file.clone()));
}
if self.has_compiled(&source_file.url) {
match self.get_compiled_source_file(&source_file) {
Ok(compiled_module) => {
return Either::A(futures::future::ok(compiled_module));
}
Err(err) => {
return Either::A(futures::future::err(err));
}
}
}
if self.use_disk_cache {
// Try to load cached version:
// 1. check if there's 'meta' file
if let Some(metadata) = self.get_metadata(&source_file.url) {
// 2. compare version hashes
// TODO: it would probably be good idea to make it method implemented on SourceFile
let version_hash_to_validate = source_code_version_hash(
&source_file.source_code,
version::DENO,
&self.config_hash,
);
if metadata.version_hash == version_hash_to_validate {
debug!("load_cache metadata version hash match");
if let Ok(compiled_module) =
self.get_compiled_source_file(&source_file)
{
debug!(
"found cached compiled module: {:?}",
compiled_module.clone().filename
);
// TODO: store in in-process cache for subsequent access
return Either::A(futures::future::ok(compiled_module));
}
}
}
}
let source_file_ = source_file.clone();
debug!(">>>>> compile_sync START");
let module_url = source_file.url.clone();
debug!(
"Running rust part of compile_sync, module specifier: {}",
&source_file.url
);
let root_names = vec![module_url.to_string()];
let req_msg = req(root_names, self.config.clone(), None);
let worker = TsCompiler::setup_worker(state.clone());
let compiling_job = state.progress.add("Compile", &module_url.to_string());
let state_ = state.clone();
let resource = worker.state.resource.clone();
let compiler_rid = resource.rid;
let first_msg_fut =
resources::post_message_to_worker(compiler_rid, req_msg)
.then(move |_| worker)
.then(move |result| {
if let Err(err) = result {
// TODO(ry) Need to forward the error instead of exiting.
eprintln!("{}", err.to_string());
std::process::exit(1);
}
debug!("Sent message to worker");
let stream_future =
resources::get_message_stream_from_worker(compiler_rid)
.into_future();
stream_future.map(|(f, _rest)| f).map_err(|(f, _rest)| f)
});
let fut = first_msg_fut
.map_err(|_| panic!("not handled"))
.and_then(move |maybe_msg: Option<Buf>| {
debug!("Received message from worker");
if let Some(msg) = maybe_msg {
let json_str = std::str::from_utf8(&msg).unwrap();
debug!("Message: {}", json_str);
if let Some(diagnostics) = Diagnostic::from_emit_result(json_str) {
return Err(ErrBox::from(diagnostics));
}
}
Ok(())
}).and_then(move |_| {
// if we are this far it means compilation was successful and we can
// load compiled filed from disk
// TODO: can this be somehow called using `self.`?
state_
.ts_compiler
.get_compiled_source_file(&source_file_)
.map_err(|e| {
// TODO: this situation shouldn't happen
panic!("Expected to find compiled file: {}", e)
})
}).and_then(move |source_file_after_compile| {
// Explicit drop to keep reference alive until future completes.
drop(compiling_job);
Ok(source_file_after_compile)
}).then(move |r| {
debug!(">>>>> compile_sync END");
// TODO(ry) do this in worker's destructor.
// resource.close();
r
});
Either::B(fut)
}
/// Get associated `CompiledFileMetadata` for given module if it exists.
pub fn get_metadata(self: &Self, url: &Url) -> Option<CompiledFileMetadata> {
// Try to load cached version:
// 1. check if there's 'meta' file
let cache_key = self
.disk_cache
.get_cache_filename_with_extension(url, "meta");
if let Ok(metadata_bytes) = self.disk_cache.get(&cache_key) {
if let Ok(metadata) = std::str::from_utf8(&metadata_bytes) {
if let Some(read_metadata) =
CompiledFileMetadata::from_json_string(metadata.to_string())
{
return Some(read_metadata);
}
}
}
None
}
/// Return compiled JS file for given TS module.
// TODO: ideally we shouldn't construct SourceFile by hand, but it should be delegated to
// SourceFileFetcher
pub fn get_compiled_source_file(
self: &Self,
source_file: &SourceFile,
) -> Result<SourceFile, ErrBox> {
let cache_key = self
.disk_cache
.get_cache_filename_with_extension(&source_file.url, "js");
let compiled_code = self.disk_cache.get(&cache_key)?;
let compiled_code_filename = self.disk_cache.location.join(cache_key);
debug!("compiled filename: {:?}", compiled_code_filename);
let compiled_module = SourceFile {
url: source_file.url.clone(),
redirect_source_url: None,
filename: compiled_code_filename,
media_type: msg::MediaType::JavaScript,
source_code: compiled_code,
};
Ok(compiled_module)
}
/// Save compiled JS file for given TS module to on-disk cache.
///
/// Along compiled file a special metadata file is saved as well containing
/// hash that can be validated to avoid unnecessary recompilation.
fn cache_compiled_file(
self: &Self,
module_specifier: &ModuleSpecifier,
contents: &str,
) -> std::io::Result<()> {
let js_key = self
.disk_cache
.get_cache_filename_with_extension(module_specifier.as_url(), "js");
self
.disk_cache
.set(&js_key, contents.as_bytes())
.and_then(|_| {
self.mark_compiled(module_specifier.as_url());
let source_file = self
.deno_dir
.fetch_source_file(&module_specifier)
.expect("Source file not found");
let version_hash = source_code_version_hash(
&source_file.source_code,
version::DENO,
&self.config_hash,
);
let compiled_file_metadata = CompiledFileMetadata {
source_path: source_file.filename.to_owned(),
version_hash,
};
let meta_key = self
.disk_cache
.get_cache_filename_with_extension(module_specifier.as_url(), "meta");
self.disk_cache.set(
&meta_key,
compiled_file_metadata.to_json_string()?.as_bytes(),
)
})
}
/// Return associated source map file for given TS module.
// TODO: ideally we shouldn't construct SourceFile by hand, but it should be delegated to
// SourceFileFetcher
pub fn get_source_map_file(
self: &Self,
module_specifier: &ModuleSpecifier,
) -> Result<SourceFile, ErrBox> {
let cache_key = self
.disk_cache
.get_cache_filename_with_extension(module_specifier.as_url(), "js.map");
let source_code = self.disk_cache.get(&cache_key)?;
let source_map_filename = self.disk_cache.location.join(cache_key);
debug!("source map filename: {:?}", source_map_filename);
let source_map_file = SourceFile {
url: module_specifier.as_url().to_owned(),
redirect_source_url: None,
filename: source_map_filename,
media_type: msg::MediaType::JavaScript,
source_code,
};
Ok(source_map_file)
}
/// Save source map file for given TS module to on-disk cache.
fn cache_source_map(
self: &Self,
module_specifier: &ModuleSpecifier,
contents: &str,
) -> std::io::Result<()> {
let source_map_key = self
.disk_cache
.get_cache_filename_with_extension(module_specifier.as_url(), "js.map");
self.disk_cache.set(&source_map_key, contents.as_bytes())
}
/// This method is called by TS compiler via an "op".
pub fn cache_compiler_output(
self: &Self,
module_specifier: &ModuleSpecifier,
extension: &str,
contents: &str,
) -> std::io::Result<()> {
match extension {
".map" => self.cache_source_map(module_specifier, contents),
".js" => self.cache_compiled_file(module_specifier, contents),
_ => unreachable!(),
}
}
}
pub fn bundle_async(
state: ThreadSafeState,
module_name: String,
out_file: String,
) -> impl Future<Item = (), Error = ErrBox> {
debug!(
"Invoking the compiler to bundle. module_name: {}",
module_name
);
impl SourceMapGetter for TsCompiler {
fn get_source_map(&self, script_name: &str) -> Option<Vec<u8>> {
self
.try_to_resolve_and_get_source_map(script_name)
.and_then(|out| Some(out.source_code))
}
let root_names = vec![module_name.clone()];
let compiler_config = get_compiler_config(&state, "typescript");
let req_msg = req(root_names, compiler_config, Some(out_file));
// Count how many times we start the compiler worker.
state.metrics.compiler_starts.fetch_add(1, Ordering::SeqCst);
let mut worker = Worker::new(
"TS".to_string(),
startup_data::compiler_isolate_init(),
// TODO(ry) Maybe we should use a separate state for the compiler.
// as was done previously.
state.clone(),
);
worker.execute("denoMain()").unwrap();
worker.execute("workerMain()").unwrap();
worker.execute("compilerMain()").unwrap();
let resource = worker.state.resource.clone();
let compiler_rid = resource.rid;
let first_msg_fut = resources::post_message_to_worker(compiler_rid, req_msg)
.then(move |_| worker)
.then(move |result| {
if let Err(err) = result {
// TODO(ry) Need to forward the error instead of exiting.
eprintln!("{}", err.to_string());
std::process::exit(1);
}
debug!("Sent message to worker");
let stream_future =
resources::get_message_stream_from_worker(compiler_rid).into_future();
stream_future.map(|(f, _rest)| f).map_err(|(f, _rest)| f)
});
first_msg_fut.map_err(|_| panic!("not handled")).and_then(
move |maybe_msg: Option<Buf>| {
debug!("Received message from worker");
if let Some(msg) = maybe_msg {
let json_str = std::str::from_utf8(&msg).unwrap();
debug!("Message: {}", json_str);
if let Some(diagnostics) = Diagnostic::from_emit_result(json_str) {
return Err(ErrBox::from(diagnostics));
}
}
Ok(())
},
)
}
pub fn compile_async(
state: ThreadSafeState,
module_meta_data: &ModuleMetaData,
) -> impl Future<Item = ModuleMetaData, Error = ErrBox> {
let module_name = module_meta_data.module_name.clone();
debug!(
"Running rust part of compile_sync. module_name: {}",
&module_name
);
let root_names = vec![module_name.clone()];
let compiler_config = get_compiler_config(&state, "typescript");
let req_msg = req(root_names, compiler_config, None);
// Count how many times we start the compiler worker.
state.metrics.compiler_starts.fetch_add(1, Ordering::SeqCst);
let mut worker = Worker::new(
"TS".to_string(),
startup_data::compiler_isolate_init(),
// TODO(ry) Maybe we should use a separate state for the compiler.
// as was done previously.
state.clone(),
);
worker.execute("denoMain()").unwrap();
worker.execute("workerMain()").unwrap();
worker.execute("compilerMain()").unwrap();
let compiling_job = state.progress.add("Compile", &module_name);
let resource = worker.state.resource.clone();
let compiler_rid = resource.rid;
let first_msg_fut = resources::post_message_to_worker(compiler_rid, req_msg)
.then(move |_| worker)
.then(move |result| {
if let Err(err) = result {
// TODO(ry) Need to forward the error instead of exiting.
eprintln!("{}", err.to_string());
std::process::exit(1);
}
debug!("Sent message to worker");
let stream_future =
resources::get_message_stream_from_worker(compiler_rid).into_future();
stream_future.map(|(f, _rest)| f).map_err(|(f, _rest)| f)
});
first_msg_fut
.map_err(|_| panic!("not handled"))
.and_then(move |maybe_msg: Option<Buf>| {
debug!("Received message from worker");
// TODO: here TS compiler emitted the files to disc and we should signal ModuleMetaData
// cache that source code is available
if let Some(msg) = maybe_msg {
let json_str = std::str::from_utf8(&msg).unwrap();
debug!("Message: {}", json_str);
if let Some(diagnostics) = Diagnostic::from_emit_result(json_str) {
return Err(ErrBox::from(diagnostics));
}
}
Ok(())
}).and_then(move |_| {
let module_specifier = ModuleSpecifier::resolve_url(&module_name)
.expect("Should be valid module specifier");
state.dir.fetch_module_meta_data_async(
&module_specifier,
true,
true,
).map_err(|e| {
// TODO(95th) Instead of panicking, We could translate this error to Diagnostic.
panic!("{}", e)
fn get_source_line(&self, script_name: &str, line: usize) -> Option<String> {
self
.try_resolve_and_get_source_file(script_name)
.and_then(|out| {
str::from_utf8(&out.source_code).ok().and_then(|v| {
let lines: Vec<&str> = v.lines().collect();
assert!(lines.len() > line);
Some(lines[line].to_string())
})
})
}).and_then(move |module_meta_data_after_compile| {
// Explicit drop to keep reference alive until future completes.
drop(compiling_job);
Ok(module_meta_data_after_compile)
}).then(move |r| {
// TODO(ry) do this in worker's destructor.
// resource.close();
r
})
}
}
pub fn compile_sync(
state: ThreadSafeState,
module_meta_data: &ModuleMetaData,
) -> Result<ModuleMetaData, ErrBox> {
tokio_util::block_on(compile_async(state, module_meta_data))
// `SourceMapGetter` related methods
impl TsCompiler {
fn try_to_resolve(self: &Self, script_name: &str) -> Option<ModuleSpecifier> {
// if `script_name` can't be resolved to ModuleSpecifier it's probably internal
// script (like `gen/cli/bundle/compiler.js`) so we won't be
// able to get source for it anyway
ModuleSpecifier::resolve_url(script_name).ok()
}
fn try_resolve_and_get_source_file(
&self,
script_name: &str,
) -> Option<SourceFile> {
if let Some(module_specifier) = self.try_to_resolve(script_name) {
return match self.deno_dir.fetch_source_file(&module_specifier) {
Ok(out) => Some(out),
Err(_) => None,
};
}
None
}
fn try_to_resolve_and_get_source_map(
&self,
script_name: &str,
) -> Option<SourceFile> {
if let Some(module_specifier) = self.try_to_resolve(script_name) {
return match self.get_source_map_file(&module_specifier) {
Ok(out) => Some(out),
Err(_) => None,
};
}
None
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::tokio_util;
use deno::ModuleSpecifier;
use std::path::PathBuf;
impl TsCompiler {
fn compile_sync(
self: &Self,
state: ThreadSafeState,
source_file: &SourceFile,
) -> Result<SourceFile, ErrBox> {
tokio_util::block_on(self.compile_async(state, source_file))
}
}
#[test]
fn test_compile_sync() {
tokio_util::init(|| {
let specifier = "./tests/002_hello.ts";
use deno::ModuleSpecifier;
let module_name = ModuleSpecifier::resolve_url_or_path(specifier)
.unwrap()
.to_string();
let specifier =
ModuleSpecifier::resolve_url_or_path("./tests/002_hello.ts").unwrap();
let mut out = ModuleMetaData {
module_name,
module_redirect_source_name: None,
let mut out = SourceFile {
url: specifier.as_url().clone(),
redirect_source_url: None,
filename: PathBuf::from("/tests/002_hello.ts"),
media_type: msg::MediaType::TypeScript,
source_code: include_bytes!("../tests/002_hello.ts").to_vec(),
maybe_output_code_filename: None,
maybe_output_code: None,
maybe_source_map_filename: None,
maybe_source_map: None,
};
out = compile_sync(
ThreadSafeState::mock(vec![
String::from("./deno"),
String::from("hello.js"),
]),
&out,
).unwrap();
let mock_state = ThreadSafeState::mock(vec![
String::from("./deno"),
String::from("hello.js"),
]);
out = mock_state
.ts_compiler
.compile_sync(mock_state.clone(), &out)
.unwrap();
assert!(
out
.maybe_output_code
.unwrap()
.source_code
.starts_with("console.log(\"Hello World\");".as_bytes())
);
})
}
#[test]
fn test_get_compiler_config_no_flag() {
let compiler_type = "typescript";
let state = ThreadSafeState::mock(vec![
String::from("./deno"),
String::from("hello.js"),
]);
let out = get_compiler_config(&state, compiler_type);
assert_eq!(out, None);
}
#[test]
fn test_bundle_async() {
let specifier = "./tests/002_hello.ts";
@ -310,8 +669,34 @@ mod tests {
String::from("./tests/002_hello.ts"),
String::from("$deno$/bundle.js"),
]);
let out =
bundle_async(state, module_name, String::from("$deno$/bundle.js"));
let out = state.ts_compiler.bundle_async(
state.clone(),
module_name,
String::from("$deno$/bundle.js"),
);
assert!(tokio_util::block_on(out).is_ok());
}
#[test]
fn test_source_code_version_hash() {
assert_eq!(
"08574f9cdeb94fd3fb9cdc7a20d086daeeb42bca",
source_code_version_hash(b"1+2", "0.4.0", b"{}")
);
// Different source_code should result in different hash.
assert_eq!(
"d8abe2ead44c3ff8650a2855bf1b18e559addd06",
source_code_version_hash(b"1", "0.4.0", b"{}")
);
// Different version should result in different hash.
assert_eq!(
"d6feffc5024d765d22c94977b4fe5975b59d6367",
source_code_version_hash(b"1", "0.1.0", b"{}")
);
// Different config should result in different hash.
assert_eq!(
"3b35db249b26a27decd68686f073a58266b2aec2",
source_code_version_hash(b"1", "0.4.0", b"{\"compilerOptions\": {}}")
);
}
}

File diff suppressed because it is too large Load diff

150
cli/disk_cache.rs Normal file
View file

@ -0,0 +1,150 @@
use crate::fs as deno_fs;
use std::ffi::OsStr;
use std::fs;
use std::path::Path;
use std::path::PathBuf;
use url::Url;
#[derive(Clone)]
pub struct DiskCache {
pub location: PathBuf,
}
impl DiskCache {
pub fn new(location: &Path) -> Self {
// TODO: ensure that 'location' is a directory
Self {
location: location.to_owned(),
}
}
pub fn get_cache_filename(self: &Self, url: &Url) -> PathBuf {
let mut out = PathBuf::new();
let scheme = url.scheme();
out.push(scheme);
match scheme {
"http" | "https" => {
let host = url.host_str().unwrap();
let host_port = match url.port() {
// Windows doesn't support ":" in filenames, so we represent port using a
// special string.
Some(port) => format!("{}_PORT{}", host, port),
None => host.to_string(),
};
out.push(host_port);
}
_ => {}
};
for path_seg in url.path_segments().unwrap() {
out.push(path_seg);
}
out
}
pub fn get_cache_filename_with_extension(
self: &Self,
url: &Url,
extension: &str,
) -> PathBuf {
let base = self.get_cache_filename(url);
match base.extension() {
None => base.with_extension(extension),
Some(ext) => {
let original_extension = OsStr::to_str(ext).unwrap();
let final_extension = format!("{}.{}", original_extension, extension);
base.with_extension(final_extension)
}
}
}
pub fn get(self: &Self, filename: &Path) -> std::io::Result<Vec<u8>> {
let path = self.location.join(filename);
fs::read(&path)
}
pub fn set(self: &Self, filename: &Path, data: &[u8]) -> std::io::Result<()> {
let path = self.location.join(filename);
match path.parent() {
Some(ref parent) => fs::create_dir_all(parent),
None => Ok(()),
}?;
deno_fs::write_file(&path, data, 0o666)
}
pub fn remove(self: &Self, filename: &Path) -> std::io::Result<()> {
let path = self.location.join(filename);
fs::remove_file(path)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_get_cache_filename() {
let cache = DiskCache::new(&PathBuf::from("foo"));
let test_cases = [
(
"http://deno.land/std/http/file_server.ts",
"http/deno.land/std/http/file_server.ts",
),
(
"http://localhost:8000/std/http/file_server.ts",
"http/localhost_PORT8000/std/http/file_server.ts",
),
(
"https://deno.land/std/http/file_server.ts",
"https/deno.land/std/http/file_server.ts",
),
(
"file:///std/http/file_server.ts",
"file/std/http/file_server.ts",
),
];
for test_case in &test_cases {
assert_eq!(
cache.get_cache_filename(&Url::parse(test_case.0).unwrap()),
PathBuf::from(test_case.1)
)
}
}
#[test]
fn test_get_cache_filename_with_extension() {
let cache = DiskCache::new(&PathBuf::from("foo"));
let test_cases = [
(
"http://deno.land/std/http/file_server.ts",
"js",
"http/deno.land/std/http/file_server.ts.js",
),
(
"file:///std/http/file_server",
"js",
"file/std/http/file_server.js",
),
(
"http://deno.land/std/http/file_server.ts",
"js.map",
"http/deno.land/std/http/file_server.ts.js.map",
),
];
for test_case in &test_cases {
assert_eq!(
cache.get_cache_filename_with_extension(
&Url::parse(test_case.0).unwrap(),
test_case.1
),
PathBuf::from(test_case.2)
)
}
}
}

View file

@ -5,7 +5,7 @@ use clap::Arg;
use clap::ArgMatches;
use clap::Shell;
use clap::SubCommand;
use crate::deno_dir;
use crate::fs as deno_fs;
use deno::ModuleSpecifier;
use log::Level;
use std;
@ -419,7 +419,7 @@ Example:
fn resolve_paths(paths: Vec<String>) -> Vec<String> {
let mut out: Vec<String> = vec![];
for pathstr in paths.iter() {
let result = deno_dir::resolve_from_cwd(pathstr);
let result = deno_fs::resolve_from_cwd(pathstr);
if result.is_err() {
eprintln!("Unrecognized path to whitelist: {}", pathstr);
continue;
@ -1161,7 +1161,7 @@ mod tests {
use tempfile::TempDir;
let temp_dir = TempDir::new().expect("tempdir fail");
let (_, temp_dir_path) =
deno_dir::resolve_from_cwd(temp_dir.path().to_str().unwrap()).unwrap();
deno_fs::resolve_from_cwd(temp_dir.path().to_str().unwrap()).unwrap();
let (flags, subcommand, argv) = flags_from_vec(svec![
"deno",
@ -1186,7 +1186,7 @@ mod tests {
use tempfile::TempDir;
let temp_dir = TempDir::new().expect("tempdir fail");
let (_, temp_dir_path) =
deno_dir::resolve_from_cwd(temp_dir.path().to_str().unwrap()).unwrap();
deno_fs::resolve_from_cwd(temp_dir.path().to_str().unwrap()).unwrap();
let (flags, subcommand, argv) = flags_from_vec(svec![
"deno",

View file

@ -8,6 +8,7 @@ use std::path::{Path, PathBuf};
use deno::ErrBox;
use rand;
use rand::Rng;
use url::Url;
#[cfg(unix)]
use nix::unistd::{chown as unix_chown, Gid, Uid};
@ -126,3 +127,31 @@ pub fn chown(_path: &str, _uid: u32, _gid: u32) -> Result<(), ErrBox> {
// TODO: implement chown for Windows
Err(crate::deno_error::op_not_implemented())
}
pub fn resolve_from_cwd(path: &str) -> Result<(PathBuf, String), ErrBox> {
let candidate_path = Path::new(path);
let resolved_path = if candidate_path.is_absolute() {
candidate_path.to_owned()
} else {
let cwd = std::env::current_dir().unwrap();
cwd.join(path)
};
// HACK: `Url::from_directory_path` is used here because it normalizes the path.
// Joining `/dev/deno/" with "./tests" using `PathBuf` yields `/deno/dev/./tests/`.
// On the other hand joining `/dev/deno/" with "./tests" using `Url` yields "/dev/deno/tests"
// - and that's what we want.
// There exists similar method on `PathBuf` - `PathBuf.canonicalize`, but the problem
// is `canonicalize` resolves symlinks and we don't want that.
// We just want o normalize the path...
let resolved_url = Url::from_file_path(resolved_path)
.expect("PathBuf should be parseable URL");
let normalized_path = resolved_url
.to_file_path()
.expect("URL from PathBuf should be valid path");
let path_string = normalized_path.to_str().unwrap().to_string();
Ok((normalized_path, path_string))
}

View file

@ -13,12 +13,14 @@ extern crate indexmap;
#[cfg(unix)]
extern crate nix;
extern crate rand;
extern crate url;
mod ansi;
pub mod compiler;
pub mod deno_dir;
pub mod deno_error;
pub mod diagnostics;
mod disk_cache;
mod dispatch_minimal;
pub mod flags;
pub mod fmt_errors;
@ -45,7 +47,7 @@ mod tokio_write;
pub mod version;
pub mod worker;
use crate::compiler::bundle_async;
use crate::deno_dir::SourceFileFetcher;
use crate::progress::Progress;
use crate::state::ThreadSafeState;
use crate::worker::Worker;
@ -101,55 +103,77 @@ pub fn print_file_info(
worker: Worker,
module_specifier: &ModuleSpecifier,
) -> impl Future<Item = Worker, Error = ()> {
state::fetch_module_meta_data_and_maybe_compile_async(
&worker.state,
module_specifier,
).and_then(move |out| {
println!(
"{} {}",
ansi::bold("local:".to_string()),
out.filename.to_str().unwrap()
);
let state_ = worker.state.clone();
let module_specifier_ = module_specifier.clone();
println!(
"{} {}",
ansi::bold("type:".to_string()),
msg::enum_name_media_type(out.media_type)
);
if out.maybe_output_code_filename.is_some() {
state_
.dir
.fetch_source_file_async(&module_specifier)
.map_err(|err| println!("{}", err))
.and_then(move |out| {
println!(
"{} {}",
ansi::bold("compiled:".to_string()),
out.maybe_output_code_filename.unwrap().to_str().unwrap(),
ansi::bold("local:".to_string()),
out.filename.to_str().unwrap()
);
}
if out.maybe_source_map_filename.is_some() {
println!(
"{} {}",
ansi::bold("map:".to_string()),
out.maybe_source_map_filename.unwrap().to_str().unwrap()
ansi::bold("type:".to_string()),
msg::enum_name_media_type(out.media_type)
);
}
if let Some(deps) =
worker.state.modules.lock().unwrap().deps(&out.module_name)
{
println!("{}{}", ansi::bold("deps:\n".to_string()), deps.name);
if let Some(ref depsdeps) = deps.deps {
for d in depsdeps {
println!("{}", d);
}
}
} else {
println!(
"{} cannot retrieve full dependency graph",
ansi::bold("deps:".to_string()),
);
}
Ok(worker)
}).map_err(|err| println!("{}", err))
state_
.clone()
.ts_compiler
.compile_async(state_.clone(), &out)
.map_err(|e| {
debug!("compiler error exiting!");
eprintln!("\n{}", e.to_string());
std::process::exit(1);
}).and_then(move |compiled| {
if out.media_type == msg::MediaType::TypeScript {
println!(
"{} {}",
ansi::bold("compiled:".to_string()),
compiled.filename.to_str().unwrap(),
);
}
if let Ok(source_map) = state_
.clone()
.ts_compiler
.get_source_map_file(&module_specifier_)
{
println!(
"{} {}",
ansi::bold("map:".to_string()),
source_map.filename.to_str().unwrap()
);
}
if let Some(deps) = worker
.state
.modules
.lock()
.unwrap()
.deps(&compiled.url.to_string())
{
println!("{}{}", ansi::bold("deps:\n".to_string()), deps.name);
if let Some(ref depsdeps) = deps.deps {
for d in depsdeps {
println!("{}", d);
}
}
} else {
println!(
"{} cannot retrieve full dependency graph",
ansi::bold("deps:".to_string()),
);
}
Ok(worker)
})
})
}
fn create_worker_and_state(
@ -273,7 +297,9 @@ fn bundle_command(flags: DenoFlags, argv: Vec<String>) {
assert!(state.argv.len() >= 3);
let out_file = state.argv[2].clone();
debug!(">>>>> bundle_async START");
let bundle_future = bundle_async(state, main_module.to_string(), out_file)
let bundle_future = state
.ts_compiler
.bundle_async(state.clone(), main_module.to_string(), out_file)
.map_err(|err| {
debug!("diagnostics returned, exiting!");
eprintln!("");

View file

@ -15,8 +15,8 @@ union Any {
EnvironRes,
Exit,
Fetch,
FetchModuleMetaData,
FetchModuleMetaDataRes,
FetchSourceFile,
FetchSourceFileRes,
FetchRes,
FormatError,
FormatErrorRes,
@ -241,12 +241,12 @@ table WorkerPostMessage {
// data passed thru the zero-copy data parameter.
}
table FetchModuleMetaData {
table FetchSourceFile {
specifier: string;
referrer: string;
}
table FetchModuleMetaDataRes {
table FetchSourceFileRes {
// If it's a non-http module, moduleName and filename will be the same.
// For http modules, module_name is its resolved http URL, and filename
// is the location of the locally downloaded source code.

View file

@ -1,7 +1,7 @@
// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
use atty;
use crate::ansi;
use crate::deno_dir::resolve_from_cwd;
use crate::deno_dir::SourceFileFetcher;
use crate::deno_error;
use crate::deno_error::DenoError;
use crate::deno_error::ErrorKind;
@ -206,7 +206,7 @@ pub fn op_selector_std(inner_type: msg::Any) -> Option<CliDispatchFn> {
msg::Any::Environ => Some(op_env),
msg::Any::Exit => Some(op_exit),
msg::Any::Fetch => Some(op_fetch),
msg::Any::FetchModuleMetaData => Some(op_fetch_module_meta_data),
msg::Any::FetchSourceFile => Some(op_fetch_source_file),
msg::Any::FormatError => Some(op_format_error),
msg::Any::GetRandomValues => Some(op_get_random_values),
msg::Any::GlobalTimer => Some(op_global_timer),
@ -411,7 +411,7 @@ fn op_format_error(
assert!(data.is_none());
let inner = base.inner_as_format_error().unwrap();
let json_str = inner.error().unwrap();
let error = JSError::from_json(json_str, &state.dir);
let error = JSError::from_json(json_str, &state.ts_compiler);
let error_string = error.to_string();
let mut builder = FlatBufferBuilder::new();
@ -472,40 +472,20 @@ fn op_cache(
let module_id = inner.module_id().unwrap();
let contents = inner.contents().unwrap();
state.mark_compiled(&module_id);
// TODO It shouldn't be necessary to call fetch_module_meta_data() here.
// However, we need module_meta_data.source_code in order to calculate the
// cache path. In the future, checksums will not be used in the cache
// filenames and this requirement can be removed. See
// https://github.com/denoland/deno/issues/2057
let module_specifier = ModuleSpecifier::resolve_url(module_id)
.expect("Should be valid module specifier");
let module_meta_data =
state
.dir
.fetch_module_meta_data(&module_specifier, true, true)?;
let (js_cache_path, source_map_path) = state.dir.cache_path(
&PathBuf::from(&module_meta_data.filename),
&module_meta_data.source_code,
);
if extension == ".map" {
debug!("cache {:?}", source_map_path);
fs::write(source_map_path, contents).map_err(ErrBox::from)?;
} else if extension == ".js" {
debug!("cache {:?}", js_cache_path);
fs::write(js_cache_path, contents).map_err(ErrBox::from)?;
} else {
unreachable!();
}
state.ts_compiler.cache_compiler_output(
&module_specifier,
extension,
contents,
)?;
ok_buf(empty_buf())
}
// https://github.com/denoland/deno/blob/golang/os.go#L100-L154
fn op_fetch_module_meta_data(
fn op_fetch_source_file(
state: &ThreadSafeState,
base: &msg::Base<'_>,
data: Option<PinnedBuf>,
@ -514,36 +494,32 @@ fn op_fetch_module_meta_data(
return Err(deno_error::no_async_support());
}
assert!(data.is_none());
let inner = base.inner_as_fetch_module_meta_data().unwrap();
let inner = base.inner_as_fetch_source_file().unwrap();
let cmd_id = base.cmd_id();
let specifier = inner.specifier().unwrap();
let referrer = inner.referrer().unwrap();
assert_eq!(state.dir.root.join("gen"), state.dir.gen, "Sanity check");
let use_cache = !state.flags.reload;
let no_fetch = state.flags.no_fetch;
let resolved_specifier = state.resolve(specifier, referrer, false)?;
let fut = state
.dir
.fetch_module_meta_data_async(&resolved_specifier, use_cache, no_fetch)
.fetch_source_file_async(&resolved_specifier)
.and_then(move |out| {
let builder = &mut FlatBufferBuilder::new();
let data_off = builder.create_vector(out.source_code.as_slice());
let msg_args = msg::FetchModuleMetaDataResArgs {
module_name: Some(builder.create_string(&out.module_name)),
let msg_args = msg::FetchSourceFileResArgs {
module_name: Some(builder.create_string(&out.url.to_string())),
filename: Some(builder.create_string(&out.filename.to_str().unwrap())),
media_type: out.media_type,
data: Some(data_off),
};
let inner = msg::FetchModuleMetaDataRes::create(builder, &msg_args);
let inner = msg::FetchSourceFileRes::create(builder, &msg_args);
Ok(serialize_response(
cmd_id,
builder,
msg::BaseArgs {
inner: Some(inner.as_union_value()),
inner_type: msg::Any::FetchModuleMetaDataRes,
inner_type: msg::Any::FetchSourceFileRes,
..Default::default()
},
))
@ -857,7 +833,7 @@ fn op_mkdir(
) -> CliOpResult {
assert!(data.is_none());
let inner = base.inner_as_mkdir().unwrap();
let (path, path_) = resolve_from_cwd(inner.path().unwrap())?;
let (path, path_) = deno_fs::resolve_from_cwd(inner.path().unwrap())?;
let recursive = inner.recursive();
let mode = inner.mode();
@ -878,7 +854,7 @@ fn op_chmod(
assert!(data.is_none());
let inner = base.inner_as_chmod().unwrap();
let _mode = inner.mode();
let (path, path_) = resolve_from_cwd(inner.path().unwrap())?;
let (path, path_) = deno_fs::resolve_from_cwd(inner.path().unwrap())?;
state.check_write(&path_)?;
@ -926,7 +902,8 @@ fn op_open(
assert!(data.is_none());
let cmd_id = base.cmd_id();
let inner = base.inner_as_open().unwrap();
let (filename, filename_) = resolve_from_cwd(inner.filename().unwrap())?;
let (filename, filename_) =
deno_fs::resolve_from_cwd(inner.filename().unwrap())?;
let mode = inner.mode().unwrap();
let mut open_options = tokio::fs::OpenOptions::new();
@ -1177,7 +1154,7 @@ fn op_remove(
) -> CliOpResult {
assert!(data.is_none());
let inner = base.inner_as_remove().unwrap();
let (path, path_) = resolve_from_cwd(inner.path().unwrap())?;
let (path, path_) = deno_fs::resolve_from_cwd(inner.path().unwrap())?;
let recursive = inner.recursive();
state.check_write(&path_)?;
@ -1203,8 +1180,8 @@ fn op_copy_file(
) -> CliOpResult {
assert!(data.is_none());
let inner = base.inner_as_copy_file().unwrap();
let (from, from_) = resolve_from_cwd(inner.from().unwrap())?;
let (to, to_) = resolve_from_cwd(inner.to().unwrap())?;
let (from, from_) = deno_fs::resolve_from_cwd(inner.from().unwrap())?;
let (to, to_) = deno_fs::resolve_from_cwd(inner.to().unwrap())?;
state.check_read(&from_)?;
state.check_write(&to_)?;
@ -1278,7 +1255,8 @@ fn op_stat(
assert!(data.is_none());
let inner = base.inner_as_stat().unwrap();
let cmd_id = base.cmd_id();
let (filename, filename_) = resolve_from_cwd(inner.filename().unwrap())?;
let (filename, filename_) =
deno_fs::resolve_from_cwd(inner.filename().unwrap())?;
let lstat = inner.lstat();
state.check_read(&filename_)?;
@ -1327,7 +1305,7 @@ fn op_read_dir(
assert!(data.is_none());
let inner = base.inner_as_read_dir().unwrap();
let cmd_id = base.cmd_id();
let (path, path_) = resolve_from_cwd(inner.path().unwrap())?;
let (path, path_) = deno_fs::resolve_from_cwd(inner.path().unwrap())?;
state.check_read(&path_)?;
@ -1383,8 +1361,9 @@ fn op_rename(
) -> CliOpResult {
assert!(data.is_none());
let inner = base.inner_as_rename().unwrap();
let (oldpath, _) = resolve_from_cwd(inner.oldpath().unwrap())?;
let (newpath, newpath_) = resolve_from_cwd(inner.newpath().unwrap())?;
let (oldpath, _) = deno_fs::resolve_from_cwd(inner.oldpath().unwrap())?;
let (newpath, newpath_) =
deno_fs::resolve_from_cwd(inner.newpath().unwrap())?;
state.check_write(&newpath_)?;
@ -1402,8 +1381,9 @@ fn op_link(
) -> CliOpResult {
assert!(data.is_none());
let inner = base.inner_as_link().unwrap();
let (oldname, _) = resolve_from_cwd(inner.oldname().unwrap())?;
let (newname, newname_) = resolve_from_cwd(inner.newname().unwrap())?;
let (oldname, _) = deno_fs::resolve_from_cwd(inner.oldname().unwrap())?;
let (newname, newname_) =
deno_fs::resolve_from_cwd(inner.newname().unwrap())?;
state.check_write(&newname_)?;
@ -1421,8 +1401,9 @@ fn op_symlink(
) -> CliOpResult {
assert!(data.is_none());
let inner = base.inner_as_symlink().unwrap();
let (oldname, _) = resolve_from_cwd(inner.oldname().unwrap())?;
let (newname, newname_) = resolve_from_cwd(inner.newname().unwrap())?;
let (oldname, _) = deno_fs::resolve_from_cwd(inner.oldname().unwrap())?;
let (newname, newname_) =
deno_fs::resolve_from_cwd(inner.newname().unwrap())?;
state.check_write(&newname_)?;
// TODO Use type for Windows.
@ -1447,7 +1428,7 @@ fn op_read_link(
assert!(data.is_none());
let inner = base.inner_as_readlink().unwrap();
let cmd_id = base.cmd_id();
let (name, name_) = resolve_from_cwd(inner.name().unwrap())?;
let (name, name_) = deno_fs::resolve_from_cwd(inner.name().unwrap())?;
state.check_read(&name_)?;
@ -1549,7 +1530,7 @@ fn op_truncate(
assert!(data.is_none());
let inner = base.inner_as_truncate().unwrap();
let (filename, filename_) = resolve_from_cwd(inner.name().unwrap())?;
let (filename, filename_) = deno_fs::resolve_from_cwd(inner.name().unwrap())?;
let len = inner.len();
state.check_write(&filename_)?;

View file

@ -1,11 +1,11 @@
// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
use crate::compiler::compile_async;
use crate::compiler::ModuleMetaData;
use crate::compiler::TsCompiler;
use crate::deno_dir;
use crate::deno_dir::SourceFile;
use crate::deno_dir::SourceFileFetcher;
use crate::flags;
use crate::global_timer::GlobalTimer;
use crate::import_map::ImportMap;
use crate::msg;
use crate::ops;
use crate::permissions::DenoPermissions;
use crate::progress::Progress;
@ -18,16 +18,13 @@ use deno::ErrBox;
use deno::Loader;
use deno::ModuleSpecifier;
use deno::PinnedBuf;
use futures::future::Either;
use futures::future::Shared;
use futures::Future;
use rand::rngs::StdRng;
use rand::SeedableRng;
use std;
use std::collections::HashMap;
use std::collections::HashSet;
use std::env;
use std::fs;
use std::ops::Deref;
use std::sync::atomic::{AtomicUsize, Ordering};
use std::sync::Arc;
@ -64,12 +61,6 @@ pub struct State {
pub argv: Vec<String>,
pub permissions: DenoPermissions,
pub flags: flags::DenoFlags,
/// When flags contains a `.config_path` option, the content of the
/// configuration file will be resolved and set.
pub config: Option<Vec<u8>>,
/// When flags contains a `.config_path` option, the fully qualified path
/// name of the passed path will be resolved and set.
pub config_path: Option<String>,
/// When flags contains a `.import_map_path` option, the content of the
/// import map file will be resolved and set.
pub import_map: Option<ImportMap>,
@ -85,10 +76,7 @@ pub struct State {
pub progress: Progress,
pub seeded_rng: Option<Mutex<StdRng>>,
/// Set of all URLs that have been compiled. This is a hacky way to work
/// around the fact that --reload will force multiple compilations of the same
/// module.
compiled: Mutex<HashSet<String>>,
pub ts_compiler: TsCompiler,
}
impl Clone for ThreadSafeState {
@ -114,37 +102,25 @@ impl ThreadSafeState {
}
}
pub fn fetch_module_meta_data_and_maybe_compile_async(
pub fn fetch_source_file_and_maybe_compile_async(
state: &ThreadSafeState,
module_specifier: &ModuleSpecifier,
) -> impl Future<Item = ModuleMetaData, Error = ErrBox> {
) -> impl Future<Item = SourceFile, Error = ErrBox> {
let state_ = state.clone();
let use_cache =
!state_.flags.reload || state_.has_compiled(&module_specifier.to_string());
let no_fetch = state_.flags.no_fetch;
state_
.dir
.fetch_module_meta_data_async(&module_specifier, use_cache, no_fetch)
.fetch_source_file_async(&module_specifier)
.and_then(move |out| {
if out.media_type == msg::MediaType::TypeScript
&& !out.has_output_code_and_source_map()
{
debug!(">>>>> compile_sync START");
Either::A(
compile_async(state_.clone(), &out)
.map_err(|e| {
debug!("compiler error exiting!");
eprintln!("\n{}", e.to_string());
std::process::exit(1);
}).and_then(move |out| {
debug!(">>>>> compile_sync END");
Ok(out)
}),
)
} else {
Either::B(futures::future::ok(out))
}
state_
.clone()
.ts_compiler
.compile_async(state_.clone(), &out)
.map_err(|e| {
debug!("compiler error exiting!");
eprintln!("\n{}", e.to_string());
std::process::exit(1);
})
})
}
@ -174,13 +150,14 @@ impl Loader for ThreadSafeState {
) -> Box<deno::SourceCodeInfoFuture> {
self.metrics.resolve_count.fetch_add(1, Ordering::SeqCst);
Box::new(
fetch_module_meta_data_and_maybe_compile_async(self, module_specifier)
.map(|module_meta_data| deno::SourceCodeInfo {
// Real module name, might be different from initial URL
fetch_source_file_and_maybe_compile_async(self, module_specifier).map(
|source_file| deno::SourceCodeInfo {
// Real module name, might be different from initial specifier
// due to redirections.
code: module_meta_data.js_source(),
module_name: module_meta_data.module_name,
}),
code: source_file.js_source(),
module_name: source_file.url.to_string(),
},
),
)
}
}
@ -200,47 +177,12 @@ impl ThreadSafeState {
let external_channels = (worker_in_tx, worker_out_rx);
let resource = resources::add_worker(external_channels);
// take the passed flag and resolve the file name relative to the cwd
let config_file = match &flags.config_path {
Some(config_file_name) => {
debug!("Compiler config file: {}", config_file_name);
let cwd = std::env::current_dir().unwrap();
Some(cwd.join(config_file_name))
}
_ => None,
};
// Convert the PathBuf to a canonicalized string. This is needed by the
// compiler to properly deal with the configuration.
let config_path = match &config_file {
Some(config_file) => Some(
config_file
.canonicalize()
.unwrap()
.to_str()
.unwrap()
.to_owned(),
),
_ => None,
};
// Load the contents of the configuration file
let config = match &config_file {
Some(config_file) => {
debug!("Attempt to load config: {}", config_file.to_str().unwrap());
match fs::read(&config_file) {
Ok(config_data) => Some(config_data.to_owned()),
_ => panic!(
"Error retrieving compiler config file at \"{}\"",
config_file.to_str().unwrap()
),
}
}
_ => None,
};
let dir =
deno_dir::DenoDir::new(custom_root, &config, progress.clone()).unwrap();
let dir = deno_dir::DenoDir::new(
custom_root,
progress.clone(),
!flags.reload,
flags.no_fetch,
).unwrap();
let main_module: Option<ModuleSpecifier> = if argv_rest.len() <= 1 {
None
@ -278,6 +220,9 @@ impl ThreadSafeState {
let modules = Arc::new(Mutex::new(deno::Modules::new()));
let ts_compiler =
TsCompiler::new(dir.clone(), !flags.reload, flags.config_path.clone());
ThreadSafeState(Arc::new(State {
main_module,
modules,
@ -285,8 +230,6 @@ impl ThreadSafeState {
argv: argv_rest,
permissions: DenoPermissions::from_flags(&flags),
flags,
config,
config_path,
import_map,
metrics: Metrics::default(),
worker_channels: Mutex::new(internal_channels),
@ -297,7 +240,7 @@ impl ThreadSafeState {
dispatch_selector,
progress,
seeded_rng,
compiled: Mutex::new(HashSet::new()),
ts_compiler,
}))
}
@ -309,16 +252,6 @@ impl ThreadSafeState {
}
}
pub fn mark_compiled(&self, module_id: &str) {
let mut c = self.compiled.lock().unwrap();
c.insert(module_id.to_string());
}
pub fn has_compiled(&self, module_id: &str) -> bool {
let c = self.compiled.lock().unwrap();
c.contains(module_id)
}
#[inline]
pub fn check_read(&self, filename: &str) -> Result<(), ErrBox> {
self.permissions.check_read(filename)

View file

@ -34,7 +34,7 @@ impl Worker {
});
let state_ = state.clone();
i.set_js_error_create(move |v8_exception| {
JSError::from_v8_exception(v8_exception, &state_.dir)
JSError::from_v8_exception(v8_exception, &state_.ts_compiler)
})
}
Self { isolate, state }

View file

@ -107,7 +107,7 @@ const ignoredCompilerOptions: ReadonlyArray<string> = [
"watch"
];
interface ModuleMetaData {
interface SourceFile {
moduleName: string | undefined;
filename: string | undefined;
mediaType: msg.MediaType;
@ -120,37 +120,34 @@ interface EmitResult {
}
/** Ops to Rust to resolve and fetch a modules meta data. */
function fetchModuleMetaData(
specifier: string,
referrer: string
): ModuleMetaData {
util.log("compiler.fetchModuleMetaData", { specifier, referrer });
// Send FetchModuleMetaData message
function fetchSourceFile(specifier: string, referrer: string): SourceFile {
util.log("compiler.fetchSourceFile", { specifier, referrer });
// Send FetchSourceFile message
const builder = flatbuffers.createBuilder();
const specifier_ = builder.createString(specifier);
const referrer_ = builder.createString(referrer);
const inner = msg.FetchModuleMetaData.createFetchModuleMetaData(
const inner = msg.FetchSourceFile.createFetchSourceFile(
builder,
specifier_,
referrer_
);
const baseRes = sendSync(builder, msg.Any.FetchModuleMetaData, inner);
const baseRes = sendSync(builder, msg.Any.FetchSourceFile, inner);
assert(baseRes != null);
assert(
msg.Any.FetchModuleMetaDataRes === baseRes!.innerType(),
msg.Any.FetchSourceFileRes === baseRes!.innerType(),
`base.innerType() unexpectedly is ${baseRes!.innerType()}`
);
const fetchModuleMetaDataRes = new msg.FetchModuleMetaDataRes();
assert(baseRes!.inner(fetchModuleMetaDataRes) != null);
const dataArray = fetchModuleMetaDataRes.dataArray();
const fetchSourceFileRes = new msg.FetchSourceFileRes();
assert(baseRes!.inner(fetchSourceFileRes) != null);
const dataArray = fetchSourceFileRes.dataArray();
const decoder = new TextDecoder();
const sourceCode = dataArray ? decoder.decode(dataArray) : undefined;
// flatbuffers returns `null` for an empty value, this does not fit well with
// idiomatic TypeScript under strict null checks, so converting to `undefined`
return {
moduleName: fetchModuleMetaDataRes.moduleName() || undefined,
filename: fetchModuleMetaDataRes.filename() || undefined,
mediaType: fetchModuleMetaDataRes.mediaType(),
moduleName: fetchSourceFileRes.moduleName() || undefined,
filename: fetchSourceFileRes.filename() || undefined,
mediaType: fetchSourceFileRes.mediaType(),
sourceCode
};
}
@ -235,7 +232,7 @@ class Host implements ts.CompilerHost {
target: ts.ScriptTarget.ESNext
};
private _resolveModule(specifier: string, referrer: string): ModuleMetaData {
private _resolveModule(specifier: string, referrer: string): SourceFile {
// Handle built-in assets specially.
if (specifier.startsWith(ASSETS)) {
const moduleName = specifier.split("/").pop()!;
@ -251,7 +248,7 @@ class Host implements ts.CompilerHost {
sourceCode
};
}
return fetchModuleMetaData(specifier, referrer);
return fetchSourceFile(specifier, referrer);
}
/* Deno specific APIs */
@ -345,13 +342,13 @@ class Host implements ts.CompilerHost {
): ts.SourceFile | undefined {
assert(!shouldCreateNewSourceFile);
util.log("getSourceFile", fileName);
const moduleMetaData = this._resolveModule(fileName, ".");
if (!moduleMetaData || !moduleMetaData.sourceCode) {
const SourceFile = this._resolveModule(fileName, ".");
if (!SourceFile || !SourceFile.sourceCode) {
return undefined;
}
return ts.createSourceFile(
fileName,
moduleMetaData.sourceCode,
SourceFile.sourceCode,
languageVersion
);
}
@ -367,16 +364,16 @@ class Host implements ts.CompilerHost {
util.log("resolveModuleNames()", { moduleNames, containingFile });
return moduleNames.map(
(moduleName): ts.ResolvedModuleFull | undefined => {
const moduleMetaData = this._resolveModule(moduleName, containingFile);
if (moduleMetaData.moduleName) {
const resolvedFileName = moduleMetaData.moduleName;
const SourceFile = this._resolveModule(moduleName, containingFile);
if (SourceFile.moduleName) {
const resolvedFileName = SourceFile.moduleName;
// This flags to the compiler to not go looking to transpile functional
// code, anything that is in `/$asset$/` is just library code
const isExternalLibraryImport = moduleName.startsWith(ASSETS);
const r = {
resolvedFileName,
isExternalLibraryImport,
extension: getExtension(resolvedFileName, moduleMetaData.mediaType)
extension: getExtension(resolvedFileName, SourceFile.mediaType)
};
return r;
} else {

View file

@ -1,2 +1,3 @@
args: run --reload tests/024_import_no_ext_with_headers.ts
output: tests/024_import_no_ext_with_headers.ts.out
# FIXME(bartlomieju): this test should use remote file
# args: run --reload tests/024_import_no_ext_with_headers.ts
# output: tests/024_import_no_ext_with_headers.ts.out

View file

@ -4,7 +4,7 @@
at maybeError (js/errors.ts:[WILDCARD])
at maybeThrowError (js/errors.ts:[WILDCARD])
at sendSync (js/dispatch.ts:[WILDCARD])
at fetchModuleMetaData (js/compiler.ts:[WILDCARD])
at fetchSourceFile (js/compiler.ts:[WILDCARD])
at _resolveModule (js/compiler.ts:[WILDCARD])
at js/compiler.ts:[WILDCARD]
at resolveModuleNames (js/compiler.ts:[WILDCARD])

View file

@ -4,7 +4,7 @@
at maybeError (js/errors.ts:[WILDCARD])
at maybeThrowError (js/errors.ts:[WILDCARD])
at sendSync (js/dispatch.ts:[WILDCARD])
at fetchModuleMetaData (js/compiler.ts:[WILDCARD])
at fetchSourceFile (js/compiler.ts:[WILDCARD])
at _resolveModule (js/compiler.ts:[WILDCARD])
at js/compiler.ts:[WILDCARD]
at resolveModuleNamesWorker ([WILDCARD])

View file

@ -4,7 +4,7 @@
at maybeError (js/errors.ts:[WILDCARD])
at maybeThrowError (js/errors.ts:[WILDCARD])
at sendSync (js/dispatch.ts:[WILDCARD])
at fetchModuleMetaData (js/compiler.ts:[WILDCARD])
at fetchSourceFile (js/compiler.ts:[WILDCARD])
at _resolveModule (js/compiler.ts:[WILDCARD])
at js/compiler.ts:[WILDCARD]
at resolveModuleNamesWorker ([WILDCARD])

View file

@ -4,7 +4,7 @@
at maybeError (js/errors.ts:[WILDCARD])
at maybeThrowError (js/errors.ts:[WILDCARD])
at sendSync (js/dispatch.ts:[WILDCARD])
at fetchModuleMetaData (js/compiler.ts:[WILDCARD])
at fetchSourceFile (js/compiler.ts:[WILDCARD])
at _resolveModule (js/compiler.ts:[WILDCARD])
at js/compiler.ts:[WILDCARD]
at resolveModuleNames (js/compiler.ts:[WILDCARD])

View file

@ -4,7 +4,7 @@
at maybeError (js/errors.ts:[WILDCARD])
at maybeThrowError (js/errors.ts:[WILDCARD])
at sendSync (js/dispatch.ts:[WILDCARD])
at fetchModuleMetaData (js/compiler.ts:[WILDCARD])
at fetchSourceFile (js/compiler.ts:[WILDCARD])
at _resolveModule (js/compiler.ts:[WILDCARD])
at js/compiler.ts:[WILDCARD]
at resolveModuleNames (js/compiler.ts:[WILDCARD])

View file

@ -28,17 +28,23 @@ class TestDenoDir(DenoTestCase):
self.run_deno()
assert not os.path.isdir(deno_dir)
# TODO(bartlomieju): reenable or rewrite these tests
# now all cache directories are lazily created
# Run deno with DENO_DIR env flag
self.run_deno(deno_dir)
assert os.path.isdir(deno_dir)
assert os.path.isdir(os.path.join(deno_dir, "deps"))
assert os.path.isdir(os.path.join(deno_dir, "gen"))
rmtree(deno_dir)
# self.run_deno(deno_dir)
# assert os.path.isdir(deno_dir)
# assert os.path.isdir(os.path.join(deno_dir, "deps"))
# assert os.path.isdir(os.path.join(deno_dir, "gen"))
# rmtree(deno_dir)
def run_deno(self, deno_dir=None):
cmd = [self.deno_exe, "run", "tests/002_hello.ts"]
cmd = [
self.deno_exe, "run",
"http://localhost:4545/tests/subdir/print_hello.ts"
]
deno_dir_env = {"DENO_DIR": deno_dir} if deno_dir is not None else None
res = run_output(cmd, quiet=True, env=deno_dir_env)
print res.code, res.out, res.err
self.assertEqual(res.code, 0)

View file

@ -55,7 +55,12 @@ class TestIntegrations(DenoTestCase):
test_abs = os.path.join(tests_path, test_filename)
test = read_test(test_abs)
exit_code = int(test.get("exit_code", 0))
args = test.get("args", "").split(" ")
args = test.get("args", None)
if not args:
return
args = args.split(" ")
check_stderr = str2bool(test.get("check_stderr", "false"))
stderr = subprocess.STDOUT if check_stderr else open(os.devnull, 'w')
stdin_input = (test.get("input",
@ -87,13 +92,13 @@ class TestIntegrations(DenoTestCase):
actual_code = e.returncode
actual_out = e.output
self.assertEqual(exit_code, actual_code)
actual_out = strip_ansi_codes(actual_out)
if not pattern_match(expected_out, actual_out):
# This will always throw since pattern_match failed.
self.assertEqual(expected_out, actual_out)
self.assertEqual(exit_code, actual_code)
# Add a methods for each test file in tests_path.
for fn in sorted(