|
|
@@ -1,6 +1,7 @@
|
|
|
// SPDX-License-Identifier: Apache-2.0
|
|
|
|
|
|
use crate::sema::ast;
|
|
|
+use normalize_path::NormalizePath;
|
|
|
use solang_parser::pt::Loc;
|
|
|
use std::collections::HashMap;
|
|
|
use std::ffi::{OsStr, OsString};
|
|
|
@@ -10,13 +11,14 @@ use std::io::{prelude::*, Error, ErrorKind};
|
|
|
use std::path::{Path, PathBuf};
|
|
|
use std::sync::Arc;
|
|
|
|
|
|
+#[derive(Default)]
|
|
|
pub struct FileResolver {
|
|
|
/// Set of import paths search for imports
|
|
|
import_paths: Vec<(Option<OsString>, PathBuf)>,
|
|
|
- /// List file by import path
|
|
|
+ /// List file by path
|
|
|
cached_paths: HashMap<PathBuf, usize>,
|
|
|
/// The actual file contents
|
|
|
- files: Vec<Arc<str>>,
|
|
|
+ files: Vec<ResolvedFile>,
|
|
|
}
|
|
|
|
|
|
/// When we resolve a file, we need to know its base compared to the import so
|
|
|
@@ -25,37 +27,17 @@ pub struct FileResolver {
|
|
|
/// user exactly which file has errors/warnings.
|
|
|
#[derive(Clone, Debug)]
|
|
|
pub struct ResolvedFile {
|
|
|
+ /// Original name used on cli or import statement
|
|
|
+ pub path: OsString,
|
|
|
/// Full path on the filesystem
|
|
|
pub full_path: PathBuf,
|
|
|
/// Which import path was used, if any
|
|
|
- import_no: usize,
|
|
|
- // Base part relative to import
|
|
|
- base: PathBuf,
|
|
|
-}
|
|
|
-
|
|
|
-impl ResolvedFile {
|
|
|
- /// Get the import number associated with this `ResolvedFile`
|
|
|
- pub fn get_import_no(&self) -> usize {
|
|
|
- self.import_no
|
|
|
- }
|
|
|
-}
|
|
|
-
|
|
|
-impl Default for FileResolver {
|
|
|
- fn default() -> Self {
|
|
|
- FileResolver::new()
|
|
|
- }
|
|
|
+ pub import_no: Option<usize>,
|
|
|
+ /// The actual file contents
|
|
|
+ pub contents: Arc<str>,
|
|
|
}
|
|
|
|
|
|
impl FileResolver {
|
|
|
- /// Create a new file resolver
|
|
|
- pub fn new() -> Self {
|
|
|
- FileResolver {
|
|
|
- import_paths: Vec::new(),
|
|
|
- cached_paths: HashMap::new(),
|
|
|
- files: Vec::new(),
|
|
|
- }
|
|
|
- }
|
|
|
-
|
|
|
/// Add import path
|
|
|
pub fn add_import_path(&mut self, path: &Path) -> io::Result<()> {
|
|
|
self.import_paths.push((None, path.canonicalize()?));
|
|
|
@@ -96,14 +78,21 @@ impl FileResolver {
|
|
|
pub fn set_file_contents(&mut self, path: &str, contents: String) {
|
|
|
let pos = self.files.len();
|
|
|
|
|
|
- self.files.push(Arc::from(contents));
|
|
|
+ let pathbuf = PathBuf::from(path);
|
|
|
+
|
|
|
+ self.files.push(ResolvedFile {
|
|
|
+ path: path.into(),
|
|
|
+ full_path: pathbuf.clone(),
|
|
|
+ contents: Arc::from(contents),
|
|
|
+ import_no: None,
|
|
|
+ });
|
|
|
|
|
|
- self.cached_paths.insert(PathBuf::from(path), pos);
|
|
|
+ self.cached_paths.insert(pathbuf, pos);
|
|
|
}
|
|
|
|
|
|
/// Get the file contents of `file_no`th file if it exists
|
|
|
pub fn get_contents_of_file_no(&self, file_no: usize) -> Option<Arc<str>> {
|
|
|
- self.files.get(file_no).cloned()
|
|
|
+ self.files.get(file_no).map(|f| f.contents.clone())
|
|
|
}
|
|
|
|
|
|
/// Get file with contents. This must be a file which was previously
|
|
|
@@ -111,13 +100,47 @@ impl FileResolver {
|
|
|
pub fn get_file_contents_and_number(&self, file: &Path) -> (Arc<str>, usize) {
|
|
|
let file_no = self.cached_paths[file];
|
|
|
|
|
|
- (self.files[file_no].clone(), file_no)
|
|
|
+ (self.files[file_no].contents.clone(), file_no)
|
|
|
+ }
|
|
|
+
|
|
|
+ /// Atempt to resolve a file, either from the cache or from the filesystem.
|
|
|
+ /// Returns Ok(Some(..)) if the file is found and loaded
|
|
|
+ /// Returns Ok(None) if no file by this path can be found.
|
|
|
+ /// Returns Err(..) if a file was found but could not be read.
|
|
|
+ fn try_file(
|
|
|
+ &mut self,
|
|
|
+ filename: &OsStr,
|
|
|
+ path: &Path,
|
|
|
+ import_no: Option<usize>,
|
|
|
+ ) -> Result<Option<ResolvedFile>, String> {
|
|
|
+ // For accessing the cache, remove "." and ".." path components
|
|
|
+ let cache_path = path.normalize();
|
|
|
+
|
|
|
+ if let Some(cache) = self.cached_paths.get(&cache_path) {
|
|
|
+ let mut file = self.files[*cache].clone();
|
|
|
+ file.import_no = import_no;
|
|
|
+ return Ok(Some(file));
|
|
|
+ }
|
|
|
+
|
|
|
+ if let Ok(full_path) = path.canonicalize() {
|
|
|
+ let file = self.load_file(filename, &full_path, import_no)?;
|
|
|
+ return Ok(Some(file.clone()));
|
|
|
+ }
|
|
|
+
|
|
|
+ Ok(None)
|
|
|
}
|
|
|
|
|
|
/// Populate the cache with absolute file path
|
|
|
- fn load_file(&mut self, path: &Path) -> Result<(), String> {
|
|
|
- if self.cached_paths.get(path).is_some() {
|
|
|
- return Ok(());
|
|
|
+ fn load_file(
|
|
|
+ &mut self,
|
|
|
+ filename: &OsStr,
|
|
|
+ path: &Path,
|
|
|
+ import_no: Option<usize>,
|
|
|
+ ) -> Result<&ResolvedFile, String> {
|
|
|
+ if let Some(cache) = self.cached_paths.get(path) {
|
|
|
+ if self.files[*cache].import_no == import_no {
|
|
|
+ return Ok(&self.files[*cache]);
|
|
|
+ }
|
|
|
}
|
|
|
|
|
|
// read the file
|
|
|
@@ -139,11 +162,16 @@ impl FileResolver {
|
|
|
|
|
|
let pos = self.files.len();
|
|
|
|
|
|
- self.files.push(Arc::from(contents));
|
|
|
+ self.files.push(ResolvedFile {
|
|
|
+ path: filename.into(),
|
|
|
+ full_path: path.to_path_buf(),
|
|
|
+ import_no,
|
|
|
+ contents: Arc::from(contents),
|
|
|
+ });
|
|
|
|
|
|
self.cached_paths.insert(path.to_path_buf(), pos);
|
|
|
|
|
|
- Ok(())
|
|
|
+ Ok(&self.files[pos])
|
|
|
}
|
|
|
|
|
|
/// Walk the import path to search for a file. If no import path is set up,
|
|
|
@@ -156,107 +184,59 @@ impl FileResolver {
|
|
|
) -> Result<ResolvedFile, String> {
|
|
|
let path = PathBuf::from(filename);
|
|
|
|
|
|
- let path = if let Ok(m) = path.strip_prefix("./") {
|
|
|
- m.to_path_buf()
|
|
|
- } else {
|
|
|
- path
|
|
|
- };
|
|
|
+ // Only when the path starts with ./ or ../ are relative paths considered; this means
|
|
|
+ // that `import "b.sol";` will check the import paths for b.sol, while `import "./b.sol";`
|
|
|
+ // will only the path relative to the current file.
|
|
|
+ if path.starts_with("./") || path.starts_with("../") {
|
|
|
+ if let Some(ResolvedFile {
|
|
|
+ import_no,
|
|
|
+ full_path,
|
|
|
+ ..
|
|
|
+ }) = parent
|
|
|
+ {
|
|
|
+ let curdir = PathBuf::from(".");
|
|
|
+ let base = full_path.parent().unwrap_or(&curdir);
|
|
|
+ let path = base.join(&path);
|
|
|
+
|
|
|
+ if let Some(file) = self.try_file(filename, &path, *import_no)? {
|
|
|
+ return Ok(file);
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ return Err(format!("file not found '{}'", filename.to_string_lossy()));
|
|
|
+ }
|
|
|
+
|
|
|
+ if let Some(file) = self.try_file(filename, &path, None)? {
|
|
|
+ return Ok(file);
|
|
|
+ } else if path.is_absolute() {
|
|
|
+ return Err(format!("file not found '{}'", filename.to_string_lossy()));
|
|
|
+ }
|
|
|
|
|
|
// first check maps
|
|
|
let mut iter = path.iter();
|
|
|
if let Some(first_part) = iter.next() {
|
|
|
let relpath: &PathBuf = &iter.collect();
|
|
|
|
|
|
- for (import_no, import) in self.import_paths.iter().enumerate() {
|
|
|
- if let (Some(mapping), import_path) = import {
|
|
|
+ for import_no in 0..self.import_paths.len() {
|
|
|
+ if let (Some(mapping), import_path) = &self.import_paths[import_no] {
|
|
|
if first_part == mapping {
|
|
|
- if let Ok(full_path) = import_path.join(relpath).canonicalize() {
|
|
|
- self.load_file(&full_path)?;
|
|
|
- let base = full_path
|
|
|
- .parent()
|
|
|
- .expect("path should include filename")
|
|
|
- .to_path_buf();
|
|
|
-
|
|
|
- return Ok(ResolvedFile {
|
|
|
- full_path,
|
|
|
- import_no,
|
|
|
- base,
|
|
|
- });
|
|
|
+ let path = import_path.join(relpath);
|
|
|
+
|
|
|
+ if let Some(file) = self.try_file(filename, &path, Some(import_no))? {
|
|
|
+ return Ok(file);
|
|
|
}
|
|
|
}
|
|
|
}
|
|
|
}
|
|
|
}
|
|
|
|
|
|
- let mut start_import_no = 0;
|
|
|
-
|
|
|
- // first try relative to the parent
|
|
|
- if let Some(ResolvedFile {
|
|
|
- import_no, base, ..
|
|
|
- }) = parent
|
|
|
- {
|
|
|
- if self.import_paths.is_empty() {
|
|
|
- // we have no import paths, resolve by what's in the cache
|
|
|
- let full_path = base.join(&path);
|
|
|
- let base = full_path
|
|
|
- .parent()
|
|
|
- .expect("path should include filename")
|
|
|
- .to_path_buf();
|
|
|
-
|
|
|
- return Ok(ResolvedFile {
|
|
|
- full_path,
|
|
|
- base,
|
|
|
- import_no: 0,
|
|
|
- });
|
|
|
- } else if let Ok(full_path) = base.join(&path).canonicalize() {
|
|
|
- self.load_file(&full_path)?;
|
|
|
- let base = full_path
|
|
|
- .parent()
|
|
|
- .expect("path should include filename")
|
|
|
- .to_path_buf();
|
|
|
-
|
|
|
- return Ok(ResolvedFile {
|
|
|
- full_path,
|
|
|
- base,
|
|
|
- import_no: 0,
|
|
|
- });
|
|
|
- }
|
|
|
-
|
|
|
- // start with this import
|
|
|
- start_import_no = *import_no;
|
|
|
- }
|
|
|
-
|
|
|
- if self.cached_paths.contains_key(&path) {
|
|
|
- let full_path = path;
|
|
|
- let base = full_path
|
|
|
- .parent()
|
|
|
- .expect("path should include filename")
|
|
|
- .to_path_buf();
|
|
|
-
|
|
|
- return Ok(ResolvedFile {
|
|
|
- full_path,
|
|
|
- base,
|
|
|
- import_no: 0,
|
|
|
- });
|
|
|
- }
|
|
|
-
|
|
|
// walk over the import paths until we find one that resolves
|
|
|
- for i in 0..self.import_paths.len() {
|
|
|
- let import_no = (i + start_import_no) % self.import_paths.len();
|
|
|
-
|
|
|
+ for import_no in 0..self.import_paths.len() {
|
|
|
if let (None, import_path) = &self.import_paths[import_no] {
|
|
|
- if let Ok(full_path) = import_path.join(&path).canonicalize() {
|
|
|
- let base = full_path
|
|
|
- .parent()
|
|
|
- .expect("path should include filename")
|
|
|
- .to_path_buf();
|
|
|
- self.load_file(&full_path)?;
|
|
|
-
|
|
|
- return Ok(ResolvedFile {
|
|
|
- full_path,
|
|
|
- import_no,
|
|
|
- base,
|
|
|
- });
|
|
|
+ let path = import_path.join(&path);
|
|
|
+
|
|
|
+ if let Some(file) = self.try_file(filename, &path, Some(import_no))? {
|
|
|
+ return Ok(file);
|
|
|
}
|
|
|
}
|
|
|
}
|
|
|
@@ -280,6 +260,7 @@ impl FileResolver {
|
|
|
let (end_line, mut end_column) = file.offset_to_line_column(*end);
|
|
|
|
|
|
let mut full_line = self.files[cache_no]
|
|
|
+ .contents
|
|
|
.lines()
|
|
|
.nth(begin_line)
|
|
|
.unwrap()
|
|
|
@@ -288,7 +269,7 @@ impl FileResolver {
|
|
|
// If the loc spans across multiple lines, we concatenate them
|
|
|
if begin_line != end_line {
|
|
|
for i in begin_line + 1..=end_line {
|
|
|
- let line = self.files[cache_no].lines().nth(i).unwrap();
|
|
|
+ let line = self.files[cache_no].contents.lines().nth(i).unwrap();
|
|
|
if i == end_line {
|
|
|
end_column += full_line.len();
|
|
|
}
|