Skip to content

Commit

Permalink
Remove exact text position and fix recursive import error (#10)
Browse files Browse the repository at this point in the history
  • Loading branch information
cakevm authored Jan 9, 2025
1 parent 10c447e commit d119973
Show file tree
Hide file tree
Showing 13 changed files with 81 additions and 44 deletions.
11 changes: 11 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,17 @@
# Node cache changelog

## [Unreleased]
- First stable release of `huff-neo`
- Update all dependencies to the latest version
- Restructure to a modern crate structure
- Replace `ethers` with `alloy`
- Remove spinner animation [#4](https://github.com/cakevm/huff-neo/pull/4)
- Fix relative position error for [#6](https://github.com/cakevm/huff-neo/pull/6)
- Currently, this still fails for nested imports
- Replace `tiny-keccak` with the alloy version
- Add error for duplicate labels in the same macro [#7](https://github.com/cakevm/huff-neo/pull/7)
- Fix recursive error for empty import e.g. `#include ""`
- Fix error for nested imports with reoccurring imports

## [0.0.4] - 2025-01-09
- Improve error handling
Expand Down
16 changes: 8 additions & 8 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

16 changes: 11 additions & 5 deletions crates/core/benches/huff_benchmark.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ use huff_neo_utils::file::file_source::FileSource;
use huff_neo_utils::file::full_file_source::FullFileSource;
use huff_neo_utils::file::remapper;
use huff_neo_utils::prelude::*;
use std::collections::HashSet;
use std::{path::PathBuf, sync::Arc};

fn lex_erc20_from_source_benchmark(c: &mut Criterion) {
Expand All @@ -20,7 +21,8 @@ fn lex_erc20_from_source_benchmark(c: &mut Criterion) {

// Recurse file deps + generate flattened source
let file_source = file_sources.first().unwrap();
let recursed_file_source = Compiler::recurse_deps(Arc::clone(file_source), &remapper::Remapper::new("./"), file_provider).unwrap();
let recursed_file_source =
Compiler::recurse_deps(Arc::clone(file_source), &remapper::Remapper::new("./"), file_provider, HashSet::new()).unwrap();
let flattened = FileSource::fully_flatten(Arc::clone(&recursed_file_source));
let full_source = FullFileSource { source: &flattened.0, file: Some(Arc::clone(file_source)), spans: flattened.1 };

Expand All @@ -43,7 +45,8 @@ fn parse_erc20_benchmark(c: &mut Criterion) {

// Recurse file deps + generate flattened source
let file_source = file_sources.first().unwrap();
let recursed_file_source = Compiler::recurse_deps(Arc::clone(file_source), &remapper::Remapper::new("./"), file_provider).unwrap();
let recursed_file_source =
Compiler::recurse_deps(Arc::clone(file_source), &remapper::Remapper::new("./"), file_provider, HashSet::new()).unwrap();
let flattened = FileSource::fully_flatten(Arc::clone(&recursed_file_source));
let full_source = FullFileSource { source: &flattened.0, file: Some(Arc::clone(file_source)), spans: flattened.1 };

Expand All @@ -70,7 +73,8 @@ fn codegen_erc20_benchmark(c: &mut Criterion) {

// Recurse file deps + generate flattened source
let file_source = file_sources.first().unwrap();
let recursed_file_source = Compiler::recurse_deps(Arc::clone(file_source), &remapper::Remapper::new("./"), file_provider).unwrap();
let recursed_file_source =
Compiler::recurse_deps(Arc::clone(file_source), &remapper::Remapper::new("./"), file_provider, HashSet::new()).unwrap();
let flattened = FileSource::fully_flatten(Arc::clone(&recursed_file_source));
let full_source = FullFileSource { source: &flattened.0, file: Some(Arc::clone(file_source)), spans: flattened.1 };

Expand Down Expand Up @@ -116,7 +120,8 @@ fn erc20_compilation_benchmark(c: &mut Criterion) {
let recursed_file_source = Compiler::recurse_deps(
Arc::clone(file_source),
&remapper::Remapper::new("./"),
file_provider
file_provider,
HashSet::new()
).unwrap();
let flattened = FileSource::fully_flatten(Arc::clone(&recursed_file_source));
let full_source = FullFileSource {
Expand Down Expand Up @@ -163,7 +168,8 @@ fn erc721_compilation_benchmark(c: &mut Criterion) {
let recursed_file_source = Compiler::recurse_deps(
Arc::clone(file_source),
&remapper::Remapper::new("./"),
file_provider
file_provider,
HashSet::new()
).unwrap();
let flattened = FileSource::fully_flatten(Arc::clone(&recursed_file_source));
let full_source = FullFileSource {
Expand Down
24 changes: 20 additions & 4 deletions crates/core/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ use huff_neo_utils::wasm::IntoParallelIterator;
use huff_neo_utils::{prelude::*, time};
#[cfg(not(all(target_arch = "wasm32", target_os = "unknown")))]
use rayon::prelude::*;
use std::collections::HashSet;
use std::{
collections::{BTreeMap, HashMap},
ffi::OsString,
Expand Down Expand Up @@ -215,8 +216,10 @@ impl<'a, 'l> Compiler<'a, 'l> {
None => {
tracing::debug!(target: "core", "FINISHED RECURSING DEPENDENCIES!");
// Parallel Dependency Resolution
let recursed_file_sources: Vec<Result<Arc<FileSource>, Arc<CompilerError>>> =
files.into_par_iter().map(|v| Self::recurse_deps(v, &Remapper::new("./"), self.file_provider.clone())).collect();
let recursed_file_sources: Vec<Result<Arc<FileSource>, Arc<CompilerError>>> = files
.into_par_iter()
.map(|v| Self::recurse_deps(v, &Remapper::new("./"), self.file_provider.clone(), HashSet::new()))
.collect();

// Collect Recurse Deps errors and try to resolve to the first one
let mut errors = recursed_file_sources.iter().filter_map(|rfs| rfs.as_ref().err()).collect::<Vec<&Arc<CompilerError>>>();
Expand Down Expand Up @@ -284,7 +287,9 @@ impl<'a, 'l> Compiler<'a, 'l> {

let recursed_file_sources: Vec<Result<Arc<FileSource>, Arc<CompilerError>>> = files
.into_par_iter()
.map(|f| Self::recurse_deps(f, &huff_neo_utils::file::remapper::Remapper::new("./"), self.file_provider.clone()))
.map(|f| {
Self::recurse_deps(f, &huff_neo_utils::file::remapper::Remapper::new("./"), self.file_provider.clone(), HashSet::new())
})
.collect();

// Collect Recurse Deps errors and try to resolve to the first one
Expand Down Expand Up @@ -463,8 +468,14 @@ impl<'a, 'l> Compiler<'a, 'l> {
fs: Arc<FileSource>,
remapper: &Remapper,
reader: Arc<dyn FileProvider>,
mut walk_level: HashSet<String>,
) -> Result<Arc<FileSource>, Arc<CompilerError>> {
tracing::debug!(target: "core", "RECURSING DEPENDENCIES FOR {}", fs.path);
// Check if we've already walked this path
if walk_level.contains(&fs.path) {
return Ok(Arc::new(FileSource::default()));
}
walk_level.insert(fs.path.clone());
let mut new_fs = FileSource { path: fs.path.clone(), ..Default::default() };
let file_source = if let Some(s) = &fs.source {
s.clone()
Expand All @@ -486,6 +497,11 @@ impl<'a, 'l> Compiler<'a, 'l> {
tracing::info!(target: "core", "IMPORT LEXICAL ANALYSIS COMPLETE ON {:?}", imports);
}

// Check that no empty imports paths are present
if imports.iter().any(|i| i.is_empty()) {
return Err(Arc::new(CompilerError::EmptyImportPath(OsString::from(&fs.path))));
}

let localized_imports: Vec<String> = imports
.into_iter()
.map(|mut import| {
Expand Down Expand Up @@ -523,7 +539,7 @@ impl<'a, 'l> Compiler<'a, 'l> {
// Now that we have all the file sources, we have to recurse and get their source
file_sources = file_sources
.into_par_iter()
.map(|inner_fs| match Self::recurse_deps(Arc::clone(&inner_fs), remapper, reader.clone()) {
.map(|inner_fs| match Self::recurse_deps(Arc::clone(&inner_fs), remapper, reader.clone(), walk_level.clone()) {
Ok(new_fs) => new_fs,
Err(e) => {
tracing::error!(target: "core", "NESTED DEPENDENCY RESOLUTION FAILED: \"{:?}\"", e);
Expand Down
7 changes: 4 additions & 3 deletions crates/core/tests/erc20.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
use std::{path::PathBuf, sync::Arc};

use huff_neo_codegen::Codegen;
use huff_neo_core::*;
use huff_neo_lexer::*;
Expand All @@ -9,6 +7,8 @@ use huff_neo_utils::file::file_source::FileSource;
use huff_neo_utils::file::full_file_source::FullFileSource;
use huff_neo_utils::file::remapper;
use huff_neo_utils::prelude::*;
use std::collections::HashSet;
use std::{path::PathBuf, sync::Arc};

#[test]
fn test_erc20_compile() {
Expand All @@ -21,7 +21,8 @@ fn test_erc20_compile() {

// Recurse file deps + generate flattened source
let file_source = file_sources.first().unwrap();
let recursed_file_source = Compiler::recurse_deps(Arc::clone(file_source), &remapper::Remapper::new("./"), file_provider).unwrap();
let recursed_file_source =
Compiler::recurse_deps(Arc::clone(file_source), &remapper::Remapper::new("./"), file_provider, HashSet::new()).unwrap();
let flattened = FileSource::fully_flatten(Arc::clone(&recursed_file_source));
let full_source = FullFileSource { source: &flattened.0, file: Some(Arc::clone(file_source)), spans: flattened.1 };
let lexer = Lexer::new(full_source);
Expand Down
7 changes: 4 additions & 3 deletions crates/core/tests/erc721.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
use std::{path::PathBuf, sync::Arc};

use huff_neo_codegen::Codegen;
use huff_neo_core::*;
use huff_neo_lexer::*;
Expand All @@ -9,6 +7,8 @@ use huff_neo_utils::file::file_source::FileSource;
use huff_neo_utils::file::full_file_source::FullFileSource;
use huff_neo_utils::file::remapper;
use huff_neo_utils::prelude::*;
use std::collections::HashSet;
use std::{path::PathBuf, sync::Arc};

#[test]
fn test_erc721_compile() {
Expand All @@ -21,7 +21,8 @@ fn test_erc721_compile() {

// Recurse file deps + generate flattened source
let file_source = file_sources.first().unwrap();
let recursed_file_source = Compiler::recurse_deps(Arc::clone(file_source), &remapper::Remapper::new("./"), file_provider).unwrap();
let recursed_file_source =
Compiler::recurse_deps(Arc::clone(file_source), &remapper::Remapper::new("./"), file_provider, HashSet::new()).unwrap();
let flattened = FileSource::fully_flatten(Arc::clone(&recursed_file_source));
let full_source = FullFileSource { source: &flattened.0, file: Some(Arc::clone(file_source)), spans: flattened.1 };
let lexer = Lexer::new(full_source);
Expand Down
8 changes: 4 additions & 4 deletions crates/core/tests/recurse_deps.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
use std::{path::PathBuf, sync::Arc};

use huff_neo_core::Compiler;
use huff_neo_utils::file::file_provider::FileSystemFileProvider;
use huff_neo_utils::file::{file_source, remapper};
use std::collections::HashSet;
use std::{path::PathBuf, sync::Arc};

#[test]
fn test_recursing_fs_dependencies() {
Expand All @@ -14,7 +14,7 @@ fn test_recursing_fs_dependencies() {
.collect();
assert_eq!(file_sources.len(), 1);
let erc20_file_source = file_sources[0].clone();
let res = Compiler::recurse_deps(Arc::clone(&erc20_file_source), &remapper::Remapper::new("./"), file_provider);
let res = Compiler::recurse_deps(Arc::clone(&erc20_file_source), &remapper::Remapper::new("./"), file_provider, HashSet::new());
let full_erc20_file_source = res.unwrap();
let dependencies = full_erc20_file_source.dependencies.as_ref().unwrap();
assert_eq!(dependencies.len(), 4);
Expand All @@ -34,7 +34,7 @@ fn test_recursing_external_dependencies() {
.collect();
assert_eq!(file_sources.len(), 1);
let erc20_file_source = file_sources[0].clone();
let res = Compiler::recurse_deps(Arc::clone(&erc20_file_source), &remapper::Remapper::new("./"), file_provider);
let res = Compiler::recurse_deps(Arc::clone(&erc20_file_source), &remapper::Remapper::new("./"), file_provider, HashSet::new());
let full_erc20_file_source = res.unwrap();
let dependencies = full_erc20_file_source.dependencies.as_ref().unwrap();
assert_eq!(dependencies.len(), 4);
Expand Down
2 changes: 1 addition & 1 deletion crates/parser/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -131,7 +131,7 @@ impl Parser {
return Err(ParserError {
kind: ParserErrorKind::UnexpectedType(self.current_token.kind.clone()),
hint: Some(format!("Expected either \"{}\" or \"{}\"", TokenKind::Define, TokenKind::Include)),
spans: AstSpan(self.spans.clone()),
spans: AstSpan(vec![self.current_token.span.clone()]),
cursor: self.cursor,
});
}
Expand Down
7 changes: 3 additions & 4 deletions crates/utils/src/ast.rs
Original file line number Diff line number Diff line change
Expand Up @@ -45,14 +45,12 @@ impl AstSpan {
let end = fs.1.iter().map(|fs2| fs2.end).max().unwrap_or(0);
let newline_s = if s.is_empty() { "".to_string() } else { format!("{s}\n") };
if start.eq(&0) && end.eq(&0) {
format!("{newline_s}-> {}:{start}\n > 0|", fs.0)
format!("{newline_s}-> {}\n > 0|", fs.0)
} else {
format!(
"{}-> {}:{}-{}{}",
"{}-> {}:{}",
newline_s,
fs.0,
start,
end,
fs.1.iter()
.map(|sp| sp.source_seg())
.filter(|ss| !ss.is_empty())
Expand All @@ -63,6 +61,7 @@ impl AstSpan {
)
}
});

// Add in optional hint message
format!("{}{source_str}", hint.map(|msg| format!("{msg}\n")).unwrap_or_default())
}
Expand Down
5 changes: 5 additions & 0 deletions crates/utils/src/error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -264,6 +264,8 @@ pub enum CompilerError {
ParserError(ParserError),
/// Reading PathBuf Failed
PathBufRead(OsString),
/// Empty Import Path
EmptyImportPath(OsString),
/// Bytecode Generation Error
CodegenError(CodegenError),
/// Multiple Failed Compiles
Expand Down Expand Up @@ -496,6 +498,9 @@ impl fmt::Display for CompilerError {
});
Ok(())
}
CompilerError::EmptyImportPath(os_str) => {
write!(f, "\nError: Import path with empty string: \"{}\"", os_str.as_os_str().to_str().unwrap_or("<unknown import>"))
}
}
}
}
8 changes: 3 additions & 5 deletions crates/utils/src/file/span.rs
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ impl Span {

/// Produces a file identifier string for errors
pub fn identifier(&self) -> String {
self.file.as_ref().map(|f| format!("\n-> {}:{}-{}", f.path, self.start, self.end)).unwrap_or_default()
self.file.as_ref().map(|f| format!("\n-> {}", f.path)).unwrap_or_default()
}

/// Produces a source segment string
Expand All @@ -42,10 +42,8 @@ impl Span {
.as_ref()
.map(|s| {
if self.start >= s.len() {
return "\nInternal compiler error: Start index out of range".to_string();
}
if self.end >= s.len() {
return "\nInternal compiler error: End index out of range: file".to_string();
// This should never happen, but currently does when the mapping from the flattened source is incorrect.
return format!("\nInternal compiler error: Start index out of range start={} len={}.", self.start, s.len());
}
let line_num = &s[0..self.start].as_bytes().iter().filter(|&&c| c == b'\n').count() + 1;
let line_start = &s[0..self.start].rfind('\n').unwrap_or(0);
Expand Down
10 changes: 5 additions & 5 deletions hnc-up/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,23 +7,23 @@ _Forked from [foundry](https://github.com/foundry-rs/foundry/tree/master/foundry

## Installing

`curl -L get.huff.sh | bash` (installs the `hnc-up` installer)
`curl -L https://raw.githubusercontent.com/cakevm/huff-neo/main/hnc-up/hnc-up | bash` (installs the `hnc-up` installer)

Run `hnc-up` to install the latest version of `huff-neo`.
Run `hnc-up` to install the latest version of the Huff Neo Compiler.


## Usage

To install the **nightly** version:
To install the **latest** version:

```sh
hnc-up
```

To install a specific **version** (in this case the `nightly` version):
To install a specific **version** (in this case the `1.0.0` version):

```sh
hnc-up --version nightly
hnc-up --version 1.0.0
```

To install a specific **branch** (in this case the `release/0.3.1` branch's latest commit):
Expand Down
Loading

0 comments on commit d119973

Please sign in to comment.