Compare commits

...

4 Commits

7 changed files with 73 additions and 33 deletions

View File

@ -1,6 +1,6 @@
use std::{collections::VecDeque};
use crate::{token::{Token}, builtin::modules::Module};
use crate::{token::{Token, MessageType}, builtin::modules::Module};
#[derive(Default)]
pub struct LangSpecs {
@ -8,7 +8,7 @@ pub struct LangSpecs {
builtin_features: Vec<crate::builtin::modules::Module>,
lang_version: u32,
authors: Vec<String>,
embedded_files: Vec<String>,
embedded_files: Vec<(usize, String)>,
}
impl LangSpecs {
@ -16,24 +16,22 @@ impl LangSpecs {
pub fn features(&self) -> &[crate::builtin::modules::Module] {
&self.builtin_features
}
pub fn embedded_files(&self) -> &[(usize, String)] {
&self.embedded_files
}
}
pub fn resolve_directives(tokens: &mut VecDeque<Token>) -> LangSpecs {
let mut specs = LangSpecs::default();
for token in tokens.iter() {
for (idx, token) in tokens.iter().enumerate() {
match token {
Token::CompilerDirective(text, _) => parse_directive(text, &mut specs),
Token::CompilerDirective(text, _) => parse_directive(text, idx, &mut specs),
_ => ()
}
}
// remove compiler directives from source
tokens.retain(|token| match token {
Token::CompilerDirective(_, _) => false,
_ => true
});
specs
}
@ -41,7 +39,7 @@ static DIRECTIVE_REGEX_SRC: &'static str = concat!(
r"@feature\(((?:\s*[\w]+\s*,?)*)\)",
r"|@version\(\s*([0-9]{3})\s*\)",
r"|@author\((.*)\)",
r"|@embed\((.*)\)"
r"|@include\((.*)\)"
);
lazy_static::lazy_static! {
@ -58,7 +56,7 @@ pub fn from_list(text: &str) -> Vec<String> {
vec
}
fn parse_directive(text: &str, specs: &mut LangSpecs) {
fn parse_directive(text: &str, token_idx: usize, specs: &mut LangSpecs) {
for cap in DIRECTIVE_REGEX.captures_iter(text) {
let mut enumerator = cap.iter().enumerate();
@ -91,8 +89,13 @@ fn parse_directive(text: &str, specs: &mut LangSpecs) {
return;
},
4 => {
specs.embedded_files.append(&mut from_list(mat.as_str()));
crate::message(crate::token::MessageType::Warning, "Embed directive not working at current state");
for path in from_list(mat.as_str()).iter() {
if let Ok(str) = std::fs::read_to_string(path) {
specs.embedded_files.push((token_idx, str));
} else {
crate::message(MessageType::Warning, format!("Unable to read embedded file: {path}"));
}
}
return;
},
_ => crate::message(crate::token::MessageType::Warning, format!("unknown directive: `{}`", text)),

View File

@ -31,11 +31,22 @@ fn compile(settings: &Settings) -> Option<(Vec<Func>, Vec<Declr>, Vec<BuiltinFun
for src in settings.get_source().iter() {
let code = src.code();
let mut diagnostics = Diagnostics::new(&settings, code);
let mut diagnostics = Diagnostics::new(&settings);
diagnostics.add_source_origin(code);
if let Ok(mut tokens) = tokenize(code, &mut diagnostics) {
let specs = crate::direct::resolve_directives(&mut tokens);
// read source of every embedded file and tokenize
for (idx, src) in specs.embedded_files() {
diagnostics.add_source_origin(src);
if let Ok(em_tokens) = tokenize(src, &mut diagnostics) {
for (idy, em_token) in em_tokens.into_iter().enumerate() {
tokens.insert(idx + idy, em_token.to_owned());
}
}
}
let mut parser = Parser::new(&specs);
if let Ok((funcs, declrs, builtin)) = parser.parse(&mut tokens, &mut diagnostics, &settings) {

View File

@ -4,7 +4,7 @@ use crate::conf::Settings;
use crate::token::{DebugInfo, DebugNotice, Token, MessageType};
use crate::Prim;
use core::panic;
use std::collections::VecDeque;
use std::collections::{VecDeque, HashMap};
use std::collections::hash_map::DefaultHasher;
use std::hash::{Hash, Hasher};
@ -24,28 +24,36 @@ impl Default for LogLvl {
}
}
pub struct Diagnostics<'a> {
pub struct Diagnostics {
/// terminating factor on error
err: Option<DebugNotice<'a>>,
err: Option<DebugNotice>,
/// additional warning and informations
/// all non critical
hints: Vec<DebugNotice<'a>>,
/// source string
source: &'a str,
hints: Vec<DebugNotice>,
/// source hash and source string
source: HashMap<u64, String>,
/// flags
loglvl: LogLvl,
}
impl<'a> Diagnostics<'a> {
pub fn new(settings: &Settings, source: &'a str) -> Diagnostics<'a> {
impl Diagnostics {
pub fn new(settings: &Settings) -> Diagnostics {
Self {
err: None,
hints: vec![],
source,
source: HashMap::new(),
loglvl: settings.loglvl()
}
}
pub fn add_source_origin<A>(&mut self, source: A) where A: Into<String> + Hash {
let mut hasher = DefaultHasher::new();
source.hash(&mut hasher);
let origin = hasher.finish();
self.source.insert(origin, source.into());
}
pub fn set_err<T, S>(&mut self, source: &S, message: &'static crate::token::DebugMsg, ext: T)
where
T: Into<String>,
@ -62,7 +70,7 @@ impl<'a> Diagnostics<'a> {
info,
msg: message,
ext: ext.into(),
source: self.source,
source: self.source.get(&info.origin).unwrap().clone(),
});
}
@ -77,12 +85,12 @@ impl<'a> Diagnostics<'a> {
info,
msg: message,
ext: ext.into(),
source: self.source,
source: self.source.get(&info.origin).unwrap().clone(),
});
}
}
impl<'a> std::fmt::Display for Diagnostics<'a> {
impl std::fmt::Display for Diagnostics {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
for hint in self.hints.iter() {
match hint.msg.typ {
@ -214,6 +222,8 @@ impl<'a> Declr<'a> {
}
}
self.result_typ.hash(&mut hasher);
self.uuid = hasher.finish()
}

View File

@ -285,7 +285,7 @@ fn discover_functions(
// if we have anything left it might be an error
match &top {
Token::LineBreak(_) | Token::Terminator(_) => (), // valid whitespace
Token::LineBreak(_) | Token::Terminator(_) | Token::CompilerDirective(_,_) => (), // valid whitespace
_ => {
diagnostics.set_err(&top, crate::msg::ERR22, "");
return Err(()) ;

View File

@ -1,5 +1,6 @@
use colored::{ColoredString, Colorize};
use std::{collections::VecDeque};
use std::{collections::{VecDeque, hash_map::DefaultHasher}, hash::Hasher};
use std::hash::Hash;
use crate::parser::data::Diagnostics;
@ -420,16 +421,16 @@ pub struct DebugMsg {
pub msg: &'static str,
}
pub struct DebugNotice<'a> {
pub struct DebugNotice {
pub info: DebugInfo,
/// generic error description
pub msg: &'static DebugMsg,
/// extra message which is case specific
pub ext: String,
pub source: &'a str,
pub source: String,
}
impl<'a> std::fmt::Display for DebugNotice<'a> {
impl std::fmt::Display for DebugNotice {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
// write header as:
// `Error (56) some syntax error message in line 5:`
@ -473,6 +474,8 @@ pub struct DebugInfo {
pub end: usize,
/// line number where the line in which the token is begins
pub line: usize,
/// string url of the source origin
pub origin: u64
}
#[derive(Debug)]
@ -623,6 +626,9 @@ pub fn tokenize<'a>(source: &'a str, diagnostics: &mut Diagnostics) -> Result<Ve
for cap in TOKEN_REGEX.captures_iter(source.as_ref()) {
let mut enumerator = cap.iter().enumerate();
let mut hasher = DefaultHasher::new();
source.hash(&mut hasher);
let origin = hasher.finish();
loop {
let next = enumerator.next();
if next.is_none() {
@ -648,6 +654,7 @@ pub fn tokenize<'a>(source: &'a str, diagnostics: &mut Diagnostics) -> Result<Ve
start: mat.start() - line_start,
end: mat.end() - line_start,
line: line_count,
origin
};
tokens.push_back(match i {

5
tests/lib.yard Normal file
View File

@ -0,0 +1,5 @@
foo( {
println("say Hello!");
}

View File

@ -2,6 +2,8 @@
@version(100)
@author(Sven Vogel)
@include(/home/servostar/Projects/Yard/tests/lib.yard)
max(a:int, b:int)=int {
despite a < b {
yield a
@ -10,9 +12,11 @@ max(a:int, b:int)=int {
}
# main function
main = int {
main() = int {
println("" .. max(3,4));
foo()
0
}