Compare commits
No commits in common. "1e029bd604edfda363a17353a0baa5ad33cb791a" and "3f997fdb8732da824eed6ae2c59c6cd150b99333" have entirely different histories.
1e029bd604
...
3f997fdb87
|
@ -1,6 +1,6 @@
|
||||||
use std::{collections::VecDeque};
|
use std::{collections::VecDeque};
|
||||||
|
|
||||||
use crate::{token::{Token, MessageType}, builtin::modules::Module};
|
use crate::{token::{Token}, builtin::modules::Module};
|
||||||
|
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
pub struct LangSpecs {
|
pub struct LangSpecs {
|
||||||
|
@ -8,7 +8,7 @@ pub struct LangSpecs {
|
||||||
builtin_features: Vec<crate::builtin::modules::Module>,
|
builtin_features: Vec<crate::builtin::modules::Module>,
|
||||||
lang_version: u32,
|
lang_version: u32,
|
||||||
authors: Vec<String>,
|
authors: Vec<String>,
|
||||||
embedded_files: Vec<(usize, String)>,
|
embedded_files: Vec<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl LangSpecs {
|
impl LangSpecs {
|
||||||
|
@ -16,22 +16,24 @@ impl LangSpecs {
|
||||||
pub fn features(&self) -> &[crate::builtin::modules::Module] {
|
pub fn features(&self) -> &[crate::builtin::modules::Module] {
|
||||||
&self.builtin_features
|
&self.builtin_features
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn embedded_files(&self) -> &[(usize, String)] {
|
|
||||||
&self.embedded_files
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn resolve_directives(tokens: &mut VecDeque<Token>) -> LangSpecs {
|
pub fn resolve_directives(tokens: &mut VecDeque<Token>) -> LangSpecs {
|
||||||
let mut specs = LangSpecs::default();
|
let mut specs = LangSpecs::default();
|
||||||
|
|
||||||
for (idx, token) in tokens.iter().enumerate() {
|
for token in tokens.iter() {
|
||||||
match token {
|
match token {
|
||||||
Token::CompilerDirective(text, _) => parse_directive(text, idx, &mut specs),
|
Token::CompilerDirective(text, _) => parse_directive(text, &mut specs),
|
||||||
_ => ()
|
_ => ()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// remove compiler directives from source
|
||||||
|
tokens.retain(|token| match token {
|
||||||
|
Token::CompilerDirective(_, _) => false,
|
||||||
|
_ => true
|
||||||
|
});
|
||||||
|
|
||||||
specs
|
specs
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -39,7 +41,7 @@ static DIRECTIVE_REGEX_SRC: &'static str = concat!(
|
||||||
r"@feature\(((?:\s*[\w]+\s*,?)*)\)",
|
r"@feature\(((?:\s*[\w]+\s*,?)*)\)",
|
||||||
r"|@version\(\s*([0-9]{3})\s*\)",
|
r"|@version\(\s*([0-9]{3})\s*\)",
|
||||||
r"|@author\((.*)\)",
|
r"|@author\((.*)\)",
|
||||||
r"|@include\((.*)\)"
|
r"|@embed\((.*)\)"
|
||||||
);
|
);
|
||||||
|
|
||||||
lazy_static::lazy_static! {
|
lazy_static::lazy_static! {
|
||||||
|
@ -56,7 +58,7 @@ pub fn from_list(text: &str) -> Vec<String> {
|
||||||
vec
|
vec
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_directive(text: &str, token_idx: usize, specs: &mut LangSpecs) {
|
fn parse_directive(text: &str, specs: &mut LangSpecs) {
|
||||||
|
|
||||||
for cap in DIRECTIVE_REGEX.captures_iter(text) {
|
for cap in DIRECTIVE_REGEX.captures_iter(text) {
|
||||||
let mut enumerator = cap.iter().enumerate();
|
let mut enumerator = cap.iter().enumerate();
|
||||||
|
@ -89,13 +91,8 @@ fn parse_directive(text: &str, token_idx: usize, specs: &mut LangSpecs) {
|
||||||
return;
|
return;
|
||||||
},
|
},
|
||||||
4 => {
|
4 => {
|
||||||
for path in from_list(mat.as_str()).iter() {
|
specs.embedded_files.append(&mut from_list(mat.as_str()));
|
||||||
if let Ok(str) = std::fs::read_to_string(path) {
|
crate::message(crate::token::MessageType::Warning, "Embed directive not working at current state");
|
||||||
specs.embedded_files.push((token_idx, str));
|
|
||||||
} else {
|
|
||||||
crate::message(MessageType::Warning, format!("Unable to read embedded file: {path}"));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return;
|
return;
|
||||||
},
|
},
|
||||||
_ => crate::message(crate::token::MessageType::Warning, format!("unknown directive: `{}`", text)),
|
_ => crate::message(crate::token::MessageType::Warning, format!("unknown directive: `{}`", text)),
|
||||||
|
|
13
src/main.rs
13
src/main.rs
|
@ -31,22 +31,11 @@ fn compile(settings: &Settings) -> Option<(Vec<Func>, Vec<Declr>, Vec<BuiltinFun
|
||||||
for src in settings.get_source().iter() {
|
for src in settings.get_source().iter() {
|
||||||
let code = src.code();
|
let code = src.code();
|
||||||
|
|
||||||
let mut diagnostics = Diagnostics::new(&settings);
|
let mut diagnostics = Diagnostics::new(&settings, code);
|
||||||
diagnostics.add_source_origin(code);
|
|
||||||
|
|
||||||
if let Ok(mut tokens) = tokenize(code, &mut diagnostics) {
|
if let Ok(mut tokens) = tokenize(code, &mut diagnostics) {
|
||||||
let specs = crate::direct::resolve_directives(&mut tokens);
|
let specs = crate::direct::resolve_directives(&mut tokens);
|
||||||
|
|
||||||
// read source of every embedded file and tokenize
|
|
||||||
for (idx, src) in specs.embedded_files() {
|
|
||||||
diagnostics.add_source_origin(src);
|
|
||||||
if let Ok(em_tokens) = tokenize(src, &mut diagnostics) {
|
|
||||||
for (idy, em_token) in em_tokens.into_iter().enumerate() {
|
|
||||||
tokens.insert(idx + idy, em_token.to_owned());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut parser = Parser::new(&specs);
|
let mut parser = Parser::new(&specs);
|
||||||
|
|
||||||
if let Ok((funcs, declrs, builtin)) = parser.parse(&mut tokens, &mut diagnostics, &settings) {
|
if let Ok((funcs, declrs, builtin)) = parser.parse(&mut tokens, &mut diagnostics, &settings) {
|
||||||
|
|
|
@ -4,7 +4,7 @@ use crate::conf::Settings;
|
||||||
use crate::token::{DebugInfo, DebugNotice, Token, MessageType};
|
use crate::token::{DebugInfo, DebugNotice, Token, MessageType};
|
||||||
use crate::Prim;
|
use crate::Prim;
|
||||||
use core::panic;
|
use core::panic;
|
||||||
use std::collections::{VecDeque, HashMap};
|
use std::collections::VecDeque;
|
||||||
use std::collections::hash_map::DefaultHasher;
|
use std::collections::hash_map::DefaultHasher;
|
||||||
use std::hash::{Hash, Hasher};
|
use std::hash::{Hash, Hasher};
|
||||||
|
|
||||||
|
@ -24,36 +24,28 @@ impl Default for LogLvl {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct Diagnostics {
|
pub struct Diagnostics<'a> {
|
||||||
/// terminating factor on error
|
/// terminating factor on error
|
||||||
err: Option<DebugNotice>,
|
err: Option<DebugNotice<'a>>,
|
||||||
/// additional warning and informations
|
/// additional warning and informations
|
||||||
/// all non critical
|
/// all non critical
|
||||||
hints: Vec<DebugNotice>,
|
hints: Vec<DebugNotice<'a>>,
|
||||||
/// source hash and source string
|
/// source string
|
||||||
source: HashMap<u64, String>,
|
source: &'a str,
|
||||||
/// flags
|
/// flags
|
||||||
loglvl: LogLvl,
|
loglvl: LogLvl,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Diagnostics {
|
impl<'a> Diagnostics<'a> {
|
||||||
pub fn new(settings: &Settings) -> Diagnostics {
|
pub fn new(settings: &Settings, source: &'a str) -> Diagnostics<'a> {
|
||||||
Self {
|
Self {
|
||||||
err: None,
|
err: None,
|
||||||
hints: vec![],
|
hints: vec![],
|
||||||
source: HashMap::new(),
|
source,
|
||||||
loglvl: settings.loglvl()
|
loglvl: settings.loglvl()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn add_source_origin<A>(&mut self, source: A) where A: Into<String> + Hash {
|
|
||||||
let mut hasher = DefaultHasher::new();
|
|
||||||
source.hash(&mut hasher);
|
|
||||||
let origin = hasher.finish();
|
|
||||||
|
|
||||||
self.source.insert(origin, source.into());
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn set_err<T, S>(&mut self, source: &S, message: &'static crate::token::DebugMsg, ext: T)
|
pub fn set_err<T, S>(&mut self, source: &S, message: &'static crate::token::DebugMsg, ext: T)
|
||||||
where
|
where
|
||||||
T: Into<String>,
|
T: Into<String>,
|
||||||
|
@ -70,7 +62,7 @@ impl Diagnostics {
|
||||||
info,
|
info,
|
||||||
msg: message,
|
msg: message,
|
||||||
ext: ext.into(),
|
ext: ext.into(),
|
||||||
source: self.source.get(&info.origin).unwrap().clone(),
|
source: self.source,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -85,12 +77,12 @@ impl Diagnostics {
|
||||||
info,
|
info,
|
||||||
msg: message,
|
msg: message,
|
||||||
ext: ext.into(),
|
ext: ext.into(),
|
||||||
source: self.source.get(&info.origin).unwrap().clone(),
|
source: self.source,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl std::fmt::Display for Diagnostics {
|
impl<'a> std::fmt::Display for Diagnostics<'a> {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
for hint in self.hints.iter() {
|
for hint in self.hints.iter() {
|
||||||
match hint.msg.typ {
|
match hint.msg.typ {
|
||||||
|
@ -222,8 +214,6 @@ impl<'a> Declr<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
self.result_typ.hash(&mut hasher);
|
|
||||||
|
|
||||||
self.uuid = hasher.finish()
|
self.uuid = hasher.finish()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -285,7 +285,7 @@ fn discover_functions(
|
||||||
|
|
||||||
// if we have anything left it might be an error
|
// if we have anything left it might be an error
|
||||||
match &top {
|
match &top {
|
||||||
Token::LineBreak(_) | Token::Terminator(_) | Token::CompilerDirective(_,_) => (), // valid whitespace
|
Token::LineBreak(_) | Token::Terminator(_) => (), // valid whitespace
|
||||||
_ => {
|
_ => {
|
||||||
diagnostics.set_err(&top, crate::msg::ERR22, "");
|
diagnostics.set_err(&top, crate::msg::ERR22, "");
|
||||||
return Err(()) ;
|
return Err(()) ;
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
use colored::{ColoredString, Colorize};
|
use colored::{ColoredString, Colorize};
|
||||||
use std::{collections::{VecDeque, hash_map::DefaultHasher}, hash::Hasher};
|
use std::{collections::VecDeque};
|
||||||
use std::hash::Hash;
|
|
||||||
|
|
||||||
use crate::parser::data::Diagnostics;
|
use crate::parser::data::Diagnostics;
|
||||||
|
|
||||||
|
@ -421,16 +420,16 @@ pub struct DebugMsg {
|
||||||
pub msg: &'static str,
|
pub msg: &'static str,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct DebugNotice {
|
pub struct DebugNotice<'a> {
|
||||||
pub info: DebugInfo,
|
pub info: DebugInfo,
|
||||||
/// generic error description
|
/// generic error description
|
||||||
pub msg: &'static DebugMsg,
|
pub msg: &'static DebugMsg,
|
||||||
/// extra message which is case specific
|
/// extra message which is case specific
|
||||||
pub ext: String,
|
pub ext: String,
|
||||||
pub source: String,
|
pub source: &'a str,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl std::fmt::Display for DebugNotice {
|
impl<'a> std::fmt::Display for DebugNotice<'a> {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
// write header as:
|
// write header as:
|
||||||
// `Error (56) some syntax error message in line 5:`
|
// `Error (56) some syntax error message in line 5:`
|
||||||
|
@ -474,8 +473,6 @@ pub struct DebugInfo {
|
||||||
pub end: usize,
|
pub end: usize,
|
||||||
/// line number where the line in which the token is begins
|
/// line number where the line in which the token is begins
|
||||||
pub line: usize,
|
pub line: usize,
|
||||||
/// string url of the source origin
|
|
||||||
pub origin: u64
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
|
@ -626,9 +623,6 @@ pub fn tokenize<'a>(source: &'a str, diagnostics: &mut Diagnostics) -> Result<Ve
|
||||||
|
|
||||||
for cap in TOKEN_REGEX.captures_iter(source.as_ref()) {
|
for cap in TOKEN_REGEX.captures_iter(source.as_ref()) {
|
||||||
let mut enumerator = cap.iter().enumerate();
|
let mut enumerator = cap.iter().enumerate();
|
||||||
let mut hasher = DefaultHasher::new();
|
|
||||||
source.hash(&mut hasher);
|
|
||||||
let origin = hasher.finish();
|
|
||||||
loop {
|
loop {
|
||||||
let next = enumerator.next();
|
let next = enumerator.next();
|
||||||
if next.is_none() {
|
if next.is_none() {
|
||||||
|
@ -654,7 +648,6 @@ pub fn tokenize<'a>(source: &'a str, diagnostics: &mut Diagnostics) -> Result<Ve
|
||||||
start: mat.start() - line_start,
|
start: mat.start() - line_start,
|
||||||
end: mat.end() - line_start,
|
end: mat.end() - line_start,
|
||||||
line: line_count,
|
line: line_count,
|
||||||
origin
|
|
||||||
};
|
};
|
||||||
|
|
||||||
tokens.push_back(match i {
|
tokens.push_back(match i {
|
||||||
|
|
|
@ -1,5 +0,0 @@
|
||||||
|
|
||||||
foo( {
|
|
||||||
|
|
||||||
println("say Hello!");
|
|
||||||
}
|
|
|
@ -2,8 +2,6 @@
|
||||||
@version(100)
|
@version(100)
|
||||||
@author(Sven Vogel)
|
@author(Sven Vogel)
|
||||||
|
|
||||||
@include(/home/servostar/Projects/Yard/tests/lib.yard)
|
|
||||||
|
|
||||||
max(a:int, b:int)=int {
|
max(a:int, b:int)=int {
|
||||||
despite a < b {
|
despite a < b {
|
||||||
yield a
|
yield a
|
||||||
|
@ -12,11 +10,9 @@ max(a:int, b:int)=int {
|
||||||
}
|
}
|
||||||
|
|
||||||
# main function
|
# main function
|
||||||
main() = int {
|
main = int {
|
||||||
|
|
||||||
println("" .. max(3,4));
|
println("" .. max(3,4));
|
||||||
|
|
||||||
foo()
|
|
||||||
|
|
||||||
0
|
0
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue