Skip to content

Commit

Permalink
Merge branch 'windows_support'
Browse files Browse the repository at this point in the history
  • Loading branch information
VonTum committed Aug 7, 2023
2 parents d28d286 + 1045baa commit 31758eb
Show file tree
Hide file tree
Showing 3 changed files with 16 additions and 5 deletions.
14 changes: 11 additions & 3 deletions src/dev_aid/lsp.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,20 +14,28 @@ use crate::{parser::{perform_full_semantic_parse, FullParseResult}, dev_aid::syn

use super::syntax_highlighting::{IDETokenType, IDEIdentifierType, IDEToken};

thread_local!(static OUT_FILE: File = File::create("/home/lennart/lsp_out.txt").expect("Replacement terminal /home/lennart/lsp_out.txt could not be created"));
use std::env;

static LSP_LOG_PATH : &str = if crate::tokenizer::const_eq_str(std::env::consts::OS, "windows") {
"C:\\Users\\lenna\\lsp_out.txt"
} else {
"/home/lennart/lsp_out.txt"
};

thread_local!(static LSP_LOG: File = File::create(LSP_LOG_PATH).expect("Replacement terminal /home/lennart/lsp_out.txt could not be created"));

macro_rules! print {
($($arg:tt)*) => {{
use std::io::Write;
OUT_FILE.with(|mut file| {
LSP_LOG.with(|mut file| {
write!(file, $($arg)*).unwrap();
})
}};
}
macro_rules! println {
($($arg:tt)*) => {{
use std::io::Write;
OUT_FILE.with(|mut file| {
LSP_LOG.with(|mut file| {
write!(file, $($arg)*).unwrap();
write!(file, "\n").unwrap();
})
Expand Down
5 changes: 4 additions & 1 deletion src/dev_aid/syntax_highlighting.rs
Original file line number Diff line number Diff line change
Expand Up @@ -154,7 +154,10 @@ pub fn create_token_ide_info<'a>(parsed: &FullParseResult) -> Vec<IDEToken> {
}

pub fn syntax_highlight_file(file_path : &str) {
let file_text = std::fs::read_to_string(file_path).expect("Could not open file!");
let file_text = match std::fs::read_to_string(file_path) {
Ok(file_text) => file_text,
Err(reason) => panic!("Could not open file '{file_path}' for syntax highlighting because {reason}")
};

let (full_parse, errors) = perform_full_semantic_parse(&file_text);

Expand Down
2 changes: 1 addition & 1 deletion src/tokenizer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,7 @@ pub const TOKEN_BIG_INTEGER : TokenTypeIdx = TOKEN_IDENTIFIER + 2;
pub const TOKEN_COMMENT : TokenTypeIdx = TOKEN_IDENTIFIER + 3;
pub const TOKEN_INVALID : TokenTypeIdx = TOKEN_IDENTIFIER + 4;

const fn const_eq_str(a: &str, b: &str) -> bool {
pub const fn const_eq_str(a: &str, b: &str) -> bool {
let a_bytes = a.as_bytes();
let b_bytes = b.as_bytes();

Expand Down

0 comments on commit 31758eb

Please sign in to comment.