Created
July 2, 2019 08:30
-
-
Save SpaceManiac/4826eb882959e997544d7908a5530324 to your computer and use it in GitHub Desktop.
Defunct dm2doxy filter for https://github.com/SpaceManiac/SpacemanDMM
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
From b80b1832ea6fa92402e79697ade7853241a9f2d5 Mon Sep 17 00:00:00 2001 | |
From: Tad Hardesty <[email protected]> | |
Date: Sat, 28 Jul 2018 17:24:36 -0700 | |
Subject: [PATCH] Add dm2doxy doxygen filter | |
Works by parsing the whole environment and creating dummy files | |
corresponding to each real file, which are then served instead of | |
parsing the real files one-by-one. | |
Not recommended for use, as Doxygen takes upwards of 6.5 hours to | |
document /tg/station's 17,000 classes, completely independent of the | |
filter which finishes in little more than a minute. | |
--- | |
Cargo.lock | 7 + | |
Cargo.toml | 1 + | |
src/dm2doxy/Cargo.toml | 12 ++ | |
src/dm2doxy/main.rs | 238 +++++++++++++++++++++++++++++++++ | |
src/dreammaker/lexer.rs | 31 +++++ | |
src/dreammaker/parser.rs | 4 +- | |
src/dreammaker/preprocessor.rs | 15 ++- | |
7 files changed, 305 insertions(+), 3 deletions(-) | |
create mode 100644 src/dm2doxy/Cargo.toml | |
create mode 100644 src/dm2doxy/main.rs | |
diff --git a/Cargo.lock b/Cargo.lock | |
index 513d6d9..9a4b267 100644 | |
--- a/Cargo.lock | |
+++ b/Cargo.lock | |
@@ -154,6 +154,13 @@ dependencies = [ | |
"url 1.7.0 (registry+https://github.com/rust-lang/crates.io-index)", | |
] | |
+[[package]] | |
+name = "dm2doxy" | |
+version = "0.0.1" | |
+dependencies = [ | |
+ "dreammaker 0.1.0", | |
+] | |
+ | |
[[package]] | |
name = "dmm-tools" | |
version = "0.1.0" | |
diff --git a/Cargo.toml b/Cargo.toml | |
index 135aec1..b48f2c9 100644 | |
--- a/Cargo.toml | |
+++ b/Cargo.toml | |
@@ -1,6 +1,7 @@ | |
[workspace] | |
members = [ | |
"src/cli", | |
+ "src/dm2doxy", | |
"src/dreammaker", | |
#"src/editor", | |
"src/interval-tree", | |
diff --git a/src/dm2doxy/Cargo.toml b/src/dm2doxy/Cargo.toml | |
new file mode 100644 | |
index 0000000..b2d1c30 | |
--- /dev/null | |
+++ b/src/dm2doxy/Cargo.toml | |
@@ -0,0 +1,12 @@ | |
+[package] | |
+name = "dm2doxy" | |
+version = "0.0.1" | |
+authors = ["Tad Hardesty <[email protected]>"] | |
+description = "Doxygen filter for DreamMaker" | |
+ | |
+[[bin]] | |
+name = "dm2doxy" | |
+path = "main.rs" | |
+ | |
+[dependencies] | |
+dreammaker = { path = "../dreammaker" } | |
diff --git a/src/dm2doxy/main.rs b/src/dm2doxy/main.rs | |
new file mode 100644 | |
index 0000000..7e79ef1 | |
--- /dev/null | |
+++ b/src/dm2doxy/main.rs | |
@@ -0,0 +1,238 @@ | |
+//! **dm2doxy** is a Doxygen filter for DreamMaker/BYOND codebases. | |
+//! | |
+//! Because DreamMaker codebases are only reasonably parsed in a solid chunk, | |
+//! we operate by parsing the entire environment and then saving out the doc | |
+//! comments alongside line-number-accurate analogues of the DM definitions. | |
+ | |
+extern crate dreammaker as dm; | |
+ | |
+use std::collections::BTreeMap; | |
+use std::path::{Path, PathBuf}; | |
+use std::{io, fs}; | |
+use std::rc::Rc; | |
+ | |
+use dm::*; | |
+ | |
+/// Entry point - invoke `.dm` or `.dme` driver based on command-line filename. | |
+fn main() { | |
+ let mut args = std::env::args_os(); | |
+ let _ = args.next(); // discard executable | |
+ let fname = match args.next() { | |
+ Some(arg) => PathBuf::from(arg), | |
+ None => return eprintln!("specify filename"), | |
+ }; | |
+ | |
+ if let Err(e) = match fname.extension().and_then(|s| s.to_str()) { | |
+ Some("dme") => dme, | |
+ Some("dm") => dm, | |
+ other => return eprintln!("bad extension: {:?}", other), | |
+ }(fname) { | |
+ eprintln!(" error: {}", e); | |
+ std::process::exit(1); | |
+ } | |
+} | |
+ | |
+/// The scoping operator Doxygen is expecting. | |
+const SCOPE: &str = "::"; | |
+ | |
+/// Map from real path to temporary file. | |
+fn tempfile(path: &Path) -> PathBuf { | |
+ // TODO: replace backslashes as well | |
+ Path::new("dm2doxy").join(path.display().to_string().replace("/", "$").replace(".dm", "..")) | |
+} | |
+ | |
+/// --------------------------------------------------------------------------- | |
+/// `.dm` files - read temp files | |
+fn dm(fname: PathBuf) -> Result<(), Box<std::error::Error>> { | |
+ use std::io::{Read, Write}; | |
+ | |
+ let path = tempfile(fname.strip_prefix(std::env::current_dir()?)?); | |
+ let mut contents = Vec::new(); | |
+ fs::File::open(path)?.read_to_end(&mut contents)?; | |
+ // TODO: delete the tempfile | |
+ let stdout = io::stdout(); | |
+ stdout.lock().write_all(&contents)?; | |
+ Ok(()) | |
+} | |
+ | |
+/// --------------------------------------------------------------------------- | |
+/// `.dme` files - parse the environment, collate definitions, write temp files | |
+fn dme(fname: PathBuf) -> Result<(), Box<std::error::Error>> { | |
+ use std::io::Write; | |
+ | |
+ // parse the environment | |
+ eprintln!("parsing {}", fname.display()); | |
+ let (tx, rx) = std::sync::mpsc::channel(); | |
+ let ctx = &Context::default(); | |
+ let mut pp = preprocessor::Preprocessor::new(ctx, fname)?; | |
+ pp.save_comments(tx); | |
+ let objtree = parser::parse(ctx, indents::IndentProcessor::new(ctx, pp)); | |
+ | |
+ // index all definitions | |
+ let mut defs = Definitions::default(); | |
+ let mut extends = BTreeMap::new(); | |
+ | |
+ while let Ok(comment) = rx.try_recv() { | |
+ defs.push(comment.location, Definition { comment: true, ..Definition::new(&Rc::from(""), comment.text) }); | |
+ } | |
+ | |
+ objtree.root().recurse(&mut |ty| { | |
+ // start with the class and what it extends from, if anything | |
+ let class: Rc<str>; | |
+ if ty.path.is_empty() { | |
+ class = Rc::from(""); | |
+ } else { | |
+ class = Rc::from(path(ty)); | |
+ if let Some(parent) = ty.parent_type() { | |
+ if !parent.path.is_empty() { | |
+ extends.insert(class.clone(), path(parent)); | |
+ } | |
+ } | |
+ defs.push(ty.location, Definition::new(&class, String::new())); | |
+ }; | |
+ | |
+ // list all the vars since these usually come first | |
+ for (name, var) in ty.vars.iter() { | |
+ let decl = match &var.declaration { | |
+ None => String::new(), | |
+ Some(decl) => decl.var_type.type_path.join(SCOPE), | |
+ }; | |
+ defs.push(var.value.location, Definition::new(&class, format!("{} {};", decl, name))); | |
+ } | |
+ | |
+ // list all the procs | |
+ for (name, proc) in ty.procs.iter() { | |
+ let decl = match &proc.declaration { | |
+ None => "", | |
+ Some(decl) => if decl.is_verb { "verb" } else { "proc" }, | |
+ }; | |
+ | |
+ // TODO: ensure doc comments on the proc end up on the proc and not | |
+ // the class, if the proc gets preceded by a class opener. | |
+ defs.push(proc.value.location, Definition::new(&class, format!("{} {}(", decl, name))); | |
+ let mut sep = ""; | |
+ for param in proc.value.parameters.iter() { | |
+ defs.push(proc.value.location, Definition::new(&class, format!("{}{}{}", sep, param.path.join(SCOPE), param.name))); | |
+ sep = ", "; | |
+ } | |
+ defs.push(proc.value.location, Definition::new(&class, "){}".to_owned())); | |
+ } | |
+ }); | |
+ | |
+ // collate definitions into files and lines | |
+ let mut map: BTreeMap<FileId, BTreeMap<u32, Vec<String>>> = BTreeMap::new(); | |
+ let mut last: Option<(Location, Rc<str>)> = None; | |
+ | |
+ for (location, def_vec) in defs.map { | |
+ for def in def_vec { | |
+ // if we're in a different file or class than before | |
+ if !def.comment { | |
+ if let Some((last_loc, last_class)) = last.take() { | |
+ if def.class != last_class || location.file != last_loc.file { | |
+ // close the previous class | |
+ if !last_class.is_empty() { | |
+ map | |
+ .entry(last_loc.file).or_default() | |
+ .entry(last_loc.line).or_default() | |
+ .push("}".to_owned()); | |
+ } | |
+ | |
+ // open the current class | |
+ let dest = map | |
+ .entry(location.file).or_default() | |
+ .entry(location.line).or_default(); | |
+ if !def.class.is_empty() { | |
+ dest.push(format!("class {}", &def.class)); | |
+ if let Some(extends) = extends.remove(&def.class) { | |
+ dest.push(format!(" extends {}", extends)); | |
+ } | |
+ dest.push("{".to_owned()); | |
+ } | |
+ } | |
+ } | |
+ last = Some((location, def.class)); | |
+ } | |
+ | |
+ // write the current entry | |
+ map | |
+ .entry(location.file).or_default() | |
+ .entry(location.line).or_default() | |
+ .push(def.bit); | |
+ } | |
+ } | |
+ | |
+ if let Some((location, class)) = last.take() { | |
+ if !class.is_empty() { | |
+ map | |
+ .entry(location.file).or_default() | |
+ .entry(location.line).or_default() | |
+ .push("}".to_owned()); | |
+ } | |
+ } | |
+ | |
+ // save collated files | |
+ for (id, lines) in map { | |
+ let path = ctx.file_path(id); | |
+ let mut f: Box<io::Write>; | |
+ if id == FileId::builtins() { | |
+ f = Box::new(io::stdout()); | |
+ } else if ctx.get_file(&path).is_some() { | |
+ let tempfile = tempfile(&path); | |
+ if let Some(p) = tempfile.parent() { | |
+ fs::create_dir_all(p)?; | |
+ } | |
+ f = Box::new(fs::File::create(tempfile)?); | |
+ } else { | |
+ continue; | |
+ } | |
+ | |
+ let mut last = 1; | |
+ let mut total_items = 0; | |
+ let num_lines = lines.len(); | |
+ for (line_number, items) in lines { | |
+ for _ in last..line_number { | |
+ writeln!(f)?; | |
+ } | |
+ last = line_number; | |
+ for item in items { | |
+ write!(f, "{}", item)?; | |
+ total_items += 1; | |
+ } | |
+ } | |
+ writeln!(f)?; | |
+ eprintln!(" {}: {} lines with {} items", path.display(), num_lines, total_items); | |
+ } | |
+ | |
+ Ok(()) | |
+} | |
+ | |
+fn path(ty: objtree::TypeRef) -> String { | |
+ if ty.path.is_empty() { | |
+ "globals".to_owned() | |
+ } else { | |
+ ty.path[1..].replace("/", SCOPE) | |
+ } | |
+} | |
+ | |
+#[derive(Default)] | |
+struct Definitions { | |
+ map: BTreeMap<Location, Vec<Definition>>, | |
+} | |
+ | |
+impl Definitions { | |
+ fn push(&mut self, loc: Location, def: Definition) { | |
+ self.map.entry(loc).or_default().push(def); | |
+ } | |
+} | |
+ | |
+struct Definition { | |
+ class: Rc<str>, | |
+ bit: String, | |
+ comment: bool, | |
+} | |
+ | |
+impl Definition { | |
+ fn new(class: &Rc<str>, bit: String) -> Definition { | |
+ Definition { class: class.clone(), bit, comment: false } | |
+ } | |
+} | |
diff --git a/src/dreammaker/lexer.rs b/src/dreammaker/lexer.rs | |
index 2fb8126..68bf8e8 100644 | |
--- a/src/dreammaker/lexer.rs | |
+++ b/src/dreammaker/lexer.rs | |
@@ -3,6 +3,7 @@ use std::io; | |
use std::str::FromStr; | |
use std::fmt; | |
use std::borrow::Cow; | |
+use std::sync::mpsc; | |
use super::{DMError, Location, HasLocation, FileId, Context, Severity}; | |
@@ -238,6 +239,13 @@ impl LocatedToken { | |
} | |
} | |
+#[derive(Debug)] | |
+pub struct Comment { | |
+ pub location: Location, | |
+ /// Includes the comment characters. | |
+ pub text: String, | |
+} | |
+ | |
fn is_digit(ch: u8) -> bool { | |
ch >= b'0' && ch <= b'9' | |
} | |
@@ -380,6 +388,7 @@ pub struct Lexer<'ctx, I> { | |
at_line_head: bool, | |
directive: Directive, | |
interp_stack: Vec<Interpolation>, | |
+ comments: Option<mpsc::Sender<Comment>>, | |
} | |
impl<'ctx, I> fmt::Debug for Lexer<'ctx, I> { | |
@@ -421,9 +430,14 @@ impl<'ctx, I: Iterator<Item=io::Result<u8>>> Lexer<'ctx, I> { | |
at_line_head: true, | |
directive: Directive::None, | |
interp_stack: Vec::new(), | |
+ comments: None, | |
} | |
} | |
+ pub fn save_comments(&mut self, chan: mpsc::Sender<Comment>) { | |
+ self.comments = Some(chan); | |
+ } | |
+ | |
fn next(&mut self) -> Option<u8> { | |
if let Some(next) = self.next.take() { | |
return Some(next); | |
@@ -461,6 +475,11 @@ impl<'ctx, I: Iterator<Item=io::Result<u8>>> Lexer<'ctx, I> { | |
fn skip_block_comments(&mut self) { | |
let mut depth = 1; | |
let mut buffer = [0, 0]; | |
+ let mut comment = Comment { | |
+ location: self.location(), | |
+ text: "/*".to_owned(), | |
+ }; | |
+ | |
while depth > 0 { | |
// read one character | |
buffer[0] = buffer[1]; | |
@@ -477,11 +496,19 @@ impl<'ctx, I: Iterator<Item=io::Result<u8>>> Lexer<'ctx, I> { | |
} else if buffer == *b"*/" { | |
depth -= 1; | |
} | |
+ comment.text.push(buffer[1] as char); | |
+ } | |
+ if let Some(ref chan) = self.comments { | |
+ let _ = chan.send(comment).is_err(); | |
} | |
} | |
fn skip_line_comment(&mut self) { | |
let mut backslash = false; | |
+ let mut comment = Comment { | |
+ location: self.location(), | |
+ text: "//".to_owned(), | |
+ }; | |
while let Some(ch) = self.next() { | |
if ch == b'\r' { | |
// not listening | |
@@ -492,6 +519,10 @@ impl<'ctx, I: Iterator<Item=io::Result<u8>>> Lexer<'ctx, I> { | |
} else if ch == b'\\' { | |
backslash = true; | |
} | |
+ comment.text.push(ch as char); | |
+ } | |
+ if let Some(ref chan) = self.comments { | |
+ let _ = chan.send(comment); | |
} | |
} | |
diff --git a/src/dreammaker/parser.rs b/src/dreammaker/parser.rs | |
index 351cb07..c4ff22f 100644 | |
--- a/src/dreammaker/parser.rs | |
+++ b/src/dreammaker/parser.rs | |
@@ -19,6 +19,7 @@ pub fn parse<I>(context: &Context, iter: I) -> ObjectTree where | |
I: IntoIterator<Item=LocatedToken> | |
{ | |
let mut parser = Parser::new(context, iter.into_iter()); | |
+ parser.tree.register_builtins(); | |
parser.run(); | |
let procs_total = parser.procs_good + parser.procs_bad; | |
@@ -313,7 +314,7 @@ impl TTKind { | |
pub struct Parser<'ctx, 'an, I> { | |
context: &'ctx Context, | |
annotations: Option<&'an mut AnnotationTree>, | |
- tree: ObjectTree, | |
+ pub tree: ObjectTree, | |
input: I, | |
eof: bool, | |
@@ -353,7 +354,6 @@ impl<'ctx, 'an, I> Parser<'ctx, 'an, I> where | |
} | |
pub fn run(&mut self) { | |
- self.tree.register_builtins(); | |
let root = self.root(); | |
if let Err(e) = self.require(root) { | |
self.context.register_error(e); | |
diff --git a/src/dreammaker/preprocessor.rs b/src/dreammaker/preprocessor.rs | |
index 4440424..994267d 100644 | |
--- a/src/dreammaker/preprocessor.rs | |
+++ b/src/dreammaker/preprocessor.rs | |
@@ -3,6 +3,7 @@ use std::collections::{HashMap, VecDeque}; | |
use std::io; | |
use std::fs::File; | |
use std::path::{Path, PathBuf}; | |
+use std::sync::mpsc; | |
use interval_tree::{IntervalTree, range}; | |
@@ -230,6 +231,7 @@ impl Ifdef { | |
pub struct Preprocessor<'ctx> { | |
context: &'ctx Context, | |
env_file: PathBuf, | |
+ comments: Option<mpsc::Sender<Comment>>, | |
include_stack: IncludeStack<'ctx>, | |
last_input_loc: Location, | |
@@ -274,6 +276,7 @@ impl<'ctx> Preprocessor<'ctx> { | |
Ok(Preprocessor { | |
context, | |
env_file, | |
+ comments: None, | |
include_stack: IncludeStack { | |
stack: vec![include], | |
}, | |
@@ -292,6 +295,10 @@ impl<'ctx> Preprocessor<'ctx> { | |
}) | |
} | |
+ pub fn save_comments(&mut self, chan: mpsc::Sender<Comment>) { | |
+ self.comments = Some(chan); | |
+ } | |
+ | |
/// Move all active defines to the define history. | |
pub fn finalize(&mut self) { | |
let mut i = 0; | |
@@ -328,6 +335,7 @@ impl<'ctx> Preprocessor<'ctx> { | |
Preprocessor { | |
context: context, | |
+ comments: None, | |
env_file: self.env_file.clone(), | |
include_stack: Default::default(), | |
history: Default::default(), // TODO: support branching a second time | |
@@ -552,11 +560,16 @@ impl<'ctx> Preprocessor<'ctx> { | |
// TODO: warn if a file is double-included, and | |
// don't include it a second time | |
FileType::DM => match Include::from_file(self.context, candidate) { | |
- Ok(include) => { | |
+ Ok(mut include) => { | |
// A phantom newline keeps the include | |
// directive being indented from making | |
// the first line of the file indented. | |
self.output.push_back(Token::Punct(Punctuation::Newline)); | |
+ if let Include::File { ref mut lexer, .. } = include { | |
+ if let Some(ref comments) = self.comments { | |
+ lexer.save_comments(comments.clone()); | |
+ } | |
+ } | |
self.include_stack.stack.push(include); | |
} | |
Err(e) => self.context.register_error(DMError::new(self.last_input_loc, | |
-- | |
2.17.1 | |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment