diff --git a/.envrc b/.envrc new file mode 100644 index 0000000..3550a30 --- /dev/null +++ b/.envrc @@ -0,0 +1 @@ +use flake diff --git a/.gitignore b/.gitignore index ea8c4bf..2d5df85 100644 --- a/.gitignore +++ b/.gitignore @@ -1 +1,2 @@ /target +.direnv diff --git a/Cargo.lock b/Cargo.lock index f958f82..3c2f247 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -359,7 +359,10 @@ dependencies = [ "console", "futures", "indicatif", + "lazy_static", + "regex", "swc_common", + "swc_ecma_ast", "swc_ecma_parser", "tokio", ] diff --git a/Cargo.toml b/Cargo.toml index 7662756..2df075e 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -12,6 +12,9 @@ clap = { version = "4.2.7", features = ["derive"] } console = "0.15.6" futures = "0.3.28" indicatif = { version = "0.17.3", features = ["tokio", "improved_unicode", "vt100"] } +lazy_static = "1.4.0" +regex = "1.8.1" swc_common = { version = "0.31.10", features = ["tty-emitter"] } +swc_ecma_ast = "0.104.3" swc_ecma_parser = "0.134.6" tokio = { version = "1.28.1", features = ["full"] } diff --git a/flake.lock b/flake.lock new file mode 100644 index 0000000..6ad42cd --- /dev/null +++ b/flake.lock @@ -0,0 +1,112 @@ +{ + "nodes": { + "fenix": { + "inputs": { + "nixpkgs": "nixpkgs", + "rust-analyzer-src": "rust-analyzer-src" + }, + "locked": { + "lastModified": 1684390923, + "narHash": "sha256-8LSe1K8ua8uyqc0B8wsQEY9qNCTX2h78mzUt2A7Uqhg=", + "owner": "nix-community", + "repo": "fenix", + "rev": "8a69206e50ca6a2cc4ae7a22eef4bfcbe6dbc9f6", + "type": "github" + }, + "original": { + "owner": "nix-community", + "repo": "fenix", + "type": "github" + } + }, + "flake-utils": { + "inputs": { + "systems": "systems" + }, + "locked": { + "lastModified": 1681202837, + "narHash": "sha256-H+Rh19JDwRtpVPAWp64F+rlEtxUWBAQW28eAi3SRSzg=", + "owner": "numtide", + "repo": "flake-utils", + "rev": "cfacdce06f30d2b68473a46042957675eebb3401", + "type": "github" + }, + "original": { + "id": "flake-utils", + "type": "indirect" + } + }, + "nixpkgs": { + "locked": { + "lastModified": 1684305980, + "narHash": "sha256-vd4SKXX1KZfSX6n3eoguJw/vQ+sBL8XGdgfxjEgLpKc=", + "owner": "nixos", + "repo": "nixpkgs", + "rev": "e6e389917a8c778be636e67a67ec958f511cc55d", + "type": "github" + }, + "original": { + "owner": "nixos", + "ref": "nixos-unstable", + "repo": "nixpkgs", + "type": "github" + } + }, + "nixpkgs_2": { + "locked": { + "lastModified": 1684432087, + "narHash": "sha256-3zFTOY/3+kN9x9Zdq6ixLmgV4ZcEd1aafq41v/OVUek=", + "owner": "nixos", + "repo": "nixpkgs", + "rev": "7721e0d2c1845c24eafd5a016b9d349187c48097", + "type": "github" + }, + "original": { + "owner": "nixos", + "repo": "nixpkgs", + "type": "github" + } + }, + "root": { + "inputs": { + "fenix": "fenix", + "flake-utils": "flake-utils", + "nixpkgs": "nixpkgs_2" + } + }, + "rust-analyzer-src": { + "flake": false, + "locked": { + "lastModified": 1684145961, + "narHash": "sha256-Ms99ML1P53EC50TnznmV55QwhOJtql75BbXfyiGuFvU=", + "owner": "rust-lang", + "repo": "rust-analyzer", + "rev": "2f8cd66fb4c98026d2bdbdf17270e3472e1ca42a", + "type": "github" + }, + "original": { + "owner": "rust-lang", + "ref": "nightly", + "repo": "rust-analyzer", + "type": "github" + } + }, + "systems": { + "locked": { + "lastModified": 1681028828, + "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", + "owner": "nix-systems", + "repo": "default", + "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e", + "type": "github" + }, + "original": { + "owner": "nix-systems", + "repo": "default", + "type": "github" + } + } + }, + "root": "root", + "version": 7 +} diff --git a/flake.nix b/flake.nix new file mode 100644 index 0000000..93fd968 --- /dev/null +++ b/flake.nix @@ -0,0 +1,42 @@ +{ + inputs = { + nixpkgs.url = "github:nixos/nixpkgs"; + fenix.url = "github:nix-community/fenix"; + }; + + outputs = { self, nixpkgs, flake-utils, fenix }: + flake-utils.lib.eachDefaultSystem (system: + let + pkgs = import nixpkgs { + inherit system; + overlays = [ fenix.overlays.default ]; + }; + + toolchain = pkgs.fenix.stable; + + flakePkgs = rec { }; + in rec { + packages = flake-utils.lib.flattenTree flakePkgs; + + devShell = pkgs.mkShell { + inputsFrom = with flakePkgs; [ ]; + + packages = (with pkgs; [ + cargo-deny + cargo-edit + cargo-expand + cargo-flamegraph + cargo-watch + ]) ++ (with toolchain; [ + cargo + rustc + clippy + + # Get the nightly version of rustfmt so we can wrap comments + pkgs.fenix.default.rustfmt + ]); + + CARGO_UNSTABLE_SPARSE_REGISTRY = "true"; + }; + }); +} diff --git a/src/main.rs b/src/main.rs index 203d715..8dfffb0 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,17 +1,21 @@ +pub mod progress_bar; +pub mod walkdir; + use std::path::{Path, PathBuf}; -use std::sync::mpsc; +use std::sync::mpsc::{self, Sender}; use std::thread; use anyhow::{Error, Result}; -use async_walkdir::{Filtering, WalkDir}; + use clap::Parser; use console::Term; -use futures::{stream, StreamExt, TryStreamExt}; +use futures::{StreamExt, TryStreamExt}; use indicatif::{ProgressBar, ProgressStyle}; use swc_common::errors::ColorConfig; use swc_common::{errors::Handler, sync::Lrc}; use swc_common::{FileName, SourceMap}; -use swc_ecma_parser::EsConfig; +use swc_ecma_ast::ModuleDecl; +use swc_ecma_parser::TsConfig; use swc_ecma_parser::{ lexer::Lexer as EcmaLexer, Parser as EcmaParser, StringInput, Syntax, }; @@ -19,10 +23,13 @@ use tokio::fs; use tokio::sync::Semaphore; use tokio::{fs::File, io::AsyncReadExt}; +use crate::progress_bar::BarAction; +use crate::walkdir::create_walkdir; + #[derive(Parser, Debug)] struct Opt { /// Output directory - #[clap(default_value = "generated/doc")] + #[clap(long = "out-dir", default_value = "generated/doc")] out_dir: PathBuf, /// Paths to process @@ -59,33 +66,29 @@ async fn main() -> Result<()> { ); bar.set_prefix("Documenting"); - enum BarAction { - Inc, - Finish, - SetLen(u64), - } let (tx, rx) = mpsc::channel::(); thread::spawn(move || { for msg in rx.into_iter() { match msg { BarAction::Inc => bar.inc(1), - BarAction::Finish => bar.finish(), + BarAction::Finish => bar.finish_and_clear(), BarAction::SetLen(n) => bar.set_length(n), + BarAction::Print(s) => bar.println(s), } } }); for base_path in paths.into_iter() { let num_files = create_walkdir(&base_path).count().await; - tx.send(BarAction::SetLen(num_files as u64)); - println!("Counted {num_files} files."); + tx.send(BarAction::SetLen(num_files as u64))?; + tx.send(BarAction::Print(format!("Counted {num_files} files.")))?; let entries_stream = create_walkdir(&base_path); let semaphore = Semaphore::new(10); let base_path2 = base_path.clone(); - let processed_stream = entries_stream + let _processed_stream = entries_stream .map_err(|e| Error::from(e)) .and_then(|dir_ent| { let tx = tx.clone(); @@ -107,9 +110,9 @@ async fn main() -> Result<()> { contents }; - handle_data(relative_path, data).await?; + handle_data(tx.clone(), relative_path, data).await?; - tx.send(BarAction::Inc); + tx.send(BarAction::Inc)?; Ok::<_, Error>(false) } @@ -118,24 +121,27 @@ async fn main() -> Result<()> { .await; } - tx.send(BarAction::Finish); + tx.send(BarAction::Finish)?; Ok(()) } -async fn handle_data(path: impl AsRef, data: String) -> Result<()> { +async fn handle_data( + tx: Sender, + path: impl AsRef, + data: String, +) -> Result<()> { let cm: Lrc = Default::default(); let handler = Handler::with_tty_emitter(ColorConfig::Auto, true, false, Some(cm.clone())); - let fm = cm.new_source_file( - FileName::Custom(path.as_ref().display().to_string()), - data, - ); + let path_display = path.as_ref().display(); - let syntax = Syntax::Es(EsConfig { - jsx: true, + let fm = cm.new_source_file(FileName::Custom(path_display.to_string()), data); + + let syntax = Syntax::Typescript(TsConfig { + tsx: true, ..Default::default() }); @@ -162,35 +168,22 @@ async fn handle_data(path: impl AsRef, data: String) -> Result<()> { }) .expect("failed to parser module"); - // println!("Module: {module:?}"); + tx.send(BarAction::Print(format!("Parsed module. {}", path_display)))?; + + for item in module.body.iter() { + tx.send(BarAction::Print(format!("SHIET {item:?}")))?; + let decl = match item.as_module_decl() { + Some(v) => v, + None => continue, + }; + + match decl { + ModuleDecl::ExportDecl(decl) => { + tx.send(BarAction::Print(format!("SHIET {decl:?}")))?; + } + _ => {} + } + } Ok(()) } - -fn create_walkdir(path: impl AsRef) -> WalkDir { - WalkDir::new(path.as_ref()).filter(|entry| async move { - let path = entry.path(); - - if let Some(true) = path - .file_name() - .map(|f| f == "node_modules" || f.to_string_lossy().starts_with('.')) - { - return Filtering::IgnoreDir; - } - - if path.is_file() - && !path - .file_name() - .and_then(|f| f.to_str()) - .map_or_else(|| false, |f| f.ends_with(".js")) - { - return Filtering::Ignore; - } - - if path.is_dir() { - return Filtering::Ignore; - } - - Filtering::Continue - }) -} diff --git a/src/progress_bar.rs b/src/progress_bar.rs new file mode 100644 index 0000000..26944c4 --- /dev/null +++ b/src/progress_bar.rs @@ -0,0 +1,6 @@ +pub enum BarAction { + Inc, + Finish, + SetLen(u64), + Print(String), +} diff --git a/src/walkdir.rs b/src/walkdir.rs new file mode 100644 index 0000000..6009f16 --- /dev/null +++ b/src/walkdir.rs @@ -0,0 +1,37 @@ +use std::path::Path; + +use async_walkdir::{WalkDir, Filtering}; +use regex::Regex; +use lazy_static::lazy_static; + +lazy_static! { + static ref MATCH_PATTERN: Regex = Regex::new(r".*\.[jt]sx?$").unwrap(); +} + +pub fn create_walkdir(path: impl AsRef) -> WalkDir { + WalkDir::new(path.as_ref()).filter(|entry| async move { + let path = entry.path(); + + if let Some(true) = path + .file_name() + .map(|f| f == "node_modules" || f.to_string_lossy().starts_with('.')) + { + return Filtering::IgnoreDir; + } + + if path.is_file() + && path + .file_name() + .and_then(|f| f.to_str()) + .map_or_else(|| true, |f| !MATCH_PATTERN.is_match(f)) + { + return Filtering::Ignore; + } + + if path.is_dir() { + return Filtering::Ignore; + } + + Filtering::Continue + }) +}