feat: clean napi-derive noop feature code path (#1571)

This commit is contained in:
liuyi 2023-04-18 10:15:29 +08:00 committed by GitHub
parent cd8888aa23
commit 5a1f229dba
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
6 changed files with 300 additions and 310 deletions

View file

@ -139,9 +139,9 @@ pub enum NapiEnumValue {
Number(i32), Number(i32),
} }
impl Into<Literal> for &NapiEnumValue { impl From<&NapiEnumValue> for Literal {
fn into(self) -> Literal { fn from(val: &NapiEnumValue) -> Self {
match self { match val {
NapiEnumValue::String(string) => Literal::string(string), NapiEnumValue::String(string) => Literal::string(string),
NapiEnumValue::Number(number) => Literal::i32_unsuffixed(number.to_owned()), NapiEnumValue::Number(number) => Literal::i32_unsuffixed(number.to_owned()),
} }

View file

@ -27,6 +27,7 @@ napi-derive-backend = { version = "1.0.49", path = "../backend" }
proc-macro2 = "1.0" proc-macro2 = "1.0"
quote = "1.0" quote = "1.0"
syn = { version = "1.0.61", features = ["fold", "full", "extra-traits"] } syn = { version = "1.0.61", features = ["fold", "full", "extra-traits"] }
cfg-if = "1.0"
[lib] [lib]
proc-macro = true proc-macro = true

View file

@ -0,0 +1,9 @@
cfg_if::cfg_if! {
if #[cfg(feature = "noop")] {
mod noop;
pub use self::noop::*;
} else {
mod napi;
pub use self::napi::*;
}
}

View file

@ -0,0 +1,242 @@
use std::env;
use std::fs;
#[cfg(feature = "type-def")]
use std::io::BufWriter;
use std::io::Write;
use std::sync::atomic::{AtomicBool, Ordering};
use crate::parser::{attrs::BindgenAttrs, ParseNapi};
#[cfg(feature = "type-def")]
use napi_derive_backend::ToTypeDef;
use napi_derive_backend::{BindgenResult, Napi, TryToTokens, REGISTER_IDENTS};
use proc_macro2::{TokenStream, TokenTree};
use quote::ToTokens;
use syn::{Attribute, Item};
/// a flag indicate whether or never at least one `napi` macro has been expanded.
/// ```ignore
/// if BUILT_FLAG
/// .compare_exchange(false, true, Ordering::Acquire, Ordering::Relaxed)
/// .is_ok() {
/// // logic on first macro expansion
/// }
///
/// ```
static BUILT_FLAG: AtomicBool = AtomicBool::new(false);
pub fn expand(attr: TokenStream, input: TokenStream) -> BindgenResult<TokenStream> {
if BUILT_FLAG
.compare_exchange(false, true, Ordering::Acquire, Ordering::Relaxed)
.is_ok()
{
// logic on first macro expansion
#[cfg(feature = "type-def")]
prepare_type_def_file();
if let Ok(wasi_register_file) = env::var("WASI_REGISTER_TMP_PATH") {
if let Err(_e) = fs::remove_file(wasi_register_file) {
#[cfg(debug_assertions)]
{
println!("Failed to manipulate wasi register file: {:?}", _e);
}
}
}
}
let mut item = syn::parse2::<Item>(input)?;
let opts: BindgenAttrs = syn::parse2(attr)?;
let mut tokens = proc_macro2::TokenStream::new();
if let Item::Mod(mut js_mod) = item {
let js_name = opts.js_name().map_or_else(
|| js_mod.ident.to_string(),
|(js_name, _)| js_name.to_owned(),
);
if let Some((_, mut items)) = js_mod.content.clone() {
for item in items.iter_mut() {
let mut empty_attrs = vec![];
if let Some(item_opts) = replace_napi_attr_in_mod(
js_name.clone(),
match item {
Item::Fn(ref mut function) => &mut function.attrs,
Item::Struct(ref mut struct_) => &mut struct_.attrs,
Item::Enum(ref mut enum_) => &mut enum_.attrs,
Item::Const(ref mut const_) => &mut const_.attrs,
Item::Impl(ref mut impl_) => &mut impl_.attrs,
Item::Mod(mod_) => {
let mod_in_mod = mod_
.attrs
.iter()
.enumerate()
.find(|(_, m)| m.path.segments[0].ident == "napi");
if mod_in_mod.is_some() {
bail_span!(
mod_,
"napi module cannot be nested under another napi module"
);
} else {
&mut empty_attrs
}
}
_ => &mut empty_attrs,
},
) {
let napi = item.parse_napi(&mut tokens, item_opts)?;
napi.try_to_tokens(&mut tokens)?;
#[cfg(feature = "type-def")]
output_type_def(&napi);
} else {
item.to_tokens(&mut tokens);
};
}
js_mod.content = None;
};
let js_mod_attrs: Vec<Attribute> = js_mod
.attrs
.clone()
.into_iter()
.filter(|attr| attr.path.segments[0].ident != "napi")
.collect();
let mod_name = js_mod.ident;
let visible = js_mod.vis;
let mod_tokens = quote! { #(#js_mod_attrs)* #visible mod #mod_name { #tokens } };
Ok(mod_tokens)
} else {
let napi = item.parse_napi(&mut tokens, opts)?;
napi.try_to_tokens(&mut tokens)?;
#[cfg(feature = "type-def")]
output_type_def(&napi);
REGISTER_IDENTS.with(|idents| {
if let Ok(wasi_register_file) = env::var("WASI_REGISTER_TMP_PATH") {
let mut file =
fs::File::create(wasi_register_file).expect("Create wasi register file failed");
file
.write_all(format!("{:?}", idents.borrow()).as_bytes())
.expect("Write wasi register file failed");
}
});
Ok(tokens)
}
}
#[cfg(feature = "type-def")]
fn output_type_def(napi: &Napi) {
if let Ok(type_def_file) = env::var("TYPE_DEF_TMP_PATH") {
if let Some(type_def) = napi.to_type_def() {
fs::OpenOptions::new()
.append(true)
.create(true)
.open(type_def_file)
.and_then(|file| {
let mut writer = BufWriter::<fs::File>::new(file);
writer.write_all(type_def.to_string().as_bytes())?;
writer.write_all("\n".as_bytes())
})
.unwrap_or_else(|e| {
println!("Failed to write type def file: {:?}", e);
});
}
}
}
fn replace_napi_attr_in_mod(
js_namespace: String,
attrs: &mut Vec<syn::Attribute>,
) -> Option<BindgenAttrs> {
let napi_attr = attrs.clone();
let napi_attr = napi_attr
.iter()
.enumerate()
.find(|(_, m)| m.path.segments[0].ident == "napi");
if let Some((index, napi_attr)) = napi_attr {
let attr_token_stream = napi_attr.tokens.clone();
let raw_attr_stream = attr_token_stream.to_string();
let raw_attr_stream = if !raw_attr_stream.is_empty() {
raw_attr_stream
.strip_prefix('(')
.unwrap()
.strip_suffix(')')
.unwrap()
.to_string()
} else {
raw_attr_stream
};
let raw_attr_token_stream = syn::parse_str::<TokenStream>(raw_attr_stream.as_str()).unwrap();
let new_attr: syn::Attribute = if !raw_attr_stream.is_empty() {
syn::parse_quote!(
#[napi(#raw_attr_token_stream, namespace = #js_namespace)]
)
} else {
syn::parse_quote!(
#[napi(namespace = #js_namespace)]
)
};
let struct_opts: BindgenAttrs =
if let Some(TokenTree::Group(g)) = new_attr.tokens.into_iter().next() {
syn::parse2(g.stream()).ok()?
} else {
syn::parse2(quote! {}).ok()?
};
attrs.remove(index);
Some(struct_opts)
} else {
None
}
}
#[cfg(feature = "type-def")]
fn prepare_type_def_file() {
if let Ok(ref type_def_file) = env::var("TYPE_DEF_TMP_PATH") {
use napi_derive_backend::{NAPI_RS_CLI_VERSION, NAPI_RS_CLI_VERSION_WITH_SHARED_CRATES_FIX};
if let Err(_e) = if *NAPI_RS_CLI_VERSION >= *NAPI_RS_CLI_VERSION_WITH_SHARED_CRATES_FIX {
remove_existed_type_def(type_def_file)
} else {
fs::remove_file(type_def_file)
} {
#[cfg(debug_assertions)]
{
println!("Failed to manipulate type def file: {:?}", _e);
}
}
}
}
#[cfg(feature = "type-def")]
fn remove_existed_type_def(type_def_file: &str) -> std::io::Result<()> {
use std::io::{BufRead, BufReader};
let pkg_name = std::env::var("CARGO_PKG_NAME").expect("CARGO_PKG_NAME is not set");
if let Ok(content) = std::fs::File::open(type_def_file) {
let reader = BufReader::new(content);
let cleaned_content = reader
.lines()
.filter_map(|line| {
if let Ok(line) = line {
if let Some((package_name, _)) = line.split_once(':') {
if pkg_name == package_name {
return None;
}
}
Some(line)
} else {
None
}
})
.collect::<Vec<String>>()
.join("\n");
let mut content = std::fs::OpenOptions::new()
.read(true)
.write(true)
.truncate(true)
.open(type_def_file)?;
content.write_all(cleaned_content.as_bytes())?;
content.write_all(b"\n")?;
}
Ok(())
}

View file

@ -0,0 +1,36 @@
use napi_derive_backend::BindgenResult;
use proc_macro2::TokenStream;
use quote::ToTokens;
use syn::Attribute;
pub fn expand(_attr: TokenStream, input: TokenStream) -> BindgenResult<TokenStream> {
let mut item = syn::parse2::<syn::Item>(input)?;
let mut tokens = TokenStream::new();
if let syn::Item::Struct(ref mut struct_) = item {
struct_
.fields
.iter_mut()
.for_each(|field| find_and_remove_napi_attr(&mut field.attrs))
}
item.to_tokens(&mut tokens);
Ok(tokens)
}
fn find_and_remove_napi_attr(attrs: &mut Vec<Attribute>) {
loop {
let napi_attr = attrs
.iter()
.enumerate()
.find(|&(_, m)| m.path.segments[0].ident == "napi");
let pos = match napi_attr {
Some((pos, _raw_attr)) => pos,
None => break,
};
attrs.remove(pos);
}
}

View file

@ -1,5 +1,6 @@
#[cfg(feature = "compat-mode")] #[cfg(feature = "compat-mode")]
mod compat_macro; mod compat_macro;
mod expand;
mod parser; mod parser;
#[macro_use] #[macro_use]
@ -9,101 +10,25 @@ extern crate napi_derive_backend;
#[macro_use] #[macro_use]
extern crate quote; extern crate quote;
#[cfg(not(feature = "noop"))]
use std::env; use std::env;
#[cfg(not(feature = "noop"))]
use std::fs;
#[cfg(not(feature = "noop"))]
use std::io::Write;
#[cfg(all(feature = "type-def", not(feature = "noop")))]
use std::io::{BufWriter, Result as IOResult};
#[cfg(not(feature = "noop"))]
use std::sync::atomic::{AtomicBool, Ordering};
use napi_derive_backend::BindgenResult; use proc_macro::TokenStream;
#[cfg(not(feature = "noop"))]
use napi_derive_backend::TryToTokens;
#[cfg(not(feature = "noop"))]
use napi_derive_backend::REGISTER_IDENTS;
#[cfg(all(feature = "type-def", not(feature = "noop")))]
use napi_derive_backend::{ToTypeDef, TypeDef};
#[cfg(not(feature = "noop"))]
use parser::{attrs::BindgenAttrs, ParseNapi};
use proc_macro::TokenStream as RawStream;
use proc_macro2::TokenStream;
#[cfg(not(feature = "noop"))]
use proc_macro2::TokenTree;
use quote::ToTokens;
use syn::Attribute;
#[cfg(not(feature = "noop"))]
use syn::Item;
#[cfg(feature = "compat-mode")] #[cfg(feature = "compat-mode")]
use syn::{fold::Fold, parse_macro_input, ItemFn}; use syn::{fold::Fold, parse_macro_input, ItemFn};
#[cfg(not(feature = "noop"))]
/// a flag indicate whether or never at least one `napi` macro has been expanded.
/// ```ignore
/// if BUILT_FLAG
/// .compare_exchange(false, true, Ordering::Acquire, Ordering::Relaxed)
/// .is_ok() {
/// // logic on first macro expansion
/// }
///
/// ```
static BUILT_FLAG: AtomicBool = AtomicBool::new(false);
/// ```ignore /// ```ignore
/// #[napi] /// #[napi]
/// fn test(ctx: CallContext, name: String) { /// fn test(ctx: CallContext, name: String) {
/// "hello" + name /// "hello" + name
/// } /// }
/// ``` /// ```
#[cfg(not(feature = "noop"))]
#[proc_macro_attribute] #[proc_macro_attribute]
pub fn napi(attr: RawStream, input: RawStream) -> RawStream { pub fn napi(attr: TokenStream, input: TokenStream) -> TokenStream {
if BUILT_FLAG match expand::expand(attr.into(), input.into()) {
.compare_exchange(false, true, Ordering::Acquire, Ordering::Relaxed)
.is_ok()
{
// logic on first macro expansion
#[cfg(feature = "type-def")]
if let Ok(ref type_def_file) = env::var("TYPE_DEF_TMP_PATH") {
use napi_derive_backend::{NAPI_RS_CLI_VERSION, NAPI_RS_CLI_VERSION_WITH_SHARED_CRATES_FIX};
if let Err(_e) = if *NAPI_RS_CLI_VERSION >= *NAPI_RS_CLI_VERSION_WITH_SHARED_CRATES_FIX {
remove_existed_type_def(type_def_file)
} else {
fs::remove_file(type_def_file)
} {
#[cfg(debug_assertions)]
{
println!("Failed to manipulate type def file: {:?}", _e);
}
}
}
if let Ok(wasi_register_file) = env::var("WASI_REGISTER_TMP_PATH") {
if let Err(_e) = fs::remove_file(wasi_register_file) {
#[cfg(debug_assertions)]
{
println!("Failed to manipulate wasi register file: {:?}", _e);
}
}
}
}
match expand(attr.into(), input.into()) {
Ok(tokens) => { Ok(tokens) => {
if env::var("DEBUG_GENERATED_CODE").is_ok() { if env::var("DEBUG_GENERATED_CODE").is_ok() {
println!("{}", tokens); println!("{}", tokens);
} }
REGISTER_IDENTS.with(|idents| {
if let Ok(wasi_register_file) = env::var("WASI_REGISTER_TMP_PATH") {
let mut file =
fs::File::create(wasi_register_file).expect("Create wasi register file failed");
file
.write_all(format!("{:?}", idents.borrow()).as_bytes())
.expect("Write wasi register file failed");
}
});
tokens.into() tokens.into()
} }
Err(diagnostic) => { Err(diagnostic) => {
@ -114,150 +39,9 @@ pub fn napi(attr: RawStream, input: RawStream) -> RawStream {
} }
} }
#[cfg(feature = "noop")]
#[proc_macro_attribute]
pub fn napi(attr: RawStream, input: RawStream) -> RawStream {
match expand(attr.into(), input.into()) {
Ok(tokens) => tokens.into(),
Err(diagnostic) => {
println!("`napi` macro expand failed.");
(quote! { #diagnostic }).into()
}
}
}
#[cfg(feature = "noop")]
fn expand(_attr: TokenStream, input: TokenStream) -> BindgenResult<TokenStream> {
let mut item = syn::parse2::<syn::Item>(input.into())?;
let mut tokens = proc_macro2::TokenStream::new();
match item {
syn::Item::Struct(ref mut struct_) => struct_
.fields
.iter_mut()
.for_each(|field| find_and_remove_napi_attr(&mut field.attrs)),
_ => {}
}
item.to_tokens(&mut tokens);
Ok(tokens)
}
#[cfg(feature = "noop")]
fn find_and_remove_napi_attr(attrs: &mut Vec<Attribute>) {
loop {
let napi_attr = attrs
.iter()
.enumerate()
.find(|&(_, m)| m.path.segments[0].ident == "napi");
let pos = match napi_attr {
Some((pos, _raw_attr)) => pos,
None => break,
};
attrs.remove(pos);
}
}
#[cfg(not(feature = "noop"))]
fn expand(attr: TokenStream, input: TokenStream) -> BindgenResult<TokenStream> {
let mut item = syn::parse2::<syn::Item>(input)?;
let opts: BindgenAttrs = syn::parse2(attr)?;
let mut tokens = proc_macro2::TokenStream::new();
if let Item::Mod(mut js_mod) = item {
let js_name = opts.js_name().map_or_else(
|| js_mod.ident.to_string(),
|(js_name, _)| js_name.to_owned(),
);
if let Some((_, mut items)) = js_mod.content.clone() {
for item in items.iter_mut() {
let mut empty_attrs = vec![];
if let Some(item_opts) = replace_napi_attr_in_mod(
js_name.clone(),
match item {
syn::Item::Fn(ref mut function) => &mut function.attrs,
syn::Item::Struct(ref mut struct_) => &mut struct_.attrs,
syn::Item::Enum(ref mut enum_) => &mut enum_.attrs,
syn::Item::Const(ref mut const_) => &mut const_.attrs,
syn::Item::Impl(ref mut impl_) => &mut impl_.attrs,
syn::Item::Mod(mod_) => {
let mod_in_mod = mod_
.attrs
.iter()
.enumerate()
.find(|(_, m)| m.path.segments[0].ident == "napi");
if mod_in_mod.is_some() {
bail_span!(
mod_,
"napi module cannot be nested under another napi module"
);
} else {
&mut empty_attrs
}
}
_ => &mut empty_attrs,
},
) {
let napi = item.parse_napi(&mut tokens, item_opts)?;
napi.try_to_tokens(&mut tokens)?;
#[cfg(feature = "type-def")]
if let Ok(type_def_file) = env::var("TYPE_DEF_TMP_PATH") {
if let Err(e) = output_type_def(type_def_file, napi.to_type_def()) {
println!("Failed to write type def file: {:?}", e);
};
}
} else {
item.to_tokens(&mut tokens);
};
}
js_mod.content = None;
};
let js_mod_attrs: Vec<Attribute> = js_mod
.attrs
.clone()
.into_iter()
.filter(|attr| attr.path.segments[0].ident != "napi")
.collect();
let mod_name = js_mod.ident;
let visible = js_mod.vis;
let mod_tokens = quote! { #(#js_mod_attrs)* #visible mod #mod_name { #tokens } };
Ok(mod_tokens)
} else {
let napi = item.parse_napi(&mut tokens, opts)?;
napi.try_to_tokens(&mut tokens)?;
#[cfg(feature = "type-def")]
if let Ok(type_def_file) = env::var("TYPE_DEF_TMP_PATH") {
if let Err(e) = output_type_def(type_def_file, napi.to_type_def()) {
println!("Failed to write type def file: {:?}", e);
};
}
Ok(tokens)
}
}
#[cfg(all(feature = "type-def", not(feature = "noop")))]
fn output_type_def(type_def_file: String, type_def: Option<TypeDef>) -> IOResult<()> {
if type_def.is_some() {
let file = fs::OpenOptions::new()
.append(true)
.create(true)
.open(type_def_file)?;
let mut writer = BufWriter::<fs::File>::new(file);
writer.write_all(type_def.unwrap().to_string().as_bytes())?;
writer.write_all("\n".as_bytes())
} else {
IOResult::Ok(())
}
}
#[cfg(feature = "compat-mode")] #[cfg(feature = "compat-mode")]
#[proc_macro_attribute] #[proc_macro_attribute]
pub fn contextless_function(_attr: RawStream, input: RawStream) -> RawStream { pub fn contextless_function(_attr: TokenStream, input: TokenStream) -> TokenStream {
let input = parse_macro_input!(input as ItemFn); let input = parse_macro_input!(input as ItemFn);
let mut js_fn = compat_macro::JsFunction::new(); let mut js_fn = compat_macro::JsFunction::new();
js_fn.fold_item_fn(input); js_fn.fold_item_fn(input);
@ -287,12 +71,12 @@ pub fn contextless_function(_attr: RawStream, input: RawStream) -> RawStream {
} }
}; };
// Hand the output tokens back to the compiler // Hand the output tokens back to the compiler
RawStream::from(expanded) TokenStream::from(expanded)
} }
#[cfg(feature = "compat-mode")] #[cfg(feature = "compat-mode")]
#[proc_macro_attribute] #[proc_macro_attribute]
pub fn js_function(attr: RawStream, input: RawStream) -> RawStream { pub fn js_function(attr: TokenStream, input: TokenStream) -> TokenStream {
let arg_len = parse_macro_input!(attr as compat_macro::ArgLength); let arg_len = parse_macro_input!(attr as compat_macro::ArgLength);
let arg_len_span = arg_len.length; let arg_len_span = arg_len.length;
let input = parse_macro_input!(input as ItemFn); let input = parse_macro_input!(input as ItemFn);
@ -345,12 +129,12 @@ pub fn js_function(attr: RawStream, input: RawStream) -> RawStream {
} }
}; };
// Hand the output tokens back to the compiler // Hand the output tokens back to the compiler
RawStream::from(expanded) TokenStream::from(expanded)
} }
#[cfg(feature = "compat-mode")] #[cfg(feature = "compat-mode")]
#[proc_macro_attribute] #[proc_macro_attribute]
pub fn module_exports(_attr: RawStream, input: RawStream) -> RawStream { pub fn module_exports(_attr: TokenStream, input: TokenStream) -> TokenStream {
let input = parse_macro_input!(input as ItemFn); let input = parse_macro_input!(input as ItemFn);
let mut js_fn = compat_macro::JsFunction::new(); let mut js_fn = compat_macro::JsFunction::new();
js_fn.fold_item_fn(input); js_fn.fold_item_fn(input);
@ -390,85 +174,3 @@ pub fn module_exports(_attr: RawStream, input: RawStream) -> RawStream {
}) })
.into() .into()
} }
#[cfg(not(feature = "noop"))]
fn replace_napi_attr_in_mod(
js_namespace: String,
attrs: &mut Vec<syn::Attribute>,
) -> Option<BindgenAttrs> {
let napi_attr = attrs.clone();
let napi_attr = napi_attr
.iter()
.enumerate()
.find(|(_, m)| m.path.segments[0].ident == "napi");
if let Some((index, napi_attr)) = napi_attr {
let attr_token_stream = napi_attr.tokens.clone();
let raw_attr_stream = attr_token_stream.to_string();
let raw_attr_stream = if !raw_attr_stream.is_empty() {
raw_attr_stream
.strip_prefix('(')
.unwrap()
.strip_suffix(')')
.unwrap()
.to_string()
} else {
raw_attr_stream
};
let raw_attr_token_stream = syn::parse_str::<TokenStream>(raw_attr_stream.as_str()).unwrap();
let new_attr: syn::Attribute = if !raw_attr_stream.is_empty() {
syn::parse_quote!(
#[napi(#raw_attr_token_stream, namespace = #js_namespace)]
)
} else {
syn::parse_quote!(
#[napi(namespace = #js_namespace)]
)
};
let struct_opts: BindgenAttrs =
if let Some(TokenTree::Group(g)) = new_attr.tokens.into_iter().next() {
syn::parse2(g.stream()).ok()?
} else {
syn::parse2(quote! {}).ok()?
};
attrs.remove(index);
Some(struct_opts)
} else {
None
}
}
#[cfg(all(feature = "type-def", not(feature = "noop")))]
fn remove_existed_type_def(type_def_file: &str) -> std::io::Result<()> {
use std::io::{BufRead, BufReader};
let pkg_name = std::env::var("CARGO_PKG_NAME").expect("CARGO_PKG_NAME is not set");
if let Ok(content) = std::fs::File::open(type_def_file) {
let reader = BufReader::new(content);
let cleaned_content = reader
.lines()
.filter_map(|line| {
if let Ok(line) = line {
if let Some((package_name, _)) = line.split_once(':') {
if pkg_name == package_name {
return None;
}
}
Some(line)
} else {
None
}
})
.collect::<Vec<String>>()
.join("\n");
let mut content = std::fs::OpenOptions::new()
.read(true)
.write(true)
.truncate(true)
.open(type_def_file)?;
content.write_all(cleaned_content.as_bytes())?;
content.write_all(b"\n")?;
}
Ok(())
}