chore: add vendor dependencies for kauma build
This commit is contained in:
parent
7c94e5d8fb
commit
067ef6141c
1758 changed files with 398473 additions and 0 deletions
75
vendor/proc-macro2/src/detection.rs
vendored
Normal file
75
vendor/proc-macro2/src/detection.rs
vendored
Normal file
|
|
@ -0,0 +1,75 @@
|
|||
use core::sync::atomic::{AtomicUsize, Ordering};
|
||||
use std::sync::Once;
|
||||
|
||||
static WORKS: AtomicUsize = AtomicUsize::new(0);
|
||||
static INIT: Once = Once::new();
|
||||
|
||||
pub(crate) fn inside_proc_macro() -> bool {
|
||||
match WORKS.load(Ordering::Relaxed) {
|
||||
1 => return false,
|
||||
2 => return true,
|
||||
_ => {}
|
||||
}
|
||||
|
||||
INIT.call_once(initialize);
|
||||
inside_proc_macro()
|
||||
}
|
||||
|
||||
pub(crate) fn force_fallback() {
|
||||
WORKS.store(1, Ordering::Relaxed);
|
||||
}
|
||||
|
||||
pub(crate) fn unforce_fallback() {
|
||||
initialize();
|
||||
}
|
||||
|
||||
#[cfg(not(no_is_available))]
|
||||
fn initialize() {
|
||||
let available = proc_macro::is_available();
|
||||
WORKS.store(available as usize + 1, Ordering::Relaxed);
|
||||
}
|
||||
|
||||
// Swap in a null panic hook to avoid printing "thread panicked" to stderr,
|
||||
// then use catch_unwind to determine whether the compiler's proc_macro is
|
||||
// working. When proc-macro2 is used from outside of a procedural macro all
|
||||
// of the proc_macro crate's APIs currently panic.
|
||||
//
|
||||
// The Once is to prevent the possibility of this ordering:
|
||||
//
|
||||
// thread 1 calls take_hook, gets the user's original hook
|
||||
// thread 1 calls set_hook with the null hook
|
||||
// thread 2 calls take_hook, thinks null hook is the original hook
|
||||
// thread 2 calls set_hook with the null hook
|
||||
// thread 1 calls set_hook with the actual original hook
|
||||
// thread 2 calls set_hook with what it thinks is the original hook
|
||||
//
|
||||
// in which the user's hook has been lost.
|
||||
//
|
||||
// There is still a race condition where a panic in a different thread can
|
||||
// happen during the interval that the user's original panic hook is
|
||||
// unregistered such that their hook is incorrectly not called. This is
|
||||
// sufficiently unlikely and less bad than printing panic messages to stderr
|
||||
// on correct use of this crate. Maybe there is a libstd feature request
|
||||
// here. For now, if a user needs to guarantee that this failure mode does
|
||||
// not occur, they need to call e.g. `proc_macro2::Span::call_site()` from
|
||||
// the main thread before launching any other threads.
|
||||
#[cfg(no_is_available)]
|
||||
fn initialize() {
|
||||
use std::panic::{self, PanicInfo};
|
||||
|
||||
type PanicHook = dyn Fn(&PanicInfo) + Sync + Send + 'static;
|
||||
|
||||
let null_hook: Box<PanicHook> = Box::new(|_panic_info| { /* ignore */ });
|
||||
let sanity_check = &*null_hook as *const PanicHook;
|
||||
let original_hook = panic::take_hook();
|
||||
panic::set_hook(null_hook);
|
||||
|
||||
let works = panic::catch_unwind(proc_macro::Span::call_site).is_ok();
|
||||
WORKS.store(works as usize + 1, Ordering::Relaxed);
|
||||
|
||||
let hopefully_null_hook = panic::take_hook();
|
||||
panic::set_hook(original_hook);
|
||||
if sanity_check != &*hopefully_null_hook {
|
||||
panic!("observed race condition in proc_macro2::inside_proc_macro");
|
||||
}
|
||||
}
|
||||
151
vendor/proc-macro2/src/extra.rs
vendored
Normal file
151
vendor/proc-macro2/src/extra.rs
vendored
Normal file
|
|
@ -0,0 +1,151 @@
|
|||
//! Items which do not have a correspondence to any API in the proc_macro crate,
|
||||
//! but are necessary to include in proc-macro2.
|
||||
|
||||
use crate::fallback;
|
||||
use crate::imp;
|
||||
use crate::marker::{ProcMacroAutoTraits, MARKER};
|
||||
use crate::Span;
|
||||
use core::fmt::{self, Debug};
|
||||
|
||||
/// Invalidate any `proc_macro2::Span` that exist on the current thread.
|
||||
///
|
||||
/// The implementation of `Span` uses thread-local data structures and this
|
||||
/// function clears them. Calling any method on a `Span` on the current thread
|
||||
/// created prior to the invalidation will return incorrect values or crash.
|
||||
///
|
||||
/// This function is useful for programs that process more than 2<sup>32</sup>
|
||||
/// bytes of Rust source code on the same thread. Just like rustc, proc-macro2
|
||||
/// uses 32-bit source locations, and these wrap around when the total source
|
||||
/// code processed by the same thread exceeds 2<sup>32</sup> bytes (4
|
||||
/// gigabytes). After a wraparound, `Span` methods such as `source_text()` can
|
||||
/// return wrong data.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// As of late 2023, there is 200 GB of Rust code published on crates.io.
|
||||
/// Looking at just the newest version of every crate, it is 16 GB of code. So a
|
||||
/// workload that involves parsing it all would overflow a 32-bit source
|
||||
/// location unless spans are being invalidated.
|
||||
///
|
||||
/// ```
|
||||
/// use flate2::read::GzDecoder;
|
||||
/// use std::ffi::OsStr;
|
||||
/// use std::io::{BufReader, Read};
|
||||
/// use std::str::FromStr;
|
||||
/// use tar::Archive;
|
||||
///
|
||||
/// rayon::scope(|s| {
|
||||
/// for krate in every_version_of_every_crate() {
|
||||
/// s.spawn(move |_| {
|
||||
/// proc_macro2::extra::invalidate_current_thread_spans();
|
||||
///
|
||||
/// let reader = BufReader::new(krate);
|
||||
/// let tar = GzDecoder::new(reader);
|
||||
/// let mut archive = Archive::new(tar);
|
||||
/// for entry in archive.entries().unwrap() {
|
||||
/// let mut entry = entry.unwrap();
|
||||
/// let path = entry.path().unwrap();
|
||||
/// if path.extension() != Some(OsStr::new("rs")) {
|
||||
/// continue;
|
||||
/// }
|
||||
/// let mut content = String::new();
|
||||
/// entry.read_to_string(&mut content).unwrap();
|
||||
/// match proc_macro2::TokenStream::from_str(&content) {
|
||||
/// Ok(tokens) => {/* ... */},
|
||||
/// Err(_) => continue,
|
||||
/// }
|
||||
/// }
|
||||
/// });
|
||||
/// }
|
||||
/// });
|
||||
/// #
|
||||
/// # fn every_version_of_every_crate() -> Vec<std::fs::File> {
|
||||
/// # Vec::new()
|
||||
/// # }
|
||||
/// ```
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// This function is not applicable to and will panic if called from a
|
||||
/// procedural macro.
|
||||
#[cfg(span_locations)]
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "span-locations")))]
|
||||
pub fn invalidate_current_thread_spans() {
|
||||
crate::imp::invalidate_current_thread_spans();
|
||||
}
|
||||
|
||||
/// An object that holds a [`Group`]'s `span_open()` and `span_close()` together
|
||||
/// in a more compact representation than holding those 2 spans individually.
|
||||
///
|
||||
/// [`Group`]: crate::Group
|
||||
#[derive(Copy, Clone)]
|
||||
pub struct DelimSpan {
|
||||
inner: DelimSpanEnum,
|
||||
_marker: ProcMacroAutoTraits,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
enum DelimSpanEnum {
|
||||
#[cfg(wrap_proc_macro)]
|
||||
Compiler {
|
||||
join: proc_macro::Span,
|
||||
open: proc_macro::Span,
|
||||
close: proc_macro::Span,
|
||||
},
|
||||
Fallback(fallback::Span),
|
||||
}
|
||||
|
||||
impl DelimSpan {
|
||||
pub(crate) fn new(group: &imp::Group) -> Self {
|
||||
#[cfg(wrap_proc_macro)]
|
||||
let inner = match group {
|
||||
imp::Group::Compiler(group) => DelimSpanEnum::Compiler {
|
||||
join: group.span(),
|
||||
open: group.span_open(),
|
||||
close: group.span_close(),
|
||||
},
|
||||
imp::Group::Fallback(group) => DelimSpanEnum::Fallback(group.span()),
|
||||
};
|
||||
|
||||
#[cfg(not(wrap_proc_macro))]
|
||||
let inner = DelimSpanEnum::Fallback(group.span());
|
||||
|
||||
DelimSpan {
|
||||
inner,
|
||||
_marker: MARKER,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns a span covering the entire delimited group.
|
||||
pub fn join(&self) -> Span {
|
||||
match &self.inner {
|
||||
#[cfg(wrap_proc_macro)]
|
||||
DelimSpanEnum::Compiler { join, .. } => Span::_new(imp::Span::Compiler(*join)),
|
||||
DelimSpanEnum::Fallback(span) => Span::_new_fallback(*span),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns a span for the opening punctuation of the group only.
|
||||
pub fn open(&self) -> Span {
|
||||
match &self.inner {
|
||||
#[cfg(wrap_proc_macro)]
|
||||
DelimSpanEnum::Compiler { open, .. } => Span::_new(imp::Span::Compiler(*open)),
|
||||
DelimSpanEnum::Fallback(span) => Span::_new_fallback(span.first_byte()),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns a span for the closing punctuation of the group only.
|
||||
pub fn close(&self) -> Span {
|
||||
match &self.inner {
|
||||
#[cfg(wrap_proc_macro)]
|
||||
DelimSpanEnum::Compiler { close, .. } => Span::_new(imp::Span::Compiler(*close)),
|
||||
DelimSpanEnum::Fallback(span) => Span::_new_fallback(span.last_byte()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for DelimSpan {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
Debug::fmt(&self.join(), f)
|
||||
}
|
||||
}
|
||||
1226
vendor/proc-macro2/src/fallback.rs
vendored
Normal file
1226
vendor/proc-macro2/src/fallback.rs
vendored
Normal file
File diff suppressed because it is too large
Load diff
1377
vendor/proc-macro2/src/lib.rs
vendored
Normal file
1377
vendor/proc-macro2/src/lib.rs
vendored
Normal file
File diff suppressed because it is too large
Load diff
29
vendor/proc-macro2/src/location.rs
vendored
Normal file
29
vendor/proc-macro2/src/location.rs
vendored
Normal file
|
|
@ -0,0 +1,29 @@
|
|||
use core::cmp::Ordering;
|
||||
|
||||
/// A line-column pair representing the start or end of a `Span`.
|
||||
///
|
||||
/// This type is semver exempt and not exposed by default.
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "span-locations")))]
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
|
||||
pub struct LineColumn {
|
||||
/// The 1-indexed line in the source file on which the span starts or ends
|
||||
/// (inclusive).
|
||||
pub line: usize,
|
||||
/// The 0-indexed column (in UTF-8 characters) in the source file on which
|
||||
/// the span starts or ends (inclusive).
|
||||
pub column: usize,
|
||||
}
|
||||
|
||||
impl Ord for LineColumn {
|
||||
fn cmp(&self, other: &Self) -> Ordering {
|
||||
self.line
|
||||
.cmp(&other.line)
|
||||
.then(self.column.cmp(&other.column))
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialOrd for LineColumn {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
||||
Some(self.cmp(other))
|
||||
}
|
||||
}
|
||||
17
vendor/proc-macro2/src/marker.rs
vendored
Normal file
17
vendor/proc-macro2/src/marker.rs
vendored
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
use alloc::rc::Rc;
|
||||
use core::marker::PhantomData;
|
||||
use core::panic::{RefUnwindSafe, UnwindSafe};
|
||||
|
||||
// Zero sized marker with the correct set of autotrait impls we want all proc
|
||||
// macro types to have.
|
||||
#[derive(Copy, Clone)]
|
||||
#[cfg_attr(
|
||||
all(procmacro2_semver_exempt, any(not(wrap_proc_macro), super_unstable)),
|
||||
derive(PartialEq, Eq)
|
||||
)]
|
||||
pub(crate) struct ProcMacroAutoTraits(PhantomData<Rc<()>>);
|
||||
|
||||
pub(crate) const MARKER: ProcMacroAutoTraits = ProcMacroAutoTraits(PhantomData);
|
||||
|
||||
impl UnwindSafe for ProcMacroAutoTraits {}
|
||||
impl RefUnwindSafe for ProcMacroAutoTraits {}
|
||||
996
vendor/proc-macro2/src/parse.rs
vendored
Normal file
996
vendor/proc-macro2/src/parse.rs
vendored
Normal file
|
|
@ -0,0 +1,996 @@
|
|||
use crate::fallback::{
|
||||
self, is_ident_continue, is_ident_start, Group, LexError, Literal, Span, TokenStream,
|
||||
TokenStreamBuilder,
|
||||
};
|
||||
use crate::{Delimiter, Punct, Spacing, TokenTree};
|
||||
use core::char;
|
||||
use core::str::{Bytes, CharIndices, Chars};
|
||||
|
||||
#[derive(Copy, Clone, Eq, PartialEq)]
|
||||
pub(crate) struct Cursor<'a> {
|
||||
pub rest: &'a str,
|
||||
#[cfg(span_locations)]
|
||||
pub off: u32,
|
||||
}
|
||||
|
||||
impl<'a> Cursor<'a> {
|
||||
pub fn advance(&self, bytes: usize) -> Cursor<'a> {
|
||||
let (_front, rest) = self.rest.split_at(bytes);
|
||||
Cursor {
|
||||
rest,
|
||||
#[cfg(span_locations)]
|
||||
off: self.off + _front.chars().count() as u32,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn starts_with(&self, s: &str) -> bool {
|
||||
self.rest.starts_with(s)
|
||||
}
|
||||
|
||||
pub fn starts_with_char(&self, ch: char) -> bool {
|
||||
self.rest.starts_with(ch)
|
||||
}
|
||||
|
||||
pub fn starts_with_fn<Pattern>(&self, f: Pattern) -> bool
|
||||
where
|
||||
Pattern: FnMut(char) -> bool,
|
||||
{
|
||||
self.rest.starts_with(f)
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.rest.is_empty()
|
||||
}
|
||||
|
||||
fn len(&self) -> usize {
|
||||
self.rest.len()
|
||||
}
|
||||
|
||||
fn as_bytes(&self) -> &'a [u8] {
|
||||
self.rest.as_bytes()
|
||||
}
|
||||
|
||||
fn bytes(&self) -> Bytes<'a> {
|
||||
self.rest.bytes()
|
||||
}
|
||||
|
||||
fn chars(&self) -> Chars<'a> {
|
||||
self.rest.chars()
|
||||
}
|
||||
|
||||
fn char_indices(&self) -> CharIndices<'a> {
|
||||
self.rest.char_indices()
|
||||
}
|
||||
|
||||
fn parse(&self, tag: &str) -> Result<Cursor<'a>, Reject> {
|
||||
if self.starts_with(tag) {
|
||||
Ok(self.advance(tag.len()))
|
||||
} else {
|
||||
Err(Reject)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct Reject;
|
||||
type PResult<'a, O> = Result<(Cursor<'a>, O), Reject>;
|
||||
|
||||
fn skip_whitespace(input: Cursor) -> Cursor {
|
||||
let mut s = input;
|
||||
|
||||
while !s.is_empty() {
|
||||
let byte = s.as_bytes()[0];
|
||||
if byte == b'/' {
|
||||
if s.starts_with("//")
|
||||
&& (!s.starts_with("///") || s.starts_with("////"))
|
||||
&& !s.starts_with("//!")
|
||||
{
|
||||
let (cursor, _) = take_until_newline_or_eof(s);
|
||||
s = cursor;
|
||||
continue;
|
||||
} else if s.starts_with("/**/") {
|
||||
s = s.advance(4);
|
||||
continue;
|
||||
} else if s.starts_with("/*")
|
||||
&& (!s.starts_with("/**") || s.starts_with("/***"))
|
||||
&& !s.starts_with("/*!")
|
||||
{
|
||||
match block_comment(s) {
|
||||
Ok((rest, _)) => {
|
||||
s = rest;
|
||||
continue;
|
||||
}
|
||||
Err(Reject) => return s,
|
||||
}
|
||||
}
|
||||
}
|
||||
match byte {
|
||||
b' ' | 0x09..=0x0d => {
|
||||
s = s.advance(1);
|
||||
continue;
|
||||
}
|
||||
b if b.is_ascii() => {}
|
||||
_ => {
|
||||
let ch = s.chars().next().unwrap();
|
||||
if is_whitespace(ch) {
|
||||
s = s.advance(ch.len_utf8());
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
return s;
|
||||
}
|
||||
s
|
||||
}
|
||||
|
||||
fn block_comment(input: Cursor) -> PResult<&str> {
|
||||
if !input.starts_with("/*") {
|
||||
return Err(Reject);
|
||||
}
|
||||
|
||||
let mut depth = 0usize;
|
||||
let bytes = input.as_bytes();
|
||||
let mut i = 0usize;
|
||||
let upper = bytes.len() - 1;
|
||||
|
||||
while i < upper {
|
||||
if bytes[i] == b'/' && bytes[i + 1] == b'*' {
|
||||
depth += 1;
|
||||
i += 1; // eat '*'
|
||||
} else if bytes[i] == b'*' && bytes[i + 1] == b'/' {
|
||||
depth -= 1;
|
||||
if depth == 0 {
|
||||
return Ok((input.advance(i + 2), &input.rest[..i + 2]));
|
||||
}
|
||||
i += 1; // eat '/'
|
||||
}
|
||||
i += 1;
|
||||
}
|
||||
|
||||
Err(Reject)
|
||||
}
|
||||
|
||||
fn is_whitespace(ch: char) -> bool {
|
||||
// Rust treats left-to-right mark and right-to-left mark as whitespace
|
||||
ch.is_whitespace() || ch == '\u{200e}' || ch == '\u{200f}'
|
||||
}
|
||||
|
||||
fn word_break(input: Cursor) -> Result<Cursor, Reject> {
|
||||
match input.chars().next() {
|
||||
Some(ch) if is_ident_continue(ch) => Err(Reject),
|
||||
Some(_) | None => Ok(input),
|
||||
}
|
||||
}
|
||||
|
||||
// Rustc's representation of a macro expansion error in expression position or
|
||||
// type position.
|
||||
const ERROR: &str = "(/*ERROR*/)";
|
||||
|
||||
pub(crate) fn token_stream(mut input: Cursor) -> Result<TokenStream, LexError> {
|
||||
let mut trees = TokenStreamBuilder::new();
|
||||
let mut stack = Vec::new();
|
||||
|
||||
loop {
|
||||
input = skip_whitespace(input);
|
||||
|
||||
if let Ok((rest, ())) = doc_comment(input, &mut trees) {
|
||||
input = rest;
|
||||
continue;
|
||||
}
|
||||
|
||||
#[cfg(span_locations)]
|
||||
let lo = input.off;
|
||||
|
||||
let first = match input.bytes().next() {
|
||||
Some(first) => first,
|
||||
None => match stack.last() {
|
||||
None => return Ok(trees.build()),
|
||||
#[cfg(span_locations)]
|
||||
Some((lo, _frame)) => {
|
||||
return Err(LexError {
|
||||
span: Span { lo: *lo, hi: *lo },
|
||||
})
|
||||
}
|
||||
#[cfg(not(span_locations))]
|
||||
Some(_frame) => return Err(LexError { span: Span {} }),
|
||||
},
|
||||
};
|
||||
|
||||
if let Some(open_delimiter) = match first {
|
||||
b'(' if !input.starts_with(ERROR) => Some(Delimiter::Parenthesis),
|
||||
b'[' => Some(Delimiter::Bracket),
|
||||
b'{' => Some(Delimiter::Brace),
|
||||
_ => None,
|
||||
} {
|
||||
input = input.advance(1);
|
||||
let frame = (open_delimiter, trees);
|
||||
#[cfg(span_locations)]
|
||||
let frame = (lo, frame);
|
||||
stack.push(frame);
|
||||
trees = TokenStreamBuilder::new();
|
||||
} else if let Some(close_delimiter) = match first {
|
||||
b')' => Some(Delimiter::Parenthesis),
|
||||
b']' => Some(Delimiter::Bracket),
|
||||
b'}' => Some(Delimiter::Brace),
|
||||
_ => None,
|
||||
} {
|
||||
let frame = match stack.pop() {
|
||||
Some(frame) => frame,
|
||||
None => return Err(lex_error(input)),
|
||||
};
|
||||
#[cfg(span_locations)]
|
||||
let (lo, frame) = frame;
|
||||
let (open_delimiter, outer) = frame;
|
||||
if open_delimiter != close_delimiter {
|
||||
return Err(lex_error(input));
|
||||
}
|
||||
input = input.advance(1);
|
||||
let mut g = Group::new(open_delimiter, trees.build());
|
||||
g.set_span(Span {
|
||||
#[cfg(span_locations)]
|
||||
lo,
|
||||
#[cfg(span_locations)]
|
||||
hi: input.off,
|
||||
});
|
||||
trees = outer;
|
||||
trees.push_token_from_parser(TokenTree::Group(crate::Group::_new_fallback(g)));
|
||||
} else {
|
||||
let (rest, mut tt) = match leaf_token(input) {
|
||||
Ok((rest, tt)) => (rest, tt),
|
||||
Err(Reject) => return Err(lex_error(input)),
|
||||
};
|
||||
tt.set_span(crate::Span::_new_fallback(Span {
|
||||
#[cfg(span_locations)]
|
||||
lo,
|
||||
#[cfg(span_locations)]
|
||||
hi: rest.off,
|
||||
}));
|
||||
trees.push_token_from_parser(tt);
|
||||
input = rest;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn lex_error(cursor: Cursor) -> LexError {
|
||||
#[cfg(not(span_locations))]
|
||||
let _ = cursor;
|
||||
LexError {
|
||||
span: Span {
|
||||
#[cfg(span_locations)]
|
||||
lo: cursor.off,
|
||||
#[cfg(span_locations)]
|
||||
hi: cursor.off,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn leaf_token(input: Cursor) -> PResult<TokenTree> {
|
||||
if let Ok((input, l)) = literal(input) {
|
||||
// must be parsed before ident
|
||||
Ok((input, TokenTree::Literal(crate::Literal::_new_fallback(l))))
|
||||
} else if let Ok((input, p)) = punct(input) {
|
||||
Ok((input, TokenTree::Punct(p)))
|
||||
} else if let Ok((input, i)) = ident(input) {
|
||||
Ok((input, TokenTree::Ident(i)))
|
||||
} else if input.starts_with(ERROR) {
|
||||
let rest = input.advance(ERROR.len());
|
||||
let repr = crate::Literal::_new_fallback(Literal::_new(ERROR.to_owned()));
|
||||
Ok((rest, TokenTree::Literal(repr)))
|
||||
} else {
|
||||
Err(Reject)
|
||||
}
|
||||
}
|
||||
|
||||
fn ident(input: Cursor) -> PResult<crate::Ident> {
|
||||
if [
|
||||
"r\"", "r#\"", "r##", "b\"", "b\'", "br\"", "br#", "c\"", "cr\"", "cr#",
|
||||
]
|
||||
.iter()
|
||||
.any(|prefix| input.starts_with(prefix))
|
||||
{
|
||||
Err(Reject)
|
||||
} else {
|
||||
ident_any(input)
|
||||
}
|
||||
}
|
||||
|
||||
fn ident_any(input: Cursor) -> PResult<crate::Ident> {
|
||||
let raw = input.starts_with("r#");
|
||||
let rest = input.advance((raw as usize) << 1);
|
||||
|
||||
let (rest, sym) = ident_not_raw(rest)?;
|
||||
|
||||
if !raw {
|
||||
let ident = crate::Ident::_new(crate::imp::Ident::new_unchecked(
|
||||
sym,
|
||||
fallback::Span::call_site(),
|
||||
));
|
||||
return Ok((rest, ident));
|
||||
}
|
||||
|
||||
match sym {
|
||||
"_" | "super" | "self" | "Self" | "crate" => return Err(Reject),
|
||||
_ => {}
|
||||
}
|
||||
|
||||
let ident = crate::Ident::_new(crate::imp::Ident::new_raw_unchecked(
|
||||
sym,
|
||||
fallback::Span::call_site(),
|
||||
));
|
||||
Ok((rest, ident))
|
||||
}
|
||||
|
||||
fn ident_not_raw(input: Cursor) -> PResult<&str> {
|
||||
let mut chars = input.char_indices();
|
||||
|
||||
match chars.next() {
|
||||
Some((_, ch)) if is_ident_start(ch) => {}
|
||||
_ => return Err(Reject),
|
||||
}
|
||||
|
||||
let mut end = input.len();
|
||||
for (i, ch) in chars {
|
||||
if !is_ident_continue(ch) {
|
||||
end = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
Ok((input.advance(end), &input.rest[..end]))
|
||||
}
|
||||
|
||||
pub(crate) fn literal(input: Cursor) -> PResult<Literal> {
|
||||
let rest = literal_nocapture(input)?;
|
||||
let end = input.len() - rest.len();
|
||||
Ok((rest, Literal::_new(input.rest[..end].to_string())))
|
||||
}
|
||||
|
||||
fn literal_nocapture(input: Cursor) -> Result<Cursor, Reject> {
|
||||
if let Ok(ok) = string(input) {
|
||||
Ok(ok)
|
||||
} else if let Ok(ok) = byte_string(input) {
|
||||
Ok(ok)
|
||||
} else if let Ok(ok) = c_string(input) {
|
||||
Ok(ok)
|
||||
} else if let Ok(ok) = byte(input) {
|
||||
Ok(ok)
|
||||
} else if let Ok(ok) = character(input) {
|
||||
Ok(ok)
|
||||
} else if let Ok(ok) = float(input) {
|
||||
Ok(ok)
|
||||
} else if let Ok(ok) = int(input) {
|
||||
Ok(ok)
|
||||
} else {
|
||||
Err(Reject)
|
||||
}
|
||||
}
|
||||
|
||||
fn literal_suffix(input: Cursor) -> Cursor {
|
||||
match ident_not_raw(input) {
|
||||
Ok((input, _)) => input,
|
||||
Err(Reject) => input,
|
||||
}
|
||||
}
|
||||
|
||||
fn string(input: Cursor) -> Result<Cursor, Reject> {
|
||||
if let Ok(input) = input.parse("\"") {
|
||||
cooked_string(input)
|
||||
} else if let Ok(input) = input.parse("r") {
|
||||
raw_string(input)
|
||||
} else {
|
||||
Err(Reject)
|
||||
}
|
||||
}
|
||||
|
||||
fn cooked_string(mut input: Cursor) -> Result<Cursor, Reject> {
|
||||
let mut chars = input.char_indices();
|
||||
|
||||
while let Some((i, ch)) = chars.next() {
|
||||
match ch {
|
||||
'"' => {
|
||||
let input = input.advance(i + 1);
|
||||
return Ok(literal_suffix(input));
|
||||
}
|
||||
'\r' => match chars.next() {
|
||||
Some((_, '\n')) => {}
|
||||
_ => break,
|
||||
},
|
||||
'\\' => match chars.next() {
|
||||
Some((_, 'x')) => {
|
||||
backslash_x_char(&mut chars)?;
|
||||
}
|
||||
Some((_, 'n' | 'r' | 't' | '\\' | '\'' | '"' | '0')) => {}
|
||||
Some((_, 'u')) => {
|
||||
backslash_u(&mut chars)?;
|
||||
}
|
||||
Some((newline, ch @ ('\n' | '\r'))) => {
|
||||
input = input.advance(newline + 1);
|
||||
trailing_backslash(&mut input, ch as u8)?;
|
||||
chars = input.char_indices();
|
||||
}
|
||||
_ => break,
|
||||
},
|
||||
_ch => {}
|
||||
}
|
||||
}
|
||||
Err(Reject)
|
||||
}
|
||||
|
||||
fn raw_string(input: Cursor) -> Result<Cursor, Reject> {
|
||||
let (input, delimiter) = delimiter_of_raw_string(input)?;
|
||||
let mut bytes = input.bytes().enumerate();
|
||||
while let Some((i, byte)) = bytes.next() {
|
||||
match byte {
|
||||
b'"' if input.rest[i + 1..].starts_with(delimiter) => {
|
||||
let rest = input.advance(i + 1 + delimiter.len());
|
||||
return Ok(literal_suffix(rest));
|
||||
}
|
||||
b'\r' => match bytes.next() {
|
||||
Some((_, b'\n')) => {}
|
||||
_ => break,
|
||||
},
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
Err(Reject)
|
||||
}
|
||||
|
||||
fn byte_string(input: Cursor) -> Result<Cursor, Reject> {
|
||||
if let Ok(input) = input.parse("b\"") {
|
||||
cooked_byte_string(input)
|
||||
} else if let Ok(input) = input.parse("br") {
|
||||
raw_byte_string(input)
|
||||
} else {
|
||||
Err(Reject)
|
||||
}
|
||||
}
|
||||
|
||||
fn cooked_byte_string(mut input: Cursor) -> Result<Cursor, Reject> {
|
||||
let mut bytes = input.bytes().enumerate();
|
||||
while let Some((offset, b)) = bytes.next() {
|
||||
match b {
|
||||
b'"' => {
|
||||
let input = input.advance(offset + 1);
|
||||
return Ok(literal_suffix(input));
|
||||
}
|
||||
b'\r' => match bytes.next() {
|
||||
Some((_, b'\n')) => {}
|
||||
_ => break,
|
||||
},
|
||||
b'\\' => match bytes.next() {
|
||||
Some((_, b'x')) => {
|
||||
backslash_x_byte(&mut bytes)?;
|
||||
}
|
||||
Some((_, b'n' | b'r' | b't' | b'\\' | b'0' | b'\'' | b'"')) => {}
|
||||
Some((newline, b @ (b'\n' | b'\r'))) => {
|
||||
input = input.advance(newline + 1);
|
||||
trailing_backslash(&mut input, b)?;
|
||||
bytes = input.bytes().enumerate();
|
||||
}
|
||||
_ => break,
|
||||
},
|
||||
b if b.is_ascii() => {}
|
||||
_ => break,
|
||||
}
|
||||
}
|
||||
Err(Reject)
|
||||
}
|
||||
|
||||
fn delimiter_of_raw_string(input: Cursor) -> PResult<&str> {
|
||||
for (i, byte) in input.bytes().enumerate() {
|
||||
match byte {
|
||||
b'"' => {
|
||||
if i > 255 {
|
||||
// https://github.com/rust-lang/rust/pull/95251
|
||||
return Err(Reject);
|
||||
}
|
||||
return Ok((input.advance(i + 1), &input.rest[..i]));
|
||||
}
|
||||
b'#' => {}
|
||||
_ => break,
|
||||
}
|
||||
}
|
||||
Err(Reject)
|
||||
}
|
||||
|
||||
fn raw_byte_string(input: Cursor) -> Result<Cursor, Reject> {
|
||||
let (input, delimiter) = delimiter_of_raw_string(input)?;
|
||||
let mut bytes = input.bytes().enumerate();
|
||||
while let Some((i, byte)) = bytes.next() {
|
||||
match byte {
|
||||
b'"' if input.rest[i + 1..].starts_with(delimiter) => {
|
||||
let rest = input.advance(i + 1 + delimiter.len());
|
||||
return Ok(literal_suffix(rest));
|
||||
}
|
||||
b'\r' => match bytes.next() {
|
||||
Some((_, b'\n')) => {}
|
||||
_ => break,
|
||||
},
|
||||
other => {
|
||||
if !other.is_ascii() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(Reject)
|
||||
}
|
||||
|
||||
fn c_string(input: Cursor) -> Result<Cursor, Reject> {
|
||||
if let Ok(input) = input.parse("c\"") {
|
||||
cooked_c_string(input)
|
||||
} else if let Ok(input) = input.parse("cr") {
|
||||
raw_c_string(input)
|
||||
} else {
|
||||
Err(Reject)
|
||||
}
|
||||
}
|
||||
|
||||
fn raw_c_string(input: Cursor) -> Result<Cursor, Reject> {
|
||||
let (input, delimiter) = delimiter_of_raw_string(input)?;
|
||||
let mut bytes = input.bytes().enumerate();
|
||||
while let Some((i, byte)) = bytes.next() {
|
||||
match byte {
|
||||
b'"' if input.rest[i + 1..].starts_with(delimiter) => {
|
||||
let rest = input.advance(i + 1 + delimiter.len());
|
||||
return Ok(literal_suffix(rest));
|
||||
}
|
||||
b'\r' => match bytes.next() {
|
||||
Some((_, b'\n')) => {}
|
||||
_ => break,
|
||||
},
|
||||
b'\0' => break,
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
Err(Reject)
|
||||
}
|
||||
|
||||
fn cooked_c_string(mut input: Cursor) -> Result<Cursor, Reject> {
|
||||
let mut chars = input.char_indices();
|
||||
|
||||
while let Some((i, ch)) = chars.next() {
|
||||
match ch {
|
||||
'"' => {
|
||||
let input = input.advance(i + 1);
|
||||
return Ok(literal_suffix(input));
|
||||
}
|
||||
'\r' => match chars.next() {
|
||||
Some((_, '\n')) => {}
|
||||
_ => break,
|
||||
},
|
||||
'\\' => match chars.next() {
|
||||
Some((_, 'x')) => {
|
||||
backslash_x_nonzero(&mut chars)?;
|
||||
}
|
||||
Some((_, 'n' | 'r' | 't' | '\\' | '\'' | '"')) => {}
|
||||
Some((_, 'u')) => {
|
||||
if backslash_u(&mut chars)? == '\0' {
|
||||
break;
|
||||
}
|
||||
}
|
||||
Some((newline, ch @ ('\n' | '\r'))) => {
|
||||
input = input.advance(newline + 1);
|
||||
trailing_backslash(&mut input, ch as u8)?;
|
||||
chars = input.char_indices();
|
||||
}
|
||||
_ => break,
|
||||
},
|
||||
'\0' => break,
|
||||
_ch => {}
|
||||
}
|
||||
}
|
||||
Err(Reject)
|
||||
}
|
||||
|
||||
fn byte(input: Cursor) -> Result<Cursor, Reject> {
|
||||
let input = input.parse("b'")?;
|
||||
let mut bytes = input.bytes().enumerate();
|
||||
let ok = match bytes.next().map(|(_, b)| b) {
|
||||
Some(b'\\') => match bytes.next().map(|(_, b)| b) {
|
||||
Some(b'x') => backslash_x_byte(&mut bytes).is_ok(),
|
||||
Some(b'n' | b'r' | b't' | b'\\' | b'0' | b'\'' | b'"') => true,
|
||||
_ => false,
|
||||
},
|
||||
b => b.is_some(),
|
||||
};
|
||||
if !ok {
|
||||
return Err(Reject);
|
||||
}
|
||||
let (offset, _) = bytes.next().ok_or(Reject)?;
|
||||
if !input.chars().as_str().is_char_boundary(offset) {
|
||||
return Err(Reject);
|
||||
}
|
||||
let input = input.advance(offset).parse("'")?;
|
||||
Ok(literal_suffix(input))
|
||||
}
|
||||
|
||||
fn character(input: Cursor) -> Result<Cursor, Reject> {
|
||||
let input = input.parse("'")?;
|
||||
let mut chars = input.char_indices();
|
||||
let ok = match chars.next().map(|(_, ch)| ch) {
|
||||
Some('\\') => match chars.next().map(|(_, ch)| ch) {
|
||||
Some('x') => backslash_x_char(&mut chars).is_ok(),
|
||||
Some('u') => backslash_u(&mut chars).is_ok(),
|
||||
Some('n' | 'r' | 't' | '\\' | '0' | '\'' | '"') => true,
|
||||
_ => false,
|
||||
},
|
||||
ch => ch.is_some(),
|
||||
};
|
||||
if !ok {
|
||||
return Err(Reject);
|
||||
}
|
||||
let (idx, _) = chars.next().ok_or(Reject)?;
|
||||
let input = input.advance(idx).parse("'")?;
|
||||
Ok(literal_suffix(input))
|
||||
}
|
||||
|
||||
macro_rules! next_ch {
|
||||
($chars:ident @ $pat:pat) => {
|
||||
match $chars.next() {
|
||||
Some((_, ch)) => match ch {
|
||||
$pat => ch,
|
||||
_ => return Err(Reject),
|
||||
},
|
||||
None => return Err(Reject),
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
fn backslash_x_char<I>(chars: &mut I) -> Result<(), Reject>
|
||||
where
|
||||
I: Iterator<Item = (usize, char)>,
|
||||
{
|
||||
next_ch!(chars @ '0'..='7');
|
||||
next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F');
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn backslash_x_byte<I>(chars: &mut I) -> Result<(), Reject>
|
||||
where
|
||||
I: Iterator<Item = (usize, u8)>,
|
||||
{
|
||||
next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F');
|
||||
next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F');
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn backslash_x_nonzero<I>(chars: &mut I) -> Result<(), Reject>
|
||||
where
|
||||
I: Iterator<Item = (usize, char)>,
|
||||
{
|
||||
let first = next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F');
|
||||
let second = next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F');
|
||||
if first == '0' && second == '0' {
|
||||
Err(Reject)
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
fn backslash_u<I>(chars: &mut I) -> Result<char, Reject>
|
||||
where
|
||||
I: Iterator<Item = (usize, char)>,
|
||||
{
|
||||
next_ch!(chars @ '{');
|
||||
let mut value = 0;
|
||||
let mut len = 0;
|
||||
for (_, ch) in chars {
|
||||
let digit = match ch {
|
||||
'0'..='9' => ch as u8 - b'0',
|
||||
'a'..='f' => 10 + ch as u8 - b'a',
|
||||
'A'..='F' => 10 + ch as u8 - b'A',
|
||||
'_' if len > 0 => continue,
|
||||
'}' if len > 0 => return char::from_u32(value).ok_or(Reject),
|
||||
_ => break,
|
||||
};
|
||||
if len == 6 {
|
||||
break;
|
||||
}
|
||||
value *= 0x10;
|
||||
value += u32::from(digit);
|
||||
len += 1;
|
||||
}
|
||||
Err(Reject)
|
||||
}
|
||||
|
||||
fn trailing_backslash(input: &mut Cursor, mut last: u8) -> Result<(), Reject> {
|
||||
let mut whitespace = input.bytes().enumerate();
|
||||
loop {
|
||||
if last == b'\r' && whitespace.next().map_or(true, |(_, b)| b != b'\n') {
|
||||
return Err(Reject);
|
||||
}
|
||||
match whitespace.next() {
|
||||
Some((_, b @ (b' ' | b'\t' | b'\n' | b'\r'))) => {
|
||||
last = b;
|
||||
}
|
||||
Some((offset, _)) => {
|
||||
*input = input.advance(offset);
|
||||
return Ok(());
|
||||
}
|
||||
None => return Err(Reject),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn float(input: Cursor) -> Result<Cursor, Reject> {
|
||||
let mut rest = float_digits(input)?;
|
||||
if let Some(ch) = rest.chars().next() {
|
||||
if is_ident_start(ch) {
|
||||
rest = ident_not_raw(rest)?.0;
|
||||
}
|
||||
}
|
||||
word_break(rest)
|
||||
}
|
||||
|
||||
fn float_digits(input: Cursor) -> Result<Cursor, Reject> {
|
||||
let mut chars = input.chars().peekable();
|
||||
match chars.next() {
|
||||
Some(ch) if '0' <= ch && ch <= '9' => {}
|
||||
_ => return Err(Reject),
|
||||
}
|
||||
|
||||
let mut len = 1;
|
||||
let mut has_dot = false;
|
||||
let mut has_exp = false;
|
||||
while let Some(&ch) = chars.peek() {
|
||||
match ch {
|
||||
'0'..='9' | '_' => {
|
||||
chars.next();
|
||||
len += 1;
|
||||
}
|
||||
'.' => {
|
||||
if has_dot {
|
||||
break;
|
||||
}
|
||||
chars.next();
|
||||
if chars
|
||||
.peek()
|
||||
.map_or(false, |&ch| ch == '.' || is_ident_start(ch))
|
||||
{
|
||||
return Err(Reject);
|
||||
}
|
||||
len += 1;
|
||||
has_dot = true;
|
||||
}
|
||||
'e' | 'E' => {
|
||||
chars.next();
|
||||
len += 1;
|
||||
has_exp = true;
|
||||
break;
|
||||
}
|
||||
_ => break,
|
||||
}
|
||||
}
|
||||
|
||||
if !(has_dot || has_exp) {
|
||||
return Err(Reject);
|
||||
}
|
||||
|
||||
if has_exp {
|
||||
let token_before_exp = if has_dot {
|
||||
Ok(input.advance(len - 1))
|
||||
} else {
|
||||
Err(Reject)
|
||||
};
|
||||
let mut has_sign = false;
|
||||
let mut has_exp_value = false;
|
||||
while let Some(&ch) = chars.peek() {
|
||||
match ch {
|
||||
'+' | '-' => {
|
||||
if has_exp_value {
|
||||
break;
|
||||
}
|
||||
if has_sign {
|
||||
return token_before_exp;
|
||||
}
|
||||
chars.next();
|
||||
len += 1;
|
||||
has_sign = true;
|
||||
}
|
||||
'0'..='9' => {
|
||||
chars.next();
|
||||
len += 1;
|
||||
has_exp_value = true;
|
||||
}
|
||||
'_' => {
|
||||
chars.next();
|
||||
len += 1;
|
||||
}
|
||||
_ => break,
|
||||
}
|
||||
}
|
||||
if !has_exp_value {
|
||||
return token_before_exp;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(input.advance(len))
|
||||
}
|
||||
|
||||
fn int(input: Cursor) -> Result<Cursor, Reject> {
|
||||
let mut rest = digits(input)?;
|
||||
if let Some(ch) = rest.chars().next() {
|
||||
if is_ident_start(ch) {
|
||||
rest = ident_not_raw(rest)?.0;
|
||||
}
|
||||
}
|
||||
word_break(rest)
|
||||
}
|
||||
|
||||
fn digits(mut input: Cursor) -> Result<Cursor, Reject> {
|
||||
let base = if input.starts_with("0x") {
|
||||
input = input.advance(2);
|
||||
16
|
||||
} else if input.starts_with("0o") {
|
||||
input = input.advance(2);
|
||||
8
|
||||
} else if input.starts_with("0b") {
|
||||
input = input.advance(2);
|
||||
2
|
||||
} else {
|
||||
10
|
||||
};
|
||||
|
||||
let mut len = 0;
|
||||
let mut empty = true;
|
||||
for b in input.bytes() {
|
||||
match b {
|
||||
b'0'..=b'9' => {
|
||||
let digit = (b - b'0') as u64;
|
||||
if digit >= base {
|
||||
return Err(Reject);
|
||||
}
|
||||
}
|
||||
b'a'..=b'f' => {
|
||||
let digit = 10 + (b - b'a') as u64;
|
||||
if digit >= base {
|
||||
break;
|
||||
}
|
||||
}
|
||||
b'A'..=b'F' => {
|
||||
let digit = 10 + (b - b'A') as u64;
|
||||
if digit >= base {
|
||||
break;
|
||||
}
|
||||
}
|
||||
b'_' => {
|
||||
if empty && base == 10 {
|
||||
return Err(Reject);
|
||||
}
|
||||
len += 1;
|
||||
continue;
|
||||
}
|
||||
_ => break,
|
||||
};
|
||||
len += 1;
|
||||
empty = false;
|
||||
}
|
||||
if empty {
|
||||
Err(Reject)
|
||||
} else {
|
||||
Ok(input.advance(len))
|
||||
}
|
||||
}
|
||||
|
||||
fn punct(input: Cursor) -> PResult<Punct> {
|
||||
let (rest, ch) = punct_char(input)?;
|
||||
if ch == '\'' {
|
||||
if ident_any(rest)?.0.starts_with_char('\'') {
|
||||
Err(Reject)
|
||||
} else {
|
||||
Ok((rest, Punct::new('\'', Spacing::Joint)))
|
||||
}
|
||||
} else {
|
||||
let kind = match punct_char(rest) {
|
||||
Ok(_) => Spacing::Joint,
|
||||
Err(Reject) => Spacing::Alone,
|
||||
};
|
||||
Ok((rest, Punct::new(ch, kind)))
|
||||
}
|
||||
}
|
||||
|
||||
fn punct_char(input: Cursor) -> PResult<char> {
|
||||
if input.starts_with("//") || input.starts_with("/*") {
|
||||
// Do not accept `/` of a comment as a punct.
|
||||
return Err(Reject);
|
||||
}
|
||||
|
||||
let mut chars = input.chars();
|
||||
let first = match chars.next() {
|
||||
Some(ch) => ch,
|
||||
None => {
|
||||
return Err(Reject);
|
||||
}
|
||||
};
|
||||
let recognized = "~!@#$%^&*-=+|;:,<.>/?'";
|
||||
if recognized.contains(first) {
|
||||
Ok((input.advance(first.len_utf8()), first))
|
||||
} else {
|
||||
Err(Reject)
|
||||
}
|
||||
}
|
||||
|
||||
fn doc_comment<'a>(input: Cursor<'a>, trees: &mut TokenStreamBuilder) -> PResult<'a, ()> {
|
||||
#[cfg(span_locations)]
|
||||
let lo = input.off;
|
||||
let (rest, (comment, inner)) = doc_comment_contents(input)?;
|
||||
let fallback_span = Span {
|
||||
#[cfg(span_locations)]
|
||||
lo,
|
||||
#[cfg(span_locations)]
|
||||
hi: rest.off,
|
||||
};
|
||||
let span = crate::Span::_new_fallback(fallback_span);
|
||||
|
||||
let mut scan_for_bare_cr = comment;
|
||||
while let Some(cr) = scan_for_bare_cr.find('\r') {
|
||||
let rest = &scan_for_bare_cr[cr + 1..];
|
||||
if !rest.starts_with('\n') {
|
||||
return Err(Reject);
|
||||
}
|
||||
scan_for_bare_cr = rest;
|
||||
}
|
||||
|
||||
let mut pound = Punct::new('#', Spacing::Alone);
|
||||
pound.set_span(span);
|
||||
trees.push_token_from_parser(TokenTree::Punct(pound));
|
||||
|
||||
if inner {
|
||||
let mut bang = Punct::new('!', Spacing::Alone);
|
||||
bang.set_span(span);
|
||||
trees.push_token_from_parser(TokenTree::Punct(bang));
|
||||
}
|
||||
|
||||
let doc_ident = crate::Ident::_new(crate::imp::Ident::new_unchecked("doc", fallback_span));
|
||||
let mut equal = Punct::new('=', Spacing::Alone);
|
||||
equal.set_span(span);
|
||||
let mut literal = crate::Literal::string(comment);
|
||||
literal.set_span(span);
|
||||
let mut bracketed = TokenStreamBuilder::with_capacity(3);
|
||||
bracketed.push_token_from_parser(TokenTree::Ident(doc_ident));
|
||||
bracketed.push_token_from_parser(TokenTree::Punct(equal));
|
||||
bracketed.push_token_from_parser(TokenTree::Literal(literal));
|
||||
let group = Group::new(Delimiter::Bracket, bracketed.build());
|
||||
let mut group = crate::Group::_new_fallback(group);
|
||||
group.set_span(span);
|
||||
trees.push_token_from_parser(TokenTree::Group(group));
|
||||
|
||||
Ok((rest, ()))
|
||||
}
|
||||
|
||||
fn doc_comment_contents(input: Cursor) -> PResult<(&str, bool)> {
|
||||
if input.starts_with("//!") {
|
||||
let input = input.advance(3);
|
||||
let (input, s) = take_until_newline_or_eof(input);
|
||||
Ok((input, (s, true)))
|
||||
} else if input.starts_with("/*!") {
|
||||
let (input, s) = block_comment(input)?;
|
||||
Ok((input, (&s[3..s.len() - 2], true)))
|
||||
} else if input.starts_with("///") {
|
||||
let input = input.advance(3);
|
||||
if input.starts_with_char('/') {
|
||||
return Err(Reject);
|
||||
}
|
||||
let (input, s) = take_until_newline_or_eof(input);
|
||||
Ok((input, (s, false)))
|
||||
} else if input.starts_with("/**") && !input.rest[3..].starts_with('*') {
|
||||
let (input, s) = block_comment(input)?;
|
||||
Ok((input, (&s[3..s.len() - 2], false)))
|
||||
} else {
|
||||
Err(Reject)
|
||||
}
|
||||
}
|
||||
|
||||
fn take_until_newline_or_eof(input: Cursor) -> (Cursor, &str) {
|
||||
let chars = input.char_indices();
|
||||
|
||||
for (i, ch) in chars {
|
||||
if ch == '\n' {
|
||||
return (input.advance(i), &input.rest[..i]);
|
||||
} else if ch == '\r' && input.rest[i + 1..].starts_with('\n') {
|
||||
return (input.advance(i + 1), &input.rest[..i]);
|
||||
}
|
||||
}
|
||||
|
||||
(input.advance(input.len()), input.rest)
|
||||
}
|
||||
145
vendor/proc-macro2/src/rcvec.rs
vendored
Normal file
145
vendor/proc-macro2/src/rcvec.rs
vendored
Normal file
|
|
@ -0,0 +1,145 @@
|
|||
use alloc::rc::Rc;
|
||||
use alloc::vec;
|
||||
use core::mem;
|
||||
use core::panic::RefUnwindSafe;
|
||||
use core::slice;
|
||||
|
||||
pub(crate) struct RcVec<T> {
|
||||
inner: Rc<Vec<T>>,
|
||||
}
|
||||
|
||||
pub(crate) struct RcVecBuilder<T> {
|
||||
inner: Vec<T>,
|
||||
}
|
||||
|
||||
pub(crate) struct RcVecMut<'a, T> {
|
||||
inner: &'a mut Vec<T>,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub(crate) struct RcVecIntoIter<T> {
|
||||
inner: vec::IntoIter<T>,
|
||||
}
|
||||
|
||||
impl<T> RcVec<T> {
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.inner.is_empty()
|
||||
}
|
||||
|
||||
pub fn len(&self) -> usize {
|
||||
self.inner.len()
|
||||
}
|
||||
|
||||
pub fn iter(&self) -> slice::Iter<T> {
|
||||
self.inner.iter()
|
||||
}
|
||||
|
||||
pub fn make_mut(&mut self) -> RcVecMut<T>
|
||||
where
|
||||
T: Clone,
|
||||
{
|
||||
RcVecMut {
|
||||
inner: Rc::make_mut(&mut self.inner),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_mut(&mut self) -> Option<RcVecMut<T>> {
|
||||
let inner = Rc::get_mut(&mut self.inner)?;
|
||||
Some(RcVecMut { inner })
|
||||
}
|
||||
|
||||
pub fn make_owned(mut self) -> RcVecBuilder<T>
|
||||
where
|
||||
T: Clone,
|
||||
{
|
||||
let vec = if let Some(owned) = Rc::get_mut(&mut self.inner) {
|
||||
mem::take(owned)
|
||||
} else {
|
||||
Vec::clone(&self.inner)
|
||||
};
|
||||
RcVecBuilder { inner: vec }
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> RcVecBuilder<T> {
|
||||
pub fn new() -> Self {
|
||||
RcVecBuilder { inner: Vec::new() }
|
||||
}
|
||||
|
||||
pub fn with_capacity(cap: usize) -> Self {
|
||||
RcVecBuilder {
|
||||
inner: Vec::with_capacity(cap),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn push(&mut self, element: T) {
|
||||
self.inner.push(element);
|
||||
}
|
||||
|
||||
pub fn extend(&mut self, iter: impl IntoIterator<Item = T>) {
|
||||
self.inner.extend(iter);
|
||||
}
|
||||
|
||||
pub fn as_mut(&mut self) -> RcVecMut<T> {
|
||||
RcVecMut {
|
||||
inner: &mut self.inner,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn build(self) -> RcVec<T> {
|
||||
RcVec {
|
||||
inner: Rc::new(self.inner),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T> RcVecMut<'a, T> {
|
||||
pub fn push(&mut self, element: T) {
|
||||
self.inner.push(element);
|
||||
}
|
||||
|
||||
pub fn extend(&mut self, iter: impl IntoIterator<Item = T>) {
|
||||
self.inner.extend(iter);
|
||||
}
|
||||
|
||||
pub fn pop(&mut self) -> Option<T> {
|
||||
self.inner.pop()
|
||||
}
|
||||
|
||||
pub fn as_mut(&mut self) -> RcVecMut<T> {
|
||||
RcVecMut { inner: self.inner }
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Clone for RcVec<T> {
|
||||
fn clone(&self) -> Self {
|
||||
RcVec {
|
||||
inner: Rc::clone(&self.inner),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> IntoIterator for RcVecBuilder<T> {
|
||||
type Item = T;
|
||||
type IntoIter = RcVecIntoIter<T>;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
RcVecIntoIter {
|
||||
inner: self.inner.into_iter(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Iterator for RcVecIntoIter<T> {
|
||||
type Item = T;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.inner.next()
|
||||
}
|
||||
|
||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||
self.inner.size_hint()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> RefUnwindSafe for RcVec<T> where T: RefUnwindSafe {}
|
||||
1008
vendor/proc-macro2/src/wrapper.rs
vendored
Normal file
1008
vendor/proc-macro2/src/wrapper.rs
vendored
Normal file
File diff suppressed because it is too large
Load diff
Loading…
Add table
Add a link
Reference in a new issue