use super::*;
use crate::punctuated::Punctuated;
ast_struct! {
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub struct Path {
pub leading_colon: Option<Token![::]>,
pub segments: Punctuated<PathSegment, Token![::]>,
}
}
impl<T> From<T> for Path
where
T: Into<PathSegment>,
{
fn from(segment: T) -> Self {
let mut path = Path {
leading_colon: None,
segments: Punctuated::new(),
};
path.segments.push_value(segment.into());
path
}
}
ast_struct! {
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub struct PathSegment {
pub ident: Ident,
pub arguments: PathArguments,
}
}
impl<T> From<T> for PathSegment
where
T: Into<Ident>,
{
fn from(ident: T) -> Self {
PathSegment {
ident: ident.into(),
arguments: PathArguments::None,
}
}
}
ast_enum! {
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub enum PathArguments {
None,
AngleBracketed(AngleBracketedGenericArguments),
Parenthesized(ParenthesizedGenericArguments),
}
}
impl Default for PathArguments {
fn default() -> Self {
PathArguments::None
}
}
impl PathArguments {
pub fn is_empty(&self) -> bool {
match self {
PathArguments::None => true,
PathArguments::AngleBracketed(bracketed) => bracketed.args.is_empty(),
PathArguments::Parenthesized(_) => false,
}
}
#[cfg(feature = "parsing")]
fn is_none(&self) -> bool {
match *self {
PathArguments::None => true,
PathArguments::AngleBracketed(_) | PathArguments::Parenthesized(_) => false,
}
}
}
ast_enum! {
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub enum GenericArgument {
Lifetime(Lifetime),
Type(Type),
Binding(Binding),
Constraint(Constraint),
Const(Expr),
}
}
ast_struct! {
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub struct AngleBracketedGenericArguments {
pub colon2_token: Option<Token![::]>,
pub lt_token: Token![<],
pub args: Punctuated<GenericArgument, Token![,]>,
pub gt_token: Token![>],
}
}
ast_struct! {
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub struct Binding {
pub ident: Ident,
pub eq_token: Token![=],
pub ty: Type,
}
}
ast_struct! {
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub struct Constraint {
pub ident: Ident,
pub colon_token: Token![:],
pub bounds: Punctuated<TypeParamBound, Token![+]>,
}
}
ast_struct! {
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub struct ParenthesizedGenericArguments {
pub paren_token: token::Paren,
pub inputs: Punctuated<Type, Token![,]>,
pub output: ReturnType,
}
}
ast_struct! {
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub struct QSelf {
pub lt_token: Token![<],
pub ty: Box<Type>,
pub position: usize,
pub as_token: Option<Token![as]>,
pub gt_token: Token![>],
}
}
#[cfg(feature = "parsing")]
pub mod parsing {
use super::*;
use crate::ext::IdentExt;
use crate::parse::{Parse, ParseStream, Result};
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
impl Parse for Path {
fn parse(input: ParseStream) -> Result<Self> {
Self::parse_helper(input, false)
}
}
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
impl Parse for GenericArgument {
fn parse(input: ParseStream) -> Result<Self> {
if input.peek(Lifetime) && !input.peek2(Token![+]) {
return Ok(GenericArgument::Lifetime(input.parse()?));
}
if input.peek(Ident) && input.peek2(Token![=]) {
return Ok(GenericArgument::Binding(input.parse()?));
}
#[cfg(feature = "full")]
{
if input.peek(Ident) && input.peek2(Token![:]) && !input.peek2(Token![::]) {
return Ok(GenericArgument::Constraint(input.parse()?));
}
}
if input.peek(Lit) || input.peek(token::Brace) {
return const_argument(input).map(GenericArgument::Const);
}
#[cfg(feature = "full")]
let begin = input.fork();
let argument: Type = input.parse()?;
#[cfg(feature = "full")]
{
if match &argument {
Type::Path(argument)
if argument.qself.is_none()
&& argument.path.leading_colon.is_none()
&& argument.path.segments.len() == 1 =>
{
match argument.path.segments[0].arguments {
PathArguments::AngleBracketed(_) => true,
_ => false,
}
}
_ => false,
} && if input.peek(Token![=]) {
input.parse::<Token![=]>()?;
input.parse::<Type>()?;
true
} else if input.peek(Token![:]) {
input.parse::<Token![:]>()?;
input.call(constraint_bounds)?;
true
} else {
false
} {
let verbatim = verbatim::between(begin, input);
return Ok(GenericArgument::Type(Type::Verbatim(verbatim)));
}
}
Ok(GenericArgument::Type(argument))
}
}
pub fn const_argument(input: ParseStream) -> Result<Expr> {
let lookahead = input.lookahead1();
if input.peek(Lit) {
let lit = input.parse()?;
return Ok(Expr::Lit(lit));
}
if input.peek(token::Brace) {
#[cfg(feature = "full")]
{
let block: ExprBlock = input.parse()?;
return Ok(Expr::Block(block));
}
#[cfg(not(feature = "full"))]
{
let begin = input.fork();
let content;
braced!(content in input);
content.parse::<Expr>()?;
let verbatim = verbatim::between(begin, input);
return Ok(Expr::Verbatim(verbatim));
}
}
Err(lookahead.error())
}
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
impl Parse for AngleBracketedGenericArguments {
fn parse(input: ParseStream) -> Result<Self> {
Ok(AngleBracketedGenericArguments {
colon2_token: input.parse()?,
lt_token: input.parse()?,
args: {
let mut args = Punctuated::new();
loop {
if input.peek(Token![>]) {
break;
}
let value = input.parse()?;
args.push_value(value);
if input.peek(Token![>]) {
break;
}
let punct = input.parse()?;
args.push_punct(punct);
}
args
},
gt_token: input.parse()?,
})
}
}
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
impl Parse for ParenthesizedGenericArguments {
fn parse(input: ParseStream) -> Result<Self> {
let content;
Ok(ParenthesizedGenericArguments {
paren_token: parenthesized!(content in input),
inputs: content.parse_terminated(Type::parse)?,
output: input.call(ReturnType::without_plus)?,
})
}
}
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
impl Parse for PathSegment {
fn parse(input: ParseStream) -> Result<Self> {
Self::parse_helper(input, false)
}
}
impl PathSegment {
fn parse_helper(input: ParseStream, expr_style: bool) -> Result<Self> {
if input.peek(Token![super]) || input.peek(Token![self]) || input.peek(Token![crate]) {
let ident = input.call(Ident::parse_any)?;
return Ok(PathSegment::from(ident));
}
let ident = if input.peek(Token![Self]) {
input.call(Ident::parse_any)?
} else {
input.parse()?
};
if !expr_style && input.peek(Token![<]) && !input.peek(Token![<=])
|| input.peek(Token![::]) && input.peek3(Token![<])
{
Ok(PathSegment {
ident,
arguments: PathArguments::AngleBracketed(input.parse()?),
})
} else {
Ok(PathSegment::from(ident))
}
}
}
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
impl Parse for Binding {
fn parse(input: ParseStream) -> Result<Self> {
Ok(Binding {
ident: input.parse()?,
eq_token: input.parse()?,
ty: input.parse()?,
})
}
}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
impl Parse for Constraint {
fn parse(input: ParseStream) -> Result<Self> {
Ok(Constraint {
ident: input.parse()?,
colon_token: input.parse()?,
bounds: constraint_bounds(input)?,
})
}
}
#[cfg(feature = "full")]
fn constraint_bounds(input: ParseStream) -> Result<Punctuated<TypeParamBound, Token![+]>> {
let mut bounds = Punctuated::new();
loop {
if input.peek(Token![,]) || input.peek(Token![>]) {
break;
}
let value = input.parse()?;
bounds.push_value(value);
if !input.peek(Token![+]) {
break;
}
let punct = input.parse()?;
bounds.push_punct(punct);
}
Ok(bounds)
}
impl Path {
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
pub fn parse_mod_style(input: ParseStream) -> Result<Self> {
Ok(Path {
leading_colon: input.parse()?,
segments: {
let mut segments = Punctuated::new();
loop {
if !input.peek(Ident)
&& !input.peek(Token![super])
&& !input.peek(Token![self])
&& !input.peek(Token![Self])
&& !input.peek(Token![crate])
{
break;
}
let ident = Ident::parse_any(input)?;
segments.push_value(PathSegment::from(ident));
if !input.peek(Token![::]) {
break;
}
let punct = input.parse()?;
segments.push_punct(punct);
}
if segments.is_empty() {
return Err(input.error("expected path"));
} else if segments.trailing_punct() {
return Err(input.error("expected path segment"));
}
segments
},
})
}
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
pub fn is_ident<I: ?Sized>(&self, ident: &I) -> bool
where
Ident: PartialEq<I>,
{
match self.get_ident() {
Some(id) => id == ident,
None => false,
}
}
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
pub fn get_ident(&self) -> Option<&Ident> {
if self.leading_colon.is_none()
&& self.segments.len() == 1
&& self.segments[0].arguments.is_none()
{
Some(&self.segments[0].ident)
} else {
None
}
}
pub(crate) fn parse_helper(input: ParseStream, expr_style: bool) -> Result<Self> {
let mut path = Path {
leading_colon: input.parse()?,
segments: {
let mut segments = Punctuated::new();
let value = PathSegment::parse_helper(input, expr_style)?;
segments.push_value(value);
segments
},
};
Path::parse_rest(input, &mut path, expr_style)?;
Ok(path)
}
pub(crate) fn parse_rest(
input: ParseStream,
path: &mut Self,
expr_style: bool,
) -> Result<()> {
while input.peek(Token![::]) {
let punct: Token![::] = input.parse()?;
path.segments.push_punct(punct);
let value = PathSegment::parse_helper(input, expr_style)?;
path.segments.push_value(value);
}
Ok(())
}
}
pub fn qpath(input: ParseStream, expr_style: bool) -> Result<(Option<QSelf>, Path)> {
if input.peek(Token![<]) {
let lt_token: Token![<] = input.parse()?;
let this: Type = input.parse()?;
let path = if input.peek(Token![as]) {
let as_token: Token![as] = input.parse()?;
let path: Path = input.parse()?;
Some((as_token, path))
} else {
None
};
let gt_token: Token![>] = input.parse()?;
let colon2_token: Token![::] = input.parse()?;
let mut rest = Punctuated::new();
loop {
let path = PathSegment::parse_helper(input, expr_style)?;
rest.push_value(path);
if !input.peek(Token![::]) {
break;
}
let punct: Token![::] = input.parse()?;
rest.push_punct(punct);
}
let (position, as_token, path) = match path {
Some((as_token, mut path)) => {
let pos = path.segments.len();
path.segments.push_punct(colon2_token);
path.segments.extend(rest.into_pairs());
(pos, Some(as_token), path)
}
None => {
let path = Path {
leading_colon: Some(colon2_token),
segments: rest,
};
(0, None, path)
}
};
let qself = QSelf {
lt_token,
ty: Box::new(this),
position,
as_token,
gt_token,
};
Ok((Some(qself), path))
} else {
let path = Path::parse_helper(input, expr_style)?;
Ok((None, path))
}
}
}
#[cfg(feature = "printing")]
mod printing {
use super::*;
use crate::print::TokensOrDefault;
use proc_macro2::TokenStream;
use quote::ToTokens;
use std::cmp;
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
impl ToTokens for Path {
fn to_tokens(&self, tokens: &mut TokenStream) {
self.leading_colon.to_tokens(tokens);
self.segments.to_tokens(tokens);
}
}
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
impl ToTokens for PathSegment {
fn to_tokens(&self, tokens: &mut TokenStream) {
self.ident.to_tokens(tokens);
self.arguments.to_tokens(tokens);
}
}
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
impl ToTokens for PathArguments {
fn to_tokens(&self, tokens: &mut TokenStream) {
match self {
PathArguments::None => {}
PathArguments::AngleBracketed(arguments) => {
arguments.to_tokens(tokens);
}
PathArguments::Parenthesized(arguments) => {
arguments.to_tokens(tokens);
}
}
}
}
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
impl ToTokens for GenericArgument {
#[allow(clippy::match_same_arms)]
fn to_tokens(&self, tokens: &mut TokenStream) {
match self {
GenericArgument::Lifetime(lt) => lt.to_tokens(tokens),
GenericArgument::Type(ty) => ty.to_tokens(tokens),
GenericArgument::Binding(tb) => tb.to_tokens(tokens),
GenericArgument::Constraint(tc) => tc.to_tokens(tokens),
GenericArgument::Const(e) => match *e {
Expr::Lit(_) => e.to_tokens(tokens),
#[cfg(feature = "full")]
Expr::Block(_) => e.to_tokens(tokens),
_ => token::Brace::default().surround(tokens, |tokens| {
e.to_tokens(tokens);
}),
},
}
}
}
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
impl ToTokens for AngleBracketedGenericArguments {
fn to_tokens(&self, tokens: &mut TokenStream) {
self.colon2_token.to_tokens(tokens);
self.lt_token.to_tokens(tokens);
let mut trailing_or_empty = true;
for param in self.args.pairs() {
match **param.value() {
GenericArgument::Lifetime(_) => {
param.to_tokens(tokens);
trailing_or_empty = param.punct().is_some();
}
GenericArgument::Type(_)
| GenericArgument::Binding(_)
| GenericArgument::Constraint(_)
| GenericArgument::Const(_) => {}
}
}
for param in self.args.pairs() {
match **param.value() {
GenericArgument::Type(_) | GenericArgument::Const(_) => {
if !trailing_or_empty {
<Token![,]>::default().to_tokens(tokens);
}
param.to_tokens(tokens);
trailing_or_empty = param.punct().is_some();
}
GenericArgument::Lifetime(_)
| GenericArgument::Binding(_)
| GenericArgument::Constraint(_) => {}
}
}
for param in self.args.pairs() {
match **param.value() {
GenericArgument::Binding(_) | GenericArgument::Constraint(_) => {
if !trailing_or_empty {
<Token![,]>::default().to_tokens(tokens);
}
param.to_tokens(tokens);
trailing_or_empty = param.punct().is_some();
}
GenericArgument::Lifetime(_)
| GenericArgument::Type(_)
| GenericArgument::Const(_) => {}
}
}
self.gt_token.to_tokens(tokens);
}
}
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
impl ToTokens for Binding {
fn to_tokens(&self, tokens: &mut TokenStream) {
self.ident.to_tokens(tokens);
self.eq_token.to_tokens(tokens);
self.ty.to_tokens(tokens);
}
}
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
impl ToTokens for Constraint {
fn to_tokens(&self, tokens: &mut TokenStream) {
self.ident.to_tokens(tokens);
self.colon_token.to_tokens(tokens);
self.bounds.to_tokens(tokens);
}
}
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
impl ToTokens for ParenthesizedGenericArguments {
fn to_tokens(&self, tokens: &mut TokenStream) {
self.paren_token.surround(tokens, |tokens| {
self.inputs.to_tokens(tokens);
});
self.output.to_tokens(tokens);
}
}
impl private {
pub(crate) fn print_path(tokens: &mut TokenStream, qself: &Option<QSelf>, path: &Path) {
let qself = match qself {
Some(qself) => qself,
None => {
path.to_tokens(tokens);
return;
}
};
qself.lt_token.to_tokens(tokens);
qself.ty.to_tokens(tokens);
let pos = cmp::min(qself.position, path.segments.len());
let mut segments = path.segments.pairs();
if pos > 0 {
TokensOrDefault(&qself.as_token).to_tokens(tokens);
path.leading_colon.to_tokens(tokens);
for (i, segment) in segments.by_ref().take(pos).enumerate() {
if i + 1 == pos {
segment.value().to_tokens(tokens);
qself.gt_token.to_tokens(tokens);
segment.punct().to_tokens(tokens);
} else {
segment.to_tokens(tokens);
}
}
} else {
qself.gt_token.to_tokens(tokens);
path.leading_colon.to_tokens(tokens);
}
for segment in segments {
segment.to_tokens(tokens);
}
}
}
}