Add an intital HIR and lowering step
This commit is contained in:
parent
cfd76b364c
commit
facdf2ebb1
160 changed files with 13917 additions and 4451 deletions
628
src/librustc_front/attr.rs
Normal file
628
src/librustc_front/attr.rs
Normal file
|
|
@ -0,0 +1,628 @@
|
|||
// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
// Functions dealing with attributes and meta items
|
||||
|
||||
pub use self::StabilityLevel::*;
|
||||
pub use self::ReprAttr::*;
|
||||
pub use self::IntType::*;
|
||||
|
||||
use hir;
|
||||
use hir::{AttrId, Attribute, Attribute_, MetaItem, MetaWord, MetaNameValue, MetaList};
|
||||
use lowering::{lower_attr_style, unlower_attribute};
|
||||
use syntax::codemap::{Span, Spanned, spanned, dummy_spanned};
|
||||
use syntax::codemap::BytePos;
|
||||
use syntax::diagnostic::SpanHandler;
|
||||
use syntax::attr as syntax_attr;
|
||||
use syntax::parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
|
||||
use syntax::parse::token::{InternedString, intern_and_get_ident};
|
||||
use syntax::parse::token;
|
||||
use syntax::ptr::P;
|
||||
|
||||
use std::cell::Cell;
|
||||
use std::collections::HashSet;
|
||||
use std::fmt;
|
||||
|
||||
pub fn mark_used(attr: &Attribute) {
|
||||
syntax_attr::mark_used(&unlower_attribute(attr))
|
||||
}
|
||||
|
||||
pub trait AttrMetaMethods {
|
||||
fn check_name(&self, name: &str) -> bool {
|
||||
name == &self.name()[..]
|
||||
}
|
||||
|
||||
/// Retrieve the name of the meta item, e.g. `foo` in `#[foo]`,
|
||||
/// `#[foo="bar"]` and `#[foo(bar)]`
|
||||
fn name(&self) -> InternedString;
|
||||
|
||||
/// Gets the string value if self is a MetaNameValue variant
|
||||
/// containing a string, otherwise None.
|
||||
fn value_str(&self) -> Option<InternedString>;
|
||||
/// Gets a list of inner meta items from a list MetaItem type.
|
||||
fn meta_item_list<'a>(&'a self) -> Option<&'a [P<MetaItem>]>;
|
||||
|
||||
fn span(&self) -> Span;
|
||||
}
|
||||
|
||||
impl AttrMetaMethods for Attribute {
|
||||
fn check_name(&self, name: &str) -> bool {
|
||||
let matches = name == &self.name()[..];
|
||||
if matches {
|
||||
syntax_attr::mark_used(&unlower_attribute(self));
|
||||
}
|
||||
matches
|
||||
}
|
||||
fn name(&self) -> InternedString { self.meta().name() }
|
||||
fn value_str(&self) -> Option<InternedString> {
|
||||
self.meta().value_str()
|
||||
}
|
||||
fn meta_item_list<'a>(&'a self) -> Option<&'a [P<MetaItem>]> {
|
||||
self.node.value.meta_item_list()
|
||||
}
|
||||
fn span(&self) -> Span { self.meta().span }
|
||||
}
|
||||
|
||||
impl AttrMetaMethods for MetaItem {
|
||||
fn name(&self) -> InternedString {
|
||||
match self.node {
|
||||
MetaWord(ref n) => (*n).clone(),
|
||||
MetaNameValue(ref n, _) => (*n).clone(),
|
||||
MetaList(ref n, _) => (*n).clone(),
|
||||
}
|
||||
}
|
||||
|
||||
fn value_str(&self) -> Option<InternedString> {
|
||||
match self.node {
|
||||
MetaNameValue(_, ref v) => {
|
||||
match v.node {
|
||||
hir::LitStr(ref s, _) => Some((*s).clone()),
|
||||
_ => None,
|
||||
}
|
||||
},
|
||||
_ => None
|
||||
}
|
||||
}
|
||||
|
||||
fn meta_item_list<'a>(&'a self) -> Option<&'a [P<MetaItem>]> {
|
||||
match self.node {
|
||||
MetaList(_, ref l) => Some(&l[..]),
|
||||
_ => None
|
||||
}
|
||||
}
|
||||
fn span(&self) -> Span { self.span }
|
||||
}
|
||||
|
||||
// Annoying, but required to get test_cfg to work
|
||||
impl AttrMetaMethods for P<MetaItem> {
|
||||
fn name(&self) -> InternedString { (**self).name() }
|
||||
fn value_str(&self) -> Option<InternedString> { (**self).value_str() }
|
||||
fn meta_item_list<'a>(&'a self) -> Option<&'a [P<MetaItem>]> {
|
||||
(**self).meta_item_list()
|
||||
}
|
||||
fn span(&self) -> Span { (**self).span() }
|
||||
}
|
||||
|
||||
|
||||
pub trait AttributeMethods {
|
||||
fn meta<'a>(&'a self) -> &'a MetaItem;
|
||||
fn with_desugared_doc<T, F>(&self, f: F) -> T where
|
||||
F: FnOnce(&Attribute) -> T;
|
||||
}
|
||||
|
||||
impl AttributeMethods for Attribute {
|
||||
/// Extract the MetaItem from inside this Attribute.
|
||||
fn meta<'a>(&'a self) -> &'a MetaItem {
|
||||
&*self.node.value
|
||||
}
|
||||
|
||||
/// Convert self to a normal #[doc="foo"] comment, if it is a
|
||||
/// comment like `///` or `/** */`. (Returns self unchanged for
|
||||
/// non-sugared doc attributes.)
|
||||
fn with_desugared_doc<T, F>(&self, f: F) -> T where
|
||||
F: FnOnce(&Attribute) -> T,
|
||||
{
|
||||
if self.node.is_sugared_doc {
|
||||
let comment = self.value_str().unwrap();
|
||||
let meta = mk_name_value_item_str(
|
||||
InternedString::new("doc"),
|
||||
token::intern_and_get_ident(&strip_doc_comment_decoration(
|
||||
&comment)));
|
||||
if self.node.style == hir::AttrOuter {
|
||||
f(&mk_attr_outer(self.node.id, meta))
|
||||
} else {
|
||||
f(&mk_attr_inner(self.node.id, meta))
|
||||
}
|
||||
} else {
|
||||
f(self)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/* Constructors */
|
||||
|
||||
pub fn mk_name_value_item_str(name: InternedString, value: InternedString)
|
||||
-> P<MetaItem> {
|
||||
let value_lit = dummy_spanned(hir::LitStr(value, hir::CookedStr));
|
||||
mk_name_value_item(name, value_lit)
|
||||
}
|
||||
|
||||
pub fn mk_name_value_item(name: InternedString, value: hir::Lit)
|
||||
-> P<MetaItem> {
|
||||
P(dummy_spanned(MetaNameValue(name, value)))
|
||||
}
|
||||
|
||||
pub fn mk_list_item(name: InternedString, items: Vec<P<MetaItem>>) -> P<MetaItem> {
|
||||
P(dummy_spanned(MetaList(name, items)))
|
||||
}
|
||||
|
||||
pub fn mk_word_item(name: InternedString) -> P<MetaItem> {
|
||||
P(dummy_spanned(MetaWord(name)))
|
||||
}
|
||||
|
||||
thread_local! { static NEXT_ATTR_ID: Cell<usize> = Cell::new(0) }
|
||||
|
||||
pub fn mk_attr_id() -> AttrId {
|
||||
let id = NEXT_ATTR_ID.with(|slot| {
|
||||
let r = slot.get();
|
||||
slot.set(r + 1);
|
||||
r
|
||||
});
|
||||
AttrId(id)
|
||||
}
|
||||
|
||||
/// Returns an inner attribute with the given value.
|
||||
pub fn mk_attr_inner(id: AttrId, item: P<MetaItem>) -> Attribute {
|
||||
dummy_spanned(Attribute_ {
|
||||
id: id,
|
||||
style: hir::AttrInner,
|
||||
value: item,
|
||||
is_sugared_doc: false,
|
||||
})
|
||||
}
|
||||
|
||||
/// Returns an outer attribute with the given value.
|
||||
pub fn mk_attr_outer(id: AttrId, item: P<MetaItem>) -> Attribute {
|
||||
dummy_spanned(Attribute_ {
|
||||
id: id,
|
||||
style: hir::AttrOuter,
|
||||
value: item,
|
||||
is_sugared_doc: false,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn mk_sugared_doc_attr(id: AttrId, text: InternedString, lo: BytePos,
|
||||
hi: BytePos)
|
||||
-> Attribute {
|
||||
let style = lower_attr_style(doc_comment_style(&text));
|
||||
let lit = spanned(lo, hi, hir::LitStr(text, hir::CookedStr));
|
||||
let attr = Attribute_ {
|
||||
id: id,
|
||||
style: style,
|
||||
value: P(spanned(lo, hi, MetaNameValue(InternedString::new("doc"),
|
||||
lit))),
|
||||
is_sugared_doc: true
|
||||
};
|
||||
spanned(lo, hi, attr)
|
||||
}
|
||||
|
||||
/* Searching */
|
||||
/// Check if `needle` occurs in `haystack` by a structural
|
||||
/// comparison. This is slightly subtle, and relies on ignoring the
|
||||
/// span included in the `==` comparison a plain MetaItem.
|
||||
pub fn contains(haystack: &[P<MetaItem>], needle: &MetaItem) -> bool {
|
||||
debug!("attr::contains (name={})", needle.name());
|
||||
haystack.iter().any(|item| {
|
||||
debug!(" testing: {}", item.name());
|
||||
item.node == needle.node
|
||||
})
|
||||
}
|
||||
|
||||
pub fn contains_name<AM: AttrMetaMethods>(metas: &[AM], name: &str) -> bool {
|
||||
debug!("attr::contains_name (name={})", name);
|
||||
metas.iter().any(|item| {
|
||||
debug!(" testing: {}", item.name());
|
||||
item.check_name(name)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn first_attr_value_str_by_name(attrs: &[Attribute], name: &str)
|
||||
-> Option<InternedString> {
|
||||
attrs.iter()
|
||||
.find(|at| at.check_name(name))
|
||||
.and_then(|at| at.value_str())
|
||||
}
|
||||
|
||||
pub fn last_meta_item_value_str_by_name(items: &[P<MetaItem>], name: &str)
|
||||
-> Option<InternedString> {
|
||||
items.iter()
|
||||
.rev()
|
||||
.find(|mi| mi.check_name(name))
|
||||
.and_then(|i| i.value_str())
|
||||
}
|
||||
|
||||
/* Higher-level applications */
|
||||
|
||||
pub fn sort_meta_items(items: Vec<P<MetaItem>>) -> Vec<P<MetaItem>> {
|
||||
// This is sort of stupid here, but we need to sort by
|
||||
// human-readable strings.
|
||||
let mut v = items.into_iter()
|
||||
.map(|mi| (mi.name(), mi))
|
||||
.collect::<Vec<(InternedString, P<MetaItem>)>>();
|
||||
|
||||
v.sort_by(|&(ref a, _), &(ref b, _)| a.cmp(b));
|
||||
|
||||
// There doesn't seem to be a more optimal way to do this
|
||||
v.into_iter().map(|(_, m)| m.map(|Spanned {node, span}| {
|
||||
Spanned {
|
||||
node: match node {
|
||||
MetaList(n, mis) => MetaList(n, sort_meta_items(mis)),
|
||||
_ => node
|
||||
},
|
||||
span: span
|
||||
}
|
||||
})).collect()
|
||||
}
|
||||
|
||||
pub fn find_crate_name(attrs: &[Attribute]) -> Option<InternedString> {
|
||||
first_attr_value_str_by_name(attrs, "crate_name")
|
||||
}
|
||||
|
||||
/// Find the value of #[export_name=*] attribute and check its validity.
|
||||
pub fn find_export_name_attr(diag: &SpanHandler, attrs: &[Attribute]) -> Option<InternedString> {
|
||||
attrs.iter().fold(None, |ia,attr| {
|
||||
if attr.check_name("export_name") {
|
||||
if let s@Some(_) = attr.value_str() {
|
||||
s
|
||||
} else {
|
||||
diag.span_err(attr.span, "export_name attribute has invalid format");
|
||||
diag.handler.help("use #[export_name=\"*\"]");
|
||||
None
|
||||
}
|
||||
} else {
|
||||
ia
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq)]
|
||||
pub enum InlineAttr {
|
||||
None,
|
||||
Hint,
|
||||
Always,
|
||||
Never,
|
||||
}
|
||||
|
||||
/// Determine what `#[inline]` attribute is present in `attrs`, if any.
|
||||
pub fn find_inline_attr(diagnostic: Option<&SpanHandler>, attrs: &[Attribute]) -> InlineAttr {
|
||||
// FIXME (#2809)---validate the usage of #[inline] and #[inline]
|
||||
attrs.iter().fold(InlineAttr::None, |ia,attr| {
|
||||
match attr.node.value.node {
|
||||
MetaWord(ref n) if *n == "inline" => {
|
||||
syntax_attr::mark_used(&unlower_attribute(attr));
|
||||
InlineAttr::Hint
|
||||
}
|
||||
MetaList(ref n, ref items) if *n == "inline" => {
|
||||
syntax_attr::mark_used(&unlower_attribute(attr));
|
||||
if items.len() != 1 {
|
||||
diagnostic.map(|d|{ d.span_err(attr.span, "expected one argument"); });
|
||||
InlineAttr::None
|
||||
} else if contains_name(&items[..], "always") {
|
||||
InlineAttr::Always
|
||||
} else if contains_name(&items[..], "never") {
|
||||
InlineAttr::Never
|
||||
} else {
|
||||
diagnostic.map(|d|{ d.span_err((*items[0]).span, "invalid argument"); });
|
||||
InlineAttr::None
|
||||
}
|
||||
}
|
||||
_ => ia
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/// True if `#[inline]` or `#[inline(always)]` is present in `attrs`.
|
||||
pub fn requests_inline(attrs: &[Attribute]) -> bool {
|
||||
match find_inline_attr(None, attrs) {
|
||||
InlineAttr::Hint | InlineAttr::Always => true,
|
||||
InlineAttr::None | InlineAttr::Never => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Represents the #[deprecated] and friends attributes.
|
||||
#[derive(RustcEncodable, RustcDecodable, Clone, Debug, PartialEq, Eq, Hash)]
|
||||
pub struct Stability {
|
||||
pub level: StabilityLevel,
|
||||
pub feature: InternedString,
|
||||
pub since: Option<InternedString>,
|
||||
pub deprecated_since: Option<InternedString>,
|
||||
// The reason for the current stability level. If deprecated, the
|
||||
// reason for deprecation.
|
||||
pub reason: Option<InternedString>,
|
||||
// The relevant rust-lang issue
|
||||
pub issue: Option<u32>
|
||||
}
|
||||
|
||||
/// The available stability levels.
|
||||
#[derive(RustcEncodable, RustcDecodable, PartialEq, PartialOrd, Clone, Debug, Copy, Eq, Hash)]
|
||||
pub enum StabilityLevel {
|
||||
Unstable,
|
||||
Stable,
|
||||
}
|
||||
|
||||
impl fmt::Display for StabilityLevel {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
fmt::Debug::fmt(self, f)
|
||||
}
|
||||
}
|
||||
|
||||
fn find_stability_generic<'a,
|
||||
AM: AttrMetaMethods,
|
||||
I: Iterator<Item=&'a AM>>
|
||||
(diagnostic: &SpanHandler, attrs: I, item_sp: Span)
|
||||
-> (Option<Stability>, Vec<&'a AM>) {
|
||||
|
||||
let mut stab: Option<Stability> = None;
|
||||
let mut deprecated: Option<(Option<InternedString>, Option<InternedString>)> = None;
|
||||
let mut used_attrs: Vec<&'a AM> = vec![];
|
||||
|
||||
'outer: for attr in attrs {
|
||||
let tag = attr.name();
|
||||
let tag = &tag[..];
|
||||
if tag != "deprecated" && tag != "unstable" && tag != "stable" {
|
||||
continue // not a stability level
|
||||
}
|
||||
|
||||
used_attrs.push(attr);
|
||||
|
||||
let (feature, since, reason, issue) = match attr.meta_item_list() {
|
||||
Some(metas) => {
|
||||
let mut feature = None;
|
||||
let mut since = None;
|
||||
let mut reason = None;
|
||||
let mut issue = None;
|
||||
for meta in metas {
|
||||
match &*meta.name() {
|
||||
"feature" => {
|
||||
match meta.value_str() {
|
||||
Some(v) => feature = Some(v),
|
||||
None => {
|
||||
diagnostic.span_err(meta.span, "incorrect meta item");
|
||||
continue 'outer;
|
||||
}
|
||||
}
|
||||
}
|
||||
"since" => {
|
||||
match meta.value_str() {
|
||||
Some(v) => since = Some(v),
|
||||
None => {
|
||||
diagnostic.span_err(meta.span, "incorrect meta item");
|
||||
continue 'outer;
|
||||
}
|
||||
}
|
||||
}
|
||||
"reason" => {
|
||||
match meta.value_str() {
|
||||
Some(v) => reason = Some(v),
|
||||
None => {
|
||||
diagnostic.span_err(meta.span, "incorrect meta item");
|
||||
continue 'outer;
|
||||
}
|
||||
}
|
||||
}
|
||||
"issue" => {
|
||||
match meta.value_str().and_then(|s| s.parse().ok()) {
|
||||
Some(v) => issue = Some(v),
|
||||
None => {
|
||||
diagnostic.span_err(meta.span, "incorrect meta item");
|
||||
continue 'outer;
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
(feature, since, reason, issue)
|
||||
}
|
||||
None => {
|
||||
diagnostic.span_err(attr.span(), "incorrect stability attribute type");
|
||||
continue
|
||||
}
|
||||
};
|
||||
|
||||
// Deprecated tags don't require feature names
|
||||
if feature == None && tag != "deprecated" {
|
||||
diagnostic.span_err(attr.span(), "missing 'feature'");
|
||||
}
|
||||
|
||||
// Unstable tags don't require a version
|
||||
if since == None && tag != "unstable" {
|
||||
diagnostic.span_err(attr.span(), "missing 'since'");
|
||||
}
|
||||
|
||||
if tag == "unstable" || tag == "stable" {
|
||||
if stab.is_some() {
|
||||
diagnostic.span_err(item_sp, "multiple stability levels");
|
||||
}
|
||||
|
||||
let level = match tag {
|
||||
"unstable" => Unstable,
|
||||
"stable" => Stable,
|
||||
_ => unreachable!()
|
||||
};
|
||||
|
||||
stab = Some(Stability {
|
||||
level: level,
|
||||
feature: feature.unwrap_or(intern_and_get_ident("bogus")),
|
||||
since: since,
|
||||
deprecated_since: None,
|
||||
reason: reason,
|
||||
issue: issue,
|
||||
});
|
||||
} else { // "deprecated"
|
||||
if deprecated.is_some() {
|
||||
diagnostic.span_err(item_sp, "multiple deprecated attributes");
|
||||
}
|
||||
|
||||
deprecated = Some((since, reason));
|
||||
}
|
||||
}
|
||||
|
||||
// Merge the deprecation info into the stability info
|
||||
if deprecated.is_some() {
|
||||
match stab {
|
||||
Some(ref mut s) => {
|
||||
let (since, reason) = deprecated.unwrap();
|
||||
s.deprecated_since = since;
|
||||
s.reason = reason;
|
||||
}
|
||||
None => {
|
||||
diagnostic.span_err(item_sp, "deprecated attribute must be paired with \
|
||||
either stable or unstable attribute");
|
||||
}
|
||||
}
|
||||
} else if stab.as_ref().map_or(false, |s| s.level == Unstable && s.issue.is_none()) {
|
||||
// non-deprecated unstable items need to point to issues.
|
||||
diagnostic.span_err(item_sp,
|
||||
"non-deprecated unstable items need to point \
|
||||
to an issue with `issue = \"NNN\"`");
|
||||
}
|
||||
|
||||
(stab, used_attrs)
|
||||
}
|
||||
|
||||
/// Find the first stability attribute. `None` if none exists.
|
||||
pub fn find_stability(diagnostic: &SpanHandler, attrs: &[Attribute],
|
||||
item_sp: Span) -> Option<Stability> {
|
||||
let (s, used) = find_stability_generic(diagnostic, attrs.iter(), item_sp);
|
||||
for used in used { syntax_attr::mark_used(&unlower_attribute(used)) }
|
||||
return s;
|
||||
}
|
||||
|
||||
pub fn require_unique_names(diagnostic: &SpanHandler, metas: &[P<MetaItem>]) {
|
||||
let mut set = HashSet::new();
|
||||
for meta in metas {
|
||||
let name = meta.name();
|
||||
|
||||
if !set.insert(name.clone()) {
|
||||
panic!(diagnostic.span_fatal(meta.span,
|
||||
&format!("duplicate meta item `{}`", name)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/// Parse #[repr(...)] forms.
|
||||
///
|
||||
/// Valid repr contents: any of the primitive integral type names (see
|
||||
/// `int_type_of_word`, below) to specify enum discriminant type; `C`, to use
|
||||
/// the same discriminant size that the corresponding C enum would or C
|
||||
/// structure layout, and `packed` to remove padding.
|
||||
pub fn find_repr_attrs(diagnostic: &SpanHandler, attr: &Attribute) -> Vec<ReprAttr> {
|
||||
let mut acc = Vec::new();
|
||||
match attr.node.value.node {
|
||||
hir::MetaList(ref s, ref items) if *s == "repr" => {
|
||||
syntax_attr::mark_used(&unlower_attribute(attr));
|
||||
for item in items {
|
||||
match item.node {
|
||||
hir::MetaWord(ref word) => {
|
||||
let hint = match &word[..] {
|
||||
// Can't use "extern" because it's not a lexical identifier.
|
||||
"C" => Some(ReprExtern),
|
||||
"packed" => Some(ReprPacked),
|
||||
"simd" => Some(ReprSimd),
|
||||
_ => match int_type_of_word(&word) {
|
||||
Some(ity) => Some(ReprInt(item.span, ity)),
|
||||
None => {
|
||||
// Not a word we recognize
|
||||
diagnostic.span_err(item.span,
|
||||
"unrecognized representation hint");
|
||||
None
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
match hint {
|
||||
Some(h) => acc.push(h),
|
||||
None => { }
|
||||
}
|
||||
}
|
||||
// Not a word:
|
||||
_ => diagnostic.span_err(item.span, "unrecognized enum representation hint")
|
||||
}
|
||||
}
|
||||
}
|
||||
// Not a "repr" hint: ignore.
|
||||
_ => { }
|
||||
}
|
||||
acc
|
||||
}
|
||||
|
||||
fn int_type_of_word(s: &str) -> Option<IntType> {
|
||||
match s {
|
||||
"i8" => Some(SignedInt(hir::TyI8)),
|
||||
"u8" => Some(UnsignedInt(hir::TyU8)),
|
||||
"i16" => Some(SignedInt(hir::TyI16)),
|
||||
"u16" => Some(UnsignedInt(hir::TyU16)),
|
||||
"i32" => Some(SignedInt(hir::TyI32)),
|
||||
"u32" => Some(UnsignedInt(hir::TyU32)),
|
||||
"i64" => Some(SignedInt(hir::TyI64)),
|
||||
"u64" => Some(UnsignedInt(hir::TyU64)),
|
||||
"isize" => Some(SignedInt(hir::TyIs)),
|
||||
"usize" => Some(UnsignedInt(hir::TyUs)),
|
||||
_ => None
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Debug, RustcEncodable, RustcDecodable, Copy, Clone)]
|
||||
pub enum ReprAttr {
|
||||
ReprAny,
|
||||
ReprInt(Span, IntType),
|
||||
ReprExtern,
|
||||
ReprPacked,
|
||||
ReprSimd,
|
||||
}
|
||||
|
||||
impl ReprAttr {
|
||||
pub fn is_ffi_safe(&self) -> bool {
|
||||
match *self {
|
||||
ReprAny => false,
|
||||
ReprInt(_sp, ity) => ity.is_ffi_safe(),
|
||||
ReprExtern => true,
|
||||
ReprPacked => false,
|
||||
ReprSimd => true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Eq, Hash, PartialEq, Debug, RustcEncodable, RustcDecodable, Copy, Clone)]
|
||||
pub enum IntType {
|
||||
SignedInt(hir::IntTy),
|
||||
UnsignedInt(hir::UintTy)
|
||||
}
|
||||
|
||||
impl IntType {
|
||||
#[inline]
|
||||
pub fn is_signed(self) -> bool {
|
||||
match self {
|
||||
SignedInt(..) => true,
|
||||
UnsignedInt(..) => false
|
||||
}
|
||||
}
|
||||
fn is_ffi_safe(self) -> bool {
|
||||
match self {
|
||||
SignedInt(hir::TyI8) | UnsignedInt(hir::TyU8) |
|
||||
SignedInt(hir::TyI16) | UnsignedInt(hir::TyU16) |
|
||||
SignedInt(hir::TyI32) | UnsignedInt(hir::TyU32) |
|
||||
SignedInt(hir::TyI64) | UnsignedInt(hir::TyU64) => true,
|
||||
SignedInt(hir::TyIs) | UnsignedInt(hir::TyUs) => false
|
||||
}
|
||||
}
|
||||
}
|
||||
1192
src/librustc_front/fold.rs
Normal file
1192
src/librustc_front/fold.rs
Normal file
File diff suppressed because it is too large
Load diff
1533
src/librustc_front/hir.rs
Normal file
1533
src/librustc_front/hir.rs
Normal file
File diff suppressed because it is too large
Load diff
58
src/librustc_front/lib.rs
Normal file
58
src/librustc_front/lib.rs
Normal file
|
|
@ -0,0 +1,58 @@
|
|||
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
//! The Rust compiler.
|
||||
//!
|
||||
//! # Note
|
||||
//!
|
||||
//! This API is completely unstable and subject to change.
|
||||
|
||||
// Do not remove on snapshot creation. Needed for bootstrap. (Issue #22364)
|
||||
#![cfg_attr(stage0, feature(custom_attribute))]
|
||||
#![crate_name = "rustc_front"]
|
||||
#![unstable(feature = "rustc_private", issue = "27812")]
|
||||
#![staged_api]
|
||||
#![crate_type = "dylib"]
|
||||
#![crate_type = "rlib"]
|
||||
#![doc(html_logo_url = "http://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
|
||||
html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
|
||||
html_root_url = "http://doc.rust-lang.org/nightly/")]
|
||||
|
||||
#![feature(associated_consts)]
|
||||
#![feature(box_patterns)]
|
||||
#![feature(box_syntax)]
|
||||
#![feature(const_fn)]
|
||||
#![feature(quote)]
|
||||
#![feature(rustc_diagnostic_macros)]
|
||||
#![feature(rustc_private)]
|
||||
#![feature(slice_patterns)]
|
||||
#![feature(staged_api)]
|
||||
#![feature(str_char)]
|
||||
#![feature(filling_drop)]
|
||||
#![feature(str_escape)]
|
||||
#![cfg_attr(test, feature(test))]
|
||||
|
||||
extern crate serialize;
|
||||
#[macro_use] extern crate log;
|
||||
#[macro_use] extern crate syntax;
|
||||
#[macro_use] #[no_link] extern crate rustc_bitflags;
|
||||
|
||||
extern crate serialize as rustc_serialize; // used by deriving
|
||||
|
||||
pub mod hir;
|
||||
pub mod lowering;
|
||||
pub mod fold;
|
||||
pub mod visit;
|
||||
pub mod attr;
|
||||
pub mod util;
|
||||
|
||||
pub mod print {
|
||||
pub mod pprust;
|
||||
}
|
||||
1133
src/librustc_front/lowering.rs
Normal file
1133
src/librustc_front/lowering.rs
Normal file
File diff suppressed because it is too large
Load diff
686
src/librustc_front/print/pp.rs
Normal file
686
src/librustc_front/print/pp.rs
Normal file
|
|
@ -0,0 +1,686 @@
|
|||
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
//! This pretty-printer is a direct reimplementation of Philip Karlton's
|
||||
//! Mesa pretty-printer, as described in appendix A of
|
||||
//!
|
||||
//! STAN-CS-79-770: "Pretty Printing", by Derek C. Oppen.
|
||||
//! Stanford Department of Computer Science, 1979.
|
||||
//!
|
||||
//! The algorithm's aim is to break a stream into as few lines as possible
|
||||
//! while respecting the indentation-consistency requirements of the enclosing
|
||||
//! block, and avoiding breaking at silly places on block boundaries, for
|
||||
//! example, between "x" and ")" in "x)".
|
||||
//!
|
||||
//! I am implementing this algorithm because it comes with 20 pages of
|
||||
//! documentation explaining its theory, and because it addresses the set of
|
||||
//! concerns I've seen other pretty-printers fall down on. Weirdly. Even though
|
||||
//! it's 32 years old. What can I say?
|
||||
//!
|
||||
//! Despite some redundancies and quirks in the way it's implemented in that
|
||||
//! paper, I've opted to keep the implementation here as similar as I can,
|
||||
//! changing only what was blatantly wrong, a typo, or sufficiently
|
||||
//! non-idiomatic rust that it really stuck out.
|
||||
//!
|
||||
//! In particular you'll see a certain amount of churn related to INTEGER vs.
|
||||
//! CARDINAL in the Mesa implementation. Mesa apparently interconverts the two
|
||||
//! somewhat readily? In any case, I've used usize for indices-in-buffers and
|
||||
//! ints for character-sizes-and-indentation-offsets. This respects the need
|
||||
//! for ints to "go negative" while carrying a pending-calculation balance, and
|
||||
//! helps differentiate all the numbers flying around internally (slightly).
|
||||
//!
|
||||
//! I also inverted the indentation arithmetic used in the print stack, since
|
||||
//! the Mesa implementation (somewhat randomly) stores the offset on the print
|
||||
//! stack in terms of margin-col rather than col itself. I store col.
|
||||
//!
|
||||
//! I also implemented a small change in the String token, in that I store an
|
||||
//! explicit length for the string. For most tokens this is just the length of
|
||||
//! the accompanying string. But it's necessary to permit it to differ, for
|
||||
//! encoding things that are supposed to "go on their own line" -- certain
|
||||
//! classes of comment and blank-line -- where relying on adjacent
|
||||
//! hardbreak-like Break tokens with long blankness indication doesn't actually
|
||||
//! work. To see why, consider when there is a "thing that should be on its own
|
||||
//! line" between two long blocks, say functions. If you put a hardbreak after
|
||||
//! each function (or before each) and the breaking algorithm decides to break
|
||||
//! there anyways (because the functions themselves are long) you wind up with
|
||||
//! extra blank lines. If you don't put hardbreaks you can wind up with the
|
||||
//! "thing which should be on its own line" not getting its own line in the
|
||||
//! rare case of "really small functions" or such. This re-occurs with comments
|
||||
//! and explicit blank lines. So in those cases we use a string with a payload
|
||||
//! we want isolated to a line and an explicit length that's huge, surrounded
|
||||
//! by two zero-length breaks. The algorithm will try its best to fit it on a
|
||||
//! line (which it can't) and so naturally place the content on its own line to
|
||||
//! avoid combining it with other lines and making matters even worse.
|
||||
|
||||
use std::io;
|
||||
use std::string;
|
||||
|
||||
#[derive(Clone, Copy, PartialEq)]
|
||||
pub enum Breaks {
|
||||
Consistent,
|
||||
Inconsistent,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
pub struct BreakToken {
|
||||
offset: isize,
|
||||
blank_space: isize
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
pub struct BeginToken {
|
||||
offset: isize,
|
||||
breaks: Breaks
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub enum Token {
|
||||
String(String, isize),
|
||||
Break(BreakToken),
|
||||
Begin(BeginToken),
|
||||
End,
|
||||
Eof,
|
||||
}
|
||||
|
||||
impl Token {
|
||||
pub fn is_eof(&self) -> bool {
|
||||
match *self {
|
||||
Token::Eof => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_hardbreak_tok(&self) -> bool {
|
||||
match *self {
|
||||
Token::Break(BreakToken {
|
||||
offset: 0,
|
||||
blank_space: bs
|
||||
}) if bs == SIZE_INFINITY =>
|
||||
true,
|
||||
_ =>
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn tok_str(token: &Token) -> String {
|
||||
match *token {
|
||||
Token::String(ref s, len) => format!("STR({},{})", s, len),
|
||||
Token::Break(_) => "BREAK".to_string(),
|
||||
Token::Begin(_) => "BEGIN".to_string(),
|
||||
Token::End => "END".to_string(),
|
||||
Token::Eof => "EOF".to_string()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn buf_str(toks: &[Token],
|
||||
szs: &[isize],
|
||||
left: usize,
|
||||
right: usize,
|
||||
lim: usize)
|
||||
-> String {
|
||||
let n = toks.len();
|
||||
assert_eq!(n, szs.len());
|
||||
let mut i = left;
|
||||
let mut l = lim;
|
||||
let mut s = string::String::from("[");
|
||||
while i != right && l != 0 {
|
||||
l -= 1;
|
||||
if i != left {
|
||||
s.push_str(", ");
|
||||
}
|
||||
s.push_str(&format!("{}={}",
|
||||
szs[i],
|
||||
tok_str(&toks[i])));
|
||||
i += 1;
|
||||
i %= n;
|
||||
}
|
||||
s.push(']');
|
||||
s
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
pub enum PrintStackBreak {
|
||||
Fits,
|
||||
Broken(Breaks),
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
pub struct PrintStackElem {
|
||||
offset: isize,
|
||||
pbreak: PrintStackBreak
|
||||
}
|
||||
|
||||
const SIZE_INFINITY: isize = 0xffff;
|
||||
|
||||
pub fn mk_printer<'a>(out: Box<io::Write+'a>, linewidth: usize) -> Printer<'a> {
|
||||
// Yes 3, it makes the ring buffers big enough to never
|
||||
// fall behind.
|
||||
let n: usize = 3 * linewidth;
|
||||
debug!("mk_printer {}", linewidth);
|
||||
let token = vec![Token::Eof; n];
|
||||
let size = vec![0_isize; n];
|
||||
let scan_stack = vec![0_usize; n];
|
||||
Printer {
|
||||
out: out,
|
||||
buf_len: n,
|
||||
margin: linewidth as isize,
|
||||
space: linewidth as isize,
|
||||
left: 0,
|
||||
right: 0,
|
||||
token: token,
|
||||
size: size,
|
||||
left_total: 0,
|
||||
right_total: 0,
|
||||
scan_stack: scan_stack,
|
||||
scan_stack_empty: true,
|
||||
top: 0,
|
||||
bottom: 0,
|
||||
print_stack: Vec::new(),
|
||||
pending_indentation: 0
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/// In case you do not have the paper, here is an explanation of what's going
|
||||
/// on.
|
||||
///
|
||||
/// There is a stream of input tokens flowing through this printer.
|
||||
///
|
||||
/// The printer buffers up to 3N tokens inside itself, where N is linewidth.
|
||||
/// Yes, linewidth is chars and tokens are multi-char, but in the worst
|
||||
/// case every token worth buffering is 1 char long, so it's ok.
|
||||
///
|
||||
/// Tokens are String, Break, and Begin/End to delimit blocks.
|
||||
///
|
||||
/// Begin tokens can carry an offset, saying "how far to indent when you break
|
||||
/// inside here", as well as a flag indicating "consistent" or "inconsistent"
|
||||
/// breaking. Consistent breaking means that after the first break, no attempt
|
||||
/// will be made to flow subsequent breaks together onto lines. Inconsistent
|
||||
/// is the opposite. Inconsistent breaking example would be, say:
|
||||
///
|
||||
/// foo(hello, there, good, friends)
|
||||
///
|
||||
/// breaking inconsistently to become
|
||||
///
|
||||
/// foo(hello, there
|
||||
/// good, friends);
|
||||
///
|
||||
/// whereas a consistent breaking would yield:
|
||||
///
|
||||
/// foo(hello,
|
||||
/// there
|
||||
/// good,
|
||||
/// friends);
|
||||
///
|
||||
/// That is, in the consistent-break blocks we value vertical alignment
|
||||
/// more than the ability to cram stuff onto a line. But in all cases if it
|
||||
/// can make a block a one-liner, it'll do so.
|
||||
///
|
||||
/// Carrying on with high-level logic:
|
||||
///
|
||||
/// The buffered tokens go through a ring-buffer, 'tokens'. The 'left' and
|
||||
/// 'right' indices denote the active portion of the ring buffer as well as
|
||||
/// describing hypothetical points-in-the-infinite-stream at most 3N tokens
|
||||
/// apart (i.e. "not wrapped to ring-buffer boundaries"). The paper will switch
|
||||
/// between using 'left' and 'right' terms to denote the wrapped-to-ring-buffer
|
||||
/// and point-in-infinite-stream senses freely.
|
||||
///
|
||||
/// There is a parallel ring buffer, 'size', that holds the calculated size of
|
||||
/// each token. Why calculated? Because for Begin/End pairs, the "size"
|
||||
/// includes everything between the pair. That is, the "size" of Begin is
|
||||
/// actually the sum of the sizes of everything between Begin and the paired
|
||||
/// End that follows. Since that is arbitrarily far in the future, 'size' is
|
||||
/// being rewritten regularly while the printer runs; in fact most of the
|
||||
/// machinery is here to work out 'size' entries on the fly (and give up when
|
||||
/// they're so obviously over-long that "infinity" is a good enough
|
||||
/// approximation for purposes of line breaking).
|
||||
///
|
||||
/// The "input side" of the printer is managed as an abstract process called
|
||||
/// SCAN, which uses 'scan_stack', 'scan_stack_empty', 'top' and 'bottom', to
|
||||
/// manage calculating 'size'. SCAN is, in other words, the process of
|
||||
/// calculating 'size' entries.
|
||||
///
|
||||
/// The "output side" of the printer is managed by an abstract process called
|
||||
/// PRINT, which uses 'print_stack', 'margin' and 'space' to figure out what to
|
||||
/// do with each token/size pair it consumes as it goes. It's trying to consume
|
||||
/// the entire buffered window, but can't output anything until the size is >=
|
||||
/// 0 (sizes are set to negative while they're pending calculation).
|
||||
///
|
||||
/// So SCAN takes input and buffers tokens and pending calculations, while
|
||||
/// PRINT gobbles up completed calculations and tokens from the buffer. The
|
||||
/// theory is that the two can never get more than 3N tokens apart, because
|
||||
/// once there's "obviously" too much data to fit on a line, in a size
|
||||
/// calculation, SCAN will write "infinity" to the size and let PRINT consume
|
||||
/// it.
|
||||
///
|
||||
/// In this implementation (following the paper, again) the SCAN process is
|
||||
/// the method called 'pretty_print', and the 'PRINT' process is the method
|
||||
/// called 'print'.
|
||||
pub struct Printer<'a> {
|
||||
pub out: Box<io::Write+'a>,
|
||||
buf_len: usize,
|
||||
/// Width of lines we're constrained to
|
||||
margin: isize,
|
||||
/// Number of spaces left on line
|
||||
space: isize,
|
||||
/// Index of left side of input stream
|
||||
left: usize,
|
||||
/// Index of right side of input stream
|
||||
right: usize,
|
||||
/// Ring-buffer stream goes through
|
||||
token: Vec<Token> ,
|
||||
/// Ring-buffer of calculated sizes
|
||||
size: Vec<isize> ,
|
||||
/// Running size of stream "...left"
|
||||
left_total: isize,
|
||||
/// Running size of stream "...right"
|
||||
right_total: isize,
|
||||
/// Pseudo-stack, really a ring too. Holds the
|
||||
/// primary-ring-buffers index of the Begin that started the
|
||||
/// current block, possibly with the most recent Break after that
|
||||
/// Begin (if there is any) on top of it. Stuff is flushed off the
|
||||
/// bottom as it becomes irrelevant due to the primary ring-buffer
|
||||
/// advancing.
|
||||
scan_stack: Vec<usize> ,
|
||||
/// Top==bottom disambiguator
|
||||
scan_stack_empty: bool,
|
||||
/// Index of top of scan_stack
|
||||
top: usize,
|
||||
/// Index of bottom of scan_stack
|
||||
bottom: usize,
|
||||
/// Stack of blocks-in-progress being flushed by print
|
||||
print_stack: Vec<PrintStackElem> ,
|
||||
/// Buffered indentation to avoid writing trailing whitespace
|
||||
pending_indentation: isize,
|
||||
}
|
||||
|
||||
impl<'a> Printer<'a> {
|
||||
pub fn last_token(&mut self) -> Token {
|
||||
self.token[self.right].clone()
|
||||
}
|
||||
// be very careful with this!
|
||||
pub fn replace_last_token(&mut self, t: Token) {
|
||||
self.token[self.right] = t;
|
||||
}
|
||||
pub fn pretty_print(&mut self, token: Token) -> io::Result<()> {
|
||||
debug!("pp Vec<{},{}>", self.left, self.right);
|
||||
match token {
|
||||
Token::Eof => {
|
||||
if !self.scan_stack_empty {
|
||||
self.check_stack(0);
|
||||
try!(self.advance_left());
|
||||
}
|
||||
self.indent(0);
|
||||
Ok(())
|
||||
}
|
||||
Token::Begin(b) => {
|
||||
if self.scan_stack_empty {
|
||||
self.left_total = 1;
|
||||
self.right_total = 1;
|
||||
self.left = 0;
|
||||
self.right = 0;
|
||||
} else { self.advance_right(); }
|
||||
debug!("pp Begin({})/buffer Vec<{},{}>",
|
||||
b.offset, self.left, self.right);
|
||||
self.token[self.right] = token;
|
||||
self.size[self.right] = -self.right_total;
|
||||
let right = self.right;
|
||||
self.scan_push(right);
|
||||
Ok(())
|
||||
}
|
||||
Token::End => {
|
||||
if self.scan_stack_empty {
|
||||
debug!("pp End/print Vec<{},{}>", self.left, self.right);
|
||||
self.print(token, 0)
|
||||
} else {
|
||||
debug!("pp End/buffer Vec<{},{}>", self.left, self.right);
|
||||
self.advance_right();
|
||||
self.token[self.right] = token;
|
||||
self.size[self.right] = -1;
|
||||
let right = self.right;
|
||||
self.scan_push(right);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
Token::Break(b) => {
|
||||
if self.scan_stack_empty {
|
||||
self.left_total = 1;
|
||||
self.right_total = 1;
|
||||
self.left = 0;
|
||||
self.right = 0;
|
||||
} else { self.advance_right(); }
|
||||
debug!("pp Break({})/buffer Vec<{},{}>",
|
||||
b.offset, self.left, self.right);
|
||||
self.check_stack(0);
|
||||
let right = self.right;
|
||||
self.scan_push(right);
|
||||
self.token[self.right] = token;
|
||||
self.size[self.right] = -self.right_total;
|
||||
self.right_total += b.blank_space;
|
||||
Ok(())
|
||||
}
|
||||
Token::String(s, len) => {
|
||||
if self.scan_stack_empty {
|
||||
debug!("pp String('{}')/print Vec<{},{}>",
|
||||
s, self.left, self.right);
|
||||
self.print(Token::String(s, len), len)
|
||||
} else {
|
||||
debug!("pp String('{}')/buffer Vec<{},{}>",
|
||||
s, self.left, self.right);
|
||||
self.advance_right();
|
||||
self.token[self.right] = Token::String(s, len);
|
||||
self.size[self.right] = len;
|
||||
self.right_total += len;
|
||||
self.check_stream()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
pub fn check_stream(&mut self) -> io::Result<()> {
|
||||
debug!("check_stream Vec<{}, {}> with left_total={}, right_total={}",
|
||||
self.left, self.right, self.left_total, self.right_total);
|
||||
if self.right_total - self.left_total > self.space {
|
||||
debug!("scan window is {}, longer than space on line ({})",
|
||||
self.right_total - self.left_total, self.space);
|
||||
if !self.scan_stack_empty {
|
||||
if self.left == self.scan_stack[self.bottom] {
|
||||
debug!("setting {} to infinity and popping", self.left);
|
||||
let scanned = self.scan_pop_bottom();
|
||||
self.size[scanned] = SIZE_INFINITY;
|
||||
}
|
||||
}
|
||||
try!(self.advance_left());
|
||||
if self.left != self.right {
|
||||
try!(self.check_stream());
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
pub fn scan_push(&mut self, x: usize) {
|
||||
debug!("scan_push {}", x);
|
||||
if self.scan_stack_empty {
|
||||
self.scan_stack_empty = false;
|
||||
} else {
|
||||
self.top += 1;
|
||||
self.top %= self.buf_len;
|
||||
assert!((self.top != self.bottom));
|
||||
}
|
||||
self.scan_stack[self.top] = x;
|
||||
}
|
||||
pub fn scan_pop(&mut self) -> usize {
|
||||
assert!((!self.scan_stack_empty));
|
||||
let x = self.scan_stack[self.top];
|
||||
if self.top == self.bottom {
|
||||
self.scan_stack_empty = true;
|
||||
} else {
|
||||
self.top += self.buf_len - 1; self.top %= self.buf_len;
|
||||
}
|
||||
return x;
|
||||
}
|
||||
pub fn scan_top(&mut self) -> usize {
|
||||
assert!((!self.scan_stack_empty));
|
||||
return self.scan_stack[self.top];
|
||||
}
|
||||
pub fn scan_pop_bottom(&mut self) -> usize {
|
||||
assert!((!self.scan_stack_empty));
|
||||
let x = self.scan_stack[self.bottom];
|
||||
if self.top == self.bottom {
|
||||
self.scan_stack_empty = true;
|
||||
} else {
|
||||
self.bottom += 1; self.bottom %= self.buf_len;
|
||||
}
|
||||
return x;
|
||||
}
|
||||
pub fn advance_right(&mut self) {
|
||||
self.right += 1;
|
||||
self.right %= self.buf_len;
|
||||
assert!((self.right != self.left));
|
||||
}
|
||||
pub fn advance_left(&mut self) -> io::Result<()> {
|
||||
debug!("advance_left Vec<{},{}>, sizeof({})={}", self.left, self.right,
|
||||
self.left, self.size[self.left]);
|
||||
|
||||
let mut left_size = self.size[self.left];
|
||||
|
||||
while left_size >= 0 {
|
||||
let left = self.token[self.left].clone();
|
||||
|
||||
let len = match left {
|
||||
Token::Break(b) => b.blank_space,
|
||||
Token::String(_, len) => {
|
||||
assert_eq!(len, left_size);
|
||||
len
|
||||
}
|
||||
_ => 0
|
||||
};
|
||||
|
||||
try!(self.print(left, left_size));
|
||||
|
||||
self.left_total += len;
|
||||
|
||||
if self.left == self.right {
|
||||
break;
|
||||
}
|
||||
|
||||
self.left += 1;
|
||||
self.left %= self.buf_len;
|
||||
|
||||
left_size = self.size[self.left];
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
pub fn check_stack(&mut self, k: isize) {
|
||||
if !self.scan_stack_empty {
|
||||
let x = self.scan_top();
|
||||
match self.token[x] {
|
||||
Token::Begin(_) => {
|
||||
if k > 0 {
|
||||
let popped = self.scan_pop();
|
||||
self.size[popped] = self.size[x] + self.right_total;
|
||||
self.check_stack(k - 1);
|
||||
}
|
||||
}
|
||||
Token::End => {
|
||||
// paper says + not =, but that makes no sense.
|
||||
let popped = self.scan_pop();
|
||||
self.size[popped] = 1;
|
||||
self.check_stack(k + 1);
|
||||
}
|
||||
_ => {
|
||||
let popped = self.scan_pop();
|
||||
self.size[popped] = self.size[x] + self.right_total;
|
||||
if k > 0 {
|
||||
self.check_stack(k);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
pub fn print_newline(&mut self, amount: isize) -> io::Result<()> {
|
||||
debug!("NEWLINE {}", amount);
|
||||
let ret = write!(self.out, "\n");
|
||||
self.pending_indentation = 0;
|
||||
self.indent(amount);
|
||||
return ret;
|
||||
}
|
||||
pub fn indent(&mut self, amount: isize) {
|
||||
debug!("INDENT {}", amount);
|
||||
self.pending_indentation += amount;
|
||||
}
|
||||
pub fn get_top(&mut self) -> PrintStackElem {
|
||||
let print_stack = &mut self.print_stack;
|
||||
let n = print_stack.len();
|
||||
if n != 0 {
|
||||
(*print_stack)[n - 1]
|
||||
} else {
|
||||
PrintStackElem {
|
||||
offset: 0,
|
||||
pbreak: PrintStackBreak::Broken(Breaks::Inconsistent)
|
||||
}
|
||||
}
|
||||
}
|
||||
pub fn print_str(&mut self, s: &str) -> io::Result<()> {
|
||||
while self.pending_indentation > 0 {
|
||||
try!(write!(self.out, " "));
|
||||
self.pending_indentation -= 1;
|
||||
}
|
||||
write!(self.out, "{}", s)
|
||||
}
|
||||
pub fn print(&mut self, token: Token, l: isize) -> io::Result<()> {
|
||||
debug!("print {} {} (remaining line space={})", tok_str(&token), l,
|
||||
self.space);
|
||||
debug!("{}", buf_str(&self.token,
|
||||
&self.size,
|
||||
self.left,
|
||||
self.right,
|
||||
6));
|
||||
match token {
|
||||
Token::Begin(b) => {
|
||||
if l > self.space {
|
||||
let col = self.margin - self.space + b.offset;
|
||||
debug!("print Begin -> push broken block at col {}", col);
|
||||
self.print_stack.push(PrintStackElem {
|
||||
offset: col,
|
||||
pbreak: PrintStackBreak::Broken(b.breaks)
|
||||
});
|
||||
} else {
|
||||
debug!("print Begin -> push fitting block");
|
||||
self.print_stack.push(PrintStackElem {
|
||||
offset: 0,
|
||||
pbreak: PrintStackBreak::Fits
|
||||
});
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
Token::End => {
|
||||
debug!("print End -> pop End");
|
||||
let print_stack = &mut self.print_stack;
|
||||
assert!((!print_stack.is_empty()));
|
||||
print_stack.pop().unwrap();
|
||||
Ok(())
|
||||
}
|
||||
Token::Break(b) => {
|
||||
let top = self.get_top();
|
||||
match top.pbreak {
|
||||
PrintStackBreak::Fits => {
|
||||
debug!("print Break({}) in fitting block", b.blank_space);
|
||||
self.space -= b.blank_space;
|
||||
self.indent(b.blank_space);
|
||||
Ok(())
|
||||
}
|
||||
PrintStackBreak::Broken(Breaks::Consistent) => {
|
||||
debug!("print Break({}+{}) in consistent block",
|
||||
top.offset, b.offset);
|
||||
let ret = self.print_newline(top.offset + b.offset);
|
||||
self.space = self.margin - (top.offset + b.offset);
|
||||
ret
|
||||
}
|
||||
PrintStackBreak::Broken(Breaks::Inconsistent) => {
|
||||
if l > self.space {
|
||||
debug!("print Break({}+{}) w/ newline in inconsistent",
|
||||
top.offset, b.offset);
|
||||
let ret = self.print_newline(top.offset + b.offset);
|
||||
self.space = self.margin - (top.offset + b.offset);
|
||||
ret
|
||||
} else {
|
||||
debug!("print Break({}) w/o newline in inconsistent",
|
||||
b.blank_space);
|
||||
self.indent(b.blank_space);
|
||||
self.space -= b.blank_space;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Token::String(s, len) => {
|
||||
debug!("print String({})", s);
|
||||
assert_eq!(l, len);
|
||||
// assert!(l <= space);
|
||||
self.space -= len;
|
||||
self.print_str(&s[..])
|
||||
}
|
||||
Token::Eof => {
|
||||
// Eof should never get here.
|
||||
panic!();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Convenience functions to talk to the printer.
|
||||
//
|
||||
// "raw box"
|
||||
pub fn rbox(p: &mut Printer, indent: usize, b: Breaks) -> io::Result<()> {
|
||||
p.pretty_print(Token::Begin(BeginToken {
|
||||
offset: indent as isize,
|
||||
breaks: b
|
||||
}))
|
||||
}
|
||||
|
||||
pub fn ibox(p: &mut Printer, indent: usize) -> io::Result<()> {
|
||||
rbox(p, indent, Breaks::Inconsistent)
|
||||
}
|
||||
|
||||
pub fn cbox(p: &mut Printer, indent: usize) -> io::Result<()> {
|
||||
rbox(p, indent, Breaks::Consistent)
|
||||
}
|
||||
|
||||
pub fn break_offset(p: &mut Printer, n: usize, off: isize) -> io::Result<()> {
|
||||
p.pretty_print(Token::Break(BreakToken {
|
||||
offset: off,
|
||||
blank_space: n as isize
|
||||
}))
|
||||
}
|
||||
|
||||
pub fn end(p: &mut Printer) -> io::Result<()> {
|
||||
p.pretty_print(Token::End)
|
||||
}
|
||||
|
||||
pub fn eof(p: &mut Printer) -> io::Result<()> {
|
||||
p.pretty_print(Token::Eof)
|
||||
}
|
||||
|
||||
pub fn word(p: &mut Printer, wrd: &str) -> io::Result<()> {
|
||||
p.pretty_print(Token::String(/* bad */ wrd.to_string(), wrd.len() as isize))
|
||||
}
|
||||
|
||||
pub fn huge_word(p: &mut Printer, wrd: &str) -> io::Result<()> {
|
||||
p.pretty_print(Token::String(/* bad */ wrd.to_string(), SIZE_INFINITY))
|
||||
}
|
||||
|
||||
pub fn zero_word(p: &mut Printer, wrd: &str) -> io::Result<()> {
|
||||
p.pretty_print(Token::String(/* bad */ wrd.to_string(), 0))
|
||||
}
|
||||
|
||||
pub fn spaces(p: &mut Printer, n: usize) -> io::Result<()> {
|
||||
break_offset(p, n, 0)
|
||||
}
|
||||
|
||||
pub fn zerobreak(p: &mut Printer) -> io::Result<()> {
|
||||
spaces(p, 0)
|
||||
}
|
||||
|
||||
pub fn space(p: &mut Printer) -> io::Result<()> {
|
||||
spaces(p, 1)
|
||||
}
|
||||
|
||||
pub fn hardbreak(p: &mut Printer) -> io::Result<()> {
|
||||
spaces(p, SIZE_INFINITY as usize)
|
||||
}
|
||||
|
||||
pub fn hardbreak_tok_offset(off: isize) -> Token {
|
||||
Token::Break(BreakToken {offset: off, blank_space: SIZE_INFINITY})
|
||||
}
|
||||
|
||||
pub fn hardbreak_tok() -> Token {
|
||||
hardbreak_tok_offset(0)
|
||||
}
|
||||
2760
src/librustc_front/print/pprust.rs
Normal file
2760
src/librustc_front/print/pprust.rs
Normal file
File diff suppressed because it is too large
Load diff
427
src/librustc_front/util.rs
Normal file
427
src/librustc_front/util.rs
Normal file
|
|
@ -0,0 +1,427 @@
|
|||
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use hir;
|
||||
use hir::*;
|
||||
use visit::{self, Visitor, FnKind};
|
||||
use syntax::ast_util;
|
||||
use syntax::ast::{Ident, NodeId, DUMMY_NODE_ID};
|
||||
use syntax::codemap::Span;
|
||||
use syntax::ptr::P;
|
||||
use syntax::owned_slice::OwnedSlice;
|
||||
|
||||
pub fn walk_pat<F>(pat: &Pat, mut it: F) -> bool where F: FnMut(&Pat) -> bool {
|
||||
// FIXME(#19596) this is a workaround, but there should be a better way
|
||||
fn walk_pat_<G>(pat: &Pat, it: &mut G) -> bool where G: FnMut(&Pat) -> bool {
|
||||
if !(*it)(pat) {
|
||||
return false;
|
||||
}
|
||||
|
||||
match pat.node {
|
||||
PatIdent(_, _, Some(ref p)) => walk_pat_(&**p, it),
|
||||
PatStruct(_, ref fields, _) => {
|
||||
fields.iter().all(|field| walk_pat_(&*field.node.pat, it))
|
||||
}
|
||||
PatEnum(_, Some(ref s)) | PatTup(ref s) => {
|
||||
s.iter().all(|p| walk_pat_(&**p, it))
|
||||
}
|
||||
PatBox(ref s) | PatRegion(ref s, _) => {
|
||||
walk_pat_(&**s, it)
|
||||
}
|
||||
PatVec(ref before, ref slice, ref after) => {
|
||||
before.iter().all(|p| walk_pat_(&**p, it)) &&
|
||||
slice.iter().all(|p| walk_pat_(&**p, it)) &&
|
||||
after.iter().all(|p| walk_pat_(&**p, it))
|
||||
}
|
||||
PatWild(_) | PatLit(_) | PatRange(_, _) | PatIdent(_, _, _) |
|
||||
PatEnum(_, _) | PatQPath(_, _) => {
|
||||
true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
walk_pat_(pat, &mut it)
|
||||
}
|
||||
|
||||
pub fn binop_to_string(op: BinOp_) -> &'static str {
|
||||
match op {
|
||||
BiAdd => "+",
|
||||
BiSub => "-",
|
||||
BiMul => "*",
|
||||
BiDiv => "/",
|
||||
BiRem => "%",
|
||||
BiAnd => "&&",
|
||||
BiOr => "||",
|
||||
BiBitXor => "^",
|
||||
BiBitAnd => "&",
|
||||
BiBitOr => "|",
|
||||
BiShl => "<<",
|
||||
BiShr => ">>",
|
||||
BiEq => "==",
|
||||
BiLt => "<",
|
||||
BiLe => "<=",
|
||||
BiNe => "!=",
|
||||
BiGe => ">=",
|
||||
BiGt => ">"
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns true if the given struct def is tuple-like; i.e. that its fields
|
||||
/// are unnamed.
|
||||
pub fn struct_def_is_tuple_like(struct_def: &hir::StructDef) -> bool {
|
||||
struct_def.ctor_id.is_some()
|
||||
}
|
||||
|
||||
pub fn stmt_id(s: &Stmt) -> NodeId {
|
||||
match s.node {
|
||||
StmtDecl(_, id) => id,
|
||||
StmtExpr(_, id) => id,
|
||||
StmtSemi(_, id) => id,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn lazy_binop(b: BinOp_) -> bool {
|
||||
match b {
|
||||
BiAnd => true,
|
||||
BiOr => true,
|
||||
_ => false
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_shift_binop(b: BinOp_) -> bool {
|
||||
match b {
|
||||
BiShl => true,
|
||||
BiShr => true,
|
||||
_ => false
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_comparison_binop(b: BinOp_) -> bool {
|
||||
match b {
|
||||
BiEq | BiLt | BiLe | BiNe | BiGt | BiGe =>
|
||||
true,
|
||||
BiAnd | BiOr | BiAdd | BiSub | BiMul | BiDiv | BiRem |
|
||||
BiBitXor | BiBitAnd | BiBitOr | BiShl | BiShr =>
|
||||
false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `true` if the binary operator takes its arguments by value
|
||||
pub fn is_by_value_binop(b: BinOp_) -> bool {
|
||||
!is_comparison_binop(b)
|
||||
}
|
||||
|
||||
/// Returns `true` if the unary operator takes its argument by value
|
||||
pub fn is_by_value_unop(u: UnOp) -> bool {
|
||||
match u {
|
||||
UnNeg | UnNot => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn unop_to_string(op: UnOp) -> &'static str {
|
||||
match op {
|
||||
UnUniq => "box() ",
|
||||
UnDeref => "*",
|
||||
UnNot => "!",
|
||||
UnNeg => "-",
|
||||
}
|
||||
}
|
||||
|
||||
pub struct IdVisitor<'a, O:'a> {
|
||||
pub operation: &'a mut O,
|
||||
pub pass_through_items: bool,
|
||||
pub visited_outermost: bool,
|
||||
}
|
||||
|
||||
impl<'a, O: ast_util::IdVisitingOperation> IdVisitor<'a, O> {
|
||||
fn visit_generics_helper(&mut self, generics: &Generics) {
|
||||
for type_parameter in generics.ty_params.iter() {
|
||||
self.operation.visit_id(type_parameter.id)
|
||||
}
|
||||
for lifetime in &generics.lifetimes {
|
||||
self.operation.visit_id(lifetime.lifetime.id)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'v, O: ast_util::IdVisitingOperation> Visitor<'v> for IdVisitor<'a, O> {
|
||||
fn visit_mod(&mut self,
|
||||
module: &Mod,
|
||||
_: Span,
|
||||
node_id: NodeId) {
|
||||
self.operation.visit_id(node_id);
|
||||
visit::walk_mod(self, module)
|
||||
}
|
||||
|
||||
fn visit_foreign_item(&mut self, foreign_item: &ForeignItem) {
|
||||
self.operation.visit_id(foreign_item.id);
|
||||
visit::walk_foreign_item(self, foreign_item)
|
||||
}
|
||||
|
||||
fn visit_item(&mut self, item: &Item) {
|
||||
if !self.pass_through_items {
|
||||
if self.visited_outermost {
|
||||
return
|
||||
} else {
|
||||
self.visited_outermost = true
|
||||
}
|
||||
}
|
||||
|
||||
self.operation.visit_id(item.id);
|
||||
match item.node {
|
||||
ItemUse(ref view_path) => {
|
||||
match view_path.node {
|
||||
ViewPathSimple(_, _) |
|
||||
ViewPathGlob(_) => {}
|
||||
ViewPathList(_, ref paths) => {
|
||||
for path in paths {
|
||||
self.operation.visit_id(path.node.id())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
ItemEnum(ref enum_definition, _) => {
|
||||
for variant in &enum_definition.variants {
|
||||
self.operation.visit_id(variant.node.id)
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
visit::walk_item(self, item);
|
||||
|
||||
self.visited_outermost = false
|
||||
}
|
||||
|
||||
fn visit_local(&mut self, local: &Local) {
|
||||
self.operation.visit_id(local.id);
|
||||
visit::walk_local(self, local)
|
||||
}
|
||||
|
||||
fn visit_block(&mut self, block: &Block) {
|
||||
self.operation.visit_id(block.id);
|
||||
visit::walk_block(self, block)
|
||||
}
|
||||
|
||||
fn visit_stmt(&mut self, statement: &Stmt) {
|
||||
self.operation.visit_id(stmt_id(statement));
|
||||
visit::walk_stmt(self, statement)
|
||||
}
|
||||
|
||||
fn visit_pat(&mut self, pattern: &Pat) {
|
||||
self.operation.visit_id(pattern.id);
|
||||
visit::walk_pat(self, pattern)
|
||||
}
|
||||
|
||||
fn visit_expr(&mut self, expression: &Expr) {
|
||||
self.operation.visit_id(expression.id);
|
||||
visit::walk_expr(self, expression)
|
||||
}
|
||||
|
||||
fn visit_ty(&mut self, typ: &Ty) {
|
||||
self.operation.visit_id(typ.id);
|
||||
visit::walk_ty(self, typ)
|
||||
}
|
||||
|
||||
fn visit_generics(&mut self, generics: &Generics) {
|
||||
self.visit_generics_helper(generics);
|
||||
visit::walk_generics(self, generics)
|
||||
}
|
||||
|
||||
fn visit_fn(&mut self,
|
||||
function_kind: FnKind<'v>,
|
||||
function_declaration: &'v FnDecl,
|
||||
block: &'v Block,
|
||||
span: Span,
|
||||
node_id: NodeId) {
|
||||
if !self.pass_through_items {
|
||||
match function_kind {
|
||||
FnKind::Method(..) if self.visited_outermost => return,
|
||||
FnKind::Method(..) => self.visited_outermost = true,
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
self.operation.visit_id(node_id);
|
||||
|
||||
match function_kind {
|
||||
FnKind::ItemFn(_, generics, _, _, _, _) => {
|
||||
self.visit_generics_helper(generics)
|
||||
}
|
||||
FnKind::Method(_, sig, _) => {
|
||||
self.visit_generics_helper(&sig.generics)
|
||||
}
|
||||
FnKind::Closure => {}
|
||||
}
|
||||
|
||||
for argument in &function_declaration.inputs {
|
||||
self.operation.visit_id(argument.id)
|
||||
}
|
||||
|
||||
visit::walk_fn(self,
|
||||
function_kind,
|
||||
function_declaration,
|
||||
block,
|
||||
span);
|
||||
|
||||
if !self.pass_through_items {
|
||||
if let FnKind::Method(..) = function_kind {
|
||||
self.visited_outermost = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_struct_field(&mut self, struct_field: &StructField) {
|
||||
self.operation.visit_id(struct_field.node.id);
|
||||
visit::walk_struct_field(self, struct_field)
|
||||
}
|
||||
|
||||
fn visit_struct_def(&mut self,
|
||||
struct_def: &StructDef,
|
||||
_: Ident,
|
||||
_: &hir::Generics,
|
||||
id: NodeId) {
|
||||
self.operation.visit_id(id);
|
||||
struct_def.ctor_id.map(|ctor_id| self.operation.visit_id(ctor_id));
|
||||
visit::walk_struct_def(self, struct_def);
|
||||
}
|
||||
|
||||
fn visit_trait_item(&mut self, ti: &hir::TraitItem) {
|
||||
self.operation.visit_id(ti.id);
|
||||
visit::walk_trait_item(self, ti);
|
||||
}
|
||||
|
||||
fn visit_impl_item(&mut self, ii: &hir::ImplItem) {
|
||||
self.operation.visit_id(ii.id);
|
||||
visit::walk_impl_item(self, ii);
|
||||
}
|
||||
|
||||
fn visit_lifetime_ref(&mut self, lifetime: &Lifetime) {
|
||||
self.operation.visit_id(lifetime.id);
|
||||
}
|
||||
|
||||
fn visit_lifetime_def(&mut self, def: &LifetimeDef) {
|
||||
self.visit_lifetime_ref(&def.lifetime);
|
||||
}
|
||||
|
||||
fn visit_trait_ref(&mut self, trait_ref: &TraitRef) {
|
||||
self.operation.visit_id(trait_ref.ref_id);
|
||||
visit::walk_trait_ref(self, trait_ref);
|
||||
}
|
||||
}
|
||||
|
||||
/// Computes the id range for a single fn body, ignoring nested items.
|
||||
pub fn compute_id_range_for_fn_body(fk: FnKind,
|
||||
decl: &FnDecl,
|
||||
body: &Block,
|
||||
sp: Span,
|
||||
id: NodeId)
|
||||
-> ast_util::IdRange
|
||||
{
|
||||
let mut visitor = ast_util::IdRangeComputingVisitor {
|
||||
result: ast_util::IdRange::max()
|
||||
};
|
||||
let mut id_visitor = IdVisitor {
|
||||
operation: &mut visitor,
|
||||
pass_through_items: false,
|
||||
visited_outermost: false,
|
||||
};
|
||||
id_visitor.visit_fn(fk, decl, body, sp, id);
|
||||
id_visitor.operation.result
|
||||
}
|
||||
|
||||
/// Returns true if this literal is a string and false otherwise.
|
||||
pub fn lit_is_str(lit: &Lit) -> bool {
|
||||
match lit.node {
|
||||
LitStr(..) => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_path(e: P<Expr>) -> bool {
|
||||
match e.node { ExprPath(..) => true, _ => false }
|
||||
}
|
||||
|
||||
/// Get a string representation of a signed int type, with its value.
|
||||
/// We want to avoid "45int" and "-3int" in favor of "45" and "-3"
|
||||
pub fn int_ty_to_string(t: IntTy, val: Option<i64>) -> String {
|
||||
let s = match t {
|
||||
TyIs => "isize",
|
||||
TyI8 => "i8",
|
||||
TyI16 => "i16",
|
||||
TyI32 => "i32",
|
||||
TyI64 => "i64"
|
||||
};
|
||||
|
||||
match val {
|
||||
// cast to a u64 so we can correctly print INT64_MIN. All integral types
|
||||
// are parsed as u64, so we wouldn't want to print an extra negative
|
||||
// sign.
|
||||
Some(n) => format!("{}{}", n as u64, s),
|
||||
None => s.to_string()
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/// Get a string representation of an unsigned int type, with its value.
|
||||
/// We want to avoid "42u" in favor of "42us". "42uint" is right out.
|
||||
pub fn uint_ty_to_string(t: UintTy, val: Option<u64>) -> String {
|
||||
let s = match t {
|
||||
TyUs => "usize",
|
||||
TyU8 => "u8",
|
||||
TyU16 => "u16",
|
||||
TyU32 => "u32",
|
||||
TyU64 => "u64"
|
||||
};
|
||||
|
||||
match val {
|
||||
Some(n) => format!("{}{}", n, s),
|
||||
None => s.to_string()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn float_ty_to_string(t: FloatTy) -> String {
|
||||
match t {
|
||||
TyF32 => "f32".to_string(),
|
||||
TyF64 => "f64".to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
pub fn empty_generics() -> Generics {
|
||||
Generics {
|
||||
lifetimes: Vec::new(),
|
||||
ty_params: OwnedSlice::empty(),
|
||||
where_clause: WhereClause {
|
||||
id: DUMMY_NODE_ID,
|
||||
predicates: Vec::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// convert a span and an identifier to the corresponding
|
||||
// 1-segment path
|
||||
pub fn ident_to_path(s: Span, identifier: Ident) -> Path {
|
||||
hir::Path {
|
||||
span: s,
|
||||
global: false,
|
||||
segments: vec!(
|
||||
hir::PathSegment {
|
||||
identifier: identifier,
|
||||
parameters: hir::AngleBracketedParameters(hir::AngleBracketedParameterData {
|
||||
lifetimes: Vec::new(),
|
||||
types: OwnedSlice::empty(),
|
||||
bindings: OwnedSlice::empty(),
|
||||
})
|
||||
}
|
||||
),
|
||||
}
|
||||
}
|
||||
841
src/librustc_front/visit.rs
Normal file
841
src/librustc_front/visit.rs
Normal file
|
|
@ -0,0 +1,841 @@
|
|||
// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
//! HIR walker. Each overridden visit method has full control over what
|
||||
//! happens with its node, it can do its own traversal of the node's children,
|
||||
//! call `visit::walk_*` to apply the default traversal algorithm, or prevent
|
||||
//! deeper traversal by doing nothing.
|
||||
//!
|
||||
//! Note: it is an important invariant that the default visitor walks the body
|
||||
//! of a function in "execution order" (more concretely, reverse post-order
|
||||
//! with respect to the CFG implied by the AST), meaning that if AST node A may
|
||||
//! execute before AST node B, then A is visited first. The borrow checker in
|
||||
//! particular relies on this property.
|
||||
//!
|
||||
//! Note: walking an AST before macro expansion is probably a bad idea. For
|
||||
//! instance, a walker looking for item names in a module will miss all of
|
||||
//! those that are created by the expansion of a macro.
|
||||
|
||||
use syntax::abi::Abi;
|
||||
use syntax::ast::{Ident, NodeId, CRATE_NODE_ID, Name};
|
||||
use hir::*;
|
||||
use hir;
|
||||
use syntax::codemap::Span;
|
||||
use syntax::ptr::P;
|
||||
use syntax::owned_slice::OwnedSlice;
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq)]
|
||||
pub enum FnKind<'a> {
|
||||
/// fn foo() or extern "Abi" fn foo()
|
||||
ItemFn(Ident, &'a Generics, Unsafety, Constness, Abi, Visibility),
|
||||
|
||||
/// fn foo(&self)
|
||||
Method(Ident, &'a MethodSig, Option<Visibility>),
|
||||
|
||||
/// |x, y| ...
|
||||
/// proc(x, y) ...
|
||||
Closure,
|
||||
}
|
||||
|
||||
/// Each method of the Visitor trait is a hook to be potentially
|
||||
/// overridden. Each method's default implementation recursively visits
|
||||
/// the substructure of the input via the corresponding `walk` method;
|
||||
/// e.g. the `visit_mod` method by default calls `visit::walk_mod`.
|
||||
///
|
||||
/// If you want to ensure that your code handles every variant
|
||||
/// explicitly, you need to override each method. (And you also need
|
||||
/// to monitor future changes to `Visitor` in case a new method with a
|
||||
/// new default implementation gets introduced.)
|
||||
pub trait Visitor<'v> : Sized {
|
||||
fn visit_name(&mut self, _span: Span, _name: Name) {
|
||||
// Nothing to do.
|
||||
}
|
||||
fn visit_ident(&mut self, span: Span, ident: Ident) {
|
||||
self.visit_name(span, ident.name);
|
||||
}
|
||||
fn visit_mod(&mut self, m: &'v Mod, _s: Span, _n: NodeId) { walk_mod(self, m) }
|
||||
fn visit_foreign_item(&mut self, i: &'v ForeignItem) { walk_foreign_item(self, i) }
|
||||
fn visit_item(&mut self, i: &'v Item) { walk_item(self, i) }
|
||||
fn visit_local(&mut self, l: &'v Local) { walk_local(self, l) }
|
||||
fn visit_block(&mut self, b: &'v Block) { walk_block(self, b) }
|
||||
fn visit_stmt(&mut self, s: &'v Stmt) { walk_stmt(self, s) }
|
||||
fn visit_arm(&mut self, a: &'v Arm) { walk_arm(self, a) }
|
||||
fn visit_pat(&mut self, p: &'v Pat) { walk_pat(self, p) }
|
||||
fn visit_decl(&mut self, d: &'v Decl) { walk_decl(self, d) }
|
||||
fn visit_expr(&mut self, ex: &'v Expr) { walk_expr(self, ex) }
|
||||
fn visit_expr_post(&mut self, _ex: &'v Expr) { }
|
||||
fn visit_ty(&mut self, t: &'v Ty) { walk_ty(self, t) }
|
||||
fn visit_generics(&mut self, g: &'v Generics) { walk_generics(self, g) }
|
||||
fn visit_fn(&mut self, fk: FnKind<'v>, fd: &'v FnDecl, b: &'v Block, s: Span, _: NodeId) {
|
||||
walk_fn(self, fk, fd, b, s)
|
||||
}
|
||||
fn visit_trait_item(&mut self, ti: &'v TraitItem) { walk_trait_item(self, ti) }
|
||||
fn visit_impl_item(&mut self, ii: &'v ImplItem) { walk_impl_item(self, ii) }
|
||||
fn visit_trait_ref(&mut self, t: &'v TraitRef) { walk_trait_ref(self, t) }
|
||||
fn visit_ty_param_bound(&mut self, bounds: &'v TyParamBound) {
|
||||
walk_ty_param_bound(self, bounds)
|
||||
}
|
||||
fn visit_poly_trait_ref(&mut self, t: &'v PolyTraitRef, m: &'v TraitBoundModifier) {
|
||||
walk_poly_trait_ref(self, t, m)
|
||||
}
|
||||
fn visit_struct_def(&mut self, s: &'v StructDef, _: Ident, _: &'v Generics, _: NodeId) {
|
||||
walk_struct_def(self, s)
|
||||
}
|
||||
fn visit_struct_field(&mut self, s: &'v StructField) { walk_struct_field(self, s) }
|
||||
fn visit_enum_def(&mut self, enum_definition: &'v EnumDef,
|
||||
generics: &'v Generics) {
|
||||
walk_enum_def(self, enum_definition, generics)
|
||||
}
|
||||
|
||||
fn visit_variant(&mut self, v: &'v Variant, g: &'v Generics) { walk_variant(self, v, g) }
|
||||
|
||||
/// Visits an optional reference to a lifetime. The `span` is the span of some surrounding
|
||||
/// reference should opt_lifetime be None.
|
||||
fn visit_opt_lifetime_ref(&mut self,
|
||||
_span: Span,
|
||||
opt_lifetime: &'v Option<Lifetime>) {
|
||||
match *opt_lifetime {
|
||||
Some(ref l) => self.visit_lifetime_ref(l),
|
||||
None => ()
|
||||
}
|
||||
}
|
||||
fn visit_lifetime_bound(&mut self, lifetime: &'v Lifetime) {
|
||||
walk_lifetime_bound(self, lifetime)
|
||||
}
|
||||
fn visit_lifetime_ref(&mut self, lifetime: &'v Lifetime) {
|
||||
walk_lifetime_ref(self, lifetime)
|
||||
}
|
||||
fn visit_lifetime_def(&mut self, lifetime: &'v LifetimeDef) {
|
||||
walk_lifetime_def(self, lifetime)
|
||||
}
|
||||
fn visit_explicit_self(&mut self, es: &'v ExplicitSelf) {
|
||||
walk_explicit_self(self, es)
|
||||
}
|
||||
fn visit_path(&mut self, path: &'v Path, _id: NodeId) {
|
||||
walk_path(self, path)
|
||||
}
|
||||
fn visit_path_segment(&mut self, path_span: Span, path_segment: &'v PathSegment) {
|
||||
walk_path_segment(self, path_span, path_segment)
|
||||
}
|
||||
fn visit_path_parameters(&mut self, path_span: Span, path_parameters: &'v PathParameters) {
|
||||
walk_path_parameters(self, path_span, path_parameters)
|
||||
}
|
||||
fn visit_assoc_type_binding(&mut self, type_binding: &'v TypeBinding) {
|
||||
walk_assoc_type_binding(self, type_binding)
|
||||
}
|
||||
fn visit_attribute(&mut self, _attr: &'v Attribute) {}
|
||||
}
|
||||
|
||||
pub fn walk_crate<'v, V: Visitor<'v>>(visitor: &mut V, krate: &'v Crate) {
|
||||
visitor.visit_mod(&krate.module, krate.span, CRATE_NODE_ID);
|
||||
for attr in &krate.attrs {
|
||||
visitor.visit_attribute(attr);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn walk_mod<'v, V: Visitor<'v>>(visitor: &mut V, module: &'v Mod) {
|
||||
for item in &module.items {
|
||||
visitor.visit_item(&**item)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn walk_local<'v, V: Visitor<'v>>(visitor: &mut V, local: &'v Local) {
|
||||
visitor.visit_pat(&*local.pat);
|
||||
walk_ty_opt(visitor, &local.ty);
|
||||
walk_expr_opt(visitor, &local.init);
|
||||
}
|
||||
|
||||
pub fn walk_lifetime_def<'v, V: Visitor<'v>>(visitor: &mut V,
|
||||
lifetime_def: &'v LifetimeDef) {
|
||||
visitor.visit_name(lifetime_def.lifetime.span, lifetime_def.lifetime.name);
|
||||
for bound in &lifetime_def.bounds {
|
||||
visitor.visit_lifetime_bound(bound);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn walk_lifetime_bound<'v, V: Visitor<'v>>(visitor: &mut V,
|
||||
lifetime_ref: &'v Lifetime) {
|
||||
visitor.visit_lifetime_ref(lifetime_ref)
|
||||
}
|
||||
|
||||
pub fn walk_lifetime_ref<'v, V: Visitor<'v>>(visitor: &mut V,
|
||||
lifetime_ref: &'v Lifetime) {
|
||||
visitor.visit_name(lifetime_ref.span, lifetime_ref.name)
|
||||
}
|
||||
|
||||
pub fn walk_explicit_self<'v, V: Visitor<'v>>(visitor: &mut V,
|
||||
explicit_self: &'v ExplicitSelf) {
|
||||
match explicit_self.node {
|
||||
SelfStatic | SelfValue(_) => {},
|
||||
SelfRegion(ref lifetime, _, _) => {
|
||||
visitor.visit_opt_lifetime_ref(explicit_self.span, lifetime)
|
||||
}
|
||||
SelfExplicit(ref typ, _) => visitor.visit_ty(&**typ),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn walk_poly_trait_ref<'v, V>(visitor: &mut V,
|
||||
trait_ref: &'v PolyTraitRef,
|
||||
_modifier: &'v TraitBoundModifier)
|
||||
where V: Visitor<'v>
|
||||
{
|
||||
walk_lifetime_decls_helper(visitor, &trait_ref.bound_lifetimes);
|
||||
visitor.visit_trait_ref(&trait_ref.trait_ref);
|
||||
}
|
||||
|
||||
pub fn walk_trait_ref<'v,V>(visitor: &mut V,
|
||||
trait_ref: &'v TraitRef)
|
||||
where V: Visitor<'v>
|
||||
{
|
||||
visitor.visit_path(&trait_ref.path, trait_ref.ref_id)
|
||||
}
|
||||
|
||||
pub fn walk_item<'v, V: Visitor<'v>>(visitor: &mut V, item: &'v Item) {
|
||||
visitor.visit_ident(item.span, item.ident);
|
||||
match item.node {
|
||||
ItemExternCrate(..) => {}
|
||||
ItemUse(ref vp) => {
|
||||
match vp.node {
|
||||
ViewPathSimple(ident, ref path) => {
|
||||
visitor.visit_ident(vp.span, ident);
|
||||
visitor.visit_path(path, item.id);
|
||||
}
|
||||
ViewPathGlob(ref path) => {
|
||||
visitor.visit_path(path, item.id);
|
||||
}
|
||||
ViewPathList(ref prefix, ref list) => {
|
||||
for id in list {
|
||||
match id.node {
|
||||
PathListIdent { name, .. } => {
|
||||
visitor.visit_ident(id.span, name);
|
||||
}
|
||||
PathListMod { .. } => ()
|
||||
}
|
||||
}
|
||||
|
||||
// Note that the `prefix` here is not a complete
|
||||
// path, so we don't use `visit_path`.
|
||||
walk_path(visitor, prefix);
|
||||
}
|
||||
}
|
||||
}
|
||||
ItemStatic(ref typ, _, ref expr) |
|
||||
ItemConst(ref typ, ref expr) => {
|
||||
visitor.visit_ty(&**typ);
|
||||
visitor.visit_expr(&**expr);
|
||||
}
|
||||
ItemFn(ref declaration, unsafety, constness, abi, ref generics, ref body) => {
|
||||
visitor.visit_fn(FnKind::ItemFn(item.ident, generics, unsafety,
|
||||
constness, abi, item.vis),
|
||||
&**declaration,
|
||||
&**body,
|
||||
item.span,
|
||||
item.id)
|
||||
}
|
||||
ItemMod(ref module) => {
|
||||
visitor.visit_mod(module, item.span, item.id)
|
||||
}
|
||||
ItemForeignMod(ref foreign_module) => {
|
||||
for foreign_item in &foreign_module.items {
|
||||
visitor.visit_foreign_item(&**foreign_item)
|
||||
}
|
||||
}
|
||||
ItemTy(ref typ, ref type_parameters) => {
|
||||
visitor.visit_ty(&**typ);
|
||||
visitor.visit_generics(type_parameters)
|
||||
}
|
||||
ItemEnum(ref enum_definition, ref type_parameters) => {
|
||||
visitor.visit_generics(type_parameters);
|
||||
visitor.visit_enum_def(enum_definition, type_parameters)
|
||||
}
|
||||
ItemDefaultImpl(_, ref trait_ref) => {
|
||||
visitor.visit_trait_ref(trait_ref)
|
||||
}
|
||||
ItemImpl(_, _,
|
||||
ref type_parameters,
|
||||
ref trait_reference,
|
||||
ref typ,
|
||||
ref impl_items) => {
|
||||
visitor.visit_generics(type_parameters);
|
||||
match *trait_reference {
|
||||
Some(ref trait_reference) => visitor.visit_trait_ref(trait_reference),
|
||||
None => ()
|
||||
}
|
||||
visitor.visit_ty(&**typ);
|
||||
for impl_item in impl_items {
|
||||
visitor.visit_impl_item(impl_item);
|
||||
}
|
||||
}
|
||||
ItemStruct(ref struct_definition, ref generics) => {
|
||||
visitor.visit_generics(generics);
|
||||
visitor.visit_struct_def(&**struct_definition,
|
||||
item.ident,
|
||||
generics,
|
||||
item.id)
|
||||
}
|
||||
ItemTrait(_, ref generics, ref bounds, ref methods) => {
|
||||
visitor.visit_generics(generics);
|
||||
walk_ty_param_bounds_helper(visitor, bounds);
|
||||
for method in methods {
|
||||
visitor.visit_trait_item(method)
|
||||
}
|
||||
}
|
||||
}
|
||||
for attr in &item.attrs {
|
||||
visitor.visit_attribute(attr);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn walk_enum_def<'v, V: Visitor<'v>>(visitor: &mut V,
|
||||
enum_definition: &'v EnumDef,
|
||||
generics: &'v Generics) {
|
||||
for variant in &enum_definition.variants {
|
||||
visitor.visit_variant(&**variant, generics);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn walk_variant<'v, V: Visitor<'v>>(visitor: &mut V,
|
||||
variant: &'v Variant,
|
||||
generics: &'v Generics) {
|
||||
visitor.visit_ident(variant.span, variant.node.name);
|
||||
|
||||
match variant.node.kind {
|
||||
TupleVariantKind(ref variant_arguments) => {
|
||||
for variant_argument in variant_arguments {
|
||||
visitor.visit_ty(&*variant_argument.ty)
|
||||
}
|
||||
}
|
||||
StructVariantKind(ref struct_definition) => {
|
||||
visitor.visit_struct_def(&**struct_definition,
|
||||
variant.node.name,
|
||||
generics,
|
||||
variant.node.id)
|
||||
}
|
||||
}
|
||||
match variant.node.disr_expr {
|
||||
Some(ref expr) => visitor.visit_expr(&**expr),
|
||||
None => ()
|
||||
}
|
||||
for attr in &variant.node.attrs {
|
||||
visitor.visit_attribute(attr);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn skip_ty<'v, V: Visitor<'v>>(_: &mut V, _: &'v Ty) {
|
||||
// Empty!
|
||||
}
|
||||
|
||||
pub fn walk_ty_opt<'v, V: Visitor<'v>>(visitor: &mut V, optional_type: &'v Option<P<Ty>>) {
|
||||
match *optional_type {
|
||||
Some(ref ty) => visitor.visit_ty(&**ty),
|
||||
None => ()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn walk_ty<'v, V: Visitor<'v>>(visitor: &mut V, typ: &'v Ty) {
|
||||
match typ.node {
|
||||
TyVec(ref ty) | TyParen(ref ty) => {
|
||||
visitor.visit_ty(&**ty)
|
||||
}
|
||||
TyPtr(ref mutable_type) => {
|
||||
visitor.visit_ty(&*mutable_type.ty)
|
||||
}
|
||||
TyRptr(ref lifetime, ref mutable_type) => {
|
||||
visitor.visit_opt_lifetime_ref(typ.span, lifetime);
|
||||
visitor.visit_ty(&*mutable_type.ty)
|
||||
}
|
||||
TyTup(ref tuple_element_types) => {
|
||||
for tuple_element_type in tuple_element_types {
|
||||
visitor.visit_ty(&**tuple_element_type)
|
||||
}
|
||||
}
|
||||
TyBareFn(ref function_declaration) => {
|
||||
for argument in &function_declaration.decl.inputs {
|
||||
visitor.visit_ty(&*argument.ty)
|
||||
}
|
||||
walk_fn_ret_ty(visitor, &function_declaration.decl.output);
|
||||
walk_lifetime_decls_helper(visitor, &function_declaration.lifetimes);
|
||||
}
|
||||
TyPath(ref maybe_qself, ref path) => {
|
||||
if let Some(ref qself) = *maybe_qself {
|
||||
visitor.visit_ty(&qself.ty);
|
||||
}
|
||||
visitor.visit_path(path, typ.id);
|
||||
}
|
||||
TyObjectSum(ref ty, ref bounds) => {
|
||||
visitor.visit_ty(&**ty);
|
||||
walk_ty_param_bounds_helper(visitor, bounds);
|
||||
}
|
||||
TyFixedLengthVec(ref ty, ref expression) => {
|
||||
visitor.visit_ty(&**ty);
|
||||
visitor.visit_expr(&**expression)
|
||||
}
|
||||
TyPolyTraitRef(ref bounds) => {
|
||||
walk_ty_param_bounds_helper(visitor, bounds)
|
||||
}
|
||||
TyTypeof(ref expression) => {
|
||||
visitor.visit_expr(&**expression)
|
||||
}
|
||||
TyInfer => {}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn walk_lifetime_decls_helper<'v, V: Visitor<'v>>(visitor: &mut V,
|
||||
lifetimes: &'v Vec<LifetimeDef>) {
|
||||
for l in lifetimes {
|
||||
visitor.visit_lifetime_def(l);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn walk_path<'v, V: Visitor<'v>>(visitor: &mut V, path: &'v Path) {
|
||||
for segment in &path.segments {
|
||||
visitor.visit_path_segment(path.span, segment);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn walk_path_segment<'v, V: Visitor<'v>>(visitor: &mut V,
|
||||
path_span: Span,
|
||||
segment: &'v PathSegment) {
|
||||
visitor.visit_ident(path_span, segment.identifier);
|
||||
visitor.visit_path_parameters(path_span, &segment.parameters);
|
||||
}
|
||||
|
||||
pub fn walk_path_parameters<'v, V: Visitor<'v>>(visitor: &mut V,
|
||||
_path_span: Span,
|
||||
path_parameters: &'v PathParameters) {
|
||||
match *path_parameters {
|
||||
hir::AngleBracketedParameters(ref data) => {
|
||||
for typ in data.types.iter() {
|
||||
visitor.visit_ty(&**typ);
|
||||
}
|
||||
for lifetime in &data.lifetimes {
|
||||
visitor.visit_lifetime_ref(lifetime);
|
||||
}
|
||||
for binding in data.bindings.iter() {
|
||||
visitor.visit_assoc_type_binding(&**binding);
|
||||
}
|
||||
}
|
||||
hir::ParenthesizedParameters(ref data) => {
|
||||
for typ in &data.inputs {
|
||||
visitor.visit_ty(&**typ);
|
||||
}
|
||||
if let Some(ref typ) = data.output {
|
||||
visitor.visit_ty(&**typ);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn walk_assoc_type_binding<'v, V: Visitor<'v>>(visitor: &mut V,
|
||||
type_binding: &'v TypeBinding) {
|
||||
visitor.visit_ident(type_binding.span, type_binding.ident);
|
||||
visitor.visit_ty(&*type_binding.ty);
|
||||
}
|
||||
|
||||
pub fn walk_pat<'v, V: Visitor<'v>>(visitor: &mut V, pattern: &'v Pat) {
|
||||
match pattern.node {
|
||||
PatEnum(ref path, ref children) => {
|
||||
visitor.visit_path(path, pattern.id);
|
||||
if let Some(ref children) = *children {
|
||||
for child in children {
|
||||
visitor.visit_pat(&*child)
|
||||
}
|
||||
}
|
||||
}
|
||||
PatQPath(ref qself, ref path) => {
|
||||
visitor.visit_ty(&qself.ty);
|
||||
visitor.visit_path(path, pattern.id)
|
||||
}
|
||||
PatStruct(ref path, ref fields, _) => {
|
||||
visitor.visit_path(path, pattern.id);
|
||||
for field in fields {
|
||||
visitor.visit_pat(&*field.node.pat)
|
||||
}
|
||||
}
|
||||
PatTup(ref tuple_elements) => {
|
||||
for tuple_element in tuple_elements {
|
||||
visitor.visit_pat(&**tuple_element)
|
||||
}
|
||||
}
|
||||
PatBox(ref subpattern) |
|
||||
PatRegion(ref subpattern, _) => {
|
||||
visitor.visit_pat(&**subpattern)
|
||||
}
|
||||
PatIdent(_, ref pth1, ref optional_subpattern) => {
|
||||
visitor.visit_ident(pth1.span, pth1.node);
|
||||
match *optional_subpattern {
|
||||
None => {}
|
||||
Some(ref subpattern) => visitor.visit_pat(&**subpattern),
|
||||
}
|
||||
}
|
||||
PatLit(ref expression) => visitor.visit_expr(&**expression),
|
||||
PatRange(ref lower_bound, ref upper_bound) => {
|
||||
visitor.visit_expr(&**lower_bound);
|
||||
visitor.visit_expr(&**upper_bound)
|
||||
}
|
||||
PatWild(_) => (),
|
||||
PatVec(ref prepattern, ref slice_pattern, ref postpatterns) => {
|
||||
for prepattern in prepattern {
|
||||
visitor.visit_pat(&**prepattern)
|
||||
}
|
||||
if let Some(ref slice_pattern) = *slice_pattern {
|
||||
visitor.visit_pat(&**slice_pattern)
|
||||
}
|
||||
for postpattern in postpatterns {
|
||||
visitor.visit_pat(&**postpattern)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn walk_foreign_item<'v, V: Visitor<'v>>(visitor: &mut V,
|
||||
foreign_item: &'v ForeignItem) {
|
||||
visitor.visit_ident(foreign_item.span, foreign_item.ident);
|
||||
|
||||
match foreign_item.node {
|
||||
ForeignItemFn(ref function_declaration, ref generics) => {
|
||||
walk_fn_decl(visitor, &**function_declaration);
|
||||
visitor.visit_generics(generics)
|
||||
}
|
||||
ForeignItemStatic(ref typ, _) => visitor.visit_ty(&**typ),
|
||||
}
|
||||
|
||||
for attr in &foreign_item.attrs {
|
||||
visitor.visit_attribute(attr);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn walk_ty_param_bounds_helper<'v, V: Visitor<'v>>(visitor: &mut V,
|
||||
bounds: &'v OwnedSlice<TyParamBound>) {
|
||||
for bound in bounds.iter() {
|
||||
visitor.visit_ty_param_bound(bound)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn walk_ty_param_bound<'v, V: Visitor<'v>>(visitor: &mut V,
|
||||
bound: &'v TyParamBound) {
|
||||
match *bound {
|
||||
TraitTyParamBound(ref typ, ref modifier) => {
|
||||
visitor.visit_poly_trait_ref(typ, modifier);
|
||||
}
|
||||
RegionTyParamBound(ref lifetime) => {
|
||||
visitor.visit_lifetime_bound(lifetime);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn walk_generics<'v, V: Visitor<'v>>(visitor: &mut V, generics: &'v Generics) {
|
||||
for param in generics.ty_params.iter() {
|
||||
visitor.visit_ident(param.span, param.ident);
|
||||
walk_ty_param_bounds_helper(visitor, ¶m.bounds);
|
||||
walk_ty_opt(visitor, ¶m.default);
|
||||
}
|
||||
walk_lifetime_decls_helper(visitor, &generics.lifetimes);
|
||||
for predicate in &generics.where_clause.predicates {
|
||||
match predicate {
|
||||
&hir::WherePredicate::BoundPredicate(hir::WhereBoundPredicate{ref bounded_ty,
|
||||
ref bounds,
|
||||
..}) => {
|
||||
visitor.visit_ty(&**bounded_ty);
|
||||
walk_ty_param_bounds_helper(visitor, bounds);
|
||||
}
|
||||
&hir::WherePredicate::RegionPredicate(hir::WhereRegionPredicate{ref lifetime,
|
||||
ref bounds,
|
||||
..}) => {
|
||||
visitor.visit_lifetime_ref(lifetime);
|
||||
|
||||
for bound in bounds {
|
||||
visitor.visit_lifetime_ref(bound);
|
||||
}
|
||||
}
|
||||
&hir::WherePredicate::EqPredicate(hir::WhereEqPredicate{id,
|
||||
ref path,
|
||||
ref ty,
|
||||
..}) => {
|
||||
visitor.visit_path(path, id);
|
||||
visitor.visit_ty(&**ty);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn walk_fn_ret_ty<'v, V: Visitor<'v>>(visitor: &mut V, ret_ty: &'v FunctionRetTy) {
|
||||
if let Return(ref output_ty) = *ret_ty {
|
||||
visitor.visit_ty(&**output_ty)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn walk_fn_decl<'v, V: Visitor<'v>>(visitor: &mut V, function_declaration: &'v FnDecl) {
|
||||
for argument in &function_declaration.inputs {
|
||||
visitor.visit_pat(&*argument.pat);
|
||||
visitor.visit_ty(&*argument.ty)
|
||||
}
|
||||
walk_fn_ret_ty(visitor, &function_declaration.output)
|
||||
}
|
||||
|
||||
pub fn walk_fn<'v, V: Visitor<'v>>(visitor: &mut V,
|
||||
function_kind: FnKind<'v>,
|
||||
function_declaration: &'v FnDecl,
|
||||
function_body: &'v Block,
|
||||
_span: Span) {
|
||||
walk_fn_decl(visitor, function_declaration);
|
||||
|
||||
match function_kind {
|
||||
FnKind::ItemFn(_, generics, _, _, _, _) => {
|
||||
visitor.visit_generics(generics);
|
||||
}
|
||||
FnKind::Method(_, sig, _) => {
|
||||
visitor.visit_generics(&sig.generics);
|
||||
visitor.visit_explicit_self(&sig.explicit_self);
|
||||
}
|
||||
FnKind::Closure(..) => {}
|
||||
}
|
||||
|
||||
visitor.visit_block(function_body)
|
||||
}
|
||||
|
||||
pub fn walk_trait_item<'v, V: Visitor<'v>>(visitor: &mut V, trait_item: &'v TraitItem) {
|
||||
visitor.visit_ident(trait_item.span, trait_item.ident);
|
||||
for attr in &trait_item.attrs {
|
||||
visitor.visit_attribute(attr);
|
||||
}
|
||||
match trait_item.node {
|
||||
ConstTraitItem(ref ty, ref default) => {
|
||||
visitor.visit_ty(ty);
|
||||
if let Some(ref expr) = *default {
|
||||
visitor.visit_expr(expr);
|
||||
}
|
||||
}
|
||||
MethodTraitItem(ref sig, None) => {
|
||||
visitor.visit_explicit_self(&sig.explicit_self);
|
||||
visitor.visit_generics(&sig.generics);
|
||||
walk_fn_decl(visitor, &sig.decl);
|
||||
}
|
||||
MethodTraitItem(ref sig, Some(ref body)) => {
|
||||
visitor.visit_fn(FnKind::Method(trait_item.ident, sig, None), &sig.decl,
|
||||
body, trait_item.span, trait_item.id);
|
||||
}
|
||||
TypeTraitItem(ref bounds, ref default) => {
|
||||
walk_ty_param_bounds_helper(visitor, bounds);
|
||||
walk_ty_opt(visitor, default);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn walk_impl_item<'v, V: Visitor<'v>>(visitor: &mut V, impl_item: &'v ImplItem) {
|
||||
visitor.visit_ident(impl_item.span, impl_item.ident);
|
||||
for attr in &impl_item.attrs {
|
||||
visitor.visit_attribute(attr);
|
||||
}
|
||||
match impl_item.node {
|
||||
ConstImplItem(ref ty, ref expr) => {
|
||||
visitor.visit_ty(ty);
|
||||
visitor.visit_expr(expr);
|
||||
}
|
||||
MethodImplItem(ref sig, ref body) => {
|
||||
visitor.visit_fn(FnKind::Method(impl_item.ident, sig, Some(impl_item.vis)), &sig.decl,
|
||||
body, impl_item.span, impl_item.id);
|
||||
}
|
||||
TypeImplItem(ref ty) => {
|
||||
visitor.visit_ty(ty);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn walk_struct_def<'v, V: Visitor<'v>>(visitor: &mut V,
|
||||
struct_definition: &'v StructDef) {
|
||||
for field in &struct_definition.fields {
|
||||
visitor.visit_struct_field(field)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn walk_struct_field<'v, V: Visitor<'v>>(visitor: &mut V,
|
||||
struct_field: &'v StructField) {
|
||||
if let NamedField(name, _) = struct_field.node.kind {
|
||||
visitor.visit_ident(struct_field.span, name);
|
||||
}
|
||||
|
||||
visitor.visit_ty(&*struct_field.node.ty);
|
||||
|
||||
for attr in &struct_field.node.attrs {
|
||||
visitor.visit_attribute(attr);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn walk_block<'v, V: Visitor<'v>>(visitor: &mut V, block: &'v Block) {
|
||||
for statement in &block.stmts {
|
||||
visitor.visit_stmt(&**statement)
|
||||
}
|
||||
walk_expr_opt(visitor, &block.expr)
|
||||
}
|
||||
|
||||
pub fn walk_stmt<'v, V: Visitor<'v>>(visitor: &mut V, statement: &'v Stmt) {
|
||||
match statement.node {
|
||||
StmtDecl(ref declaration, _) => visitor.visit_decl(&**declaration),
|
||||
StmtExpr(ref expression, _) | StmtSemi(ref expression, _) => {
|
||||
visitor.visit_expr(&**expression)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn walk_decl<'v, V: Visitor<'v>>(visitor: &mut V, declaration: &'v Decl) {
|
||||
match declaration.node {
|
||||
DeclLocal(ref local) => visitor.visit_local(&**local),
|
||||
DeclItem(ref item) => visitor.visit_item(&**item),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn walk_expr_opt<'v, V: Visitor<'v>>(visitor: &mut V,
|
||||
optional_expression: &'v Option<P<Expr>>) {
|
||||
match *optional_expression {
|
||||
None => {}
|
||||
Some(ref expression) => visitor.visit_expr(&**expression),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn walk_exprs<'v, V: Visitor<'v>>(visitor: &mut V, expressions: &'v [P<Expr>]) {
|
||||
for expression in expressions {
|
||||
visitor.visit_expr(&**expression)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn walk_expr<'v, V: Visitor<'v>>(visitor: &mut V, expression: &'v Expr) {
|
||||
match expression.node {
|
||||
ExprBox(ref place, ref subexpression) => {
|
||||
place.as_ref().map(|e|visitor.visit_expr(&**e));
|
||||
visitor.visit_expr(&**subexpression)
|
||||
}
|
||||
ExprVec(ref subexpressions) => {
|
||||
walk_exprs(visitor, subexpressions)
|
||||
}
|
||||
ExprRepeat(ref element, ref count) => {
|
||||
visitor.visit_expr(&**element);
|
||||
visitor.visit_expr(&**count)
|
||||
}
|
||||
ExprStruct(ref path, ref fields, ref optional_base) => {
|
||||
visitor.visit_path(path, expression.id);
|
||||
for field in fields {
|
||||
visitor.visit_expr(&*field.expr)
|
||||
}
|
||||
walk_expr_opt(visitor, optional_base)
|
||||
}
|
||||
ExprTup(ref subexpressions) => {
|
||||
for subexpression in subexpressions {
|
||||
visitor.visit_expr(&**subexpression)
|
||||
}
|
||||
}
|
||||
ExprCall(ref callee_expression, ref arguments) => {
|
||||
for argument in arguments {
|
||||
visitor.visit_expr(&**argument)
|
||||
}
|
||||
visitor.visit_expr(&**callee_expression)
|
||||
}
|
||||
ExprMethodCall(_, ref types, ref arguments) => {
|
||||
walk_exprs(visitor, arguments);
|
||||
for typ in types {
|
||||
visitor.visit_ty(&**typ)
|
||||
}
|
||||
}
|
||||
ExprBinary(_, ref left_expression, ref right_expression) => {
|
||||
visitor.visit_expr(&**left_expression);
|
||||
visitor.visit_expr(&**right_expression)
|
||||
}
|
||||
ExprAddrOf(_, ref subexpression) | ExprUnary(_, ref subexpression) => {
|
||||
visitor.visit_expr(&**subexpression)
|
||||
}
|
||||
ExprLit(_) => {}
|
||||
ExprCast(ref subexpression, ref typ) => {
|
||||
visitor.visit_expr(&**subexpression);
|
||||
visitor.visit_ty(&**typ)
|
||||
}
|
||||
ExprIf(ref head_expression, ref if_block, ref optional_else) => {
|
||||
visitor.visit_expr(&**head_expression);
|
||||
visitor.visit_block(&**if_block);
|
||||
walk_expr_opt(visitor, optional_else)
|
||||
}
|
||||
ExprWhile(ref subexpression, ref block, _) => {
|
||||
visitor.visit_expr(&**subexpression);
|
||||
visitor.visit_block(&**block)
|
||||
}
|
||||
ExprLoop(ref block, _) => visitor.visit_block(&**block),
|
||||
ExprMatch(ref subexpression, ref arms, _) => {
|
||||
visitor.visit_expr(&**subexpression);
|
||||
for arm in arms {
|
||||
visitor.visit_arm(arm)
|
||||
}
|
||||
}
|
||||
ExprClosure(_, ref function_declaration, ref body) => {
|
||||
visitor.visit_fn(FnKind::Closure,
|
||||
&**function_declaration,
|
||||
&**body,
|
||||
expression.span,
|
||||
expression.id)
|
||||
}
|
||||
ExprBlock(ref block) => visitor.visit_block(&**block),
|
||||
ExprAssign(ref left_hand_expression, ref right_hand_expression) => {
|
||||
visitor.visit_expr(&**right_hand_expression);
|
||||
visitor.visit_expr(&**left_hand_expression)
|
||||
}
|
||||
ExprAssignOp(_, ref left_expression, ref right_expression) => {
|
||||
visitor.visit_expr(&**right_expression);
|
||||
visitor.visit_expr(&**left_expression)
|
||||
}
|
||||
ExprField(ref subexpression, _) => {
|
||||
visitor.visit_expr(&**subexpression);
|
||||
}
|
||||
ExprTupField(ref subexpression, _) => {
|
||||
visitor.visit_expr(&**subexpression);
|
||||
}
|
||||
ExprIndex(ref main_expression, ref index_expression) => {
|
||||
visitor.visit_expr(&**main_expression);
|
||||
visitor.visit_expr(&**index_expression)
|
||||
}
|
||||
ExprRange(ref start, ref end) => {
|
||||
walk_expr_opt(visitor, start);
|
||||
walk_expr_opt(visitor, end)
|
||||
}
|
||||
ExprPath(ref maybe_qself, ref path) => {
|
||||
if let Some(ref qself) = *maybe_qself {
|
||||
visitor.visit_ty(&qself.ty);
|
||||
}
|
||||
visitor.visit_path(path, expression.id)
|
||||
}
|
||||
ExprBreak(_) | ExprAgain(_) => {}
|
||||
ExprRet(ref optional_expression) => {
|
||||
walk_expr_opt(visitor, optional_expression)
|
||||
}
|
||||
ExprParen(ref subexpression) => {
|
||||
visitor.visit_expr(&**subexpression)
|
||||
}
|
||||
ExprInlineAsm(ref ia) => {
|
||||
for input in &ia.inputs {
|
||||
let (_, ref input) = *input;
|
||||
visitor.visit_expr(&**input)
|
||||
}
|
||||
for output in &ia.outputs {
|
||||
let (_, ref output, _) = *output;
|
||||
visitor.visit_expr(&**output)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
visitor.visit_expr_post(expression)
|
||||
}
|
||||
|
||||
pub fn walk_arm<'v, V: Visitor<'v>>(visitor: &mut V, arm: &'v Arm) {
|
||||
for pattern in &arm.pats {
|
||||
visitor.visit_pat(&**pattern)
|
||||
}
|
||||
walk_expr_opt(visitor, &arm.guard);
|
||||
visitor.visit_expr(&*arm.body);
|
||||
for attr in &arm.attrs {
|
||||
visitor.visit_attribute(attr);
|
||||
}
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue