Fix more misspelled comments and strings.

This commit is contained in:
Joseph Crail 2014-06-09 00:00:52 -04:00
parent 0ee6a8e8a5
commit c2c9946372
48 changed files with 64 additions and 64 deletions

View file

@ -189,7 +189,7 @@ fn describe_codegen_flags() {
}
}
/// Process command line options. Emits messages as appropirate.If compilation
/// Process command line options. Emits messages as appropriate. If compilation
/// should continue, returns a getopts::Matches object parsed from args, otherwise
/// returns None.
pub fn handle_options(mut args: Vec<String>) -> Option<getopts::Matches> {

View file

@ -551,7 +551,7 @@ are computed based on the kind of borrow:
The reasoning here is that a mutable borrow must be the only writer,
therefore it prevents other writes (`MUTATE`), mutable borrows
(`CLAIM`), and immutable borrows (`FREEZE`). An immutable borrow
permits other immutable borrows but forbids writes and mutable borows.
permits other immutable borrows but forbids writes and mutable borrows.
Finally, a const borrow just wants to be sure that the value is not
moved out from under it, so no actions are forbidden.

View file

@ -438,7 +438,7 @@ impl<'d,'t,TYPER:mc::Typer> ExprUseVisitor<'d,'t,TYPER> {
None => {
self.tcx().sess.span_bug(
callee.span,
format!("unxpected callee type {}",
format!("unexpected callee type {}",
callee_ty.repr(self.tcx())).as_slice());
}
}

View file

@ -257,7 +257,7 @@ impl<N,E> Graph<N,E> {
//
// A common use for graphs in our compiler is to perform
// fixed-point iteration. In this case, each edge represents a
// constaint, and the nodes themselves are associated with
// constraint, and the nodes themselves are associated with
// variables or other bitsets. This method facilitates such a
// computation.

View file

@ -31,7 +31,7 @@
* is the address of the lvalue. If Expr is an rvalue, this is the address of
* some temporary spot in memory where the result is stored.
*
* Now, cat_expr() classies the expression Expr and the address A=ToAddr(Expr)
* Now, cat_expr() classifies the expression Expr and the address A=ToAddr(Expr)
* as follows:
*
* - cat: what kind of expression was this? This is a subset of the
@ -42,7 +42,7 @@
*
* The resulting categorization tree differs somewhat from the expressions
* themselves. For example, auto-derefs are explicit. Also, an index a[b] is
* decomposed into two operations: a derefence to reach the array data and
* decomposed into two operations: a dereference to reach the array data and
* then an index to jump forward to the relevant item.
*
* ## By-reference upvars

View file

@ -39,7 +39,7 @@ The region maps encode information about region relationships.
- `scope_map` maps from a scope id to the enclosing scope id; this is
usually corresponding to the lexical nesting, though in the case of
closures the parent scope is the innermost conditinal expression or repeating
closures the parent scope is the innermost conditional expression or repeating
block
- `var_map` maps from a variable or binding id to the block in which

View file

@ -717,7 +717,7 @@ pub fn trans_field_ptr(bcx: &Block, r: &Repr, val: ValueRef, discr: Disr,
let ty = type_of::type_of(bcx.ccx(), *nullfields.get(ix));
assert_eq!(machine::llsize_of_alloc(bcx.ccx(), ty), 0);
// The contents of memory at this pointer can't matter, but use
// the value that's "reasonable" in case of pointer comparision.
// the value that's "reasonable" in case of pointer comparison.
PointerCast(bcx, val, ty.ptr_to())
}
RawNullablePointer { nndiscr, nnty, .. } => {

View file

@ -1571,7 +1571,7 @@ fn enum_variant_size_lint(ccx: &CrateContext, enum_def: &ast::EnumDef, sp: Span,
for var in variants.iter() {
let mut size = 0;
for field in var.fields.iter().skip(1) {
// skip the dicriminant
// skip the discriminant
size += llsize_of_real(ccx, sizing_type_of(ccx, *field));
}
sizes.push(size);
@ -2318,7 +2318,7 @@ pub fn trans_crate(krate: ast::Crate,
// LLVM code generator emits a ".file filename" directive
// for ELF backends. Value of the "filename" is set as the
// LLVM module identifier. Due to a LLVM MC bug[1], LLVM
// crashes if the module identifer is same as other symbols
// crashes if the module identifier is same as other symbols
// such as a function name in the module.
// 1. http://llvm.org/bugs/show_bug.cgi?id=11479
let mut llmod_id = link_meta.crateid.name.clone();

View file

@ -1527,7 +1527,7 @@ impl EnumMemberDescriptionFactory {
// As far as debuginfo is concerned, the pointer this enum represents is still
// wrapped in a struct. This is to make the DWARF representation of enums uniform.
// First create a description of the artifical wrapper struct:
// First create a description of the artificial wrapper struct:
let non_null_variant = self.variants.get(non_null_variant_index as uint);
let non_null_variant_ident = non_null_variant.name;
let non_null_variant_name = token::get_ident(non_null_variant_ident);

View file

@ -204,7 +204,7 @@ impl FnStyleState {
}
/// Whether `check_binop` is part of an assignment or not.
/// Used to know wether we allow user overloads and to print
/// Used to know whether we allow user overloads and to print
/// better messages on error.
#[deriving(PartialEq)]
enum IsBinopAssignment{
@ -3702,7 +3702,7 @@ pub fn check_const_with_ty(fcx: &FnCtxt,
e: &ast::Expr,
declty: ty::t) {
// Gather locals in statics (because of block expressions).
// This is technically uneccessary because locals in static items are forbidden,
// This is technically unnecessary because locals in static items are forbidden,
// but prevents type checking from blowing up before const checking can properly
// emit a error.
GatherLocalsVisitor { fcx: fcx }.visit_expr(e, ());
@ -4174,7 +4174,7 @@ pub fn instantiate_path(fcx: &FnCtxt,
}
None => {
fcx.tcx().sess.span_bug(span,
"missing default for a not explicitely provided type param")
"missing default for a not explicitly provided type param")
}
}
}

View file

@ -180,7 +180,7 @@ impl<'f> Coerce<'f> {
self.unpack_actual_value(a, |sty_a| {
match *sty_a {
ty::ty_bare_fn(ref a_f) => {
// Bare functions are coercable to any closure type.
// Bare functions are coercible to any closure type.
//
// FIXME(#3320) this should go away and be
// replaced with proper inference, got a patch

View file

@ -372,7 +372,7 @@ pub fn super_fn_sigs<C:Combine>(this: &C, a: &ty::FnSig, b: &ty::FnSig) -> cres<
pub fn super_tys<C:Combine>(this: &C, a: ty::t, b: ty::t) -> cres<ty::t> {
// This is a horible hack - historically, [T] was not treated as a type,
// This is a horrible hack - historically, [T] was not treated as a type,
// so, for example, &T and &[U] should not unify. In fact the only thing
// &[U] should unify with is &[T]. We preserve that behaviour with this
// check.

View file

@ -17,7 +17,7 @@ works, it often happens that errors are not detected until far after
the relevant line of code has been type-checked. Therefore, there is
an elaborate system to track why a particular constraint in the
inference graph arose so that we can explain to the user what gave
rise to a patricular error.
rise to a particular error.
The basis of the system are the "origin" types. An "origin" is the
reason that a constraint or inference variable arose. There are

View file

@ -19,7 +19,7 @@
* The code in here is defined quite generically so that it can be
* applied both to type variables, which represent types being inferred,
* and fn variables, which represent function types being inferred.
* It may eventually be applied to ther types as well, who knows.
* It may eventually be applied to their types as well, who knows.
* In some cases, the functions are also generic with respect to the
* operation on the lattice (GLB vs LUB).
*

View file

@ -362,7 +362,7 @@ identify and remove strongly connected components (SCC) in the graph.
Note that such components must consist solely of region variables; all
of these variables can effectively be unified into a single variable.
Once SCCs are removed, we are left with a DAG. At this point, we
could walk the DAG in toplogical order once to compute the expanding
could walk the DAG in topological order once to compute the expanding
nodes, and again in reverse topological order to compute the
contracting nodes. However, as I said, this does not work given the
current treatment of closure bounds, but perhaps in the future we can
@ -617,7 +617,7 @@ created to replace the bound regions in the input types, but it also
contains 'intermediate' variables created to represent the LUB/GLB of
individual regions. Basically, when asked to compute the LUB/GLB of a
region variable with another region, the inferencer cannot oblige
immediately since the valuese of that variables are not known.
immediately since the values of that variables are not known.
Therefore, it creates a new variable that is related to the two
regions. For example, the LUB of two variables `$x` and `$y` is a
fresh variable `$z` that is constrained such that `$x <= $z` and `$y

View file

@ -485,7 +485,7 @@ impl<'a> Visitor<()> for ConstraintContext<'a> {
let variant =
ty::VariantInfo::from_ast_variant(tcx,
ast_variant,
/*discrimant*/ 0);
/*discriminant*/ 0);
for &arg_ty in variant.args.iter() {
self.add_constraints_from_ty(arg_ty, self.covariant);
}

View file

@ -61,12 +61,12 @@ fn add_bytes_to_bits<T: Int + CheckedAdd + ToBits>(bits: T, bytes: T) -> T {
let (new_high_bits, new_low_bits) = bytes.to_bits();
if new_high_bits > Zero::zero() {
fail!("numeric overflow occured.")
fail!("numeric overflow occurred.")
}
match bits.checked_add(&new_low_bits) {
Some(x) => return x,
None => fail!("numeric overflow occured.")
None => fail!("numeric overflow occurred.")
}
}