replace #[inline(always)] with #[inline]. r=burningtree.

This commit is contained in:
Graydon Hoare 2013-06-18 14:45:18 -07:00
parent 303d7bfc87
commit d904c72af8
79 changed files with 1317 additions and 1317 deletions

View file

@ -82,7 +82,7 @@ impl AtomicFlag {
/**
* Clears the atomic flag
*/
#[inline(always)]
#[inline]
pub fn clear(&mut self, order: Ordering) {
unsafe {atomic_store(&mut self.v, 0, order)}
}
@ -91,7 +91,7 @@ impl AtomicFlag {
* Sets the flag if it was previously unset, returns the previous value of the
* flag.
*/
#[inline(always)]
#[inline]
pub fn test_and_set(&mut self, order: Ordering) -> bool {
unsafe {atomic_compare_and_swap(&mut self.v, 0, 1, order) > 0}
}
@ -102,26 +102,26 @@ impl AtomicBool {
AtomicBool { v: if v { 1 } else { 0 } }
}
#[inline(always)]
#[inline]
pub fn load(&self, order: Ordering) -> bool {
unsafe { atomic_load(&self.v, order) > 0 }
}
#[inline(always)]
#[inline]
pub fn store(&mut self, val: bool, order: Ordering) {
let val = if val { 1 } else { 0 };
unsafe { atomic_store(&mut self.v, val, order); }
}
#[inline(always)]
#[inline]
pub fn swap(&mut self, val: bool, order: Ordering) -> bool {
let val = if val { 1 } else { 0 };
unsafe { atomic_swap(&mut self.v, val, order) > 0}
}
#[inline(always)]
#[inline]
pub fn compare_and_swap(&mut self, old: bool, new: bool, order: Ordering) -> bool {
let old = if old { 1 } else { 0 };
let new = if new { 1 } else { 0 };
@ -135,34 +135,34 @@ impl AtomicInt {
AtomicInt { v:v }
}
#[inline(always)]
#[inline]
pub fn load(&self, order: Ordering) -> int {
unsafe { atomic_load(&self.v, order) }
}
#[inline(always)]
#[inline]
pub fn store(&mut self, val: int, order: Ordering) {
unsafe { atomic_store(&mut self.v, val, order); }
}
#[inline(always)]
#[inline]
pub fn swap(&mut self, val: int, order: Ordering) -> int {
unsafe { atomic_swap(&mut self.v, val, order) }
}
#[inline(always)]
#[inline]
pub fn compare_and_swap(&mut self, old: int, new: int, order: Ordering) -> int {
unsafe { atomic_compare_and_swap(&mut self.v, old, new, order) }
}
/// Returns the old value (like __sync_fetch_and_add).
#[inline(always)]
#[inline]
pub fn fetch_add(&mut self, val: int, order: Ordering) -> int {
unsafe { atomic_add(&mut self.v, val, order) }
}
/// Returns the old value (like __sync_fetch_and_sub).
#[inline(always)]
#[inline]
pub fn fetch_sub(&mut self, val: int, order: Ordering) -> int {
unsafe { atomic_sub(&mut self.v, val, order) }
}
@ -173,34 +173,34 @@ impl AtomicUint {
AtomicUint { v:v }
}
#[inline(always)]
#[inline]
pub fn load(&self, order: Ordering) -> uint {
unsafe { atomic_load(&self.v, order) }
}
#[inline(always)]
#[inline]
pub fn store(&mut self, val: uint, order: Ordering) {
unsafe { atomic_store(&mut self.v, val, order); }
}
#[inline(always)]
#[inline]
pub fn swap(&mut self, val: uint, order: Ordering) -> uint {
unsafe { atomic_swap(&mut self.v, val, order) }
}
#[inline(always)]
#[inline]
pub fn compare_and_swap(&mut self, old: uint, new: uint, order: Ordering) -> uint {
unsafe { atomic_compare_and_swap(&mut self.v, old, new, order) }
}
/// Returns the old value (like __sync_fetch_and_add).
#[inline(always)]
#[inline]
pub fn fetch_add(&mut self, val: uint, order: Ordering) -> uint {
unsafe { atomic_add(&mut self.v, val, order) }
}
/// Returns the old value (like __sync_fetch_and_sub)..
#[inline(always)]
#[inline]
pub fn fetch_sub(&mut self, val: uint, order: Ordering) -> uint {
unsafe { atomic_sub(&mut self.v, val, order) }
}
@ -211,22 +211,22 @@ impl<T> AtomicPtr<T> {
AtomicPtr { p:p }
}
#[inline(always)]
#[inline]
pub fn load(&self, order: Ordering) -> *mut T {
unsafe { atomic_load(&self.p, order) }
}
#[inline(always)]
#[inline]
pub fn store(&mut self, ptr: *mut T, order: Ordering) {
unsafe { atomic_store(&mut self.p, ptr, order); }
}
#[inline(always)]
#[inline]
pub fn swap(&mut self, ptr: *mut T, order: Ordering) -> *mut T {
unsafe { atomic_swap(&mut self.p, ptr, order) }
}
#[inline(always)]
#[inline]
pub fn compare_and_swap(&mut self, old: *mut T, new: *mut T, order: Ordering) -> *mut T {
unsafe { atomic_compare_and_swap(&mut self.p, old, new, order) }
}
@ -249,7 +249,7 @@ impl<T> AtomicOption<T> {
}
}
#[inline(always)]
#[inline]
pub fn swap(&mut self, val: ~T, order: Ordering) -> Option<~T> {
unsafe {
let val = cast::transmute(val);
@ -265,7 +265,7 @@ impl<T> AtomicOption<T> {
}
}
#[inline(always)]
#[inline]
pub fn take(&mut self, order: Ordering) -> Option<~T> {
unsafe {
self.swap(cast::transmute(0), order)
@ -286,7 +286,7 @@ impl<T> Drop for AtomicOption<T> {
}
}
#[inline(always)]
#[inline]
pub unsafe fn atomic_store<T>(dst: &mut T, val: T, order:Ordering) {
let dst = cast::transmute(dst);
let val = cast::transmute(val);
@ -297,7 +297,7 @@ pub unsafe fn atomic_store<T>(dst: &mut T, val: T, order:Ordering) {
}
}
#[inline(always)]
#[inline]
pub unsafe fn atomic_load<T>(dst: &T, order:Ordering) -> T {
let dst = cast::transmute(dst);
@ -307,7 +307,7 @@ pub unsafe fn atomic_load<T>(dst: &T, order:Ordering) -> T {
})
}
#[inline(always)]
#[inline]
pub unsafe fn atomic_swap<T>(dst: &mut T, val: T, order: Ordering) -> T {
let dst = cast::transmute(dst);
let val = cast::transmute(val);
@ -320,7 +320,7 @@ pub unsafe fn atomic_swap<T>(dst: &mut T, val: T, order: Ordering) -> T {
}
/// Returns the old value (like __sync_fetch_and_add).
#[inline(always)]
#[inline]
pub unsafe fn atomic_add<T>(dst: &mut T, val: T, order: Ordering) -> T {
let dst = cast::transmute(dst);
let val = cast::transmute(val);
@ -333,7 +333,7 @@ pub unsafe fn atomic_add<T>(dst: &mut T, val: T, order: Ordering) -> T {
}
/// Returns the old value (like __sync_fetch_and_sub).
#[inline(always)]
#[inline]
pub unsafe fn atomic_sub<T>(dst: &mut T, val: T, order: Ordering) -> T {
let dst = cast::transmute(dst);
let val = cast::transmute(val);
@ -345,7 +345,7 @@ pub unsafe fn atomic_sub<T>(dst: &mut T, val: T, order: Ordering) -> T {
})
}
#[inline(always)]
#[inline]
pub unsafe fn atomic_compare_and_swap<T>(dst:&mut T, old:T, new:T, order: Ordering) -> T {
let dst = cast::transmute(dst);
let old = cast::transmute(old);

View file

@ -673,7 +673,7 @@ pub mod rt {
}
buf.push_str(s);
}
#[inline(always)]
#[inline]
pub fn have_flag(flags: u32, f: u32) -> bool {
flags & f != 0
}

View file

@ -152,7 +152,7 @@ unsafe fn fail_borrowed(box: *mut BoxRepr, file: *c_char, line: size_t) {
// FIXME #4942: Make these signatures agree with exchange_alloc's signatures
#[lang="exchange_malloc"]
#[inline(always)]
#[inline]
pub unsafe fn exchange_malloc(td: *c_char, size: uintptr_t) -> *c_char {
transmute(global_heap::malloc(transmute(td), transmute(size)))
}
@ -232,7 +232,7 @@ impl DebugPrints for io::fd_t {
// inside a landing pad may corrupt the state of the exception handler. If a
// problem occurs, call exit instead.
#[lang="exchange_free"]
#[inline(always)]
#[inline]
pub unsafe fn exchange_free(ptr: *c_char) {
global_heap::free(transmute(ptr))
}
@ -271,7 +271,7 @@ pub unsafe fn local_free(ptr: *c_char) {
}
#[lang="borrow_as_imm"]
#[inline(always)]
#[inline]
pub unsafe fn borrow_as_imm(a: *u8, file: *c_char, line: size_t) -> uint {
let a: *mut BoxRepr = transmute(a);
let old_ref_count = (*a).header.ref_count;
@ -289,7 +289,7 @@ pub unsafe fn borrow_as_imm(a: *u8, file: *c_char, line: size_t) -> uint {
}
#[lang="borrow_as_mut"]
#[inline(always)]
#[inline]
pub unsafe fn borrow_as_mut(a: *u8, file: *c_char, line: size_t) -> uint {
let a: *mut BoxRepr = transmute(a);
let old_ref_count = (*a).header.ref_count;
@ -346,7 +346,7 @@ pub unsafe fn unrecord_borrow(a: *u8, old_ref_count: uint,
}
#[lang="return_to_mut"]
#[inline(always)]
#[inline]
pub unsafe fn return_to_mut(a: *u8, orig_ref_count: uint,
file: *c_char, line: size_t) {
// Sometimes the box is null, if it is conditionally frozen.
@ -365,7 +365,7 @@ pub unsafe fn return_to_mut(a: *u8, orig_ref_count: uint,
}
#[lang="check_not_borrowed"]
#[inline(always)]
#[inline]
pub unsafe fn check_not_borrowed(a: *u8,
file: *c_char,
line: size_t) {
@ -378,7 +378,7 @@ pub unsafe fn check_not_borrowed(a: *u8,
}
#[lang="strdup_uniq"]
#[inline(always)]
#[inline]
pub unsafe fn strdup_uniq(ptr: *c_uchar, len: uint) -> ~str {
str::raw::from_buf_len(ptr, len)
}

View file

@ -40,7 +40,7 @@ impl<T: Owned> UnsafeAtomicRcBox<T> {
}
}
#[inline(always)]
#[inline]
pub unsafe fn get(&self) -> *mut T
{
let mut data: ~AtomicRcBoxData<T> = cast::transmute(self.data);
@ -50,7 +50,7 @@ impl<T: Owned> UnsafeAtomicRcBox<T> {
return r;
}
#[inline(always)]
#[inline]
pub unsafe fn get_immut(&self) -> *T
{
let mut data: ~AtomicRcBoxData<T> = cast::transmute(self.data);
@ -118,7 +118,7 @@ fn LittleLock() -> LittleLock {
}
impl LittleLock {
#[inline(always)]
#[inline]
pub unsafe fn lock<T>(&self, f: &fn() -> T) -> T {
do atomically {
rust_lock_little_lock(self.l);
@ -169,7 +169,7 @@ impl<T:Owned> Exclusive<T> {
// Currently, scheduling operations (i.e., yielding, receiving on a pipe,
// accessing the provided condition variable) are prohibited while inside
// the exclusive. Supporting that is a work in progress.
#[inline(always)]
#[inline]
pub unsafe fn with<U>(&self, f: &fn(x: &mut T) -> U) -> U {
let rec = self.x.get();
do (*rec).lock.lock {
@ -183,7 +183,7 @@ impl<T:Owned> Exclusive<T> {
}
}
#[inline(always)]
#[inline]
pub unsafe fn with_imm<U>(&self, f: &fn(x: &T) -> U) -> U {
do self.with |x| {
f(cast::transmute_immut(x))