Index: kernel/arch/ppc32/include/atomic.h
===================================================================
--- kernel/arch/ppc32/include/atomic.h	(revision b8230b99f42baed609487cfaa5737fbb11968cc0)
+++ kernel/arch/ppc32/include/atomic.h	(revision ba7371f9d732a62dea4383ffb0e509bccd9f4e4a)
@@ -42,11 +42,11 @@
 	asm volatile (
 		"1:\n"
-		"lwarx %0, 0, %2\n"
-		"addic %0, %0, 1\n"
-		"stwcx. %0, 0, %2\n"
-		"bne- 1b"
-		: "=&r" (tmp),
+		"	lwarx %[tmp], 0, %[count_ptr]\n"
+		"	addic %[tmp], %[tmp], 1\n"
+		"	stwcx. %[tmp], 0, %[count_ptr]\n"
+		"	bne- 1b"
+		: [tmp] "=&r" (tmp),
 		  "=m" (val->count)
-		: "r" (&val->count),
+		: [count_ptr] "r" (&val->count),
 		  "m" (val->count)
 		: "cc"
@@ -60,11 +60,11 @@
 	asm volatile (
 		"1:\n"
-		"lwarx %0, 0, %2\n"
-		"addic %0, %0, -1\n"
-		"stwcx. %0, 0, %2\n"
-		"bne- 1b"
-		: "=&r" (tmp),
+		"	lwarx %[tmp], 0, %[count_ptr]\n"
+		"	addic %[tmp], %[tmp], -1\n"
+		"	stwcx. %[tmp], 0, %[count_ptr]\n"
+		"	bne- 1b"
+		: [tmp] "=&r" (tmp),
 		  "=m" (val->count)
-		: "r" (&val->count),
+		: [count_ptr] "r" (&val->count),
 		  "m" (val->count)
 		: "cc"
Index: kernel/arch/ppc32/include/barrier.h
===================================================================
--- kernel/arch/ppc32/include/barrier.h	(revision b8230b99f42baed609487cfaa5737fbb11968cc0)
+++ kernel/arch/ppc32/include/barrier.h	(revision ba7371f9d732a62dea4383ffb0e509bccd9f4e4a)
@@ -27,5 +27,5 @@
  */
 
-/** @addtogroup ppc32	
+/** @addtogroup ppc32
  * @{
  */
@@ -36,10 +36,18 @@
 #define KERN_ppc32_BARRIER_H_
 
-#define CS_ENTER_BARRIER()	asm volatile ("" ::: "memory")
-#define CS_LEAVE_BARRIER()	asm volatile ("" ::: "memory")
+#define CS_ENTER_BARRIER()  asm volatile ("" ::: "memory")
+#define CS_LEAVE_BARRIER()  asm volatile ("" ::: "memory")
 
-#define memory_barrier() asm volatile ("sync" ::: "memory")
-#define read_barrier() asm volatile ("sync" ::: "memory")
-#define write_barrier() asm volatile ("eieio" ::: "memory")
+#define memory_barrier()  asm volatile ("sync" ::: "memory")
+#define read_barrier()    asm volatile ("sync" ::: "memory")
+#define write_barrier()   asm volatile ("eieio" ::: "memory")
+
+#define instruction_barrier() \
+	asm volatile ( \
+		"sync\n" \
+		"isync\n" \
+	)
+
+#define COHERENCE_INVAL_MIN  4
 
 /*
@@ -53,33 +61,32 @@
 {
 	asm volatile (
-		"dcbst 0, %0\n"
+		"dcbst 0, %[addr]\n"
 		"sync\n"
-		"icbi 0, %0\n"
+		"icbi 0, %[addr]\n"
 		"sync\n"
 		"isync\n"
-		:: "r" (addr)
+		:: [addr] "r" (addr)
 	);
 }
 
-#define COHERENCE_INVAL_MIN	4
-
-static inline void smc_coherence_block(void *addr, unsigned long len)
+static inline void smc_coherence_block(void *addr, unsigned int len)
 {
-	unsigned long i;
-
-	for (i = 0; i < len; i += COHERENCE_INVAL_MIN) {
-		asm volatile ("dcbst 0, %0\n" :: "r" (addr + i));
-	}
-
-	asm volatile ("sync");
-
-	for (i = 0; i < len; i += COHERENCE_INVAL_MIN) {
-		asm volatile ("icbi 0, %0\n" :: "r" (addr + i));
-	}
-
-	asm volatile (
-		"sync\n"
-		"isync\n"
-	);
+	unsigned int i;
+	
+	for (i = 0; i < len; i += COHERENCE_INVAL_MIN)
+		asm volatile (
+			"dcbst 0, %[addr]\n"
+			:: [addr] "r" (addr + i)
+		);
+	
+	memory_barrier();
+	
+	for (i = 0; i < len; i += COHERENCE_INVAL_MIN)
+		asm volatile (
+			"icbi 0, %[addr]\n"
+			:: [addr] "r" (addr + i)
+		);
+	
+	instruction_barrier();
 }
 
Index: kernel/arch/ppc32/include/cycle.h
===================================================================
--- kernel/arch/ppc32/include/cycle.h	(revision b8230b99f42baed609487cfaa5737fbb11968cc0)
+++ kernel/arch/ppc32/include/cycle.h	(revision ba7371f9d732a62dea4383ffb0e509bccd9f4e4a)
@@ -40,17 +40,16 @@
 	uint32_t lower;
 	uint32_t upper;
-	uint32_t upper2;
+	uint32_t tmp;
 	
-	asm volatile (
-		"1: mftbu %0\n"
-		"mftb %1\n"
-		"mftbu %2\n"
-		"cmpw %0, %2\n"
-		"bne- 1b\n"
-		: "=r" (upper),
-		  "=r" (lower),
-		  "=r" (upper2)
-		:: "cr0"
-	);
+	do {
+		asm volatile (
+			"mftbu %[upper]\n"
+			"mftb %[lower]\n"
+			"mftbu %[tmp]\n"
+			: [upper] "=r" (upper),
+			  [lower] "=r" (lower),
+			  [tmp] "=r" (tmp)
+		);
+	} while (upper != tmp);
 	
 	return ((uint64_t) upper << 32) + (uint64_t) lower;
Index: kernel/arch/ppc32/src/proc/scheduler.c
===================================================================
--- kernel/arch/ppc32/src/proc/scheduler.c	(revision b8230b99f42baed609487cfaa5737fbb11968cc0)
+++ kernel/arch/ppc32/src/proc/scheduler.c	(revision ba7371f9d732a62dea4383ffb0e509bccd9f4e4a)
@@ -39,17 +39,21 @@
 #include <arch.h>
 
-/** Perform ppc32 specific tasks needed before the new task is run. */
+/** Perform ppc32 specific tasks needed before the new task is run.
+ *
+ */
 void before_task_runs_arch(void)
 {
 }
 
-/** Perform ppc32 specific tasks needed before the new thread is scheduled. */
+/** Perform ppc32 specific tasks needed before the new thread is scheduled.
+ *
+ */
 void before_thread_runs_arch(void)
 {
 	tlb_invalidate_all();
+	
 	asm volatile (
-		"mtsprg0 %0\n"
-		:
-		: "r" (KA2PA(&THREAD->kstack[THREAD_STACK_SIZE - SP_DELTA]))
+		"mtsprg0 %[ksp]\n"
+		:: [ksp] "r" (KA2PA(&THREAD->kstack[THREAD_STACK_SIZE - SP_DELTA]))
 	);
 }
