Index: kernel/arch/arm32/include/asm.h
===================================================================
--- kernel/arch/arm32/include/asm.h	(revision f24d300581dcff87298e80ad8a95d552d34abead)
+++ kernel/arch/arm32/include/asm.h	(revision da58187297a4ba3471d2a800ec2cfb016c16e6ae)
@@ -27,8 +27,8 @@
  */
 
-/** @addtogroup arm32	
+/** @addtogroup arm32
  * @{
  */
-/** @file 
+/** @file
  *  @brief Declarations of functions implemented in assembly.
  */
@@ -78,8 +78,9 @@
 
 /** Return base address of current stack.
- * 
+ *
  * Return the base address of the current stack.
  * The stack is assumed to be STACK_SIZE bytes long.
  * The stack must start on page boundary.
+ *
  */
 static inline uintptr_t get_stack_base(void)
@@ -87,7 +88,7 @@
 	uintptr_t v;
 	asm volatile (
-		"and %0, sp, %1\n" 
-		: "=r" (v) 
-		: "r" (~(STACK_SIZE - 1))
+		"and %[v], sp, %[size]\n" 
+		: [v] "=r" (v)
+		: [size] "r" (~(STACK_SIZE - 1))
 	);
 	return v;
Index: kernel/arch/arm32/include/atomic.h
===================================================================
--- kernel/arch/arm32/include/atomic.h	(revision f24d300581dcff87298e80ad8a95d552d34abead)
+++ kernel/arch/arm32/include/atomic.h	(revision da58187297a4ba3471d2a800ec2cfb016c16e6ae)
@@ -27,8 +27,8 @@
  */
 
-/** @addtogroup arm32	
+/** @addtogroup arm32
  * @{
  */
-/** @file 
+/** @file
  *  @brief Atomic operations.
  */
@@ -43,4 +43,5 @@
  *
  * @return Value after addition.
+ *
  */
 static inline long atomic_add(atomic_t *val, int i)
@@ -48,19 +49,18 @@
 	int ret;
 	volatile long *mem = &(val->count);
-
+	
 	asm volatile (
-	"1:\n"
-		"ldr r2, [%1]       \n"
-		"add r3, r2, %2     \n"
-		"str r3, %0         \n"
-		"swp r3, r3, [%1]   \n"
-		"cmp r3, r2         \n"
-		"bne 1b             \n"
-
-		: "=m" (ret)
-		: "r" (mem), "r" (i)
+		"1:\n"
+			"ldr r2, [%[mem]]\n"
+			"add r3, r2, %[i]\n"
+			"str r3, %[ret]\n"
+			"swp r3, r3, [%[mem]]\n"
+			"cmp r3, r2\n"
+			"bne 1b\n"
+		: [ret] "=m" (ret)
+		: [mem] "r" (mem), [i] "r" (i)
 		: "r3", "r2"
 	);
-
+	
 	return ret;
 }
Index: kernel/arch/arm32/include/barrier.h
===================================================================
--- kernel/arch/arm32/include/barrier.h	(revision f24d300581dcff87298e80ad8a95d552d34abead)
+++ kernel/arch/arm32/include/barrier.h	(revision da58187297a4ba3471d2a800ec2cfb016c16e6ae)
@@ -27,5 +27,5 @@
  */
 
-/** @addtogroup arm32	
+/** @addtogroup arm32
  * @{
  */
@@ -40,10 +40,10 @@
  * TODO: implement true ARM memory barriers for macros below.
  */
-#define CS_ENTER_BARRIER()	asm volatile ("" ::: "memory")
-#define CS_LEAVE_BARRIER()	asm volatile ("" ::: "memory")
+#define CS_ENTER_BARRIER()  asm volatile ("" ::: "memory")
+#define CS_LEAVE_BARRIER()  asm volatile ("" ::: "memory")
 
-#define memory_barrier()        asm volatile ("" ::: "memory")
-#define read_barrier()          asm volatile ("" ::: "memory")
-#define write_barrier()         asm volatile ("" ::: "memory")
+#define memory_barrier()  asm volatile ("" ::: "memory")
+#define read_barrier()    asm volatile ("" ::: "memory")
+#define write_barrier()   asm volatile ("" ::: "memory")
 
 #define smc_coherence(a)
Index: kernel/arch/arm32/include/mm/page.h
===================================================================
--- kernel/arch/arm32/include/mm/page.h	(revision f24d300581dcff87298e80ad8a95d552d34abead)
+++ kernel/arch/arm32/include/mm/page.h	(revision da58187297a4ba3471d2a800ec2cfb016c16e6ae)
@@ -194,7 +194,6 @@
 {
 	asm volatile (
-		"mcr p15, 0, %0, c2, c0, 0 \n"
-		:
-		: "r"(pt)
+		"mcr p15, 0, %[pt], c2, c0, 0\n"
+		:: [pt] "r" (pt)
 	);
 }
Index: kernel/arch/arm32/include/regutils.h
===================================================================
--- kernel/arch/arm32/include/regutils.h	(revision f24d300581dcff87298e80ad8a95d552d34abead)
+++ kernel/arch/arm32/include/regutils.h	(revision da58187297a4ba3471d2a800ec2cfb016c16e6ae)
@@ -27,5 +27,5 @@
  */
 
-/** @addtogroup arm32	
+/** @addtogroup arm32
  * @{
  */
@@ -58,5 +58,8 @@
 { \
 	uint32_t retval; \
-	asm volatile("mrs %0, " #reg : "=r" (retval)); \
+	asm volatile( \
+		"mrs %[retval], " #reg \
+		: [retval] "=r" (retval) \
+	); \
 	return retval; \
 }
@@ -65,5 +68,8 @@
 static inline void nm## _status_reg_ ##fieldname## _write(uint32_t value) \
 { \
-	asm volatile("msr " #reg "_" #field ", %0" : : "r" (value)); \
+	asm volatile( \
+		"msr " #reg "_" #field ", %[value]" \
+		:: [value] "r" (value) \
+	); \
 }
 
Index: kernel/arch/arm32/src/cpu/cpu.c
===================================================================
--- kernel/arch/arm32/src/cpu/cpu.c	(revision f24d300581dcff87298e80ad8a95d552d34abead)
+++ kernel/arch/arm32/src/cpu/cpu.c	(revision da58187297a4ba3471d2a800ec2cfb016c16e6ae)
@@ -37,5 +37,5 @@
 #include <cpu.h>
 #include <arch.h>
-#include <print.h>	
+#include <print.h>
 
 /** Number of indexes left out in the #imp_data array */
@@ -83,8 +83,8 @@
 	uint32_t ident;
 	asm volatile (
-		"mrc p15, 0, %0, c0, c0, 0\n"
-		: "=r" (ident)
+		"mrc p15, 0, %[ident], c0, c0, 0\n"
+		: [ident] "=r" (ident)
 	);
-
+	
 	cpu->imp_num = ident >> 24;
 	cpu->variant_num = (ident << 8) >> 28;
Index: kernel/arch/arm32/src/exception.c
===================================================================
--- kernel/arch/arm32/src/exception.c	(revision f24d300581dcff87298e80ad8a95d552d34abead)
+++ kernel/arch/arm32/src/exception.c	(revision da58187297a4ba3471d2a800ec2cfb016c16e6ae)
@@ -64,55 +64,58 @@
  * Temporary exception stack is used to save a few registers
  * before stack switch takes place.
+ *
  */
 inline static void setup_stack_and_save_regs()
 {
-	asm volatile(
-		"ldr r13, =exc_stack		\n"
-		"stmfd r13!, {r0}		\n"
-		"mrs r0, spsr			\n"
-		"and r0, r0, #0x1f		\n"
-		"cmp r0, #0x10			\n"
-		"bne 1f				\n"
-
+	asm volatile (
+		"ldr r13, =exc_stack\n"
+		"stmfd r13!, {r0}\n"
+		"mrs r0, spsr\n"
+		"and r0, r0, #0x1f\n"
+		"cmp r0, #0x10\n"
+		"bne 1f\n"
+		
 		/* prev mode was usermode */
-		"ldmfd r13!, {r0}		\n"
-		"ldr r13, =supervisor_sp	\n"
-		"ldr r13, [r13]			\n"
-		"stmfd r13!, {lr}		\n"
-		"stmfd r13!, {r0-r12}		\n"
-		"stmfd r13!, {r13, lr}^		\n"
-		"mrs r0, spsr			\n"
-		"stmfd r13!, {r0}		\n"
-		"b 2f				\n"
-
+		"ldmfd r13!, {r0}\n"
+		"ldr r13, =supervisor_sp\n"
+		"ldr r13, [r13]\n"
+		"stmfd r13!, {lr}\n"
+		"stmfd r13!, {r0-r12}\n"
+		"stmfd r13!, {r13, lr}^\n"
+		"mrs r0, spsr\n"
+		"stmfd r13!, {r0}\n"
+		"b 2f\n"
+		
 		/* mode was not usermode */
-	"1:\n"
-		"stmfd r13!, {r1, r2, r3}	\n"
-		"mrs r1, cpsr			\n"
-		"mov r2, lr			\n"
-		"bic r1, r1, #0x1f		\n"
-		"orr r1, r1, r0			\n"
-		"mrs r0, cpsr			\n"
-		"msr cpsr_c, r1			\n"
-
-		"mov r3, r13			\n"
-		"stmfd r13!, {r2}		\n"
-		"mov r2, lr			\n"
-		"stmfd r13!, {r4-r12}		\n"
-		"mov r1, r13			\n"
-		/* the following two lines are for debugging */
-		"mov sp, #0			\n"
-		"mov lr, #0			\n"
-		"msr cpsr_c, r0			\n"
-
-		"ldmfd r13!, {r4, r5, r6, r7}	\n"
-		"stmfd r1!, {r4, r5, r6}	\n"
-		"stmfd r1!, {r7}		\n"
-		"stmfd r1!, {r2}		\n"
-		"stmfd r1!, {r3}		\n"
-		"mrs r0, spsr			\n"
-		"stmfd r1!, {r0}		\n"
-		"mov r13, r1			\n"
-	"2:\n"
+		"1:\n"
+			"stmfd r13!, {r1, r2, r3}\n"
+			"mrs r1, cpsr\n"
+			"mov r2, lr\n"
+			"bic r1, r1, #0x1f\n"
+			"orr r1, r1, r0\n"
+			"mrs r0, cpsr\n"
+			"msr cpsr_c, r1\n"
+			
+			"mov r3, r13\n"
+			"stmfd r13!, {r2}\n"
+			"mov r2, lr\n"
+			"stmfd r13!, {r4-r12}\n"
+			"mov r1, r13\n"
+			
+			/* the following two lines are for debugging */
+			"mov sp, #0\n"
+			"mov lr, #0\n"
+			"msr cpsr_c, r0\n"
+			
+			"ldmfd r13!, {r4, r5, r6, r7}\n"
+			"stmfd r1!, {r4, r5, r6}\n"
+			"stmfd r1!, {r7}\n"
+			"stmfd r1!, {r2}\n"
+			"stmfd r1!, {r3}\n"
+			"mrs r0, spsr\n"
+			"stmfd r1!, {r0}\n"
+			"mov r13, r1\n"
+			
+		"2:\n"
 	);
 }
@@ -190,8 +193,11 @@
 
 /** Calls exception dispatch routine. */
-#define CALL_EXC_DISPATCH(exception)		\
-	asm("mov r0, %0" : : "i" (exception));	\
-	asm("mov r1, r13");			\
-	asm("bl exc_dispatch");		
+#define CALL_EXC_DISPATCH(exception) \
+	asm volatile ( \
+		"mov r0, %[exc]\n" \
+		"mov r1, r13\n" \
+		"bl exc_dispatch\n" \
+		:: [exc] "i" (exception) \
+	);\
 
 /** General exception handler.
@@ -202,7 +208,7 @@
  *  @param exception Exception number.
  */
-#define PROCESS_EXCEPTION(exception)		\
-	setup_stack_and_save_regs();		\
-	CALL_EXC_DISPATCH(exception)		\
+#define PROCESS_EXCEPTION(exception) \
+	setup_stack_and_save_regs(); \
+	CALL_EXC_DISPATCH(exception) \
 	load_regs();
 
@@ -334,15 +340,21 @@
 	uint32_t control_reg;
 	
-	asm volatile("mrc p15, 0, %0, c1, c1" : "=r" (control_reg));
+	asm volatile (
+		"mrc p15, 0, %[control_reg], c1, c1"
+		: [control_reg] "=r" (control_reg)
+	);
 	
 	/* switch on the high vectors bit */
 	control_reg |= CP15_R1_HIGH_VECTORS_BIT;
 	
-	asm volatile("mcr p15, 0, %0, c1, c1" : : "r" (control_reg));
+	asm volatile (
+		"mcr p15, 0, %[control_reg], c1, c1"
+		:: [control_reg] "r" (control_reg)
+	);
 }
 #endif
 
 /** Initializes exception handling.
- * 
+ *
  * Installs low-level exception handlers and then registers
  * exceptions and their handlers to kernel exception dispatcher.
Index: kernel/arch/arm32/src/mm/page_fault.c
===================================================================
--- kernel/arch/arm32/src/mm/page_fault.c	(revision f24d300581dcff87298e80ad8a95d552d34abead)
+++ kernel/arch/arm32/src/mm/page_fault.c	(revision da58187297a4ba3471d2a800ec2cfb016c16e6ae)
@@ -50,10 +50,11 @@
 {
 	fault_status_union_t fsu;
-
+	
 	/* fault status is stored in CP15 register 5 */
 	asm volatile (
-		"mrc p15, 0, %0, c5, c0, 0"
-		: "=r"(fsu.dummy)
+		"mrc p15, 0, %[dummy], c5, c0, 0"
+		: [dummy] "=r" (fsu.dummy)
 	);
+	
 	return fsu.fs;
 }
@@ -62,15 +63,16 @@
  *
  * @return FAR (fault address register) content (address that caused a page
- * 	   fault)
+ *         fault)
  */
 static inline uintptr_t read_fault_address_register(void)
 {
 	uintptr_t ret;
-
+	
 	/* fault adress is stored in CP15 register 6 */
 	asm volatile (
-		"mrc p15, 0, %0, c6, c0, 0"
-		: "=r"(ret)
+		"mrc p15, 0, %[ret], c6, c0, 0"
+		: [ret] "=r" (ret)
 	);
+	
 	return ret;
 }
@@ -81,27 +83,24 @@
  *
  * @return true when instruction is load/store, false otherwise
+ *
  */
 static inline bool is_load_store_instruction(instruction_t instr)
 {
 	/* load store immediate offset */
-	if (instr.type == 0x2) {
-		return true;
-	}
-
+	if (instr.type == 0x2)
+		return true;
+	
 	/* load store register offset */
-	if (instr.type == 0x3 && instr.bit4 == 0) {
-		return true;
-	}
-
+	if ((instr.type == 0x3) && (instr.bit4 == 0))
+		return true;
+	
 	/* load store multiple */
-	if (instr.type == 0x4) {
-		return true;
-	}
-
+	if (instr.type == 0x4)
+		return true;
+	
 	/* oprocessor load/store */
-	if (instr.type == 0x6) {
-		return true;
-	}
-
+	if (instr.type == 0x6)
+		return true;
+	
 	return false;
 }
@@ -116,10 +115,9 @@
 {
 	/* swap, swapb instruction */
-	if (instr.type == 0x0 &&
-	    (instr.opcode == 0x8 || instr.opcode == 0xa) &&
-	    instr.access == 0x0 && instr.bits567 == 0x4 && instr.bit4 == 1) {
-		return true;
-	}
-
+	if ((instr.type == 0x0) &&
+	    ((instr.opcode == 0x8) || (instr.opcode == 0xa)) &&
+	    (instr.access == 0x0) && (instr.bits567 == 0x4) && (instr.bit4 == 1))
+		return true;
+	
 	return false;
 }
Index: kernel/arch/arm32/src/mm/tlb.c
===================================================================
--- kernel/arch/arm32/src/mm/tlb.c	(revision f24d300581dcff87298e80ad8a95d552d34abead)
+++ kernel/arch/arm32/src/mm/tlb.c	(revision da58187297a4ba3471d2a800ec2cfb016c16e6ae)
@@ -49,5 +49,5 @@
 		"eor r1, r1\n"
 		"mcr p15, 0, r1, c8, c7, 0\n"
-		: : : "r1"
+		::: "r1"
 	);
 }
@@ -69,7 +69,6 @@
 {
 	asm volatile (
-		"mcr p15, 0, %0, c8, c7, 1"		
-		:
-		: "r" (page)
+		"mcr p15, 0, %[page], c8, c7, 1\n"
+		:: [page] "r" (page)
 	);
 }
Index: kernel/arch/arm32/src/userspace.c
===================================================================
--- kernel/arch/arm32/src/userspace.c	(revision f24d300581dcff87298e80ad8a95d552d34abead)
+++ kernel/arch/arm32/src/userspace.c	(revision da58187297a4ba3471d2a800ec2cfb016c16e6ae)
@@ -91,10 +91,9 @@
 	/* set user mode, set registers, jump */
 	asm volatile (
-		"mov sp, %0			\n"
-		"msr spsr_c, %1			\n"
-		"ldmfd sp!, {r0-r12, sp, lr}^	\n"
+		"mov sp, %[ustate]\n"
+		"msr spsr_c, %[user_mode]\n"
+		"ldmfd sp!, {r0-r12, sp, lr}^\n"
 		"ldmfd sp!, {pc}^\n"
-		:
-		: "r" (&ustate), "r" (user_mode)
+		:: [ustate] "r" (&ustate), [user_mode] "r" (user_mode)
 	);
 
