Index: kernel/arch/arm32/src/cpu/cpu.c
===================================================================
--- kernel/arch/arm32/src/cpu/cpu.c	(revision e49e2348e7647a4dd4d4e2879082790396c4f4f3)
+++ kernel/arch/arm32/src/cpu/cpu.c	(revision 5d8d71ebf89df6dbc2bd7f5a55aa3d4c73a286cd)
@@ -37,5 +37,5 @@
 #include <cpu.h>
 #include <arch.h>
-#include <print.h>	
+#include <print.h>
 
 /** Number of indexes left out in the #imp_data array */
@@ -83,8 +83,8 @@
 	uint32_t ident;
 	asm volatile (
-		"mrc p15, 0, %0, c0, c0, 0\n"
-		: "=r" (ident)
+		"mrc p15, 0, %[ident], c0, c0, 0\n"
+		: [ident] "=r" (ident)
 	);
-
+	
 	cpu->imp_num = ident >> 24;
 	cpu->variant_num = (ident << 8) >> 28;
Index: kernel/arch/arm32/src/exception.c
===================================================================
--- kernel/arch/arm32/src/exception.c	(revision e49e2348e7647a4dd4d4e2879082790396c4f4f3)
+++ kernel/arch/arm32/src/exception.c	(revision 5d8d71ebf89df6dbc2bd7f5a55aa3d4c73a286cd)
@@ -64,55 +64,58 @@
  * Temporary exception stack is used to save a few registers
  * before stack switch takes place.
+ *
  */
 inline static void setup_stack_and_save_regs()
 {
-	asm volatile(
-		"ldr r13, =exc_stack		\n"
-		"stmfd r13!, {r0}		\n"
-		"mrs r0, spsr			\n"
-		"and r0, r0, #0x1f		\n"
-		"cmp r0, #0x10			\n"
-		"bne 1f				\n"
-
+	asm volatile (
+		"ldr r13, =exc_stack\n"
+		"stmfd r13!, {r0}\n"
+		"mrs r0, spsr\n"
+		"and r0, r0, #0x1f\n"
+		"cmp r0, #0x10\n"
+		"bne 1f\n"
+		
 		/* prev mode was usermode */
-		"ldmfd r13!, {r0}		\n"
-		"ldr r13, =supervisor_sp	\n"
-		"ldr r13, [r13]			\n"
-		"stmfd r13!, {lr}		\n"
-		"stmfd r13!, {r0-r12}		\n"
-		"stmfd r13!, {r13, lr}^		\n"
-		"mrs r0, spsr			\n"
-		"stmfd r13!, {r0}		\n"
-		"b 2f				\n"
-
+		"ldmfd r13!, {r0}\n"
+		"ldr r13, =supervisor_sp\n"
+		"ldr r13, [r13]\n"
+		"stmfd r13!, {lr}\n"
+		"stmfd r13!, {r0-r12}\n"
+		"stmfd r13!, {r13, lr}^\n"
+		"mrs r0, spsr\n"
+		"stmfd r13!, {r0}\n"
+		"b 2f\n"
+		
 		/* mode was not usermode */
-	"1:\n"
-		"stmfd r13!, {r1, r2, r3}	\n"
-		"mrs r1, cpsr			\n"
-		"mov r2, lr			\n"
-		"bic r1, r1, #0x1f		\n"
-		"orr r1, r1, r0			\n"
-		"mrs r0, cpsr			\n"
-		"msr cpsr_c, r1			\n"
-
-		"mov r3, r13			\n"
-		"stmfd r13!, {r2}		\n"
-		"mov r2, lr			\n"
-		"stmfd r13!, {r4-r12}		\n"
-		"mov r1, r13			\n"
-		/* the following two lines are for debugging */
-		"mov sp, #0			\n"
-		"mov lr, #0			\n"
-		"msr cpsr_c, r0			\n"
-
-		"ldmfd r13!, {r4, r5, r6, r7}	\n"
-		"stmfd r1!, {r4, r5, r6}	\n"
-		"stmfd r1!, {r7}		\n"
-		"stmfd r1!, {r2}		\n"
-		"stmfd r1!, {r3}		\n"
-		"mrs r0, spsr			\n"
-		"stmfd r1!, {r0}		\n"
-		"mov r13, r1			\n"
-	"2:\n"
+		"1:\n"
+			"stmfd r13!, {r1, r2, r3}\n"
+			"mrs r1, cpsr\n"
+			"mov r2, lr\n"
+			"bic r1, r1, #0x1f\n"
+			"orr r1, r1, r0\n"
+			"mrs r0, cpsr\n"
+			"msr cpsr_c, r1\n"
+			
+			"mov r3, r13\n"
+			"stmfd r13!, {r2}\n"
+			"mov r2, lr\n"
+			"stmfd r13!, {r4-r12}\n"
+			"mov r1, r13\n"
+			
+			/* the following two lines are for debugging */
+			"mov sp, #0\n"
+			"mov lr, #0\n"
+			"msr cpsr_c, r0\n"
+			
+			"ldmfd r13!, {r4, r5, r6, r7}\n"
+			"stmfd r1!, {r4, r5, r6}\n"
+			"stmfd r1!, {r7}\n"
+			"stmfd r1!, {r2}\n"
+			"stmfd r1!, {r3}\n"
+			"mrs r0, spsr\n"
+			"stmfd r1!, {r0}\n"
+			"mov r13, r1\n"
+			
+		"2:\n"
 	);
 }
@@ -190,8 +193,11 @@
 
 /** Calls exception dispatch routine. */
-#define CALL_EXC_DISPATCH(exception)		\
-	asm("mov r0, %0" : : "i" (exception));	\
-	asm("mov r1, r13");			\
-	asm("bl exc_dispatch");		
+#define CALL_EXC_DISPATCH(exception) \
+	asm volatile ( \
+		"mov r0, %[exc]\n" \
+		"mov r1, r13\n" \
+		"bl exc_dispatch\n" \
+		:: [exc] "i" (exception) \
+	);\
 
 /** General exception handler.
@@ -202,7 +208,7 @@
  *  @param exception Exception number.
  */
-#define PROCESS_EXCEPTION(exception)		\
-	setup_stack_and_save_regs();		\
-	CALL_EXC_DISPATCH(exception)		\
+#define PROCESS_EXCEPTION(exception) \
+	setup_stack_and_save_regs(); \
+	CALL_EXC_DISPATCH(exception) \
 	load_regs();
 
@@ -334,15 +340,21 @@
 	uint32_t control_reg;
 	
-	asm volatile("mrc p15, 0, %0, c1, c1" : "=r" (control_reg));
+	asm volatile (
+		"mrc p15, 0, %[control_reg], c1, c1"
+		: [control_reg] "=r" (control_reg)
+	);
 	
 	/* switch on the high vectors bit */
 	control_reg |= CP15_R1_HIGH_VECTORS_BIT;
 	
-	asm volatile("mcr p15, 0, %0, c1, c1" : : "r" (control_reg));
+	asm volatile (
+		"mcr p15, 0, %[control_reg], c1, c1"
+		:: [control_reg] "r" (control_reg)
+	);
 }
 #endif
 
 /** Initializes exception handling.
- * 
+ *
  * Installs low-level exception handlers and then registers
  * exceptions and their handlers to kernel exception dispatcher.
Index: kernel/arch/arm32/src/mm/page_fault.c
===================================================================
--- kernel/arch/arm32/src/mm/page_fault.c	(revision e49e2348e7647a4dd4d4e2879082790396c4f4f3)
+++ kernel/arch/arm32/src/mm/page_fault.c	(revision 5d8d71ebf89df6dbc2bd7f5a55aa3d4c73a286cd)
@@ -50,10 +50,11 @@
 {
 	fault_status_union_t fsu;
-
+	
 	/* fault status is stored in CP15 register 5 */
 	asm volatile (
-		"mrc p15, 0, %0, c5, c0, 0"
-		: "=r"(fsu.dummy)
+		"mrc p15, 0, %[dummy], c5, c0, 0"
+		: [dummy] "=r" (fsu.dummy)
 	);
+	
 	return fsu.fs;
 }
@@ -62,15 +63,16 @@
  *
  * @return FAR (fault address register) content (address that caused a page
- * 	   fault)
+ *         fault)
  */
 static inline uintptr_t read_fault_address_register(void)
 {
 	uintptr_t ret;
-
+	
 	/* fault adress is stored in CP15 register 6 */
 	asm volatile (
-		"mrc p15, 0, %0, c6, c0, 0"
-		: "=r"(ret)
+		"mrc p15, 0, %[ret], c6, c0, 0"
+		: [ret] "=r" (ret)
 	);
+	
 	return ret;
 }
@@ -81,27 +83,24 @@
  *
  * @return true when instruction is load/store, false otherwise
+ *
  */
 static inline bool is_load_store_instruction(instruction_t instr)
 {
 	/* load store immediate offset */
-	if (instr.type == 0x2) {
-		return true;
-	}
-
+	if (instr.type == 0x2)
+		return true;
+	
 	/* load store register offset */
-	if (instr.type == 0x3 && instr.bit4 == 0) {
-		return true;
-	}
-
+	if ((instr.type == 0x3) && (instr.bit4 == 0))
+		return true;
+	
 	/* load store multiple */
-	if (instr.type == 0x4) {
-		return true;
-	}
-
+	if (instr.type == 0x4)
+		return true;
+	
 	/* oprocessor load/store */
-	if (instr.type == 0x6) {
-		return true;
-	}
-
+	if (instr.type == 0x6)
+		return true;
+	
 	return false;
 }
@@ -116,10 +115,9 @@
 {
 	/* swap, swapb instruction */
-	if (instr.type == 0x0 &&
-	    (instr.opcode == 0x8 || instr.opcode == 0xa) &&
-	    instr.access == 0x0 && instr.bits567 == 0x4 && instr.bit4 == 1) {
-		return true;
-	}
-
+	if ((instr.type == 0x0) &&
+	    ((instr.opcode == 0x8) || (instr.opcode == 0xa)) &&
+	    (instr.access == 0x0) && (instr.bits567 == 0x4) && (instr.bit4 == 1))
+		return true;
+	
 	return false;
 }
Index: kernel/arch/arm32/src/mm/tlb.c
===================================================================
--- kernel/arch/arm32/src/mm/tlb.c	(revision e49e2348e7647a4dd4d4e2879082790396c4f4f3)
+++ kernel/arch/arm32/src/mm/tlb.c	(revision 5d8d71ebf89df6dbc2bd7f5a55aa3d4c73a286cd)
@@ -49,5 +49,5 @@
 		"eor r1, r1\n"
 		"mcr p15, 0, r1, c8, c7, 0\n"
-		: : : "r1"
+		::: "r1"
 	);
 }
@@ -69,7 +69,6 @@
 {
 	asm volatile (
-		"mcr p15, 0, %0, c8, c7, 1"		
-		:
-		: "r" (page)
+		"mcr p15, 0, %[page], c8, c7, 1\n"
+		:: [page] "r" (page)
 	);
 }
Index: kernel/arch/arm32/src/userspace.c
===================================================================
--- kernel/arch/arm32/src/userspace.c	(revision e49e2348e7647a4dd4d4e2879082790396c4f4f3)
+++ kernel/arch/arm32/src/userspace.c	(revision 5d8d71ebf89df6dbc2bd7f5a55aa3d4c73a286cd)
@@ -91,10 +91,9 @@
 	/* set user mode, set registers, jump */
 	asm volatile (
-		"mov sp, %0			\n"
-		"msr spsr_c, %1			\n"
-		"ldmfd sp!, {r0-r12, sp, lr}^	\n"
+		"mov sp, %[ustate]\n"
+		"msr spsr_c, %[user_mode]\n"
+		"ldmfd sp!, {r0-r12, sp, lr}^\n"
 		"ldmfd sp!, {pc}^\n"
-		:
-		: "r" (&ustate), "r" (user_mode)
+		:: [ustate] "r" (&ustate), [user_mode] "r" (user_mode)
 	);
 
