Index: uspace/lib/c/arch/amd64/include/atomic.h
===================================================================
--- uspace/lib/c/arch/amd64/include/atomic.h	(revision 7dfcc50a002e0bb06422fd836ca683dec57b9140)
+++ uspace/lib/c/arch/amd64/include/atomic.h	(revision 7d364fb8ea8ef4fe1f9f9a2a7f89c21619faf824)
@@ -44,16 +44,30 @@
 static inline void atomic_inc(atomic_t *val)
 {
+#ifdef __PCC__
 	asm volatile (
 		"lock incq %0\n"
 		: "+m" (val->count)
 	);
+#else
+	asm volatile (
+		"lock incq %[count]\n"
+		: [count] "+m" (val->count)
+	);
+#endif
 }
 
 static inline void atomic_dec(atomic_t *val)
 {
+#ifdef __PCC__
 	asm volatile (
 		"lock decq %0\n"
 		: "+m" (val->count)
 	);
+#else
+	asm volatile (
+		"lock decq %[count]\n"
+		: [count] "+m" (val->count)
+	);
+#endif
 }
 
@@ -62,4 +76,5 @@
 	atomic_count_t r = 1;
 	
+#ifdef __PCC__
 	asm volatile (
 		"lock xaddq %1, %0\n"
@@ -67,4 +82,11 @@
 		  "+r" (r)
 	);
+#else
+	asm volatile (
+		"lock xaddq %[r], %[count]\n"
+		: [count] "+m" (val->count),
+		  [r] "+r" (r)
+	);
+#endif
 	
 	return r;
@@ -75,4 +97,5 @@
 	atomic_count_t r = -1;
 	
+#ifdef __PCC__
 	asm volatile (
 		"lock xaddq %1, %0\n"
@@ -80,4 +103,11 @@
 		  "+r" (r)
 	);
+#else
+	asm volatile (
+		"lock xaddq %[r], %[count]\n"
+		: [count] "+m" (val->count),
+		  [r] "+r" (r)
+	);
+#endif
 	
 	return r;
