Index: kernel/arch/amd64/include/atomic.h
===================================================================
--- kernel/arch/amd64/include/atomic.h	(revision 3bb732b6ee1465b9afefb77533296351b0cb71ab)
+++ kernel/arch/amd64/include/atomic.h	(revision 2bcf6c654972c91913e02276dd74d51eb9229589)
@@ -140,4 +140,45 @@
 }
 
+
+#define _atomic_cas_ptr_impl(pptr, exp_val, new_val, old_val, prefix)
+	asm volatile ( \
+		prefix " cmpxchgq %[newval], %[ptr]\n" \
+		: /* Output operands. */ \
+		/* Old/current value is returned in eax. */ \
+		[oldval] "=a" (old_val), \
+		/* (*ptr) will be read and written to, hence "+" */ \
+		[ptr] "+m" (*pptr) \
+		: /* Input operands. */ \
+		/* Expected value must be in eax. */ \
+		[expval] "a" (exp_val), \
+		/* The new value may be in any register. */ \
+		[newval] "r" (new_val) \
+		: "memory" \
+	)
+	
+/** Atomically compares and swaps the pointer at pptr. */
+NO_TRACE static inline void * atomic_cas_ptr(void **pptr, 
+	void *exp_val, void *new_val)
+{
+	void *old_val;
+	_atomic_cas_ptr_impl(pptr, exp_val, new_val, old_val, "lock\n");
+	return old_val;
+}
+
+/** Compare-and-swap of a pointer that is atomic wrt to local cpu's interrupts.
+ * 
+ * This function is NOT smp safe and is not atomic with respect to other cpus.
+ */
+NO_TRACE static inline void * atomic_cas_ptr_local(void **pptr, 
+	void *exp_val, void *new_val)
+{
+	void *old_val;
+	_atomic_cas_ptr_impl(pptr, exp_val, new_val, old_val, "");
+	return old_val;
+}
+
+#undef _atomic_cas_ptr_impl
+
+
 #endif
 
Index: kernel/arch/ia32/include/atomic.h
===================================================================
--- kernel/arch/ia32/include/atomic.h	(revision 3bb732b6ee1465b9afefb77533296351b0cb71ab)
+++ kernel/arch/ia32/include/atomic.h	(revision 2bcf6c654972c91913e02276dd74d51eb9229589)
@@ -113,4 +113,5 @@
 }
 
+
 /** ia32 specific fast spinlock */
 NO_TRACE static inline void atomic_lock_arch(atomic_t *val)
@@ -142,4 +143,44 @@
 }
 
+
+#define _atomic_cas_ptr_impl(pptr, exp_val, new_val, old_val, prefix) \
+	asm volatile ( \
+		prefix " cmpxchgl %[newval], %[ptr]\n" \
+		: /* Output operands. */ \
+		/* Old/current value is returned in eax. */ \
+		[oldval] "=a" (old_val), \
+		/* (*ptr) will be read and written to, hence "+" */ \
+		[ptr] "+m" (*pptr) \
+		: /* Input operands. */ \
+		/* Expected value must be in eax. */ \
+		[expval] "a" (exp_val), \
+		/* The new value may be in any register. */ \
+		[newval] "r" (new_val) \
+		: "memory" \
+	)
+	
+/** Atomically compares and swaps the pointer at pptr. */
+NO_TRACE static inline void * atomic_cas_ptr(void **pptr, 
+	void *exp_val, void *new_val)
+{
+	void *old_val;
+	_atomic_cas_ptr_impl(pptr, exp_val, new_val, old_val, "lock\n");
+	return old_val;
+}
+
+/** Compare-and-swap of a pointer that is atomic wrt to local cpu's interrupts.
+ * 
+ * This function is NOT smp safe and is not atomic with respect to other cpus.
+ */
+NO_TRACE static inline void * atomic_cas_ptr_local(void **pptr, 
+	void *exp_val, void *new_val)
+{
+	void *old_val;
+	_atomic_cas_ptr_impl(pptr, exp_val, new_val, old_val, "");
+	return old_val;
+}
+
+#undef _atomic_cas_ptr_impl
+
 #endif
 
Index: kernel/test/atomic/atomic1.c
===================================================================
--- kernel/test/atomic/atomic1.c	(revision 3bb732b6ee1465b9afefb77533296351b0cb71ab)
+++ kernel/test/atomic/atomic1.c	(revision 2bcf6c654972c91913e02276dd74d51eb9229589)
@@ -60,4 +60,15 @@
 		return "Failed atomic_get() after atomic_predec()";
 	
+	void *ptr = 0;
+	void *a_ptr = &a;
+	if (atomic_cas_ptr(&ptr, 0, a_ptr) != 0)
+		return "Failed atomic_cas_ptr(): bad return value";
+	if (ptr != a_ptr)
+		return "Failed atomic_cas_ptr(): bad pointer value";
+	if (atomic_cas_ptr(&ptr, 0, 0) != a_ptr)
+		return "Failed atomic_cas_ptr(): indicated change";
+	if (ptr != a_ptr)
+		return "Failed atomic_cas_ptr(): changed the ptr";
+	
 	return NULL;
 }
