Index: libc/arch/amd64/Makefile.inc
===================================================================
--- libc/arch/amd64/Makefile.inc	(revision 7e2988cf02679487ab306921d9313ff5c9c3add7)
+++ libc/arch/amd64/Makefile.inc	(revision 29a9f628c8583835e581747cfad7877e27f6d810)
@@ -33,5 +33,6 @@
 TOOLCHAIN_DIR = /usr/local/amd64/bin
 
-ARCH_SOURCES += arch/$(ARCH)/src/syscall.S
+ARCH_SOURCES += arch/$(ARCH)/src/syscall.S \
+		arch/$(ARCH)/src/psthread.S
 
 LFLAGS += -N
Index: libc/arch/amd64/include/atomic.h
===================================================================
--- libc/arch/amd64/include/atomic.h	(revision 29a9f628c8583835e581747cfad7877e27f6d810)
+++ libc/arch/amd64/include/atomic.h	(revision 29a9f628c8583835e581747cfad7877e27f6d810)
@@ -0,0 +1,69 @@
+/*
+ * Copyright (C) 2001-2004 Jakub Jermar
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ *   notice, this list of conditions and the following disclaimer.
+ * - Redistributions in binary form must reproduce the above copyright
+ *   notice, this list of conditions and the following disclaimer in the
+ *   documentation and/or other materials provided with the distribution.
+ * - The name of the author may not be used to endorse or promote products
+ *   derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
+ * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
+ * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
+ * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
+ * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef __amd64_ATOMIC_H__
+#define __amd64_ATOMIC_H__
+
+static inline void atomic_inc(atomic_t *val) {
+	__asm__ volatile ("lock incq %0\n" : "=m" (val->count));
+}
+
+static inline void atomic_dec(atomic_t *val) {
+	__asm__ volatile ("lock decq %0\n" : "=m" (val->count));
+}
+
+static inline long atomic_postinc(atomic_t *val) 
+{
+	long r;
+
+	__asm__ volatile (
+		"movq $1, %0\n"
+		"lock xaddq %0, %1\n"
+		: "=r" (r), "=m" (val->count)
+	);
+
+	return r;
+}
+
+static inline long atomic_postdec(atomic_t *val) 
+{
+	long r;
+	
+	__asm__ volatile (
+		"movq $-1, %0\n"
+		"lock xaddq %0, %1\n"
+		: "=r" (r), "=m" (val->count)
+	);
+	
+	return r;
+}
+
+#define atomic_preinc(val) (atomic_postinc(val)+1)
+#define atomic_predec(val) (atomic_postdec(val)-1)
+
+#endif
Index: libc/arch/amd64/include/atomic_arch.h
===================================================================
--- libc/arch/amd64/include/atomic_arch.h	(revision 7e2988cf02679487ab306921d9313ff5c9c3add7)
+++ 	(revision )
@@ -1,69 +1,0 @@
-/*
- * Copyright (C) 2001-2004 Jakub Jermar
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- * - Redistributions in binary form must reproduce the above copyright
- *   notice, this list of conditions and the following disclaimer in the
- *   documentation and/or other materials provided with the distribution.
- * - The name of the author may not be used to endorse or promote products
- *   derived from this software without specific prior written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
- * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
- * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
- * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
- * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
- * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
- * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
- * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-#ifndef __amd64_ATOMIC_H__
-#define __amd64_ATOMIC_H__
-
-static inline void atomic_inc(atomic_t *val) {
-	__asm__ volatile ("lock incq %0\n" : "=m" (val->count));
-}
-
-static inline void atomic_dec(atomic_t *val) {
-	__asm__ volatile ("lock decq %0\n" : "=m" (val->count));
-}
-
-static inline long atomic_postinc(atomic_t *val) 
-{
-	long r;
-
-	__asm__ volatile (
-		"movq $1, %0\n"
-		"lock xaddq %0, %1\n"
-		: "=r" (r), "=m" (val->count)
-	);
-
-	return r;
-}
-
-static inline long atomic_postdec(atomic_t *val) 
-{
-	long r;
-	
-	__asm__ volatile (
-		"movq $-1, %0\n"
-		"lock xaddq %0, %1\n"
-		: "=r" (r), "=m" (val->count)
-	);
-	
-	return r;
-}
-
-#define atomic_preinc(val) (atomic_postinc(val)+1)
-#define atomic_predec(val) (atomic_postdec(val)-1)
-
-#endif
Index: libc/arch/amd64/include/context_offset.h
===================================================================
--- libc/arch/amd64/include/context_offset.h	(revision 29a9f628c8583835e581747cfad7877e27f6d810)
+++ libc/arch/amd64/include/context_offset.h	(revision 29a9f628c8583835e581747cfad7877e27f6d810)
@@ -0,0 +1,10 @@
+/* This file is automatically generated by gencontext.c. */
+#define OFFSET_SP  0x0
+#define OFFSET_PC  0x8
+#define OFFSET_RBX 0x10
+#define OFFSET_RBP 0x18
+#define OFFSET_R12 0x20
+#define OFFSET_R13 0x28
+#define OFFSET_R14 0x30
+#define OFFSET_R15 0x38
+#define OFFSET_TLS 0x40
Index: libc/arch/amd64/include/psthread.h
===================================================================
--- libc/arch/amd64/include/psthread.h	(revision 29a9f628c8583835e581747cfad7877e27f6d810)
+++ libc/arch/amd64/include/psthread.h	(revision 29a9f628c8583835e581747cfad7877e27f6d810)
@@ -0,0 +1,58 @@
+/*
+ * Copyright (C) 2006 Ondrej Palkovsky
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ *   notice, this list of conditions and the following disclaimer.
+ * - Redistributions in binary form must reproduce the above copyright
+ *   notice, this list of conditions and the following disclaimer in the
+ *   documentation and/or other materials provided with the distribution.
+ * - The name of the author may not be used to endorse or promote products
+ *   derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
+ * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
+ * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
+ * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
+ * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef __LIBC__amd64PSTHREAD_H__
+#define __LIBC__amd64PSTHREAD_H__
+
+#include <types.h>
+
+/* According to ABI the stack MUST be aligned on 
+ * 16-byte boundary. If it is not, the va_arg calling will
+ * panic sooner or later
+ */
+#define SP_DELTA     16
+
+/* We include only registers that must be preserved
+ * during function call
+ */
+typedef struct {
+    uint64_t sp;
+    uint64_t pc;
+    
+    uint64_t rbx;
+    uint64_t rbp;
+
+    uint64_t r12;
+    uint64_t r13;
+    uint64_t r14;
+    uint64_t r15;
+
+    uint64_t tls;
+} context_t;
+
+#endif
Index: libc/arch/amd64/include/thread.h
===================================================================
--- libc/arch/amd64/include/thread.h	(revision 29a9f628c8583835e581747cfad7877e27f6d810)
+++ libc/arch/amd64/include/thread.h	(revision 29a9f628c8583835e581747cfad7877e27f6d810)
@@ -0,0 +1,47 @@
+/*
+ * Copyright (C) 2006 Ondrej Palkovsky
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ *   notice, this list of conditions and the following disclaimer.
+ * - Redistributions in binary form must reproduce the above copyright
+ *   notice, this list of conditions and the following disclaimer in the
+ *   documentation and/or other materials provided with the distribution.
+ * - The name of the author may not be used to endorse or promote products
+ *   derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
+ * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
+ * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
+ * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
+ * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef __LIBC__amd64THREAD_H__
+#define __LIBC__amd64THREAD_H__
+
+#include <libc.h>
+
+static inline void __tls_set(void *tls)
+{
+	__SYSCALL1(SYS_TLS_SET, (sysarg_t) tls);
+}
+
+static inline void * __tls_get(void)
+{
+	void * retval;
+
+	__asm__ ("movq %%fs:0, %0" : "=r"(retval));
+	return retval;
+}
+
+#endif
Index: libc/arch/amd64/src/entry.s
===================================================================
--- libc/arch/amd64/src/entry.s	(revision 7e2988cf02679487ab306921d9313ff5c9c3add7)
+++ libc/arch/amd64/src/entry.s	(revision 29a9f628c8583835e581747cfad7877e27f6d810)
@@ -37,4 +37,5 @@
 #
 __entry:
+	call __main
 	call main
 	call __exit
Index: libc/arch/amd64/src/psthread.S
===================================================================
--- libc/arch/amd64/src/psthread.S	(revision 29a9f628c8583835e581747cfad7877e27f6d810)
+++ libc/arch/amd64/src/psthread.S	(revision 29a9f628c8583835e581747cfad7877e27f6d810)
@@ -0,0 +1,87 @@
+#
+# Copyright (C) 2001-2004 Jakub Jermar
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions
+# are met:
+#
+# - Redistributions of source code must retain the above copyright
+#   notice, this list of conditions and the following disclaimer.
+# - Redistributions in binary form must reproduce the above copyright
+#   notice, this list of conditions and the following disclaimer in the
+#   documentation and/or other materials provided with the distribution.
+# - The name of the author may not be used to endorse or promote products
+#   derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
+# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
+# IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
+# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+# NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
+# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+
+.text
+
+.global context_save
+.global context_restore
+
+#include <libarch/context_offset.h>
+
+## Save current CPU context
+#
+# Save CPU context to context_t variable
+# pointed by the 1st argument. Returns 1 in EAX.
+#
+context_save:
+	movq (%rsp), %rdx     # the caller's return %eip
+	# In %edi is passed 1st argument
+	movq %rdx, OFFSET_PC(%rdi)
+	movq %rsp, OFFSET_SP(%rdi)
+	
+	movq %rbx, OFFSET_RBX(%rdi)
+	movq %rbp, OFFSET_RBP(%rdi)
+	movq %r12, OFFSET_R12(%rdi)
+	movq %r13, OFFSET_R13(%rdi)
+	movq %r14, OFFSET_R14(%rdi)
+	movq %r15, OFFSET_R15(%rdi)
+
+	# Save TLS
+	movq %fs:0, %rax
+	movq %rax, OFFSET_TLS(%rdi)
+		
+	xorq %rax,%rax		# context_save returns 1
+	incq %rax
+	ret
+
+
+## Restore current CPU context
+#
+# Restore CPU context from context_t variable
+# pointed by the 1st argument. Returns 0 in EAX.
+#
+context_restore:
+	movq OFFSET_R15(%rdi), %r15
+	movq OFFSET_R14(%rdi), %r14
+	movq OFFSET_R13(%rdi), %r13
+	movq OFFSET_R12(%rdi), %r12
+	movq OFFSET_RBP(%rdi), %rbp
+	movq OFFSET_RBX(%rdi), %rbx	
+	
+	movq OFFSET_SP(%rdi), %rsp   # ctx->sp -> %rsp
+	
+	movq OFFSET_PC(%rdi), %rdx
+	movq %rdx,(%rsp)
+
+	# Set thread local storage
+	movq OFFSET_TLS(%rdi), %rdi   # Set arg1 to TLS addr
+	movq $1, %r8
+	syscall
+
+	xorq %rax,%rax		# context_restore returns 0
+	ret
Index: libc/arch/amd64/src/thread_entry.s
===================================================================
--- libc/arch/amd64/src/thread_entry.s	(revision 7e2988cf02679487ab306921d9313ff5c9c3add7)
+++ libc/arch/amd64/src/thread_entry.s	(revision 29a9f628c8583835e581747cfad7877e27f6d810)
@@ -39,5 +39,5 @@
 	#
 	movq %rax, %rdi
-	call thread_main
+	call __thread_main
 	
 .end __thread_entry
Index: libc/arch/ia32/Makefile.inc
===================================================================
--- libc/arch/ia32/Makefile.inc	(revision 7e2988cf02679487ab306921d9313ff5c9c3add7)
+++ libc/arch/ia32/Makefile.inc	(revision 29a9f628c8583835e581747cfad7877e27f6d810)
@@ -33,5 +33,6 @@
 TOOLCHAIN_DIR = /usr/local/i686/bin
 
-ARCH_SOURCES += arch/$(ARCH)/src/syscall.c
+ARCH_SOURCES += arch/$(ARCH)/src/syscall.c \
+		arch/$(ARCH)/src/psthread.S
 
 LFLAGS += -N
Index: libc/arch/ia32/include/atomic.h
===================================================================
--- libc/arch/ia32/include/atomic.h	(revision 29a9f628c8583835e581747cfad7877e27f6d810)
+++ libc/arch/ia32/include/atomic.h	(revision 29a9f628c8583835e581747cfad7877e27f6d810)
@@ -0,0 +1,69 @@
+/*
+ * Copyright (C) 2001-2004 Jakub Jermar
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ *   notice, this list of conditions and the following disclaimer.
+ * - Redistributions in binary form must reproduce the above copyright
+ *   notice, this list of conditions and the following disclaimer in the
+ *   documentation and/or other materials provided with the distribution.
+ * - The name of the author may not be used to endorse or promote products
+ *   derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
+ * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
+ * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
+ * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
+ * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef __ia32_ATOMIC_H__
+#define __ia32_ATOMIC_H__
+
+static inline void atomic_inc(atomic_t *val) {
+	__asm__ volatile ("lock incl %0\n" : "=m" (val->count));
+}
+
+static inline void atomic_dec(atomic_t *val) {
+	__asm__ volatile ("lock decl %0\n" : "=m" (val->count));
+}
+
+static inline long atomic_postinc(atomic_t *val) 
+{
+	long r;
+
+	__asm__ volatile (
+		"movl $1, %0\n"
+		"lock xaddl %0, %1\n"
+		: "=r" (r), "=m" (val->count)
+	);
+
+	return r;
+}
+
+static inline long atomic_postdec(atomic_t *val) 
+{
+	long r;
+	
+	__asm__ volatile (
+		"movl $-1, %0\n"
+		"lock xaddl %0, %1\n"
+		: "=r" (r), "=m" (val->count)
+	);
+	
+	return r;
+}
+
+#define atomic_preinc(val) (atomic_postinc(val)+1)
+#define atomic_predec(val) (atomic_postdec(val)-1)
+
+#endif
Index: libc/arch/ia32/include/atomic_arch.h
===================================================================
--- libc/arch/ia32/include/atomic_arch.h	(revision 7e2988cf02679487ab306921d9313ff5c9c3add7)
+++ 	(revision )
@@ -1,69 +1,0 @@
-/*
- * Copyright (C) 2001-2004 Jakub Jermar
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- * - Redistributions in binary form must reproduce the above copyright
- *   notice, this list of conditions and the following disclaimer in the
- *   documentation and/or other materials provided with the distribution.
- * - The name of the author may not be used to endorse or promote products
- *   derived from this software without specific prior written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
- * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
- * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
- * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
- * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
- * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
- * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
- * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-#ifndef __ia32_ATOMIC_H__
-#define __ia32_ATOMIC_H__
-
-static inline void atomic_inc(atomic_t *val) {
-	__asm__ volatile ("lock incl %0\n" : "=m" (val->count));
-}
-
-static inline void atomic_dec(atomic_t *val) {
-	__asm__ volatile ("lock decl %0\n" : "=m" (val->count));
-}
-
-static inline long atomic_postinc(atomic_t *val) 
-{
-	long r;
-
-	__asm__ volatile (
-		"movl $1, %0\n"
-		"lock xaddl %0, %1\n"
-		: "=r" (r), "=m" (val->count)
-	);
-
-	return r;
-}
-
-static inline long atomic_postdec(atomic_t *val) 
-{
-	long r;
-	
-	__asm__ volatile (
-		"movl $-1, %0\n"
-		"lock xaddl %0, %1\n"
-		: "=r" (r), "=m" (val->count)
-	);
-	
-	return r;
-}
-
-#define atomic_preinc(val) (atomic_postinc(val)+1)
-#define atomic_predec(val) (atomic_postdec(val)-1)
-
-#endif
Index: libc/arch/ia32/src/entry.s
===================================================================
--- libc/arch/ia32/src/entry.s	(revision 7e2988cf02679487ab306921d9313ff5c9c3add7)
+++ libc/arch/ia32/src/entry.s	(revision 29a9f628c8583835e581747cfad7877e27f6d810)
@@ -41,6 +41,7 @@
 	mov %ax, %es
 	mov %ax, %fs
-	mov %ax, %gs
+	# Do not set %gs, it contains descriptor that can see TLS
 	
+	call __main	
 	call main
 	call __exit
Index: libc/arch/ia32/src/thread_entry.s
===================================================================
--- libc/arch/ia32/src/thread_entry.s	(revision 7e2988cf02679487ab306921d9313ff5c9c3add7)
+++ libc/arch/ia32/src/thread_entry.s	(revision 29a9f628c8583835e581747cfad7877e27f6d810)
@@ -39,5 +39,5 @@
 	mov %dx, %es
 	mov %dx, %fs
-	mov %dx, %gs
+	# Do not set %gs, it contains descriptor that can see TLS
 
 	#
@@ -45,6 +45,5 @@
 	#
 	pushl %eax
-	call thread_main
-	addl $4, %esp
+	call __thread_main
 	
 	#
Index: libc/arch/ia64/include/atomic.h
===================================================================
--- libc/arch/ia64/include/atomic.h	(revision 29a9f628c8583835e581747cfad7877e27f6d810)
+++ libc/arch/ia64/include/atomic.h	(revision 29a9f628c8583835e581747cfad7877e27f6d810)
@@ -0,0 +1,57 @@
+/*
+ * Copyright (C) 2005 Jakub Jermar
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ *   notice, this list of conditions and the following disclaimer.
+ * - Redistributions in binary form must reproduce the above copyright
+ *   notice, this list of conditions and the following disclaimer in the
+ *   documentation and/or other materials provided with the distribution.
+ * - The name of the author may not be used to endorse or promote products
+ *   derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
+ * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
+ * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
+ * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
+ * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef __ia64_ATOMIC_H__
+#define __ia64_ATOMIC_H__
+
+/** Atomic addition.
+ *
+ * @param val Atomic value.
+ * @param imm Value to add.
+ *
+ * @return Value before addition.
+ */
+static inline long atomic_add(atomic_t *val, int imm)
+{
+	long v;
+
+ 	__asm__ volatile ("fetchadd8.rel %0 = %1, %2\n" : "=r" (v), "+m" (val->count) : "i" (imm));
+ 
+	return v;
+}
+
+static inline void atomic_inc(atomic_t *val) { atomic_add(val, 1); }
+static inline void atomic_dec(atomic_t *val) { atomic_add(val, -1); }
+
+static inline long atomic_preinc(atomic_t *val) { return atomic_add(val, 1) + 1; }
+static inline long atomic_predec(atomic_t *val) { return atomic_add(val, -1) - 1; }
+
+static inline long atomic_postinc(atomic_t *val) { return atomic_add(val, 1); }
+static inline long atomic_postdec(atomic_t *val) { return atomic_add(val, -1); }
+
+#endif
Index: libc/arch/ia64/include/atomic_arch.h
===================================================================
--- libc/arch/ia64/include/atomic_arch.h	(revision 7e2988cf02679487ab306921d9313ff5c9c3add7)
+++ 	(revision )
@@ -1,57 +1,0 @@
-/*
- * Copyright (C) 2005 Jakub Jermar
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- * - Redistributions in binary form must reproduce the above copyright
- *   notice, this list of conditions and the following disclaimer in the
- *   documentation and/or other materials provided with the distribution.
- * - The name of the author may not be used to endorse or promote products
- *   derived from this software without specific prior written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
- * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
- * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
- * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
- * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
- * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
- * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
- * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-#ifndef __ia64_ATOMIC_H__
-#define __ia64_ATOMIC_H__
-
-/** Atomic addition.
- *
- * @param val Atomic value.
- * @param imm Value to add.
- *
- * @return Value before addition.
- */
-static inline long atomic_add(atomic_t *val, int imm)
-{
-	long v;
-
- 	__asm__ volatile ("fetchadd8.rel %0 = %1, %2\n" : "=r" (v), "+m" (val->count) : "i" (imm));
- 
-	return v;
-}
-
-static inline void atomic_inc(atomic_t *val) { atomic_add(val, 1); }
-static inline void atomic_dec(atomic_t *val) { atomic_add(val, -1); }
-
-static inline long atomic_preinc(atomic_t *val) { return atomic_add(val, 1) + 1; }
-static inline long atomic_predec(atomic_t *val) { return atomic_add(val, -1) - 1; }
-
-static inline long atomic_postinc(atomic_t *val) { return atomic_add(val, 1); }
-static inline long atomic_postdec(atomic_t *val) { return atomic_add(val, -1); }
-
-#endif
Index: libc/arch/ia64/src/entry.s
===================================================================
--- libc/arch/ia64/src/entry.s	(revision 7e2988cf02679487ab306921d9313ff5c9c3add7)
+++ libc/arch/ia64/src/entry.s	(revision 29a9f628c8583835e581747cfad7877e27f6d810)
@@ -39,4 +39,5 @@
 	alloc loc0 = ar.pfs, 0, 1, 2, 0
 	mov r1 = _gp 
+	{ br.call.sptk.many b0 = __main }
 	{ br.call.sptk.many b0 = main }
 	{ br.call.sptk.many b0 = __exit }
Index: libc/arch/ia64/src/thread_entry.s
===================================================================
--- libc/arch/ia64/src/thread_entry.s	(revision 7e2988cf02679487ab306921d9313ff5c9c3add7)
+++ libc/arch/ia64/src/thread_entry.s	(revision 29a9f628c8583835e581747cfad7877e27f6d810)
@@ -44,5 +44,5 @@
 	
 	mov out0 = r8
-	{ br.call.sptk.many b0 = thread_main }
+	{ br.call.sptk.many b0 = __thread_main }
 	
 	#
Index: libc/arch/mips32/Makefile.inc
===================================================================
--- libc/arch/mips32/Makefile.inc	(revision 7e2988cf02679487ab306921d9313ff5c9c3add7)
+++ libc/arch/mips32/Makefile.inc	(revision 29a9f628c8583835e581747cfad7877e27f6d810)
@@ -32,7 +32,8 @@
 TARGET = mipsel-linux-gnu
 TOOLCHAIN_DIR = /usr/local/mipsel/bin
-CFLAGS += -mno-abicalls -mips3 
+CFLAGS += -mno-abicalls -mips3 -ftls-model=global-dynamic
 
-ARCH_SOURCES += arch/$(ARCH)/src/syscall.c
+ARCH_SOURCES += arch/$(ARCH)/src/syscall.c \
+	arch/$(ARCH)/src/psthread.S
 
 LFLAGS += -N
Index: libc/arch/mips32/include/atomic.h
===================================================================
--- libc/arch/mips32/include/atomic.h	(revision 29a9f628c8583835e581747cfad7877e27f6d810)
+++ libc/arch/mips32/include/atomic.h	(revision 29a9f628c8583835e581747cfad7877e27f6d810)
@@ -0,0 +1,67 @@
+/*
+ * Copyright (C) 2005 Ondrej Palkovsky
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ *   notice, this list of conditions and the following disclaimer.
+ * - Redistributions in binary form must reproduce the above copyright
+ *   notice, this list of conditions and the following disclaimer in the
+ *   documentation and/or other materials provided with the distribution.
+ * - The name of the author may not be used to endorse or promote products
+ *   derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
+ * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
+ * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
+ * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
+ * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef __mips32_ATOMIC_H__
+#define __mips32_ATOMIC_H__
+
+#define atomic_inc(x)	((void) atomic_add(x, 1))
+#define atomic_dec(x)	((void) atomic_add(x, -1))
+
+#define atomic_postinc(x) (atomic_add(x, 1) - 1)
+#define atomic_postdec(x) (atomic_add(x, -1) + 1)
+
+#define atomic_preinc(x) atomic_add(x, 1)
+#define atomic_predec(x) atomic_add(x, -1)
+
+/* Atomic addition of immediate value.
+ *
+ * @param val Memory location to which will be the immediate value added.
+ * @param i Signed immediate that will be added to *val.
+ *
+ * @return Value after addition.
+ */
+static inline long atomic_add(atomic_t *val, int i)
+{
+	long tmp, v;
+
+	__asm__ volatile (
+		"1:\n"
+		"	ll %0, %1\n"
+		"	addiu %0, %0, %3\n"	/* same as addi, but never traps on overflow */
+		"       move %2, %0\n"
+		"	sc %0, %1\n"
+		"	beq %0, %4, 1b\n"	/* if the atomic operation failed, try again */
+		/*	nop	*/		/* nop is inserted automatically by compiler */
+		: "=r" (tmp), "=m" (val->count), "=r" (v)
+		: "i" (i), "i" (0)
+		);
+
+	return v;
+}
+
+#endif
Index: libc/arch/mips32/include/atomic_arch.h
===================================================================
--- libc/arch/mips32/include/atomic_arch.h	(revision 7e2988cf02679487ab306921d9313ff5c9c3add7)
+++ 	(revision )
@@ -1,67 +1,0 @@
-/*
- * Copyright (C) 2005 Ondrej Palkovsky
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- *
- * - Redistributions of source code must retain the above copyright
- *   notice, this list of conditions and the following disclaimer.
- * - Redistributions in binary form must reproduce the above copyright
- *   notice, this list of conditions and the following disclaimer in the
- *   documentation and/or other materials provided with the distribution.
- * - The name of the author may not be used to endorse or promote products
- *   derived from this software without specific prior written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
- * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
- * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
- * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
- * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
- * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
- * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
- * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
- * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
- * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-#ifndef __mips32_ATOMIC_H__
-#define __mips32_ATOMIC_H__
-
-#define atomic_inc(x)	((void) atomic_add(x, 1))
-#define atomic_dec(x)	((void) atomic_add(x, -1))
-
-#define atomic_postinc(x) (atomic_add(x, 1) - 1)
-#define atomic_postdec(x) (atomic_add(x, -1) + 1)
-
-#define atomic_preinc(x) atomic_add(x, 1)
-#define atomic_predec(x) atomic_add(x, -1)
-
-/* Atomic addition of immediate value.
- *
- * @param val Memory location to which will be the immediate value added.
- * @param i Signed immediate that will be added to *val.
- *
- * @return Value after addition.
- */
-static inline long atomic_add(atomic_t *val, int i)
-{
-	long tmp, v;
-
-	__asm__ volatile (
-		"1:\n"
-		"	ll %0, %1\n"
-		"	addiu %0, %0, %3\n"	/* same as addi, but never traps on overflow */
-		"       move %2, %0\n"
-		"	sc %0, %1\n"
-		"	beq %0, %4, 1b\n"	/* if the atomic operation failed, try again */
-		/*	nop	*/		/* nop is inserted automatically by compiler */
-		: "=r" (tmp), "=m" (val->count), "=r" (v)
-		: "i" (i), "i" (0)
-		);
-
-	return v;
-}
-
-#endif
Index: libc/arch/mips32/include/context_offset.h
===================================================================
--- libc/arch/mips32/include/context_offset.h	(revision 29a9f628c8583835e581747cfad7877e27f6d810)
+++ libc/arch/mips32/include/context_offset.h	(revision 29a9f628c8583835e581747cfad7877e27f6d810)
@@ -0,0 +1,15 @@
+/* This file is automatically generated by gencontext.c. */
+/* struct context */
+#define OFFSET_SP      0x0
+#define OFFSET_PC      0x4
+#define OFFSET_S0      0x8
+#define OFFSET_S1      0xc
+#define OFFSET_S2      0x10
+#define OFFSET_S3      0x14
+#define OFFSET_S4      0x18
+#define OFFSET_S5      0x1c
+#define OFFSET_S6      0x20
+#define OFFSET_S7      0x24
+#define OFFSET_S8      0x28
+#define OFFSET_GP      0x2c
+#define OFFSET_TLS     0x30
Index: libc/arch/mips32/include/psthread.h
===================================================================
--- libc/arch/mips32/include/psthread.h	(revision 29a9f628c8583835e581747cfad7877e27f6d810)
+++ libc/arch/mips32/include/psthread.h	(revision 29a9f628c8583835e581747cfad7877e27f6d810)
@@ -0,0 +1,53 @@
+/*
+ * Copyright (C) 2006 Ondrej Palkovsky
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ *   notice, this list of conditions and the following disclaimer.
+ * - Redistributions in binary form must reproduce the above copyright
+ *   notice, this list of conditions and the following disclaimer in the
+ *   documentation and/or other materials provided with the distribution.
+ * - The name of the author may not be used to endorse or promote products
+ *   derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
+ * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
+ * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
+ * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
+ * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef __LIBC__mips32PSTHREAD_H__
+#define __LIBC__mips32PSTHREAD_H__
+
+#include <types.h>
+
+#define SP_DELTA	(8)
+
+typedef struct  {
+	uint32_t sp;
+	uint32_t pc;
+	
+	uint32_t s0;
+	uint32_t s1;
+	uint32_t s2;
+	uint32_t s3;
+	uint32_t s4;
+	uint32_t s5;
+	uint32_t s6;
+	uint32_t s7;
+	uint32_t s8;
+	uint32_t gp;
+	uint32_t tls; /* Thread local storage(=k1) */
+} context_t;
+
+#endif
Index: libc/arch/mips32/include/thread.h
===================================================================
--- libc/arch/mips32/include/thread.h	(revision 29a9f628c8583835e581747cfad7877e27f6d810)
+++ libc/arch/mips32/include/thread.h	(revision 29a9f628c8583835e581747cfad7877e27f6d810)
@@ -0,0 +1,46 @@
+/*
+ * Copyright (C) 2006 Ondrej Palkovsky
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ *   notice, this list of conditions and the following disclaimer.
+ * - Redistributions in binary form must reproduce the above copyright
+ *   notice, this list of conditions and the following disclaimer in the
+ *   documentation and/or other materials provided with the distribution.
+ * - The name of the author may not be used to endorse or promote products
+ *   derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
+ * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
+ * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
+ * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
+ * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef __LIBC__mips32THREAD_H__
+#define __LIBC__mips32THREAD_H__
+
+
+static inline void __tls_set(void *tls)
+{
+	__asm__ volatile ("add $27, %0, $0" : : "r"(tls)); /* Move tls to K1 */
+}
+
+static inline void * __tls_get(void)
+{
+	void * retval;
+
+	__asm__ volatile("add %0, $27, $0" : "=r"(retval));
+	return retval;
+}
+
+#endif
Index: libc/arch/mips32/src/entry.s
===================================================================
--- libc/arch/mips32/src/entry.s	(revision 7e2988cf02679487ab306921d9313ff5c9c3add7)
+++ libc/arch/mips32/src/entry.s	(revision 29a9f628c8583835e581747cfad7877e27f6d810)
@@ -37,12 +37,17 @@
 #
 #
+.ent __entry
 __entry:
 	lui $28, _gp
-
+	
+	# Mips o32 may store its arguments on stack, make space,
+	# so that it could work with -O0
+	addiu $sp, -16
+	
+	jal __main
+	
 	jal main
-	nop
 	
 	jal __exit
-	nop	
 	
 .end __entry
Index: libc/arch/mips32/src/psthread.S
===================================================================
--- libc/arch/mips32/src/psthread.S	(revision 29a9f628c8583835e581747cfad7877e27f6d810)
+++ libc/arch/mips32/src/psthread.S	(revision 29a9f628c8583835e581747cfad7877e27f6d810)
@@ -0,0 +1,88 @@
+#
+# Copyright (C) 2003-2004 Jakub Jermar
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions
+# are met:
+#
+# - Redistributions of source code must retain the above copyright
+#   notice, this list of conditions and the following disclaimer.
+# - Redistributions in binary form must reproduce the above copyright
+#   notice, this list of conditions and the following disclaimer in the
+#   documentation and/or other materials provided with the distribution.
+# - The name of the author may not be used to endorse or promote products
+#   derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
+# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
+# IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
+# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+# NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
+# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+
+.text
+
+.set noat
+.set noreorder
+.set nomacro
+
+
+#include <arch/asm/regname.h>
+#include <libarch/context_offset.h>
+	
+.global context_save
+.global context_restore
+
+.macro CONTEXT_STORE r
+	sw $s0,OFFSET_S0(\r)
+	sw $s1,OFFSET_S1(\r)
+	sw $s2,OFFSET_S2(\r)
+	sw $s3,OFFSET_S3(\r)
+	sw $s4,OFFSET_S4(\r)
+	sw $s5,OFFSET_S5(\r)
+	sw $s6,OFFSET_S6(\r)
+	sw $s7,OFFSET_S7(\r)
+	sw $s8,OFFSET_S8(\r)
+	sw $gp,OFFSET_GP(\r)
+	sw $k1,OFFSET_TLS(\r)
+	
+	sw $ra,OFFSET_PC(\r)
+	sw $sp,OFFSET_SP(\r)
+.endm
+
+.macro CONTEXT_LOAD r
+	lw $s0,OFFSET_S0(\r)
+	lw $s1,OFFSET_S1(\r)
+	lw $s2,OFFSET_S2(\r)
+	lw $s3,OFFSET_S3(\r)
+	lw $s4,OFFSET_S4(\r)
+	lw $s5,OFFSET_S5(\r)
+	lw $s6,OFFSET_S6(\r)
+	lw $s7,OFFSET_S7(\r)
+	lw $s8,OFFSET_S8(\r)
+	lw $gp,OFFSET_GP(\r)
+	lw $k1,OFFSET_TLS(\r)
+	
+	lw $ra,OFFSET_PC(\r)
+	lw $sp,OFFSET_SP(\r)
+.endm
+	
+context_save:
+	CONTEXT_STORE $a0
+
+	# context_save returns 1
+	j $ra
+	li $v0, 1	
+	
+context_restore:
+	CONTEXT_LOAD $a0
+
+	# context_restore returns 0
+	j $ra
+	xor $v0, $v0	
Index: libc/arch/mips32/src/thread_entry.s
===================================================================
--- libc/arch/mips32/src/thread_entry.s	(revision 7e2988cf02679487ab306921d9313ff5c9c3add7)
+++ libc/arch/mips32/src/thread_entry.s	(revision 29a9f628c8583835e581747cfad7877e27f6d810)
@@ -38,4 +38,5 @@
 #
 #
+.ent __thread_entry
 __thread_entry:
 	lui $28, _gp
@@ -45,8 +46,12 @@
 	#
 	add $4, $2, 0
-	jal thread_main
+	# Mips o32 may store its arguments on stack, make space
+	addiu $sp, -16
+	
+	j __thread_main
 	nop
-	
+		
 	#
 	# Not reached.
 	#
+.end __thread_entry
