source: mainline/kernel/arch/amd64/include/atomic.h@ b3b7e14a

lfn serial ticket/834-toolchain-update topic/msim-upgrade topic/simplify-dev-export
Last change on this file since b3b7e14a was d99c1d2, checked in by Martin Decky <martin@…>, 15 years ago

use [u]int{8|16|32|64}_t type definitions as detected by the autotool
replace direct usage of arch/types.h with typedefs.h

  • Property mode set to 100644
File size: 3.4 KB
RevLine 
[5753fbb]1/*
[df4ed85]2 * Copyright (c) 2001-2004 Jakub Jermar
[5753fbb]3 * All rights reserved.
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
7 * are met:
8 *
9 * - Redistributions of source code must retain the above copyright
10 * notice, this list of conditions and the following disclaimer.
11 * - Redistributions in binary form must reproduce the above copyright
12 * notice, this list of conditions and the following disclaimer in the
13 * documentation and/or other materials provided with the distribution.
14 * - The name of the author may not be used to endorse or promote products
15 * derived from this software without specific prior written permission.
16 *
17 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
18 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
19 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
20 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
21 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
22 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 */
28
[f24d300]29/** @addtogroup amd64
[b45c443]30 * @{
31 */
32/** @file
33 */
34
[06e1e95]35#ifndef KERN_amd64_ATOMIC_H_
36#define KERN_amd64_ATOMIC_H_
[5753fbb]37
[d99c1d2]38#include <typedefs.h>
[53f9821]39#include <arch/barrier.h>
40#include <preemption.h>
[5753fbb]41
[228666c]42static inline void atomic_inc(atomic_t *val)
43{
[5753fbb]44#ifdef CONFIG_SMP
[f24d300]45 asm volatile (
46 "lock incq %[count]\n"
47 : [count] "+m" (val->count)
48 );
[5753fbb]49#else
[f24d300]50 asm volatile (
51 "incq %[count]\n"
52 : [count] "+m" (val->count)
53 );
[5753fbb]54#endif /* CONFIG_SMP */
55}
56
[228666c]57static inline void atomic_dec(atomic_t *val)
58{
[5753fbb]59#ifdef CONFIG_SMP
[f24d300]60 asm volatile (
61 "lock decq %[count]\n"
62 : [count] "+m" (val->count)
63 );
[5753fbb]64#else
[f24d300]65 asm volatile (
66 "decq %[count]\n"
67 : [count] "+m" (val->count)
68 );
[5753fbb]69#endif /* CONFIG_SMP */
70}
71
[228666c]72static inline atomic_count_t atomic_postinc(atomic_t *val)
[5753fbb]73{
[228666c]74 atomic_count_t r = 1;
[f24d300]75
[e7b7be3f]76 asm volatile (
[f24d300]77 "lock xaddq %[r], %[count]\n"
[228666c]78 : [count] "+m" (val->count),
79 [r] "+r" (r)
[5753fbb]80 );
[f24d300]81
[5753fbb]82 return r;
83}
84
[228666c]85static inline atomic_count_t atomic_postdec(atomic_t *val)
[5753fbb]86{
[228666c]87 atomic_count_t r = -1;
[5753fbb]88
[e7b7be3f]89 asm volatile (
[f24d300]90 "lock xaddq %[r], %[count]\n"
[228666c]91 : [count] "+m" (val->count),
92 [r] "+r" (r)
[5753fbb]93 );
94
95 return r;
96}
97
[f24d300]98#define atomic_preinc(val) (atomic_postinc(val) + 1)
99#define atomic_predec(val) (atomic_postdec(val) - 1)
[5753fbb]100
[228666c]101static inline atomic_count_t test_and_set(atomic_t *val)
102{
[ba371e1]103 atomic_count_t v = 1;
[5753fbb]104
[e7b7be3f]105 asm volatile (
[f24d300]106 "xchgq %[v], %[count]\n"
[ba371e1]107 : [v] "+r" (v),
[228666c]108 [count] "+m" (val->count)
[5753fbb]109 );
110
111 return v;
112}
113
[23684b7]114/** amd64 specific fast spinlock */
[53f9821]115static inline void atomic_lock_arch(atomic_t *val)
116{
[228666c]117 atomic_count_t tmp;
[f24d300]118
[53f9821]119 preemption_disable();
[e7b7be3f]120 asm volatile (
[ad2e39b]121 "0:\n"
122 "pause\n"
[f24d300]123 "mov %[count], %[tmp]\n"
124 "testq %[tmp], %[tmp]\n"
[43b7b35]125 "jnz 0b\n" /* lightweight looping on locked spinlock */
[53f9821]126
[f24d300]127 "incq %[tmp]\n" /* now use the atomic operation */
128 "xchgq %[count], %[tmp]\n"
129 "testq %[tmp], %[tmp]\n"
[ad2e39b]130 "jnz 0b\n"
[228666c]131 : [count] "+m" (val->count),
132 [tmp] "=&r" (tmp)
[ad2e39b]133 );
[228666c]134
[53f9821]135 /*
136 * Prevent critical section code from bleeding out this way up.
137 */
138 CS_ENTER_BARRIER();
139}
[5753fbb]140
141#endif
[b45c443]142
[06e1e95]143/** @}
[b45c443]144 */
Note: See TracBrowser for help on using the repository browser.