source: mainline/kernel/arch/arm32/src/atomic.c

Last change on this file was 133461c, checked in by Jiří Zárevúcky <zarevucky.jiri@…>, 21 months ago

Align arm32 atomic op prototypes with compiler's expectations

Based on a patch by @vhotspur, but without explicit casts
between pointer types. Those are evil footguns.

  • Property mode set to 100644
File size: 3.7 KB
RevLine 
[dcb0751]1/*
2 * Copyright (c) 2012 Adam Hraska
3 * All rights reserved.
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
7 * are met:
8 *
9 * - Redistributions of source code must retain the above copyright
10 * notice, this list of conditions and the following disclaimer.
11 * - Redistributions in binary form must reproduce the above copyright
12 * notice, this list of conditions and the following disclaimer in the
13 * documentation and/or other materials provided with the distribution.
14 * - The name of the author may not be used to endorse or promote products
15 * derived from this software without specific prior written permission.
16 *
17 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
18 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
19 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
20 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
21 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
22 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 */
28
[c5429fe]29/** @addtogroup kernel_arm32
[dcb0751]30 * @{
31 */
32/** @file
33 * @brief Atomic operations emulation.
34 */
35
36#include <synch/spinlock.h>
[7328ff4]37#include <arch/barrier.h>
[4621d23]38#include <arch/asm.h>
[dcb0751]39
[133461c]40unsigned __atomic_fetch_add_4(volatile void *mem0, unsigned val, int model)
[4621d23]41{
[133461c]42 volatile unsigned *mem = mem0;
43
[4621d23]44 /*
45 * This implementation is for UP pre-ARMv6 systems where we do not have
46 * the LDREX and STREX instructions.
47 */
48 ipl_t ipl = interrupts_disable();
49 unsigned ret = *mem;
50 *mem += val;
51 interrupts_restore(ipl);
52 return ret;
53}
54
[133461c]55unsigned __atomic_fetch_sub_4(volatile void *mem0, unsigned val, int model)
[4621d23]56{
[133461c]57 volatile unsigned *mem = mem0;
58
[4621d23]59 ipl_t ipl = interrupts_disable();
60 unsigned ret = *mem;
61 *mem -= val;
62 interrupts_restore(ipl);
63 return ret;
64}
[dcb0751]65
66IRQ_SPINLOCK_STATIC_INITIALIZE_NAME(cas_lock, "arm-cas-lock");
67
68/** Implements GCC's missing compare-and-swap intrinsic for ARM.
69 *
70 * Sets \a *ptr to \a new_val if it is equal to \a expected. In any case,
71 * returns the previous value of \a *ptr.
72 */
[133461c]73unsigned __sync_val_compare_and_swap_4(volatile void *ptr0, unsigned expected,
74 unsigned new_val)
[dcb0751]75{
[133461c]76 volatile unsigned *ptr = ptr0;
77
[1b20da0]78 /*
[dcb0751]79 * Using an interrupt disabling spinlock might still lead to deadlock
80 * if CAS() is used in an exception handler. Eg. if a CAS() results
81 * in a page fault exception and the exception handler again tries
82 * to invoke CAS() (even for a different memory location), the spinlock
83 * would deadlock.
84 */
85 irq_spinlock_lock(&cas_lock, true);
[a35b458]86
[133461c]87 unsigned cur_val = *ptr;
[a35b458]88
[dcb0751]89 if (cur_val == expected) {
90 *ptr = new_val;
91 }
[a35b458]92
[dcb0751]93 irq_spinlock_unlock(&cas_lock, true);
[a35b458]94
[dcb0751]95 return cur_val;
96}
97
[7328ff4]98void __sync_synchronize(void)
99{
100 dsb();
101}
102
[9ce35f0]103/* Naive implementations of the newer intrinsics. */
104
[133461c]105_Bool __atomic_compare_exchange_4(volatile void *mem, void *expected0,
106 unsigned desired, _Bool weak, int success, int failure)
[9ce35f0]107{
[133461c]108 unsigned *expected = expected0;
109
[9ce35f0]110 (void) weak;
111 (void) success;
112 (void) failure;
113
[133461c]114 unsigned old = *expected;
115 unsigned new = __sync_val_compare_and_swap_4(mem, old, desired);
[9ce35f0]116 if (old == new) {
117 return 1;
118 } else {
119 *expected = new;
120 return 0;
121 }
122}
123
[133461c]124unsigned __atomic_exchange_4(volatile void *mem0, unsigned val, int model)
[9ce35f0]125{
[133461c]126 volatile unsigned *mem = mem0;
127
[9ce35f0]128 (void) model;
129
130 irq_spinlock_lock(&cas_lock, true);
[133461c]131 unsigned old = *mem;
[9ce35f0]132 *mem = val;
133 irq_spinlock_unlock(&cas_lock, true);
134
135 return old;
136}
[dcb0751]137
138/** @}
139 */
Note: See TracBrowser for help on using the repository browser.