1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
|
/* Copyright (C) 2010-2014 Free Software Foundation, Inc.
This file is part of the GNU C Library.
Contributed by Maxim Kuvyrkov <maxim@codesourcery.com>, 2010.
The GNU C Library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
The GNU C Library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with the GNU C Library. If not, see
<http://www.gnu.org/licenses/>. */
#ifndef _BITS_ATOMIC_H
#define _BITS_ATOMIC_H 1
#include <stdint.h>
#include <sysdep.h>
#include <bits/m68k-vdso.h>
/* Coldfire has no atomic compare-and-exchange operation, but the
kernel provides userspace atomicity operations. Use them. */
typedef int32_t atomic32_t;
typedef uint32_t uatomic32_t;
typedef int_fast32_t atomic_fast32_t;
typedef uint_fast32_t uatomic_fast32_t;
typedef intptr_t atomicptr_t;
typedef uintptr_t uatomicptr_t;
typedef intmax_t atomic_max_t;
typedef uintmax_t uatomic_max_t;
#define __HAVE_64B_ATOMICS 0
#define USE_ATOMIC_COMPILER_BUILTINS 0
/* The only basic operation needed is compare and exchange. */
/* For ColdFire we'll have to trap into the kernel mode anyway,
so trap from the library rather then from the kernel wrapper. */
#ifdef SHARED
# define atomic_compare_and_exchange_val_acq(mem, newval, oldval) \
({ \
/* Use temporary variables to workaround call-clobberness of \
the registers. */ \
__typeof (mem) _mem = mem; \
__typeof (oldval) _oldval = oldval; \
__typeof (newval) _newval = newval; \
register __typeof (mem) _a0 asm ("a0") = _mem; \
register __typeof (oldval) _d0 asm ("d0") = _oldval; \
register __typeof (newval) _d1 asm ("d1") = _newval; \
void *tmp; \
\
asm ("movel #_GLOBAL_OFFSET_TABLE_@GOTPC, %2\n\t" \
"lea (-6, %%pc, %2), %2\n\t" \
"movel " STR_M68K_VDSO_SYMBOL (__vdso_atomic_cmpxchg_32) \
"@GOT(%2), %2\n\t" \
"movel (%2), %2\n\t" \
"jsr (%2)\n\t" \
: "+d" (_d0), "+m" (*_a0), "=&a" (tmp) \
: "a" (_a0), "d" (_d1)); \
_d0; \
})
#else
# define atomic_compare_and_exchange_val_acq(mem, newval, oldval) \
({ \
/* Use temporary variables to workaround call-clobberness of \
the registers. */ \
__typeof (mem) _mem = mem; \
__typeof (oldval) _oldval = oldval; \
__typeof (newval) _newval = newval; \
register __typeof (oldval) _d0 asm ("d0") \
= (__typeof (oldval)) SYS_ify (atomic_cmpxchg_32); \
register __typeof (mem) _a0 asm ("a0") = _mem; \
register __typeof (oldval) _d2 asm ("d2") = _oldval; \
register __typeof (newval) _d1 asm ("d1") = _newval; \
\
asm ("trap #0" \
: "+d" (_d0), "+m" (*_a0) \
: "a" (_a0), "d" (_d2), "d" (_d1)); \
_d0; \
})
#endif
#ifdef SHARED
# define atomic_full_barrier() \
({ \
void *tmp; \
\
asm ("movel #_GLOBAL_OFFSET_TABLE_@GOTPC, %0\n\t" \
"lea (-6, %pc, %0), %0\n\t" \
"movel " STR_M68K_VDSO_SYMBOL (__vdso_atomic_barrier) \
"@GOT(%0), %0\n\t" \
"movel (%0), %0\n\t" \
"jsr (%0)\n\t" \
: "=&a" (tmp)); \
})
#else
# define atomic_full_barrier() \
(INTERNAL_SYSCALL (atomic_barrier, , 0), (void) 0)
#endif
#endif
|