1
|
/*
|
2
|
* linux/include/asm-arm/atomic.h
|
3
|
*
|
4
|
* Copyright (C) 1996 Russell King.
|
5
|
* Copyright (C) 2002 Deep Blue Solutions Ltd.
|
6
|
*
|
7
|
* This program is free software; you can redistribute it and/or modify
|
8
|
* it under the terms of the GNU General Public License version 2 as
|
9
|
* published by the Free Software Foundation.
|
10
|
*/
|
11
|
#ifndef __ASM_ARM_ATOMIC_H
|
12
|
#define __ASM_ARM_ATOMIC_H
|
13
|
|
14
|
typedef struct { volatile int counter; } atomic_t;
|
15
|
|
16
|
#define ATOMIC_INIT(i) { (i) }
|
17
|
|
18
|
#define atomic_read(v) ((v)->counter)
|
19
|
|
20
|
#include <asm/system.h>
|
21
|
#include <asm/compiler.h>
|
22
|
|
23
|
#define atomic_set(v,i) (((v)->counter) = (i))
|
24
|
|
25
|
static inline int atomic_add_return(int i, atomic_t *v)
|
26
|
{
|
27
|
unsigned long flags;
|
28
|
int val;
|
29
|
|
30
|
local_irq_save(flags);
|
31
|
val = v->counter;
|
32
|
v->counter = val += i;
|
33
|
local_irq_restore(flags);
|
34
|
|
35
|
return val;
|
36
|
}
|
37
|
|
38
|
static inline int atomic_sub_return(int i, atomic_t *v)
|
39
|
{
|
40
|
unsigned long flags;
|
41
|
int val;
|
42
|
|
43
|
local_irq_save(flags);
|
44
|
val = v->counter;
|
45
|
v->counter = val -= i;
|
46
|
local_irq_restore(flags);
|
47
|
|
48
|
return val;
|
49
|
}
|
50
|
|
51
|
static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
|
52
|
{
|
53
|
int ret;
|
54
|
unsigned long flags;
|
55
|
|
56
|
local_irq_save(flags);
|
57
|
ret = v->counter;
|
58
|
if (likely(ret == old))
|
59
|
v->counter = new;
|
60
|
local_irq_restore(flags);
|
61
|
|
62
|
return ret;
|
63
|
}
|
64
|
|
65
|
static inline void atomic_clear_mask(unsigned long mask, unsigned long *addr)
|
66
|
{
|
67
|
unsigned long flags;
|
68
|
|
69
|
local_irq_save(flags);
|
70
|
*addr &= ~mask;
|
71
|
local_irq_restore(flags);
|
72
|
}
|
73
|
|
74
|
#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
|
75
|
|
76
|
static inline int atomic_add_unless(atomic_t *v, int a, int u)
|
77
|
{
|
78
|
int c, old;
|
79
|
|
80
|
c = atomic_read(v);
|
81
|
while (c != u && (old = atomic_cmpxchg((v), c, c + a)) != c)
|
82
|
c = old;
|
83
|
return c != u;
|
84
|
}
|
85
|
#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
|
86
|
|
87
|
#define atomic_add(i, v) (void) atomic_add_return(i, v)
|
88
|
#define atomic_inc(v) (void) atomic_add_return(1, v)
|
89
|
#define atomic_sub(i, v) (void) atomic_sub_return(i, v)
|
90
|
#define atomic_dec(v) (void) atomic_sub_return(1, v)
|
91
|
|
92
|
#define atomic_inc_and_test(v) (atomic_add_return(1, v) == 0)
|
93
|
#define atomic_dec_and_test(v) (atomic_sub_return(1, v) == 0)
|
94
|
#define atomic_inc_return(v) (atomic_add_return(1, v))
|
95
|
#define atomic_dec_return(v) (atomic_sub_return(1, v))
|
96
|
#define atomic_sub_and_test(i, v) (atomic_sub_return(i, v) == 0)
|
97
|
|
98
|
#define atomic_add_negative(i,v) (atomic_add_return(i, v) < 0)
|
99
|
|
100
|
/* Atomic operations are already serializing on ARM */
|
101
|
#define smp_mb__before_atomic_dec() barrier()
|
102
|
#define smp_mb__after_atomic_dec() barrier()
|
103
|
#define smp_mb__before_atomic_inc() barrier()
|
104
|
#define smp_mb__after_atomic_inc() barrier()
|
105
|
|
106
|
#endif
|