Apache HTTPD
framework
httpd-2.4.62
srclib
apr
atomic
unix
builtins64.c
Go to the documentation of this file.
1
/* Licensed to the Apache Software Foundation (ASF) under one or more
2
* contributor license agreements. See the NOTICE file distributed with
3
* this work for additional information regarding copyright ownership.
4
* The ASF licenses this file to You under the Apache License, Version 2.0
5
* (the "License"); you may not use this file except in compliance with
6
* the License. You may obtain a copy of the License at
7
*
8
* http://www.apache.org/licenses/LICENSE-2.0
9
*
10
* Unless required by applicable law or agreed to in writing, software
11
* distributed under the License is distributed on an "AS IS" BASIS,
12
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
* See the License for the specific language governing permissions and
14
* limitations under the License.
15
*/
16
17
#include "
apr_arch_atomic.h
"
18
19
#ifdef USE_ATOMICS_BUILTINS64
20
21
#if defined(__i386__) || defined(__x86_64__) \
22
|| defined(__s390__) || defined(__s390x__)
23
#define WEAK_MEMORY_ORDERING 0
24
#else
25
#define WEAK_MEMORY_ORDERING 1
26
#endif
27
28
APR_DECLARE
(
apr_uint64_t
)
apr_atomic_read64
(
volatile
apr_uint64_t
*
mem
)
29
{
30
#if HAVE__ATOMIC_BUILTINS
31
return
__atomic_load_n
(
mem
,
__ATOMIC_SEQ_CST
);
32
#elif WEAK_MEMORY_ORDERING || APR_SIZEOF_VOIDP < 8
33
/* No __sync_load() available => apr_atomic_add64(mem, 0) */
34
return
__sync_fetch_and_add
(
mem
, 0);
35
#else
36
return
*
mem
;
37
#endif
38
}
39
40
APR_DECLARE
(
void
)
apr_atomic_set64
(
volatile
apr_uint64_t
*
mem
,
apr_uint64_t
val
)
41
{
42
#if HAVE__ATOMIC_BUILTINS
43
__atomic_store_n
(
mem
,
val
,
__ATOMIC_SEQ_CST
);
44
#elif WEAK_MEMORY_ORDERING || APR_SIZEOF_VOIDP < 8
45
/* No __sync_store() available => apr_atomic_xchg64(mem, val) */
46
__sync_synchronize
();
47
__sync_lock_test_and_set
(
mem
,
val
);
48
#else
49
*
mem
=
val
;
50
#endif
51
}
52
53
APR_DECLARE
(
apr_uint64_t
)
apr_atomic_add64
(
volatile
apr_uint64_t
*
mem
,
apr_uint64_t
val
)
54
{
55
#if HAVE__ATOMIC_BUILTINS
56
return
__atomic_fetch_add
(
mem
,
val
,
__ATOMIC_SEQ_CST
);
57
#else
58
return
__sync_fetch_and_add
(
mem
,
val
);
59
#endif
60
}
61
62
APR_DECLARE
(
void
)
apr_atomic_sub64
(
volatile
apr_uint64_t
*
mem
,
apr_uint64_t
val
)
63
{
64
#if HAVE__ATOMIC_BUILTINS
65
__atomic_fetch_sub
(
mem
,
val
,
__ATOMIC_SEQ_CST
);
66
#else
67
__sync_fetch_and_sub
(
mem
,
val
);
68
#endif
69
}
70
71
APR_DECLARE
(
apr_uint64_t
)
apr_atomic_inc64
(
volatile
apr_uint64_t
*
mem
)
72
{
73
#if HAVE__ATOMIC_BUILTINS
74
return
__atomic_fetch_add
(
mem
, 1,
__ATOMIC_SEQ_CST
);
75
#else
76
return
__sync_fetch_and_add
(
mem
, 1);
77
#endif
78
}
79
80
APR_DECLARE
(
int
)
apr_atomic_dec64
(
volatile
apr_uint64_t
*
mem
)
81
{
82
#if HAVE__ATOMIC_BUILTINS
83
return
__atomic_sub_fetch
(
mem
, 1,
__ATOMIC_SEQ_CST
);
84
#else
85
return
__sync_sub_and_fetch
(
mem
, 1);
86
#endif
87
}
88
89
APR_DECLARE
(
apr_uint64_t
)
apr_atomic_cas64
(
volatile
apr_uint64_t
*
mem
,
apr_uint64_t
val
,
90
apr_uint64_t
cmp
)
91
{
92
#if HAVE__ATOMIC_BUILTINS
93
__atomic_compare_exchange_n
(
mem
, &
cmp
,
val
, 0,
__ATOMIC_SEQ_CST
,
__ATOMIC_SEQ_CST
);
94
return
cmp
;
95
#else
96
return
__sync_val_compare_and_swap
(
mem
,
cmp
,
val
);
97
#endif
98
}
99
100
APR_DECLARE
(
apr_uint64_t
)
apr_atomic_xchg64
(
volatile
apr_uint64_t
*
mem
,
apr_uint64_t
val
)
101
{
102
#if HAVE__ATOMIC_BUILTINS
103
return
__atomic_exchange_n
(
mem
,
val
,
__ATOMIC_SEQ_CST
);
104
#else
105
__sync_synchronize
();
106
return
__sync_lock_test_and_set
(
mem
,
val
);
107
#endif
108
}
109
110
#endif
/* USE_ATOMICS_BUILTINS64 */
apr_arch_atomic.h
APR_DECLARE
const void apr_status_t(*) apr_status_t(* APR_DECLARE)(void) apr_pool_pre_cleanup_register(apr_pool_t *p
Definition
apr_pools.h:646
size
apr_size_t size
Definition
apr_allocator.h:115
cmp
apr_uint32_t apr_uint32_t cmp
Definition
apr_atomic.h:106
val
apr_uint32_t val
Definition
apr_atomic.h:66
mem
void * mem
Definition
apr_skiplist.h:88
Generated by
1.9.8