Blender  V2.93
atomic_ops_msvc.h
Go to the documentation of this file.
1 /*
2  * Adopted from jemalloc with this license:
3  *
4  * Copyright (C) 2002-2013 Jason Evans <jasone@canonware.com>.
5  * All rights reserved.
6  * Copyright (C) 2007-2012 Mozilla Foundation. All rights reserved.
7  * Copyright (C) 2009-2013 Facebook, Inc. All rights reserved.
8 
9  * Redistribution and use in source and binary forms, with or without
10  * modification, are permitted provided that the following conditions are met:
11  * 1. Redistributions of source code must retain the above copyright notice(s),
12  * this list of conditions and the following disclaimer.
13  * 2. Redistributions in binary form must reproduce the above copyright notice(s),
14  * this list of conditions and the following disclaimer in the documentation
15  * and/or other materials provided with the distribution.
16 
17  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER(S) ``AS IS'' AND ANY EXPRESS
18  * OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
19  * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
20  * EVENT SHALL THE COPYRIGHT HOLDER(S) BE LIABLE FOR ANY DIRECT, INDIRECT,
21  * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22  * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
23  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
24  * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25  * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
26  * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27  */
28 
29 #ifndef __ATOMIC_OPS_MSVC_H__
30 #define __ATOMIC_OPS_MSVC_H__
31 
32 #include "atomic_ops_utils.h"
33 
34 #define NOGDI
35 #ifndef NOMINMAX
36 # define NOMINMAX
37 #endif
38 #define WIN32_LEAN_AND_MEAN
39 
40 #include <intrin.h>
41 #include <windows.h>
42 
43 #if defined(__clang__)
44 # pragma GCC diagnostic push
45 # pragma GCC diagnostic ignored "-Wincompatible-pointer-types"
46 #endif
47 
48 /* 64-bit operations. */
49 /* Unsigned */
51 {
52  return InterlockedExchangeAdd64((int64_t *)p, (int64_t)x) + x;
53 }
54 
56 {
57  return InterlockedExchangeAdd64((int64_t *)p, -((int64_t)x)) - x;
58 }
59 
61 {
62  return InterlockedCompareExchange64((int64_t *)v, _new, old);
63 }
64 
66 {
67  return InterlockedExchangeAdd64((int64_t *)p, (int64_t)x);
68 }
69 
71 {
72  return InterlockedExchangeAdd64((int64_t *)p, -((int64_t)x));
73 }
74 
75 /* Signed */
77 {
78  return InterlockedExchangeAdd64(p, x) + x;
79 }
80 
82 {
83  return InterlockedExchangeAdd64(p, -x) - x;
84 }
85 
87 {
88  return InterlockedCompareExchange64(v, _new, old);
89 }
90 
92 {
93  return InterlockedExchangeAdd64(p, x);
94 }
95 
97 {
98  return InterlockedExchangeAdd64(p, -x);
99 }
100 
101 /******************************************************************************/
102 /* 32-bit operations. */
103 /* Unsigned */
105 {
106  return InterlockedExchangeAdd(p, x) + x;
107 }
108 
110 {
111  return InterlockedExchangeAdd(p, -((int32_t)x)) - x;
112 }
113 
115 {
116  return InterlockedCompareExchange((long *)v, _new, old);
117 }
118 
120 {
121  return InterlockedExchangeAdd(p, x);
122 }
123 
125 {
126  return InterlockedOr((long *)p, x);
127 }
128 
130 {
131  return InterlockedAnd((long *)p, x);
132 }
133 
134 /* Signed */
136 {
137  return InterlockedExchangeAdd((long *)p, x) + x;
138 }
139 
141 {
142  return InterlockedExchangeAdd((long *)p, -x) - x;
143 }
144 
146 {
147  return InterlockedCompareExchange((long *)v, _new, old);
148 }
149 
151 {
152  return InterlockedExchangeAdd((long *)p, x);
153 }
154 
156 {
157  return InterlockedOr((long *)p, x);
158 }
159 
161 {
162  return InterlockedAnd((long *)p, x);
163 }
164 
165 /******************************************************************************/
166 /* 16-bit operations. */
167 
168 /* Signed */
170 {
171  return InterlockedOr16((short *)p, x);
172 }
173 
175 {
176  return InterlockedAnd16((short *)p, x);
177 }
178 
179 /******************************************************************************/
180 /* 8-bit operations. */
181 
182 /* Unsigned */
183 #pragma intrinsic(_InterlockedAnd8)
185 {
186 #if (LG_SIZEOF_PTR == 8 || LG_SIZEOF_INT == 8)
187  return InterlockedAnd8((char *)p, (char)b);
188 #else
189  return _InterlockedAnd8((char *)p, (char)b);
190 #endif
191 }
192 
193 #pragma intrinsic(_InterlockedOr8)
195 {
196 #if (LG_SIZEOF_PTR == 8 || LG_SIZEOF_INT == 8)
197  return InterlockedOr8((char *)p, (char)b);
198 #else
199  return _InterlockedOr8((char *)p, (char)b);
200 #endif
201 }
202 
203 /* Signed */
204 #pragma intrinsic(_InterlockedAnd8)
206 {
207 #if (LG_SIZEOF_PTR == 8 || LG_SIZEOF_INT == 8)
208  return InterlockedAnd8((char *)p, (char)b);
209 #else
210  return _InterlockedAnd8((char *)p, (char)b);
211 #endif
212 }
213 
214 #pragma intrinsic(_InterlockedOr8)
216 {
217 #if (LG_SIZEOF_PTR == 8 || LG_SIZEOF_INT == 8)
218  return InterlockedOr8((char *)p, (char)b);
219 #else
220  return _InterlockedOr8((char *)p, (char)b);
221 #endif
222 }
223 
224 #if defined(__clang__)
225 # pragma GCC diagnostic pop
226 #endif
227 
228 #endif /* __ATOMIC_OPS_MSVC_H__ */
ATOMIC_INLINE uint32_t atomic_fetch_and_or_uint32(uint32_t *p, uint32_t x)
ATOMIC_INLINE int32_t atomic_add_and_fetch_int32(int32_t *p, int32_t x)
ATOMIC_INLINE uint64_t atomic_fetch_and_sub_uint64(uint64_t *p, uint64_t x)
ATOMIC_INLINE int64_t atomic_sub_and_fetch_int64(int64_t *p, int64_t x)
ATOMIC_INLINE uint8_t atomic_fetch_and_and_uint8(uint8_t *p, uint8_t b)
ATOMIC_INLINE int64_t atomic_cas_int64(int64_t *v, int64_t old, int64_t _new)
ATOMIC_INLINE uint32_t atomic_fetch_and_add_uint32(uint32_t *p, uint32_t x)
ATOMIC_INLINE uint8_t atomic_fetch_and_or_uint8(uint8_t *p, uint8_t b)
ATOMIC_INLINE int32_t atomic_fetch_and_or_int32(int32_t *p, int32_t x)
ATOMIC_INLINE uint32_t atomic_fetch_and_and_uint32(uint32_t *p, uint32_t x)
ATOMIC_INLINE int64_t atomic_fetch_and_add_int64(int64_t *p, int64_t x)
ATOMIC_INLINE uint32_t atomic_add_and_fetch_uint32(uint32_t *p, uint32_t x)
ATOMIC_INLINE int32_t atomic_fetch_and_add_int32(int32_t *p, int32_t x)
ATOMIC_INLINE uint64_t atomic_cas_uint64(uint64_t *v, uint64_t old, uint64_t _new)
ATOMIC_INLINE uint64_t atomic_fetch_and_add_uint64(uint64_t *p, uint64_t x)
ATOMIC_INLINE uint32_t atomic_sub_and_fetch_uint32(uint32_t *p, uint32_t x)
ATOMIC_INLINE int64_t atomic_add_and_fetch_int64(int64_t *p, int64_t x)
ATOMIC_INLINE uint64_t atomic_add_and_fetch_uint64(uint64_t *p, uint64_t x)
ATOMIC_INLINE uint64_t atomic_sub_and_fetch_uint64(uint64_t *p, uint64_t x)
ATOMIC_INLINE int32_t atomic_cas_int32(int32_t *v, int32_t old, int32_t _new)
ATOMIC_INLINE int32_t atomic_fetch_and_and_int32(int32_t *p, int32_t x)
ATOMIC_INLINE int8_t atomic_fetch_and_or_int8(int8_t *p, int8_t b)
ATOMIC_INLINE int16_t atomic_fetch_and_and_int16(int16_t *p, int16_t x)
ATOMIC_INLINE int32_t atomic_sub_and_fetch_int32(int32_t *p, int32_t x)
ATOMIC_INLINE int16_t atomic_fetch_and_or_int16(int16_t *p, int16_t x)
ATOMIC_INLINE int8_t atomic_fetch_and_and_int8(int8_t *p, int8_t b)
ATOMIC_INLINE int64_t atomic_fetch_and_sub_int64(int64_t *p, int64_t x)
ATOMIC_INLINE uint32_t atomic_cas_uint32(uint32_t *v, uint32_t old, uint32_t _new)
#define ATOMIC_INLINE
ATTR_WARN_UNUSED_RESULT const BMVert * v
signed short int16_t
Definition: stdint.h:79
unsigned int uint32_t
Definition: stdint.h:83
__int64 int64_t
Definition: stdint.h:92
signed int int32_t
Definition: stdint.h:80
unsigned char uint8_t
Definition: stdint.h:81
unsigned __int64 uint64_t
Definition: stdint.h:93
signed char int8_t
Definition: stdint.h:78