Server IP : 85.214.239.14 / Your IP : 3.133.120.10 Web Server : Apache/2.4.62 (Debian) System : Linux h2886529.stratoserver.net 4.9.0 #1 SMP Tue Jan 9 19:45:01 MSK 2024 x86_64 User : www-data ( 33) PHP Version : 7.4.18 Disable Function : pcntl_alarm,pcntl_fork,pcntl_waitpid,pcntl_wait,pcntl_wifexited,pcntl_wifstopped,pcntl_wifsignaled,pcntl_wifcontinued,pcntl_wexitstatus,pcntl_wtermsig,pcntl_wstopsig,pcntl_signal,pcntl_signal_get_handler,pcntl_signal_dispatch,pcntl_get_last_error,pcntl_strerror,pcntl_sigprocmask,pcntl_sigwaitinfo,pcntl_sigtimedwait,pcntl_exec,pcntl_getpriority,pcntl_setpriority,pcntl_async_signals,pcntl_unshare, MySQL : OFF | cURL : OFF | WGET : ON | Perl : ON | Python : ON | Sudo : ON | Pkexec : OFF Directory : /proc/3/root/usr/include/python3.11/internal/ |
Upload File : |
/* Atomic functions: similar to pycore_atomic.h, but don't need to declare variables as atomic. Py_ssize_t type: * value = _Py_atomic_size_get(&var) * _Py_atomic_size_set(&var, value) Use sequentially-consistent ordering (__ATOMIC_SEQ_CST memory order): enforce total ordering with all other atomic functions. */ #ifndef Py_ATOMIC_FUNC_H #define Py_ATOMIC_FUNC_H #ifdef __cplusplus extern "C" { #endif #ifndef Py_BUILD_CORE # error "this header requires Py_BUILD_CORE define" #endif #if defined(_MSC_VER) # include <intrin.h> // _InterlockedExchange() #endif // Use builtin atomic operations in GCC >= 4.7 and clang #ifdef HAVE_BUILTIN_ATOMIC static inline Py_ssize_t _Py_atomic_size_get(Py_ssize_t *var) { return __atomic_load_n(var, __ATOMIC_SEQ_CST); } static inline void _Py_atomic_size_set(Py_ssize_t *var, Py_ssize_t value) { __atomic_store_n(var, value, __ATOMIC_SEQ_CST); } #elif defined(_MSC_VER) static inline Py_ssize_t _Py_atomic_size_get(Py_ssize_t *var) { #if SIZEOF_VOID_P == 8 Py_BUILD_ASSERT(sizeof(__int64) == sizeof(*var)); volatile __int64 *volatile_var = (volatile __int64 *)var; __int64 old; do { old = *volatile_var; } while(_InterlockedCompareExchange64(volatile_var, old, old) != old); #else Py_BUILD_ASSERT(sizeof(long) == sizeof(*var)); volatile long *volatile_var = (volatile long *)var; long old; do { old = *volatile_var; } while(_InterlockedCompareExchange(volatile_var, old, old) != old); #endif return old; } static inline void _Py_atomic_size_set(Py_ssize_t *var, Py_ssize_t value) { #if SIZEOF_VOID_P == 8 Py_BUILD_ASSERT(sizeof(__int64) == sizeof(*var)); volatile __int64 *volatile_var = (volatile __int64 *)var; _InterlockedExchange64(volatile_var, value); #else Py_BUILD_ASSERT(sizeof(long) == sizeof(*var)); volatile long *volatile_var = (volatile long *)var; _InterlockedExchange(volatile_var, value); #endif } #else // Fallback implementation using volatile static inline Py_ssize_t _Py_atomic_size_get(Py_ssize_t *var) { volatile Py_ssize_t *volatile_var = (volatile Py_ssize_t *)var; return *volatile_var; } static inline void _Py_atomic_size_set(Py_ssize_t *var, Py_ssize_t value) { volatile Py_ssize_t *volatile_var = (volatile Py_ssize_t *)var; *volatile_var = value; } #endif #ifdef __cplusplus } #endif #endif /* Py_ATOMIC_FUNC_H */