29 #ifndef _GLIBCXX_MUTEX
30 #define _GLIBCXX_MUTEX 1
32 #pragma GCC system_header
34 #if __cplusplus < 201103L
45 #if ! _GTHREAD_USE_MUTEX_TIMEDLOCK
49 #ifndef _GLIBCXX_HAVE_TLS
53 namespace std _GLIBCXX_VISIBILITY(default)
55 _GLIBCXX_BEGIN_NAMESPACE_VERSION
62 #ifdef _GLIBCXX_HAS_GTHREADS
65 class __recursive_mutex_base
68 typedef __gthread_recursive_mutex_t __native_type;
70 __recursive_mutex_base(
const __recursive_mutex_base&) =
delete;
71 __recursive_mutex_base& operator=(
const __recursive_mutex_base&) =
delete;
73 #ifdef __GTHREAD_RECURSIVE_MUTEX_INIT
74 __native_type _M_mutex = __GTHREAD_RECURSIVE_MUTEX_INIT;
76 __recursive_mutex_base() =
default;
78 __native_type _M_mutex;
80 __recursive_mutex_base()
83 __GTHREAD_RECURSIVE_MUTEX_INIT_FUNCTION(&_M_mutex);
86 ~__recursive_mutex_base()
87 { __gthread_recursive_mutex_destroy(&_M_mutex); }
95 typedef __native_type* native_handle_type;
106 int __e = __gthread_recursive_mutex_lock(&_M_mutex);
110 __throw_system_error(__e);
117 return !__gthread_recursive_mutex_trylock(&_M_mutex);
124 __gthread_recursive_mutex_unlock(&_M_mutex);
128 native_handle() noexcept
129 {
return &_M_mutex; }
132 #if _GTHREAD_USE_MUTEX_TIMEDLOCK
133 template<
typename _Derived>
134 class __timed_mutex_impl
137 template<
typename _Rep,
typename _Period>
141 #if _GLIBCXX_USE_PTHREAD_MUTEX_CLOCKLOCK
147 auto __rt = chrono::duration_cast<__clock::duration>(__rtime);
150 return _M_try_lock_until(__clock::now() + __rt);
153 template<
typename _Duration>
155 _M_try_lock_until(
const chrono::time_point<chrono::system_clock,
158 auto __s = chrono::time_point_cast<chrono::seconds>(__atime);
159 auto __ns = chrono::duration_cast<chrono::nanoseconds>(__atime - __s);
161 __gthread_time_t __ts = {
162 static_cast<std::time_t
>(__s.time_since_epoch().count()),
163 static_cast<long>(__ns.count())
166 return static_cast<_Derived*
>(
this)->_M_timedlock(__ts);
169 #ifdef _GLIBCXX_USE_PTHREAD_MUTEX_CLOCKLOCK
170 template<
typename _Duration>
172 _M_try_lock_until(
const chrono::time_point<chrono::steady_clock,
175 auto __s = chrono::time_point_cast<chrono::seconds>(__atime);
176 auto __ns = chrono::duration_cast<chrono::nanoseconds>(__atime - __s);
178 __gthread_time_t __ts = {
179 static_cast<std::time_t
>(__s.time_since_epoch().count()),
180 static_cast<long>(__ns.count())
183 return static_cast<_Derived*
>(
this)->_M_clocklock(CLOCK_MONOTONIC,
188 template<
typename _Clock,
typename _Duration>
190 _M_try_lock_until(
const chrono::time_point<_Clock, _Duration>& __atime)
195 auto __now = _Clock::now();
197 auto __rtime = __atime - __now;
198 if (_M_try_lock_for(__rtime))
200 __now = _Clock::now();
201 }
while (__atime > __now);
208 :
private __mutex_base,
public __timed_mutex_impl<timed_mutex>
211 typedef __native_type* native_handle_type;
213 timed_mutex() =
default;
214 ~timed_mutex() =
default;
216 timed_mutex(
const timed_mutex&) =
delete;
217 timed_mutex& operator=(
const timed_mutex&) =
delete;
222 int __e = __gthread_mutex_lock(&_M_mutex);
226 __throw_system_error(__e);
233 return !__gthread_mutex_trylock(&_M_mutex);
236 template <
class _Rep,
class _Period>
238 try_lock_for(
const chrono::duration<_Rep, _Period>& __rtime)
239 {
return _M_try_lock_for(__rtime); }
241 template <
class _Clock,
class _Duration>
243 try_lock_until(
const chrono::time_point<_Clock, _Duration>& __atime)
244 {
return _M_try_lock_until(__atime); }
250 __gthread_mutex_unlock(&_M_mutex);
254 native_handle() noexcept
255 {
return &_M_mutex; }
258 friend class __timed_mutex_impl<timed_mutex>;
261 _M_timedlock(
const __gthread_time_t& __ts)
262 {
return !__gthread_mutex_timedlock(&_M_mutex, &__ts); }
264 #if _GLIBCXX_USE_PTHREAD_MUTEX_CLOCKLOCK
266 _M_clocklock(clockid_t clockid,
const __gthread_time_t& __ts)
267 {
return !pthread_mutex_clocklock(&_M_mutex, clockid, &__ts); }
272 class recursive_timed_mutex
273 :
private __recursive_mutex_base,
274 public __timed_mutex_impl<recursive_timed_mutex>
277 typedef __native_type* native_handle_type;
279 recursive_timed_mutex() =
default;
280 ~recursive_timed_mutex() =
default;
282 recursive_timed_mutex(
const recursive_timed_mutex&) =
delete;
283 recursive_timed_mutex& operator=(
const recursive_timed_mutex&) =
delete;
288 int __e = __gthread_recursive_mutex_lock(&_M_mutex);
292 __throw_system_error(__e);
299 return !__gthread_recursive_mutex_trylock(&_M_mutex);
302 template <
class _Rep,
class _Period>
304 try_lock_for(
const chrono::duration<_Rep, _Period>& __rtime)
305 {
return _M_try_lock_for(__rtime); }
307 template <
class _Clock,
class _Duration>
309 try_lock_until(
const chrono::time_point<_Clock, _Duration>& __atime)
310 {
return _M_try_lock_until(__atime); }
316 __gthread_recursive_mutex_unlock(&_M_mutex);
320 native_handle() noexcept
321 {
return &_M_mutex; }
324 friend class __timed_mutex_impl<recursive_timed_mutex>;
327 _M_timedlock(
const __gthread_time_t& __ts)
328 {
return !__gthread_recursive_mutex_timedlock(&_M_mutex, &__ts); }
330 #ifdef _GLIBCXX_USE_PTHREAD_MUTEX_CLOCKLOCK
332 _M_clocklock(clockid_t clockid,
const __gthread_time_t& __ts)
333 {
return !pthread_mutex_clocklock(&_M_mutex, clockid, &__ts); }
337 #else // !_GTHREAD_USE_MUTEX_TIMEDLOCK
344 bool _M_locked =
false;
358 _M_cv.wait(__lk, [&]{
return !_M_locked; });
372 template<
typename _Rep,
typename _Period>
377 if (!_M_cv.wait_for(__lk, __rtime, [&]{ return !_M_locked; }))
383 template<
typename _Clock,
typename _Duration>
388 if (!_M_cv.wait_until(__lk, __atime, [&]{ return !_M_locked; }))
398 __glibcxx_assert( _M_locked );
410 unsigned _M_count = 0;
417 operator()()
const noexcept
418 {
return _M_mx->_M_count == 0 || _M_mx->_M_owner == _M_caller; }
436 _Can_lock __can_lock{
this, __id};
438 _M_cv.wait(__lk, __can_lock);
440 __throw_system_error(EAGAIN);
449 _Can_lock __can_lock{
this, __id};
460 template<
typename _Rep,
typename _Period>
465 _Can_lock __can_lock{
this, __id};
467 if (!_M_cv.wait_for(__lk, __rtime, __can_lock))
476 template<
typename _Clock,
typename _Duration>
481 _Can_lock __can_lock{
this, __id};
483 if (!_M_cv.wait_until(__lk, __atime, __can_lock))
497 __glibcxx_assert( _M_count > 0 );
507 #endif // _GLIBCXX_HAS_GTHREADS
510 template<
typename _Lock>
512 __try_to_lock(_Lock& __l)
515 template<
int _Idx,
bool _Continue = true>
516 struct __try_lock_impl
518 template<
typename... _Lock>
520 __do_try_lock(tuple<_Lock&...>& __locks,
int& __idx)
523 auto __lock = std::__try_to_lock(std::get<_Idx>(__locks));
524 if (__lock.owns_lock())
526 constexpr
bool __cont = _Idx + 2 <
sizeof...(_Lock);
527 using __try_locker = __try_lock_impl<_Idx + 1, __cont>;
528 __try_locker::__do_try_lock(__locks, __idx);
536 struct __try_lock_impl<_Idx, false>
538 template<
typename... _Lock>
540 __do_try_lock(tuple<_Lock&...>& __locks,
int& __idx)
543 auto __lock = std::__try_to_lock(std::get<_Idx>(__locks));
544 if (__lock.owns_lock())
563 template<
typename _Lock1,
typename _Lock2,
typename... _Lock3>
565 try_lock(_Lock1& __l1, _Lock2& __l2, _Lock3&... __l3)
568 auto __locks =
std::tie(__l1, __l2, __l3...);
569 __try_lock_impl<0>::__do_try_lock(__locks, __idx);
584 template<
typename _L1,
typename _L2,
typename... _L3>
586 lock(_L1& __l1, _L2& __l2, _L3&... __l3)
590 using __try_locker = __try_lock_impl<0,
sizeof...(_L3) != 0>;
593 auto __locks =
std::tie(__l2, __l3...);
594 __try_locker::__do_try_lock(__locks, __idx);
603 #if __cplusplus >= 201703L
604 #define __cpp_lib_scoped_lock 201703
610 template<
typename... _MutexTypes>
614 explicit scoped_lock(_MutexTypes&... __m) : _M_devices(
std::
tie(__m...))
617 explicit scoped_lock(adopt_lock_t, _MutexTypes&... __m) noexcept
622 { std::apply([](
auto&... __m) { (__m.unlock(), ...); }, _M_devices); }
624 scoped_lock(
const scoped_lock&) =
delete;
625 scoped_lock& operator=(
const scoped_lock&) =
delete;
628 tuple<_MutexTypes&...> _M_devices;
635 explicit scoped_lock() =
default;
636 explicit scoped_lock(adopt_lock_t) noexcept { }
637 ~scoped_lock() =
default;
639 scoped_lock(
const scoped_lock&) =
delete;
640 scoped_lock& operator=(
const scoped_lock&) =
delete;
643 template<
typename _Mutex>
644 class scoped_lock<_Mutex>
647 using mutex_type = _Mutex;
649 explicit scoped_lock(mutex_type& __m) : _M_device(__m)
650 { _M_device.lock(); }
652 explicit scoped_lock(adopt_lock_t, mutex_type& __m) noexcept
657 { _M_device.unlock(); }
659 scoped_lock(
const scoped_lock&) =
delete;
660 scoped_lock& operator=(
const scoped_lock&) =
delete;
663 mutex_type& _M_device;
667 #ifdef _GLIBCXX_HAS_GTHREADS
672 typedef __gthread_once_t __native_type;
673 __native_type _M_once = __GTHREAD_ONCE_INIT;
677 constexpr
once_flag() noexcept =
default;
684 template<
typename _Callable,
typename... _Args>
690 #ifdef _GLIBCXX_HAVE_TLS
691 extern __thread
void* __once_callable;
692 extern __thread void (*__once_call)();
694 extern function<void()> __once_functor;
703 extern "C" void __once_proxy(
void);
707 template<
typename _Callable,
typename... _Args>
713 auto __callable = [&] {
715 std::forward<_Args>(__args)...);
717 #ifdef _GLIBCXX_HAVE_TLS
719 __once_call = []{ (*(decltype(__callable)*)__once_callable)(); };
722 __once_functor = __callable;
723 __set_once_functor_lock_ptr(&__functor_lock);
726 int __e = __gthread_once(&__once._M_once, &__once_proxy);
728 #ifndef _GLIBCXX_HAVE_TLS
730 __set_once_functor_lock_ptr(0);
733 #ifdef __clang_analyzer__
735 __once_callable =
nullptr;
736 __once_call =
nullptr;
740 __throw_system_error(__e);
742 #endif // _GLIBCXX_HAS_GTHREADS
745 _GLIBCXX_END_NAMESPACE_VERSION
750 #endif // _GLIBCXX_MUTEX