Lines Matching refs:lock

41     SRWLOCK lock;  member
78 CRYPTO_RCU_LOCK *lock; member
148 static struct rcu_qp *allocate_new_qp_group(struct rcu_lock_st *lock, in allocate_new_qp_group() argument
154 lock->group_count = count; in allocate_new_qp_group()
203 void ossl_rcu_lock_free(CRYPTO_RCU_LOCK *lock) in ossl_rcu_lock_free() argument
205 CRYPTO_THREAD_lock_free(lock->rw_lock); in ossl_rcu_lock_free()
206 OPENSSL_free(lock->qp_group); in ossl_rcu_lock_free()
207 ossl_crypto_condvar_free(&lock->alloc_signal); in ossl_rcu_lock_free()
208 ossl_crypto_condvar_free(&lock->prior_signal); in ossl_rcu_lock_free()
209 ossl_crypto_mutex_free(&lock->alloc_lock); in ossl_rcu_lock_free()
210 ossl_crypto_mutex_free(&lock->prior_lock); in ossl_rcu_lock_free()
211 ossl_crypto_mutex_free(&lock->write_lock); in ossl_rcu_lock_free()
212 OPENSSL_free(lock); in ossl_rcu_lock_free()
216 static ossl_inline struct rcu_qp *get_hold_current_qp(CRYPTO_RCU_LOCK *lock) in get_hold_current_qp() argument
224 CRYPTO_atomic_load_int((int *)&lock->reader_idx, (int *)&qp_idx, in get_hold_current_qp()
225 lock->rw_lock); in get_hold_current_qp()
226 CRYPTO_atomic_add64(&lock->qp_group[qp_idx].users, VAL_READER, &tmp64, in get_hold_current_qp()
227 lock->rw_lock); in get_hold_current_qp()
228 CRYPTO_atomic_load_int((int *)&lock->reader_idx, (int *)&tmp, in get_hold_current_qp()
229 lock->rw_lock); in get_hold_current_qp()
232 CRYPTO_atomic_add64(&lock->qp_group[qp_idx].users, -VAL_READER, &tmp64, in get_hold_current_qp()
233 lock->rw_lock); in get_hold_current_qp()
236 return &lock->qp_group[qp_idx]; in get_hold_current_qp()
248 void ossl_rcu_read_lock(CRYPTO_RCU_LOCK *lock) in ossl_rcu_read_lock() argument
253 CRYPTO_THREAD_LOCAL *lkey = ossl_lib_ctx_get_rcukey(lock->ctx); in ossl_rcu_read_lock()
265 ossl_init_thread_start(NULL, lock->ctx, ossl_rcu_free_local_data); in ossl_rcu_read_lock()
272 if (data->thread_qps[i].lock == lock) in ossl_rcu_read_lock()
281 data->thread_qps[available_qp].qp = get_hold_current_qp(lock); in ossl_rcu_read_lock()
283 data->thread_qps[available_qp].lock = lock; in ossl_rcu_read_lock()
286 void ossl_rcu_write_lock(CRYPTO_RCU_LOCK *lock) in ossl_rcu_write_lock() argument
288 ossl_crypto_mutex_lock(lock->write_lock); in ossl_rcu_write_lock()
291 void ossl_rcu_write_unlock(CRYPTO_RCU_LOCK *lock) in ossl_rcu_write_unlock() argument
293 ossl_crypto_mutex_unlock(lock->write_lock); in ossl_rcu_write_unlock()
296 void ossl_rcu_read_unlock(CRYPTO_RCU_LOCK *lock) in ossl_rcu_read_unlock() argument
298 CRYPTO_THREAD_LOCAL *lkey = ossl_lib_ctx_get_rcukey(lock->ctx); in ossl_rcu_read_unlock()
306 if (data->thread_qps[i].lock == lock) { in ossl_rcu_read_unlock()
311 lock->rw_lock); in ossl_rcu_read_unlock()
314 data->thread_qps[i].lock = NULL; in ossl_rcu_read_unlock()
325 static struct rcu_qp *update_qp(CRYPTO_RCU_LOCK *lock) in update_qp() argument
332 ossl_crypto_mutex_lock(lock->alloc_lock); in update_qp()
338 while (lock->group_count - lock->writers_alloced < 2) in update_qp()
340 ossl_crypto_condvar_wait(lock->alloc_signal, lock->alloc_lock); in update_qp()
342 current_idx = lock->current_alloc_idx; in update_qp()
345 lock->writers_alloced++; in update_qp()
348 lock->current_alloc_idx = in update_qp()
349 (lock->current_alloc_idx + 1) % lock->group_count; in update_qp()
352 new_id = VAL_ID(lock->id_ctr); in update_qp()
353 lock->id_ctr++; in update_qp()
361 CRYPTO_atomic_and(&lock->qp_group[current_idx].users, ID_MASK, &tmp64, in update_qp()
362 lock->rw_lock); in update_qp()
363 CRYPTO_atomic_add64(&lock->qp_group[current_idx].users, new_id, &tmp64, in update_qp()
364 lock->rw_lock); in update_qp()
367 tmp = lock->current_alloc_idx; in update_qp()
368 InterlockedExchange((LONG volatile *)&lock->reader_idx, tmp); in update_qp()
371 ossl_crypto_condvar_broadcast(lock->alloc_signal); in update_qp()
372 ossl_crypto_mutex_unlock(lock->alloc_lock); in update_qp()
373 return &lock->qp_group[current_idx]; in update_qp()
376 static void retire_qp(CRYPTO_RCU_LOCK *lock, in retire_qp() argument
379 ossl_crypto_mutex_lock(lock->alloc_lock); in retire_qp()
380 lock->writers_alloced--; in retire_qp()
381 ossl_crypto_condvar_broadcast(lock->alloc_signal); in retire_qp()
382 ossl_crypto_mutex_unlock(lock->alloc_lock); in retire_qp()
386 void ossl_synchronize_rcu(CRYPTO_RCU_LOCK *lock) in ossl_synchronize_rcu() argument
393 cb_items = InterlockedExchangePointer((void * volatile *)&lock->cb_items, in ossl_synchronize_rcu()
396 qp = update_qp(lock); in ossl_synchronize_rcu()
400 CRYPTO_atomic_load(&qp->users, &count, lock->rw_lock); in ossl_synchronize_rcu()
404 ossl_crypto_mutex_lock(lock->prior_lock); in ossl_synchronize_rcu()
405 while (lock->next_to_retire != ID_VAL(count)) in ossl_synchronize_rcu()
406 ossl_crypto_condvar_wait(lock->prior_signal, lock->prior_lock); in ossl_synchronize_rcu()
408 lock->next_to_retire++; in ossl_synchronize_rcu()
409 ossl_crypto_condvar_broadcast(lock->prior_signal); in ossl_synchronize_rcu()
410 ossl_crypto_mutex_unlock(lock->prior_lock); in ossl_synchronize_rcu()
412 retire_qp(lock, qp); in ossl_synchronize_rcu()
427 int ossl_rcu_call(CRYPTO_RCU_LOCK *lock, rcu_cb_fn cb, void *data) in ossl_rcu_call() argument
437 new->next = InterlockedExchangePointer((void * volatile *)&lock->cb_items, in ossl_rcu_call()
455 CRYPTO_RWLOCK *lock; in CRYPTO_THREAD_lock_new() local
459 if ((lock = OPENSSL_zalloc(sizeof(CRYPTO_win_rwlock))) == NULL) in CRYPTO_THREAD_lock_new()
462 rwlock = lock; in CRYPTO_THREAD_lock_new()
463 InitializeSRWLock(&rwlock->lock); in CRYPTO_THREAD_lock_new()
466 if ((lock = OPENSSL_zalloc(sizeof(CRITICAL_SECTION))) == NULL) in CRYPTO_THREAD_lock_new()
472 if (!InitializeCriticalSectionAndSpinCount(lock, 0x400)) { in CRYPTO_THREAD_lock_new()
473 OPENSSL_free(lock); in CRYPTO_THREAD_lock_new()
477 InitializeCriticalSection(lock); in CRYPTO_THREAD_lock_new()
481 return lock; in CRYPTO_THREAD_lock_new()
484 __owur int CRYPTO_THREAD_read_lock(CRYPTO_RWLOCK *lock) in CRYPTO_THREAD_read_lock() argument
487 CRYPTO_win_rwlock *rwlock = lock; in CRYPTO_THREAD_read_lock()
489 AcquireSRWLockShared(&rwlock->lock); in CRYPTO_THREAD_read_lock()
491 EnterCriticalSection(lock); in CRYPTO_THREAD_read_lock()
496 __owur int CRYPTO_THREAD_write_lock(CRYPTO_RWLOCK *lock) in CRYPTO_THREAD_write_lock() argument
499 CRYPTO_win_rwlock *rwlock = lock; in CRYPTO_THREAD_write_lock()
501 AcquireSRWLockExclusive(&rwlock->lock); in CRYPTO_THREAD_write_lock()
504 EnterCriticalSection(lock); in CRYPTO_THREAD_write_lock()
509 int CRYPTO_THREAD_unlock(CRYPTO_RWLOCK *lock) in CRYPTO_THREAD_unlock() argument
512 CRYPTO_win_rwlock *rwlock = lock; in CRYPTO_THREAD_unlock()
516 ReleaseSRWLockExclusive(&rwlock->lock); in CRYPTO_THREAD_unlock()
518 ReleaseSRWLockShared(&rwlock->lock); in CRYPTO_THREAD_unlock()
521 LeaveCriticalSection(lock); in CRYPTO_THREAD_unlock()
526 void CRYPTO_THREAD_lock_free(CRYPTO_RWLOCK *lock) in CRYPTO_THREAD_lock_free() argument
528 if (lock == NULL) in CRYPTO_THREAD_lock_free()
532 DeleteCriticalSection(lock); in CRYPTO_THREAD_lock_free()
534 OPENSSL_free(lock); in CRYPTO_THREAD_lock_free()
549 LONG volatile *lock = (LONG *)once; in CRYPTO_THREAD_run_once() local
552 if (*lock == ONCE_DONE) in CRYPTO_THREAD_run_once()
556 result = InterlockedCompareExchange(lock, ONCE_ININIT, ONCE_UNINITED); in CRYPTO_THREAD_run_once()
559 *lock = ONCE_DONE; in CRYPTO_THREAD_run_once()
564 return (*lock == ONCE_DONE); in CRYPTO_THREAD_run_once()
626 int CRYPTO_atomic_add(int *val, int amount, int *ret, CRYPTO_RWLOCK *lock) in CRYPTO_atomic_add() argument
634 CRYPTO_RWLOCK *lock) in CRYPTO_atomic_add64() argument
637 if (lock == NULL || !CRYPTO_THREAD_write_lock(lock)) in CRYPTO_atomic_add64()
642 if (!CRYPTO_THREAD_unlock(lock)) in CRYPTO_atomic_add64()
653 CRYPTO_RWLOCK *lock) in CRYPTO_atomic_and() argument
656 if (lock == NULL || !CRYPTO_THREAD_write_lock(lock)) in CRYPTO_atomic_and()
661 if (!CRYPTO_THREAD_unlock(lock)) in CRYPTO_atomic_and()
672 CRYPTO_RWLOCK *lock) in CRYPTO_atomic_or() argument
675 if (lock == NULL || !CRYPTO_THREAD_write_lock(lock)) in CRYPTO_atomic_or()
680 if (!CRYPTO_THREAD_unlock(lock)) in CRYPTO_atomic_or()
690 int CRYPTO_atomic_load(uint64_t *val, uint64_t *ret, CRYPTO_RWLOCK *lock) in CRYPTO_atomic_load() argument
693 if (lock == NULL || !CRYPTO_THREAD_read_lock(lock)) in CRYPTO_atomic_load()
696 if (!CRYPTO_THREAD_unlock(lock)) in CRYPTO_atomic_load()
706 int CRYPTO_atomic_store(uint64_t *dst, uint64_t val, CRYPTO_RWLOCK *lock) in CRYPTO_atomic_store() argument
709 if (lock == NULL || !CRYPTO_THREAD_read_lock(lock)) in CRYPTO_atomic_store()
712 if (!CRYPTO_THREAD_unlock(lock)) in CRYPTO_atomic_store()
722 int CRYPTO_atomic_load_int(int *val, int *ret, CRYPTO_RWLOCK *lock) in CRYPTO_atomic_load_int() argument
725 if (lock == NULL || !CRYPTO_THREAD_read_lock(lock)) in CRYPTO_atomic_load_int()
728 if (!CRYPTO_THREAD_unlock(lock)) in CRYPTO_atomic_load_int()