1
2 /*
3 * Copyright (C) NGINX, Inc.
4 */
5
6 #ifndef _NXT_NVBCQ_H_INCLUDED_
7 #define _NXT_NVBCQ_H_INCLUDED_
8
9
10 /* Numeric VBart Circular Queue */
11
12 #define NXT_NVBCQ_SIZE 16384
13
14 typedef uint32_t nxt_nvbcq_atomic_t;
15
16 struct nxt_nvbcq_s {
17 nxt_nvbcq_atomic_t head;
18 nxt_nvbcq_atomic_t entries[NXT_NVBCQ_SIZE];
19 nxt_nvbcq_atomic_t tail;
20 };
21
22 typedef struct nxt_nvbcq_s nxt_nvbcq_t;
23
24
25 static inline nxt_nvbcq_atomic_t
nxt_nvbcq_head(nxt_nvbcq_t const volatile * q)26 nxt_nvbcq_head(nxt_nvbcq_t const volatile *q)
27 {
28 return q->head;
29 }
30
31
32 static inline nxt_nvbcq_atomic_t
nxt_nvbcq_tail(nxt_nvbcq_t const volatile * q)33 nxt_nvbcq_tail(nxt_nvbcq_t const volatile *q)
34 {
35 return q->tail;
36 }
37
38
39 static inline void
nxt_nvbcq_tail_cmp_inc(nxt_nvbcq_t volatile * q,nxt_nvbcq_atomic_t t)40 nxt_nvbcq_tail_cmp_inc(nxt_nvbcq_t volatile *q, nxt_nvbcq_atomic_t t)
41 {
42 nxt_atomic_cmp_set(&q->tail, t, t + 1);
43 }
44
45
46 static inline nxt_nvbcq_atomic_t
nxt_nvbcq_index(nxt_nvbcq_t const volatile * q,nxt_nvbcq_atomic_t i)47 nxt_nvbcq_index(nxt_nvbcq_t const volatile *q, nxt_nvbcq_atomic_t i)
48 {
49 return i % NXT_NVBCQ_SIZE;
50 }
51
52
53 static inline nxt_nvbcq_atomic_t
nxt_nvbcq_map(nxt_nvbcq_t const volatile * q,nxt_nvbcq_atomic_t i)54 nxt_nvbcq_map(nxt_nvbcq_t const volatile *q, nxt_nvbcq_atomic_t i)
55 {
56 return i % NXT_NVBCQ_SIZE;
57 }
58
59
60 static inline nxt_nvbcq_atomic_t
nxt_nvbcq_empty(nxt_nvbcq_t const volatile * q)61 nxt_nvbcq_empty(nxt_nvbcq_t const volatile *q)
62 {
63 return NXT_NVBCQ_SIZE;
64 }
65
66
67 static inline void
nxt_nvbcq_init(nxt_nvbcq_t volatile * q)68 nxt_nvbcq_init(nxt_nvbcq_t volatile *q)
69 {
70 nxt_nvbcq_atomic_t i;
71
72 q->head = 0;
73
74 for (i = 0; i < NXT_NVBCQ_SIZE; i++) {
75 q->entries[i] = NXT_NVBCQ_SIZE;
76 }
77
78 q->tail = NXT_NVBCQ_SIZE;
79 }
80
81
82 static inline void
nxt_nvbcq_enqueue(nxt_nvbcq_t volatile * q,nxt_nvbcq_atomic_t val)83 nxt_nvbcq_enqueue(nxt_nvbcq_t volatile *q, nxt_nvbcq_atomic_t val)
84 {
85 nxt_nvbcq_atomic_t t, h, i;
86
87 t = nxt_nvbcq_tail(q);
88 h = t - NXT_NVBCQ_SIZE;
89
90 for ( ;; ) {
91 i = nxt_nvbcq_map(q, t);
92
93 if (q->entries[i] == NXT_NVBCQ_SIZE
94 && nxt_atomic_cmp_set(&q->entries[i], NXT_NVBCQ_SIZE, val))
95 {
96 nxt_nvbcq_tail_cmp_inc(q, t);
97 return;
98 }
99
100 if ((t - h) == NXT_NVBCQ_SIZE) {
101 h = nxt_nvbcq_head(q);
102
103 if ((t - h) == NXT_NVBCQ_SIZE) {
104 return;
105 }
106 }
107
108 t++;
109 }
110 }
111
112
113 static inline nxt_nvbcq_atomic_t
nxt_nvbcq_dequeue(nxt_nvbcq_t volatile * q)114 nxt_nvbcq_dequeue(nxt_nvbcq_t volatile *q)
115 {
116 nxt_nvbcq_atomic_t h, t, i, e;
117
118 h = nxt_nvbcq_head(q);
119 t = h + NXT_NVBCQ_SIZE;
120
121 for ( ;; ) {
122 i = nxt_nvbcq_map(q, h);
123 e = q->entries[i];
124
125 if (e < NXT_NVBCQ_SIZE
126 && nxt_atomic_cmp_set(&q->entries[i], e, NXT_NVBCQ_SIZE))
127 {
128 nxt_atomic_cmp_set(&q->head, h, h + 1);
129
130 return e;
131 }
132
133 if ((t - h) == NXT_NVBCQ_SIZE) {
134 t = nxt_nvbcq_tail(q);
135
136 if ((t - h) == NXT_NVBCQ_SIZE) {
137 return NXT_NVBCQ_SIZE;
138 }
139 }
140
141 h++;
142 }
143 }
144
145
146 #endif /* _NXT_NVBCQ_H_INCLUDED_ */
147