1
2 /*
3 * Copyright (C) NGINX, Inc.
4 */
5
6 #ifndef _NXT_NNCQ_H_INCLUDED_
7 #define _NXT_NNCQ_H_INCLUDED_
8
9
10 /* Numeric Naive Circular Queue */
11
12 #define NXT_NNCQ_SIZE 16384
13
14 typedef uint32_t nxt_nncq_atomic_t;
15 typedef uint16_t nxt_nncq_cycle_t;
16
17 typedef struct {
18 nxt_nncq_atomic_t head;
19 nxt_nncq_atomic_t entries[NXT_NNCQ_SIZE];
20 nxt_nncq_atomic_t tail;
21 } nxt_nncq_t;
22
23
24 static inline nxt_nncq_atomic_t
nxt_nncq_head(nxt_nncq_t const volatile * q)25 nxt_nncq_head(nxt_nncq_t const volatile *q)
26 {
27 return q->head;
28 }
29
30
31 static inline nxt_nncq_atomic_t
nxt_nncq_tail(nxt_nncq_t const volatile * q)32 nxt_nncq_tail(nxt_nncq_t const volatile *q)
33 {
34 return q->tail;
35 }
36
37
38 static inline void
nxt_nncq_tail_cmp_inc(nxt_nncq_t volatile * q,nxt_nncq_atomic_t t)39 nxt_nncq_tail_cmp_inc(nxt_nncq_t volatile *q, nxt_nncq_atomic_t t)
40 {
41 nxt_atomic_cmp_set(&q->tail, t, t + 1);
42 }
43
44
45 static inline nxt_nncq_atomic_t
nxt_nncq_index(nxt_nncq_t const volatile * q,nxt_nncq_atomic_t i)46 nxt_nncq_index(nxt_nncq_t const volatile *q, nxt_nncq_atomic_t i)
47 {
48 return i % NXT_NNCQ_SIZE;
49 }
50
51
52 static inline nxt_nncq_atomic_t
nxt_nncq_map(nxt_nncq_t const volatile * q,nxt_nncq_atomic_t i)53 nxt_nncq_map(nxt_nncq_t const volatile *q, nxt_nncq_atomic_t i)
54 {
55 return i % NXT_NNCQ_SIZE;
56 }
57
58
59 static inline nxt_nncq_cycle_t
nxt_nncq_cycle(nxt_nncq_t const volatile * q,nxt_nncq_atomic_t i)60 nxt_nncq_cycle(nxt_nncq_t const volatile *q, nxt_nncq_atomic_t i)
61 {
62 return i / NXT_NNCQ_SIZE;
63 }
64
65
66 static inline nxt_nncq_cycle_t
nxt_nncq_next_cycle(nxt_nncq_t const volatile * q,nxt_nncq_cycle_t i)67 nxt_nncq_next_cycle(nxt_nncq_t const volatile *q, nxt_nncq_cycle_t i)
68 {
69 return i + 1;
70 }
71
72
73 static inline nxt_nncq_atomic_t
nxt_nncq_new_entry(nxt_nncq_t const volatile * q,nxt_nncq_cycle_t cycle,nxt_nncq_atomic_t i)74 nxt_nncq_new_entry(nxt_nncq_t const volatile *q, nxt_nncq_cycle_t cycle,
75 nxt_nncq_atomic_t i)
76 {
77 return cycle * NXT_NNCQ_SIZE + (i % NXT_NNCQ_SIZE);
78 }
79
80
81 static inline nxt_nncq_atomic_t
nxt_nncq_empty(nxt_nncq_t const volatile * q)82 nxt_nncq_empty(nxt_nncq_t const volatile *q)
83 {
84 return NXT_NNCQ_SIZE;
85 }
86
87
88 static inline void
nxt_nncq_init(nxt_nncq_t volatile * q)89 nxt_nncq_init(nxt_nncq_t volatile *q)
90 {
91 q->head = NXT_NNCQ_SIZE;
92 nxt_memzero((void *) q->entries, NXT_NNCQ_SIZE * sizeof(nxt_nncq_atomic_t));
93 q->tail = NXT_NNCQ_SIZE;
94 }
95
96
97 static inline void
nxt_nncq_enqueue(nxt_nncq_t volatile * q,nxt_nncq_atomic_t val)98 nxt_nncq_enqueue(nxt_nncq_t volatile *q, nxt_nncq_atomic_t val)
99 {
100 nxt_nncq_cycle_t e_cycle, t_cycle;
101 nxt_nncq_atomic_t n, t, e, j;
102
103 for ( ;; ) {
104 t = nxt_nncq_tail(q);
105 j = nxt_nncq_map(q, t);
106 e = q->entries[j];
107
108 e_cycle = nxt_nncq_cycle(q, e);
109 t_cycle = nxt_nncq_cycle(q, t);
110
111 if (e_cycle == t_cycle) {
112 nxt_nncq_tail_cmp_inc(q, t);
113 continue;
114 }
115
116 if (nxt_nncq_next_cycle(q, e_cycle) != t_cycle) {
117 continue;
118 }
119
120 n = nxt_nncq_new_entry(q, t_cycle, val);
121
122 if (nxt_atomic_cmp_set(&q->entries[j], e, n)) {
123 break;
124 }
125 }
126
127 nxt_nncq_tail_cmp_inc(q, t);
128 }
129
130
131 static inline nxt_nncq_atomic_t
nxt_nncq_dequeue(nxt_nncq_t volatile * q)132 nxt_nncq_dequeue(nxt_nncq_t volatile *q)
133 {
134 nxt_nncq_cycle_t e_cycle, h_cycle;
135 nxt_nncq_atomic_t h, j, e;
136
137 for ( ;; ) {
138 h = nxt_nncq_head(q);
139 j = nxt_nncq_map(q, h);
140 e = q->entries[j];
141
142 e_cycle = nxt_nncq_cycle(q, e);
143 h_cycle = nxt_nncq_cycle(q, h);
144
145 if (e_cycle != h_cycle) {
146 if (nxt_nncq_next_cycle(q, e_cycle) == h_cycle) {
147 return nxt_nncq_empty(q);
148 }
149
150 continue;
151 }
152
153 if (nxt_atomic_cmp_set(&q->head, h, h + 1)) {
154 break;
155 }
156 }
157
158 return nxt_nncq_index(q, e);
159 }
160
161
162 #endif /* _NXT_NNCQ_H_INCLUDED_ */
163