@@ -85,18 +85,13 @@ int main(void) {
85
85
while (p != 0) { \
86
86
int cmp; \
87
87
cmp = __cmp(x, p); \
88
+ if (cmp >= 0) cnt += kavl_size_child(__head, p, 0) + 1; \
88
89
if (cmp < 0) p = p->__head.p[0]; \
89
- else if (cmp > 0) { \
90
- cnt += kavl_size_child(__head, p, 0) + 1; \
91
- p = p->__head.p[1]; \
92
- } else { \
93
- cnt += kavl_size_child(__head, p, 0) + 1; \
94
- if (cnt_) *cnt_ = cnt; \
95
- return (__type*)p; \
96
- } \
90
+ else if (cmp > 0) p = p->__head.p[1]; \
91
+ else break; \
97
92
} \
98
93
if (cnt_) *cnt_ = cnt; \
99
- return 0 ; \
94
+ return (__type*)p ; \
100
95
}
101
96
102
97
#define __KAVL_ROTATE (suf , __type , __head ) \
@@ -144,13 +139,13 @@ int main(void) {
144
139
for (p = bp , q = bq , top = path_len = 0 ; p ; q = p , p = p -> __head .p [which ]) { \
145
140
int cmp ; \
146
141
cmp = __cmp (x , p ); \
142
+ if (cmp >= 0 ) cnt += kavl_size_child (__head , p , 0 ) + 1 ; \
147
143
if (cmp == 0 ) { \
148
- if (cnt_ ) * cnt_ = cnt + 1 ; \
144
+ if (cnt_ ) * cnt_ = cnt ; \
149
145
return p ; \
150
146
} \
151
147
if (p -> __head .balance != 0 ) \
152
148
bq = q , bp = p , top = 0 ; \
153
- if (cmp > 0 ) cnt += kavl_size_child (__head , p , 0 ) + 1 ; \
154
149
stack [top ++ ] = which = (cmp > 0 ); \
155
150
path [path_len ++ ] = p ; \
156
151
} \
@@ -178,24 +173,32 @@ int main(void) {
178
173
}
179
174
180
175
#define __KAVL_ERASE (suf , __scope , __type , __head , __cmp ) \
181
- __scope __type *kavl_erase_##suf(__type **root_, const __type *x) { \
176
+ __scope __type *kavl_erase_##suf(__type **root_, const __type *x, unsigned *cnt_ ) { \
182
177
__type *p, *path[KAVL_MAX_DEPTH], fake; \
183
178
unsigned char dir[KAVL_MAX_DEPTH]; \
184
179
int i, d = 0, cmp; \
180
+ unsigned cnt = 0; \
185
181
fake.__head.p[0] = *root_, fake.__head.p[1] = 0; \
182
+ if (cnt_) *cnt_ = 0; \
186
183
if (x) { \
187
184
for (cmp = -1, p = &fake; cmp; cmp = __cmp(x, p)) { \
188
185
int which = (cmp > 0); \
186
+ if (cmp > 0) cnt += kavl_size_child(__head, p, 0) + 1; \
189
187
dir[d] = which; \
190
188
path[d++] = p; \
191
189
p = p->__head.p[which]; \
192
- if (p == 0) return 0; \
190
+ if (p == 0) { \
191
+ if (cnt_) *cnt_ = 0; \
192
+ return 0; \
193
+ } \
193
194
} \
195
+ cnt += kavl_size_child(__head, p, 0) + 1; /* because p==x is not counted */ \
194
196
} else { \
195
- for (p = &fake; p; p = p->__head.p[0]) \
197
+ for (p = & fake , cnt = 1 ; p ; p = p -> __head .p [0 ]) \
196
198
dir [d ] = 0 , path [d ++ ] = p ; \
197
199
p = path [-- d ]; \
198
200
} \
201
+ if (cnt_ ) * cnt_ = cnt ; \
199
202
for (i = 1 ; i < d ; ++ i ) -- path [i ]-> __head .size ; \
200
203
if (p -> __head .p [1 ] == 0 ) { /* ((1,.)2,3)4 => (1,3)4; p=2 */ \
201
204
path [d - 1 ]-> __head .p [dir [d - 1 ]] = p -> __head .p [0 ]; \
@@ -339,8 +342,8 @@ int main(void) {
339
342
*
340
343
* @return node removed from the tree if present, or NULL if absent
341
344
*/
342
- #define kavl_erase (suf , proot , x ) kavl_erase_##suf(proot, x)
343
- #define kavl_erase_first (suf , proot ) kavl_erase_##suf(proot, 0)
345
+ #define kavl_erase (suf , proot , x , cnt ) kavl_erase_##suf(proot, x, cnt )
346
+ #define kavl_erase_first (suf , proot ) kavl_erase_##suf(proot, 0, 0 )
344
347
345
348
#define kavl_itr_t (suf ) struct kavl_itr_##suf
346
349
0 commit comments