@@ -11,7 +11,7 @@ fn main() {
11
11
. write_to_file ( GENERATION_PATH ) ;
12
12
13
13
configure ( ) ;
14
- split_bindings ( ) ;
14
+ split_bindings ( ) . unwrap ( ) ;
15
15
rename_enums ( ) ;
16
16
17
17
println ! ( "cargo:rerun-if-changed=build.rs" ) ;
@@ -22,12 +22,12 @@ fn main() {
22
22
23
23
fn configure ( ) {
24
24
let content = format ! (
25
- r#"
26
- #pragma once
27
- #define RUST_U128_ALIGNMENT {}
25
+ r#"#pragma once
26
+ #define TARGET_ARCH {}_
28
27
"# ,
29
- std:: mem :: align_of :: < u128 > ( )
28
+ std:: env :: var ( "CARGO_CFG_TARGET_ARCH" ) . unwrap ( )
30
29
) ;
30
+
31
31
let mut file = std:: fs:: File :: options ( )
32
32
. write ( true )
33
33
. append ( false )
@@ -75,8 +75,7 @@ fn rename_enums() {
75
75
}
76
76
}
77
77
78
- fn split_bindings ( ) {
79
- let mut errors = Vec :: new ( ) ;
78
+ fn split_bindings ( ) -> Result < ( ) , String > {
80
79
let split_guide = SplitGuide :: from_yaml ( SPLITGUIDE_PATH ) ;
81
80
let bindings = std:: fs:: read_to_string ( GENERATION_PATH ) . unwrap ( ) ;
82
81
let mut files = split_guide
@@ -94,46 +93,37 @@ fn split_bindings() {
94
93
( name. as_str ( ) , BufWriter :: new ( file) )
95
94
} )
96
95
. collect :: < HashMap < _ , _ > > ( ) ;
97
- let mut records = group_tokens ( Tokenizer { inner : & bindings } ) ;
96
+ let mut records = group_tokens ( Tokenizer { inner : & bindings } ) ? ;
98
97
for id in split_guide. requested_ids ( ) {
99
- if ! records. iter ( ) . any ( |r| r. token . id == id) {
100
- errors . push ( format ! (
98
+ if records. iter ( ) . any ( |r| r. tokens . iter ( ) . any ( |t| t . id == id) ) {
99
+ return Err ( format ! (
101
100
"{} not found (requested explicitly by splitguide.yaml)" ,
102
101
id,
103
- ) )
102
+ ) ) ;
104
103
}
105
104
}
106
105
for record in & mut records {
107
106
let appropriate_files = split_guide. appropriate_files ( record) ;
108
107
for file in appropriate_files {
109
108
let writer = files. get_mut ( file) . unwrap ( ) ;
110
109
record. used = true ;
111
- for comment in & record. comments {
112
- writeln ! ( writer, "{}" , comment ) . unwrap ( ) ;
110
+ for token in & record. tokens {
111
+ writeln ! ( writer, "{}" , & token ) . unwrap ( ) ;
113
112
}
114
- writeln ! ( writer, "{}" , & record. token) . unwrap ( ) ;
115
- }
116
- }
117
- for record in records {
118
- if !record. used && !record. token . id . is_empty ( ) && record. token . tt != TokenType :: PrivateToken
119
- {
120
- errors. push ( format ! (
121
- "Unused {:?} record: {}" ,
122
- record. token. tt, record. token. id
123
- ) )
124
113
}
125
114
}
126
- if !errors . is_empty ( ) {
127
- panic ! ( "Errors in splitting: {:?}" , errors )
115
+ for record in & records {
116
+ record . is_used ( ) ? ;
128
117
}
129
118
for ( _, file) in files {
130
119
file. into_inner ( ) . unwrap ( ) . unlock ( ) . unwrap ( ) ;
131
120
}
132
121
std:: fs:: remove_file ( GENERATION_PATH ) . unwrap ( ) ;
122
+ Ok ( ( ) )
133
123
}
134
124
135
125
enum SplitRule {
136
- Brand ( TokenType ) ,
126
+ Brand ( RecordType ) ,
137
127
Exclusive ( String ) ,
138
128
Shared ( String ) ,
139
129
}
@@ -153,11 +143,12 @@ impl SplitGuide {
153
143
rules
154
144
. into_iter ( )
155
145
. map ( |mut s| match s. as_str ( ) {
156
- ":functions" => SplitRule :: Brand ( TokenType :: Function ) ,
157
- ":typedefs" => SplitRule :: Brand ( TokenType :: Typedef ) ,
158
- ":includes" => SplitRule :: Brand ( TokenType :: Include ) ,
159
- ":defines" => SplitRule :: Brand ( TokenType :: Define ) ,
160
- ":const" => SplitRule :: Brand ( TokenType :: Const ) ,
146
+ ":functions" => SplitRule :: Brand ( RecordType :: Function ) ,
147
+ ":typedefs" => SplitRule :: Brand ( RecordType :: Typedef ) ,
148
+ ":includes" => SplitRule :: Brand ( RecordType :: PreprInclude ) ,
149
+ ":defines" => SplitRule :: Brand ( RecordType :: PreprDefine ) ,
150
+ ":const" => SplitRule :: Brand ( RecordType :: Const ) ,
151
+ ":multiples" => SplitRule :: Brand ( RecordType :: Multiple ) ,
161
152
_ if s. ends_with ( '!' ) => {
162
153
s. pop ( ) ;
163
154
SplitRule :: Exclusive ( s)
@@ -176,13 +167,11 @@ impl SplitGuide {
176
167
for ( file, rules) in & self . rules {
177
168
for rule in rules {
178
169
match rule {
179
- SplitRule :: Brand ( brand) if * brand == record. token . tt => {
180
- shared. push ( file. as_str ( ) )
181
- }
182
- SplitRule :: Exclusive ( id) if id == record. token . id => {
170
+ SplitRule :: Brand ( brand) if * brand == record. rt => shared. push ( file. as_str ( ) ) ,
171
+ SplitRule :: Exclusive ( id) if record. contains_id ( id) => {
183
172
exclusives. push ( file. as_str ( ) )
184
173
}
185
- SplitRule :: Shared ( id) if id == record. token . id => shared. push ( file. as_str ( ) ) ,
174
+ SplitRule :: Shared ( id) if record. contains_id ( id ) => shared. push ( file. as_str ( ) ) ,
186
175
_ => { }
187
176
}
188
177
}
@@ -203,31 +192,120 @@ impl SplitGuide {
203
192
}
204
193
}
205
194
206
- fn group_tokens ( stream : Tokenizer ) -> Vec < Record > {
207
- let mut comments_stack = Vec :: new ( ) ;
195
+ fn group_tokens ( stream : Tokenizer ) -> Result < Vec < Record > , String > {
208
196
let mut records = Vec :: new ( ) ;
197
+ let mut record_collect = Record :: new ( ) ;
209
198
for token in stream {
210
- match token. tt {
211
- TokenType :: Comment => comments_stack. push ( token) ,
212
- TokenType :: Whitespace => { }
213
- _ => {
214
- let comments = comments_stack;
215
- comments_stack = Vec :: new ( ) ;
216
- records. push ( Record {
217
- token,
218
- used : false ,
219
- comments,
220
- } ) ;
221
- }
199
+ record_collect. add_token ( token) ?;
200
+ if record_collect. is_ready ( ) {
201
+ let mut record = Record :: new ( ) ;
202
+ std:: mem:: swap ( & mut record_collect, & mut record) ;
203
+ records. push ( record) ;
204
+ }
205
+ }
206
+ records. push ( record_collect) ;
207
+ Ok ( records)
208
+ }
209
+
210
+ #[ derive( Copy , Clone , PartialEq , Debug ) ]
211
+ enum RecordType {
212
+ Empty ,
213
+ Multiple ,
214
+ PrivateToken ,
215
+ Typedef ,
216
+ Function ,
217
+ Const ,
218
+ PreprDefine ,
219
+ PreprInclude ,
220
+ }
221
+
222
+ impl RecordType {
223
+ fn update ( & mut self , rt : RecordType ) {
224
+ match * self {
225
+ RecordType :: Empty => * self = rt,
226
+ RecordType :: Multiple => return ,
227
+ _ => * self = RecordType :: Multiple ,
222
228
}
223
229
}
224
- records
225
230
}
226
231
227
232
struct Record < ' a > {
228
- token : Token < ' a > ,
229
233
used : bool ,
230
- comments : Vec < Token < ' a > > ,
234
+ rt : RecordType ,
235
+ nesting : i32 ,
236
+ ids : Vec < Cow < ' a , str > > ,
237
+ tokens : Vec < Token < ' a > > ,
238
+ }
239
+
240
+ impl < ' a > Record < ' a > {
241
+ fn new ( ) -> Self {
242
+ Self {
243
+ used : false ,
244
+ rt : RecordType :: Empty ,
245
+ nesting : 0 ,
246
+ ids : Vec :: new ( ) ,
247
+ tokens : Vec :: new ( ) ,
248
+ }
249
+ }
250
+
251
+ fn is_used ( & self ) -> Result < ( ) , String > {
252
+ if self . used || self . rt == RecordType :: Empty || self . rt == RecordType :: PrivateToken {
253
+ Ok ( ( ) )
254
+ } else {
255
+ let token_ids = self . tokens . iter ( ) . map ( |t| t. id ) . collect :: < Vec < _ > > ( ) ;
256
+ Err ( format ! ( "Unused {:?} record: {:?}" , self . rt, token_ids) )
257
+ }
258
+ }
259
+
260
+ fn is_ready ( & self ) -> bool {
261
+ return self . nesting == 0 && self . rt != RecordType :: Empty ;
262
+ }
263
+
264
+ fn contains_id ( & self , id : & str ) -> bool {
265
+ self . ids . iter ( ) . any ( |v| v == id)
266
+ }
267
+
268
+ fn push_token ( & mut self , token : Token < ' a > ) {
269
+ self . tokens . push ( token) ;
270
+ }
271
+
272
+ fn push_record_type_token ( & mut self , token : Token < ' a > , rt : RecordType ) {
273
+ self . rt . update ( rt) ;
274
+ if !token. id . is_empty ( ) {
275
+ self . ids . push ( token. id . into ( ) ) ;
276
+ }
277
+ self . push_token ( token)
278
+ }
279
+
280
+ fn push_prepr_if ( & mut self , token : Token < ' a > ) {
281
+ self . nesting += 1 ;
282
+ self . push_token ( token)
283
+ }
284
+
285
+ fn push_prepr_endif ( & mut self , token : Token < ' a > ) -> Result < ( ) , String > {
286
+ self . nesting -= 1 ;
287
+ if self . nesting < 0 {
288
+ return Err ( format ! ( "unmatched #endif" ) ) ;
289
+ }
290
+ self . push_token ( token) ;
291
+ Ok ( ( ) )
292
+ }
293
+
294
+ fn add_token ( & mut self , token : Token < ' a > ) -> Result < ( ) , String > {
295
+ match token. tt {
296
+ TokenType :: Comment => self . push_token ( token) ,
297
+ TokenType :: Typedef => self . push_record_type_token ( token, RecordType :: Typedef ) ,
298
+ TokenType :: Function => self . push_record_type_token ( token, RecordType :: Function ) ,
299
+ TokenType :: Const => self . push_record_type_token ( token, RecordType :: Const ) ,
300
+ TokenType :: PrivateToken => self . push_record_type_token ( token, RecordType :: PrivateToken ) ,
301
+ TokenType :: PreprDefine => self . push_record_type_token ( token, RecordType :: PreprDefine ) ,
302
+ TokenType :: PreprInclude => self . push_record_type_token ( token, RecordType :: PreprInclude ) ,
303
+ TokenType :: PreprIf => self . push_prepr_if ( token) ,
304
+ TokenType :: PreprEndif => self . push_prepr_endif ( token) ?,
305
+ TokenType :: Whitespace => self . push_token ( token) ,
306
+ }
307
+ Ok ( ( ) )
308
+ }
231
309
}
232
310
233
311
#[ derive( Debug , Clone , Copy , PartialEq , Eq ) ]
@@ -236,11 +314,11 @@ enum TokenType {
236
314
Typedef ,
237
315
Function ,
238
316
Const ,
239
- Define ,
240
317
PrivateToken ,
241
- Include ,
242
- Ifndef ,
243
- Endif ,
318
+ PreprDefine ,
319
+ PreprInclude ,
320
+ PreprIf ,
321
+ PreprEndif ,
244
322
Whitespace ,
245
323
}
246
324
#[ derive( Debug , Clone , PartialEq , Eq ) ]
@@ -266,10 +344,10 @@ impl<'a> Token<'a> {
266
344
fn next ( s : & ' a str ) -> Option < Self > {
267
345
Self :: whitespace ( s)
268
346
. or_else ( || Self :: comment ( s) )
269
- . or_else ( || Self :: endif ( s) )
270
- . or_else ( || Self :: include ( s) )
271
- . or_else ( || Self :: ifndef ( s) )
272
- . or_else ( || Self :: define ( s) )
347
+ . or_else ( || Self :: prepr_endif ( s) )
348
+ . or_else ( || Self :: prepr_include ( s) )
349
+ . or_else ( || Self :: prepr_define ( s) )
350
+ . or_else ( || Self :: prepr_if ( s) )
273
351
. or_else ( || Self :: typedef ( s) )
274
352
. or_else ( || Self :: r#const ( s) )
275
353
. or_else ( || Self :: function ( s) )
@@ -370,51 +448,52 @@ impl<'a> Token<'a> {
370
448
Some ( Token :: new (
371
449
TokenType :: Comment ,
372
450
"" ,
373
- s. until_incl ( "\n " ) . unwrap_or ( s) ,
451
+ s. until ( "\n " ) . unwrap_or ( s) ,
374
452
) )
375
453
} else {
376
454
None
377
455
}
378
456
}
379
457
380
- fn ifndef ( s : & ' a str ) -> Option < Self > {
381
- let start = "#ifndef " ;
382
- s. starts_with ( start) . then ( || {
458
+ fn prepr_if ( s : & ' a str ) -> Option < Self > {
459
+ if s. starts_with ( "#if " ) || s. starts_with ( "#ifdef " ) || s. starts_with ( "#ifndef " ) {
383
460
let span = s. until ( "\n " ) . unwrap_or ( s) ;
384
- Token :: new ( TokenType :: Ifndef , & span[ start. len ( ) ..] , span)
385
- } )
461
+ Some ( Token :: new ( TokenType :: PreprIf , span, span) )
462
+ } else {
463
+ None
464
+ }
386
465
}
387
466
388
- fn define ( s : & ' a str ) -> Option < Self > {
467
+ fn prepr_define ( s : & ' a str ) -> Option < Self > {
389
468
let start = "#define " ;
390
469
s. strip_prefix ( start) . map ( |defined| {
391
470
let span = s. until ( "\n " ) . unwrap_or ( s) ;
392
471
Token :: new (
393
472
if defined. starts_with ( '_' ) {
394
473
TokenType :: PrivateToken
395
474
} else {
396
- TokenType :: Define
475
+ TokenType :: PreprDefine
397
476
} ,
398
477
span[ start. len ( ) ..] . split_whitespace ( ) . next ( ) . unwrap ( ) ,
399
478
span,
400
479
)
401
480
} )
402
481
}
403
482
404
- fn endif ( s : & ' a str ) -> Option < Self > {
483
+ fn prepr_endif ( s : & ' a str ) -> Option < Self > {
405
484
s. starts_with ( "#endif" )
406
- . then ( || Token :: new ( TokenType :: Endif , "" , "#endif" ) )
485
+ . then ( || Token :: new ( TokenType :: PreprEndif , "" , "#endif" ) )
407
486
}
408
487
409
- fn include ( s : & ' a str ) -> Option < Self > {
488
+ fn prepr_include ( s : & ' a str ) -> Option < Self > {
410
489
Self :: _include ( s, "#include \" " , "\" " ) . or_else ( || Self :: _include ( s, "#include <" , ">" ) )
411
490
}
412
491
413
492
fn _include ( s : & ' a str , start : & str , end : & str ) -> Option < Self > {
414
493
if s. starts_with ( start) {
415
494
let span = s. until_incl ( end) . expect ( "detected unterminated #include" ) ;
416
495
Some ( Token :: new (
417
- TokenType :: Include ,
496
+ TokenType :: PreprInclude ,
418
497
& span[ start. len ( ) ..( span. len ( ) - end. len ( ) ) ] ,
419
498
span,
420
499
) )
0 commit comments