Skip to content

Commit 4414e77

Browse files
committed
build.rs #if parsing suport - unfinished
1 parent 8756195 commit 4414e77

File tree

5 files changed

+178
-81
lines changed

5 files changed

+178
-81
lines changed

build.rs

Lines changed: 152 additions & 73 deletions
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@ fn main() {
1111
.write_to_file(GENERATION_PATH);
1212

1313
configure();
14-
split_bindings();
14+
split_bindings().unwrap();
1515
rename_enums();
1616

1717
println!("cargo:rerun-if-changed=build.rs");
@@ -22,12 +22,12 @@ fn main() {
2222

2323
fn configure() {
2424
let content = format!(
25-
r#"
26-
#pragma once
27-
#define RUST_U128_ALIGNMENT {}
25+
r#"#pragma once
26+
#define TARGET_ARCH {}_
2827
"#,
29-
std::mem::align_of::<u128>()
28+
std::env::var("CARGO_CFG_TARGET_ARCH").unwrap()
3029
);
30+
3131
let mut file = std::fs::File::options()
3232
.write(true)
3333
.append(false)
@@ -75,8 +75,7 @@ fn rename_enums() {
7575
}
7676
}
7777

78-
fn split_bindings() {
79-
let mut errors = Vec::new();
78+
fn split_bindings() -> Result<(), String> {
8079
let split_guide = SplitGuide::from_yaml(SPLITGUIDE_PATH);
8180
let bindings = std::fs::read_to_string(GENERATION_PATH).unwrap();
8281
let mut files = split_guide
@@ -94,46 +93,37 @@ fn split_bindings() {
9493
(name.as_str(), BufWriter::new(file))
9594
})
9695
.collect::<HashMap<_, _>>();
97-
let mut records = group_tokens(Tokenizer { inner: &bindings });
96+
let mut records = group_tokens(Tokenizer { inner: &bindings })?;
9897
for id in split_guide.requested_ids() {
99-
if !records.iter().any(|r| r.token.id == id) {
100-
errors.push(format!(
98+
if records.iter().any(|r| r.tokens.iter().any(|t| t.id == id)) {
99+
return Err(format!(
101100
"{} not found (requested explicitly by splitguide.yaml)",
102101
id,
103-
))
102+
));
104103
}
105104
}
106105
for record in &mut records {
107106
let appropriate_files = split_guide.appropriate_files(record);
108107
for file in appropriate_files {
109108
let writer = files.get_mut(file).unwrap();
110109
record.used = true;
111-
for comment in &record.comments {
112-
writeln!(writer, "{}", comment).unwrap();
110+
for token in &record.tokens {
111+
writeln!(writer, "{}", &token).unwrap();
113112
}
114-
writeln!(writer, "{}", &record.token).unwrap();
115-
}
116-
}
117-
for record in records {
118-
if !record.used && !record.token.id.is_empty() && record.token.tt != TokenType::PrivateToken
119-
{
120-
errors.push(format!(
121-
"Unused {:?} record: {}",
122-
record.token.tt, record.token.id
123-
))
124113
}
125114
}
126-
if !errors.is_empty() {
127-
panic!("Errors in splitting: {:?}", errors)
115+
for record in &records {
116+
record.is_used()?;
128117
}
129118
for (_, file) in files {
130119
file.into_inner().unwrap().unlock().unwrap();
131120
}
132121
std::fs::remove_file(GENERATION_PATH).unwrap();
122+
Ok(())
133123
}
134124

135125
enum SplitRule {
136-
Brand(TokenType),
126+
Brand(RecordType),
137127
Exclusive(String),
138128
Shared(String),
139129
}
@@ -153,11 +143,12 @@ impl SplitGuide {
153143
rules
154144
.into_iter()
155145
.map(|mut s| match s.as_str() {
156-
":functions" => SplitRule::Brand(TokenType::Function),
157-
":typedefs" => SplitRule::Brand(TokenType::Typedef),
158-
":includes" => SplitRule::Brand(TokenType::Include),
159-
":defines" => SplitRule::Brand(TokenType::Define),
160-
":const" => SplitRule::Brand(TokenType::Const),
146+
":functions" => SplitRule::Brand(RecordType::Function),
147+
":typedefs" => SplitRule::Brand(RecordType::Typedef),
148+
":includes" => SplitRule::Brand(RecordType::PreprInclude),
149+
":defines" => SplitRule::Brand(RecordType::PreprDefine),
150+
":const" => SplitRule::Brand(RecordType::Const),
151+
":multiples" => SplitRule::Brand(RecordType::Multiple),
161152
_ if s.ends_with('!') => {
162153
s.pop();
163154
SplitRule::Exclusive(s)
@@ -176,13 +167,11 @@ impl SplitGuide {
176167
for (file, rules) in &self.rules {
177168
for rule in rules {
178169
match rule {
179-
SplitRule::Brand(brand) if *brand == record.token.tt => {
180-
shared.push(file.as_str())
181-
}
182-
SplitRule::Exclusive(id) if id == record.token.id => {
170+
SplitRule::Brand(brand) if *brand == record.rt => shared.push(file.as_str()),
171+
SplitRule::Exclusive(id) if record.contains_id(id) => {
183172
exclusives.push(file.as_str())
184173
}
185-
SplitRule::Shared(id) if id == record.token.id => shared.push(file.as_str()),
174+
SplitRule::Shared(id) if record.contains_id(id) => shared.push(file.as_str()),
186175
_ => {}
187176
}
188177
}
@@ -203,31 +192,120 @@ impl SplitGuide {
203192
}
204193
}
205194

206-
fn group_tokens(stream: Tokenizer) -> Vec<Record> {
207-
let mut comments_stack = Vec::new();
195+
fn group_tokens(stream: Tokenizer) -> Result<Vec<Record>, String> {
208196
let mut records = Vec::new();
197+
let mut record_collect = Record::new();
209198
for token in stream {
210-
match token.tt {
211-
TokenType::Comment => comments_stack.push(token),
212-
TokenType::Whitespace => {}
213-
_ => {
214-
let comments = comments_stack;
215-
comments_stack = Vec::new();
216-
records.push(Record {
217-
token,
218-
used: false,
219-
comments,
220-
});
221-
}
199+
record_collect.add_token(token)?;
200+
if record_collect.is_ready() {
201+
let mut record = Record::new();
202+
std::mem::swap(&mut record_collect, &mut record);
203+
records.push(record);
204+
}
205+
}
206+
records.push(record_collect);
207+
Ok(records)
208+
}
209+
210+
#[derive(Copy, Clone, PartialEq, Debug)]
211+
enum RecordType {
212+
Empty,
213+
Multiple,
214+
PrivateToken,
215+
Typedef,
216+
Function,
217+
Const,
218+
PreprDefine,
219+
PreprInclude,
220+
}
221+
222+
impl RecordType {
223+
fn update(&mut self, rt: RecordType) {
224+
match *self {
225+
RecordType::Empty => *self = rt,
226+
RecordType::Multiple => return,
227+
_ => *self = RecordType::Multiple,
222228
}
223229
}
224-
records
225230
}
226231

227232
struct Record<'a> {
228-
token: Token<'a>,
229233
used: bool,
230-
comments: Vec<Token<'a>>,
234+
rt: RecordType,
235+
nesting: i32,
236+
ids: Vec<Cow<'a, str>>,
237+
tokens: Vec<Token<'a>>,
238+
}
239+
240+
impl<'a> Record<'a> {
241+
fn new() -> Self {
242+
Self {
243+
used: false,
244+
rt: RecordType::Empty,
245+
nesting: 0,
246+
ids: Vec::new(),
247+
tokens: Vec::new(),
248+
}
249+
}
250+
251+
fn is_used(&self) -> Result<(), String> {
252+
if self.used || self.rt == RecordType::Empty || self.rt == RecordType::PrivateToken {
253+
Ok(())
254+
} else {
255+
let token_ids = self.tokens.iter().map(|t| t.id).collect::<Vec<_>>();
256+
Err(format!("Unused {:?} record: {:?}", self.rt, token_ids))
257+
}
258+
}
259+
260+
fn is_ready(&self) -> bool {
261+
return self.nesting == 0 && self.rt != RecordType::Empty;
262+
}
263+
264+
fn contains_id(&self, id: &str) -> bool {
265+
self.ids.iter().any(|v| v == id)
266+
}
267+
268+
fn push_token(&mut self, token: Token<'a>) {
269+
self.tokens.push(token);
270+
}
271+
272+
fn push_record_type_token(&mut self, token: Token<'a>, rt: RecordType) {
273+
self.rt.update(rt);
274+
if !token.id.is_empty() {
275+
self.ids.push(token.id.into());
276+
}
277+
self.push_token(token)
278+
}
279+
280+
fn push_prepr_if(&mut self, token: Token<'a>) {
281+
self.nesting += 1;
282+
self.push_token(token)
283+
}
284+
285+
fn push_prepr_endif(&mut self, token: Token<'a>) -> Result<(),String> {
286+
self.nesting -= 1;
287+
if self.nesting < 0 {
288+
return Err(format!("unmatched #endif"));
289+
}
290+
self.push_token(token);
291+
Ok(())
292+
}
293+
294+
fn add_token(&mut self, token: Token<'a>) -> Result<(), String> {
295+
match token.tt {
296+
TokenType::Comment => self.push_token(token),
297+
TokenType::Typedef => self.push_record_type_token(token, RecordType::Typedef),
298+
TokenType::Function => self.push_record_type_token(token, RecordType::Function),
299+
TokenType::Const => self.push_record_type_token(token, RecordType::Const),
300+
TokenType::PrivateToken => self.push_record_type_token(token, RecordType::PrivateToken),
301+
TokenType::PreprDefine => self.push_record_type_token(token, RecordType::PreprDefine),
302+
TokenType::PreprInclude => self.push_record_type_token(token, RecordType::PreprInclude),
303+
TokenType::PreprIf => self.push_prepr_if(token),
304+
TokenType::PreprEndif => self.push_prepr_endif(token)?,
305+
TokenType::Whitespace => self.push_token(token),
306+
}
307+
Ok(())
308+
}
231309
}
232310

233311
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
@@ -236,11 +314,11 @@ enum TokenType {
236314
Typedef,
237315
Function,
238316
Const,
239-
Define,
240317
PrivateToken,
241-
Include,
242-
Ifndef,
243-
Endif,
318+
PreprDefine,
319+
PreprInclude,
320+
PreprIf,
321+
PreprEndif,
244322
Whitespace,
245323
}
246324
#[derive(Debug, Clone, PartialEq, Eq)]
@@ -266,10 +344,10 @@ impl<'a> Token<'a> {
266344
fn next(s: &'a str) -> Option<Self> {
267345
Self::whitespace(s)
268346
.or_else(|| Self::comment(s))
269-
.or_else(|| Self::endif(s))
270-
.or_else(|| Self::include(s))
271-
.or_else(|| Self::ifndef(s))
272-
.or_else(|| Self::define(s))
347+
.or_else(|| Self::prepr_endif(s))
348+
.or_else(|| Self::prepr_include(s))
349+
.or_else(|| Self::prepr_define(s))
350+
.or_else(|| Self::prepr_if(s))
273351
.or_else(|| Self::typedef(s))
274352
.or_else(|| Self::r#const(s))
275353
.or_else(|| Self::function(s))
@@ -370,51 +448,52 @@ impl<'a> Token<'a> {
370448
Some(Token::new(
371449
TokenType::Comment,
372450
"",
373-
s.until_incl("\n").unwrap_or(s),
451+
s.until("\n").unwrap_or(s),
374452
))
375453
} else {
376454
None
377455
}
378456
}
379457

380-
fn ifndef(s: &'a str) -> Option<Self> {
381-
let start = "#ifndef ";
382-
s.starts_with(start).then(|| {
458+
fn prepr_if(s: &'a str) -> Option<Self> {
459+
if s.starts_with("#if ") || s.starts_with("#ifdef ") || s.starts_with("#ifndef ") {
383460
let span = s.until("\n").unwrap_or(s);
384-
Token::new(TokenType::Ifndef, &span[start.len()..], span)
385-
})
461+
Some(Token::new(TokenType::PreprIf, span, span))
462+
} else {
463+
None
464+
}
386465
}
387466

388-
fn define(s: &'a str) -> Option<Self> {
467+
fn prepr_define(s: &'a str) -> Option<Self> {
389468
let start = "#define ";
390469
s.strip_prefix(start).map(|defined| {
391470
let span = s.until("\n").unwrap_or(s);
392471
Token::new(
393472
if defined.starts_with('_') {
394473
TokenType::PrivateToken
395474
} else {
396-
TokenType::Define
475+
TokenType::PreprDefine
397476
},
398477
span[start.len()..].split_whitespace().next().unwrap(),
399478
span,
400479
)
401480
})
402481
}
403482

404-
fn endif(s: &'a str) -> Option<Self> {
483+
fn prepr_endif(s: &'a str) -> Option<Self> {
405484
s.starts_with("#endif")
406-
.then(|| Token::new(TokenType::Endif, "", "#endif"))
485+
.then(|| Token::new(TokenType::PreprEndif, "", "#endif"))
407486
}
408487

409-
fn include(s: &'a str) -> Option<Self> {
488+
fn prepr_include(s: &'a str) -> Option<Self> {
410489
Self::_include(s, "#include \"", "\"").or_else(|| Self::_include(s, "#include <", ">"))
411490
}
412491

413492
fn _include(s: &'a str, start: &str, end: &str) -> Option<Self> {
414493
if s.starts_with(start) {
415494
let span = s.until_incl(end).expect("detected unterminated #include");
416495
Some(Token::new(
417-
TokenType::Include,
496+
TokenType::PreprInclude,
418497
&span[start.len()..(span.len() - end.len())],
419498
span,
420499
))

cbindgen.toml

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -67,9 +67,8 @@ style = "both"
6767

6868

6969
[defines]
70-
# "target_os = freebsd" = "DEFINE_FREEBSD"
71-
# "feature = complete_n" = "DEFINE_COMPLETE_N"
72-
70+
"target_arch = aarch64" = "TARGET_ARCH_AARCH64"
71+
"target_arch = x86_64" = "TARGET_ARCH_X86_64"
7372

7473
[export]
7574
include = []

0 commit comments

Comments
 (0)