blob: dc6453bcfd83dba832a857695193bc1e3b51c063 [file] [log] [blame]
akmhoquefa8ee9b2014-03-14 09:06:24 -05001#! /usr/bin/env python
2# encoding: utf-8
3# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
4
5import re,string,traceback
6from waflib import Logs,Utils,Errors
7from waflib.Logs import debug,error
8class PreprocError(Errors.WafError):
9 pass
10POPFILE='-'
11recursion_limit=150
12go_absolute=False
13standard_includes=['/usr/include']
14if Utils.is_win32:
15 standard_includes=[]
16use_trigraphs=0
17strict_quotes=0
18g_optrans={'not':'!','and':'&&','bitand':'&','and_eq':'&=','or':'||','bitor':'|','or_eq':'|=','xor':'^','xor_eq':'^=','compl':'~',}
19re_lines=re.compile('^[ \t]*(#|%:)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef|pragma)[ \t]*(.*)\r*$',re.IGNORECASE|re.MULTILINE)
20re_mac=re.compile("^[a-zA-Z_]\w*")
21re_fun=re.compile('^[a-zA-Z_][a-zA-Z0-9_]*[(]')
22re_pragma_once=re.compile('^\s*once\s*',re.IGNORECASE)
23re_nl=re.compile('\\\\\r*\n',re.MULTILINE)
24re_cpp=re.compile(r'//.*?$|/\*.*?\*/|\'(?:\\.|[^\\\'])*\'|"(?:\\.|[^\\"])*"',re.DOTALL|re.MULTILINE)
25trig_def=[('??'+a,b)for a,b in zip("=-/!'()<>",r'#~\|^[]{}')]
26chr_esc={'0':0,'a':7,'b':8,'t':9,'n':10,'f':11,'v':12,'r':13,'\\':92,"'":39}
27NUM='i'
28OP='O'
29IDENT='T'
30STR='s'
31CHAR='c'
32tok_types=[NUM,STR,IDENT,OP]
33exp_types=[r"""0[xX](?P<hex>[a-fA-F0-9]+)(?P<qual1>[uUlL]*)|L*?'(?P<char>(\\.|[^\\'])+)'|(?P<n1>\d+)[Ee](?P<exp0>[+-]*?\d+)(?P<float0>[fFlL]*)|(?P<n2>\d*\.\d+)([Ee](?P<exp1>[+-]*?\d+))?(?P<float1>[fFlL]*)|(?P<n4>\d+\.\d*)([Ee](?P<exp2>[+-]*?\d+))?(?P<float2>[fFlL]*)|(?P<oct>0*)(?P<n0>\d+)(?P<qual2>[uUlL]*)""",r'L?"([^"\\]|\\.)*"',r'[a-zA-Z_]\w*',r'%:%:|<<=|>>=|\.\.\.|<<|<%|<:|<=|>>|>=|\+\+|\+=|--|->|-=|\*=|/=|%:|%=|%>|==|&&|&=|\|\||\|=|\^=|:>|!=|##|[\(\)\{\}\[\]<>\?\|\^\*\+&=:!#;,%/\-\?\~\.]',]
34re_clexer=re.compile('|'.join(["(?P<%s>%s)"%(name,part)for name,part in zip(tok_types,exp_types)]),re.M)
35accepted='a'
36ignored='i'
37undefined='u'
38skipped='s'
39def repl(m):
40 s=m.group(0)
41 if s.startswith('/'):
42 return' '
43 return s
44def filter_comments(filename):
45 code=Utils.readf(filename)
46 if use_trigraphs:
47 for(a,b)in trig_def:code=code.split(a).join(b)
48 code=re_nl.sub('',code)
49 code=re_cpp.sub(repl,code)
50 return[(m.group(2),m.group(3))for m in re.finditer(re_lines,code)]
51prec={}
52ops=['* / %','+ -','<< >>','< <= >= >','== !=','& | ^','&& ||',',']
53for x in range(len(ops)):
54 syms=ops[x]
55 for u in syms.split():
56 prec[u]=x
57def trimquotes(s):
58 if not s:return''
59 s=s.rstrip()
60 if s[0]=="'"and s[-1]=="'":return s[1:-1]
61 return s
62def reduce_nums(val_1,val_2,val_op):
63 try:a=0+val_1
64 except TypeError:a=int(val_1)
65 try:b=0+val_2
66 except TypeError:b=int(val_2)
67 d=val_op
68 if d=='%':c=a%b
69 elif d=='+':c=a+b
70 elif d=='-':c=a-b
71 elif d=='*':c=a*b
72 elif d=='/':c=a/b
73 elif d=='^':c=a^b
74 elif d=='|':c=a|b
75 elif d=='||':c=int(a or b)
76 elif d=='&':c=a&b
77 elif d=='&&':c=int(a and b)
78 elif d=='==':c=int(a==b)
79 elif d=='!=':c=int(a!=b)
80 elif d=='<=':c=int(a<=b)
81 elif d=='<':c=int(a<b)
82 elif d=='>':c=int(a>b)
83 elif d=='>=':c=int(a>=b)
84 elif d=='^':c=int(a^b)
85 elif d=='<<':c=a<<b
86 elif d=='>>':c=a>>b
87 else:c=0
88 return c
89def get_num(lst):
90 if not lst:raise PreprocError("empty list for get_num")
91 (p,v)=lst[0]
92 if p==OP:
93 if v=='(':
94 count_par=1
95 i=1
96 while i<len(lst):
97 (p,v)=lst[i]
98 if p==OP:
99 if v==')':
100 count_par-=1
101 if count_par==0:
102 break
103 elif v=='(':
104 count_par+=1
105 i+=1
106 else:
107 raise PreprocError("rparen expected %r"%lst)
108 (num,_)=get_term(lst[1:i])
109 return(num,lst[i+1:])
110 elif v=='+':
111 return get_num(lst[1:])
112 elif v=='-':
113 num,lst=get_num(lst[1:])
114 return(reduce_nums('-1',num,'*'),lst)
115 elif v=='!':
116 num,lst=get_num(lst[1:])
117 return(int(not int(num)),lst)
118 elif v=='~':
119 num,lst=get_num(lst[1:])
120 return(~int(num),lst)
121 else:
122 raise PreprocError("Invalid op token %r for get_num"%lst)
123 elif p==NUM:
124 return v,lst[1:]
125 elif p==IDENT:
126 return 0,lst[1:]
127 else:
128 raise PreprocError("Invalid token %r for get_num"%lst)
129def get_term(lst):
130 if not lst:raise PreprocError("empty list for get_term")
131 num,lst=get_num(lst)
132 if not lst:
133 return(num,[])
134 (p,v)=lst[0]
135 if p==OP:
136 if v==',':
137 return get_term(lst[1:])
138 elif v=='?':
139 count_par=0
140 i=1
141 while i<len(lst):
142 (p,v)=lst[i]
143 if p==OP:
144 if v==')':
145 count_par-=1
146 elif v=='(':
147 count_par+=1
148 elif v==':':
149 if count_par==0:
150 break
151 i+=1
152 else:
153 raise PreprocError("rparen expected %r"%lst)
154 if int(num):
155 return get_term(lst[1:i])
156 else:
157 return get_term(lst[i+1:])
158 else:
159 num2,lst=get_num(lst[1:])
160 if not lst:
161 num2=reduce_nums(num,num2,v)
162 return get_term([(NUM,num2)]+lst)
163 p2,v2=lst[0]
164 if p2!=OP:
165 raise PreprocError("op expected %r"%lst)
166 if prec[v2]>=prec[v]:
167 num2=reduce_nums(num,num2,v)
168 return get_term([(NUM,num2)]+lst)
169 else:
170 num3,lst=get_num(lst[1:])
171 num3=reduce_nums(num2,num3,v2)
172 return get_term([(NUM,num),(p,v),(NUM,num3)]+lst)
173 raise PreprocError("cannot reduce %r"%lst)
174def reduce_eval(lst):
175 num,lst=get_term(lst)
176 return(NUM,num)
177def stringize(lst):
178 lst=[str(v2)for(p2,v2)in lst]
179 return"".join(lst)
180def paste_tokens(t1,t2):
181 p1=None
182 if t1[0]==OP and t2[0]==OP:
183 p1=OP
184 elif t1[0]==IDENT and(t2[0]==IDENT or t2[0]==NUM):
185 p1=IDENT
186 elif t1[0]==NUM and t2[0]==NUM:
187 p1=NUM
188 if not p1:
189 raise PreprocError('tokens do not make a valid paste %r and %r'%(t1,t2))
190 return(p1,t1[1]+t2[1])
191def reduce_tokens(lst,defs,ban=[]):
192 i=0
193 while i<len(lst):
194 (p,v)=lst[i]
195 if p==IDENT and v=="defined":
196 del lst[i]
197 if i<len(lst):
198 (p2,v2)=lst[i]
199 if p2==IDENT:
200 if v2 in defs:
201 lst[i]=(NUM,1)
202 else:
203 lst[i]=(NUM,0)
204 elif p2==OP and v2=='(':
205 del lst[i]
206 (p2,v2)=lst[i]
207 del lst[i]
208 if v2 in defs:
209 lst[i]=(NUM,1)
210 else:
211 lst[i]=(NUM,0)
212 else:
213 raise PreprocError("Invalid define expression %r"%lst)
214 elif p==IDENT and v in defs:
215 if isinstance(defs[v],str):
216 a,b=extract_macro(defs[v])
217 defs[v]=b
218 macro_def=defs[v]
219 to_add=macro_def[1]
220 if isinstance(macro_def[0],list):
221 del lst[i]
222 accu=to_add[:]
223 reduce_tokens(accu,defs,ban+[v])
224 for x in range(len(accu)):
225 lst.insert(i,accu[x])
226 i+=1
227 else:
228 args=[]
229 del lst[i]
230 if i>=len(lst):
231 raise PreprocError("expected '(' after %r (got nothing)"%v)
232 (p2,v2)=lst[i]
233 if p2!=OP or v2!='(':
234 raise PreprocError("expected '(' after %r"%v)
235 del lst[i]
236 one_param=[]
237 count_paren=0
238 while i<len(lst):
239 p2,v2=lst[i]
240 del lst[i]
241 if p2==OP and count_paren==0:
242 if v2=='(':
243 one_param.append((p2,v2))
244 count_paren+=1
245 elif v2==')':
246 if one_param:args.append(one_param)
247 break
248 elif v2==',':
249 if not one_param:raise PreprocError("empty param in funcall %s"%p)
250 args.append(one_param)
251 one_param=[]
252 else:
253 one_param.append((p2,v2))
254 else:
255 one_param.append((p2,v2))
256 if v2=='(':count_paren+=1
257 elif v2==')':count_paren-=1
258 else:
259 raise PreprocError('malformed macro')
260 accu=[]
261 arg_table=macro_def[0]
262 j=0
263 while j<len(to_add):
264 (p2,v2)=to_add[j]
265 if p2==OP and v2=='#':
266 if j+1<len(to_add)and to_add[j+1][0]==IDENT and to_add[j+1][1]in arg_table:
267 toks=args[arg_table[to_add[j+1][1]]]
268 accu.append((STR,stringize(toks)))
269 j+=1
270 else:
271 accu.append((p2,v2))
272 elif p2==OP and v2=='##':
273 if accu and j+1<len(to_add):
274 t1=accu[-1]
275 if to_add[j+1][0]==IDENT and to_add[j+1][1]in arg_table:
276 toks=args[arg_table[to_add[j+1][1]]]
277 if toks:
278 accu[-1]=paste_tokens(t1,toks[0])
279 accu.extend(toks[1:])
280 else:
281 accu.append((p2,v2))
282 accu.extend(toks)
283 elif to_add[j+1][0]==IDENT and to_add[j+1][1]=='__VA_ARGS__':
284 va_toks=[]
285 st=len(macro_def[0])
286 pt=len(args)
287 for x in args[pt-st+1:]:
288 va_toks.extend(x)
289 va_toks.append((OP,','))
290 if va_toks:va_toks.pop()
291 if len(accu)>1:
292 (p3,v3)=accu[-1]
293 (p4,v4)=accu[-2]
294 if v3=='##':
295 accu.pop()
296 if v4==','and pt<st:
297 accu.pop()
298 accu+=va_toks
299 else:
300 accu[-1]=paste_tokens(t1,to_add[j+1])
301 j+=1
302 else:
303 accu.append((p2,v2))
304 elif p2==IDENT and v2 in arg_table:
305 toks=args[arg_table[v2]]
306 reduce_tokens(toks,defs,ban+[v])
307 accu.extend(toks)
308 else:
309 accu.append((p2,v2))
310 j+=1
311 reduce_tokens(accu,defs,ban+[v])
312 for x in range(len(accu)-1,-1,-1):
313 lst.insert(i,accu[x])
314 i+=1
315def eval_macro(lst,defs):
316 reduce_tokens(lst,defs,[])
317 if not lst:raise PreprocError("missing tokens to evaluate")
318 (p,v)=reduce_eval(lst)
319 return int(v)!=0
320def extract_macro(txt):
321 t=tokenize(txt)
322 if re_fun.search(txt):
323 p,name=t[0]
324 p,v=t[1]
325 if p!=OP:raise PreprocError("expected open parenthesis")
326 i=1
327 pindex=0
328 params={}
329 prev='('
330 while 1:
331 i+=1
332 p,v=t[i]
333 if prev=='(':
334 if p==IDENT:
335 params[v]=pindex
336 pindex+=1
337 prev=p
338 elif p==OP and v==')':
339 break
340 else:
341 raise PreprocError("unexpected token (3)")
342 elif prev==IDENT:
343 if p==OP and v==',':
344 prev=v
345 elif p==OP and v==')':
346 break
347 else:
348 raise PreprocError("comma or ... expected")
349 elif prev==',':
350 if p==IDENT:
351 params[v]=pindex
352 pindex+=1
353 prev=p
354 elif p==OP and v=='...':
355 raise PreprocError("not implemented (1)")
356 else:
357 raise PreprocError("comma or ... expected (2)")
358 elif prev=='...':
359 raise PreprocError("not implemented (2)")
360 else:
361 raise PreprocError("unexpected else")
362 return(name,[params,t[i+1:]])
363 else:
364 (p,v)=t[0]
365 return(v,[[],t[1:]])
366re_include=re.compile('^\s*(<(?P<a>.*)>|"(?P<b>.*)")')
367def extract_include(txt,defs):
368 m=re_include.search(txt)
369 if m:
370 if m.group('a'):return'<',m.group('a')
371 if m.group('b'):return'"',m.group('b')
372 toks=tokenize(txt)
373 reduce_tokens(toks,defs,['waf_include'])
374 if not toks:
375 raise PreprocError("could not parse include %s"%txt)
376 if len(toks)==1:
377 if toks[0][0]==STR:
378 return'"',toks[0][1]
379 else:
380 if toks[0][1]=='<'and toks[-1][1]=='>':
381 return stringize(toks).lstrip('<').rstrip('>')
382 raise PreprocError("could not parse include %s."%txt)
383def parse_char(txt):
384 if not txt:raise PreprocError("attempted to parse a null char")
385 if txt[0]!='\\':
386 return ord(txt)
387 c=txt[1]
388 if c=='x':
389 if len(txt)==4 and txt[3]in string.hexdigits:return int(txt[2:],16)
390 return int(txt[2:],16)
391 elif c.isdigit():
392 if c=='0'and len(txt)==2:return 0
393 for i in 3,2,1:
394 if len(txt)>i and txt[1:1+i].isdigit():
395 return(1+i,int(txt[1:1+i],8))
396 else:
397 try:return chr_esc[c]
398 except KeyError:raise PreprocError("could not parse char literal '%s'"%txt)
399def tokenize(s):
400 return tokenize_private(s)[:]
401@Utils.run_once
402def tokenize_private(s):
403 ret=[]
404 for match in re_clexer.finditer(s):
405 m=match.group
406 for name in tok_types:
407 v=m(name)
408 if v:
409 if name==IDENT:
410 try:v=g_optrans[v];name=OP
411 except KeyError:
412 if v.lower()=="true":
413 v=1
414 name=NUM
415 elif v.lower()=="false":
416 v=0
417 name=NUM
418 elif name==NUM:
419 if m('oct'):v=int(v,8)
420 elif m('hex'):v=int(m('hex'),16)
421 elif m('n0'):v=m('n0')
422 else:
423 v=m('char')
424 if v:v=parse_char(v)
425 else:v=m('n2')or m('n4')
426 elif name==OP:
427 if v=='%:':v='#'
428 elif v=='%:%:':v='##'
429 elif name==STR:
430 v=v[1:-1]
431 ret.append((name,v))
432 break
433 return ret
434@Utils.run_once
435def define_name(line):
436 return re_mac.match(line).group(0)
437class c_parser(object):
438 def __init__(self,nodepaths=None,defines=None):
439 self.lines=[]
440 if defines is None:
441 self.defs={}
442 else:
443 self.defs=dict(defines)
444 self.state=[]
445 self.count_files=0
446 self.currentnode_stack=[]
447 self.nodepaths=nodepaths or[]
448 self.nodes=[]
449 self.names=[]
450 self.curfile=''
451 self.ban_includes=set([])
452 def cached_find_resource(self,node,filename):
453 try:
454 nd=node.ctx.cache_nd
455 except AttributeError:
456 nd=node.ctx.cache_nd={}
457 tup=(node,filename)
458 try:
459 return nd[tup]
460 except KeyError:
461 ret=node.find_resource(filename)
462 if ret:
463 if getattr(ret,'children',None):
464 ret=None
465 elif ret.is_child_of(node.ctx.bldnode):
466 tmp=node.ctx.srcnode.search_node(ret.path_from(node.ctx.bldnode))
467 if tmp and getattr(tmp,'children',None):
468 ret=None
469 nd[tup]=ret
470 return ret
471 def tryfind(self,filename):
472 self.curfile=filename
473 found=self.cached_find_resource(self.currentnode_stack[-1],filename)
474 for n in self.nodepaths:
475 if found:
476 break
477 found=self.cached_find_resource(n,filename)
478 if found and not found in self.ban_includes:
479 self.nodes.append(found)
480 if filename[-4:]!='.moc':
481 self.addlines(found)
482 else:
483 if not filename in self.names:
484 self.names.append(filename)
485 return found
486 def addlines(self,node):
487 self.currentnode_stack.append(node.parent)
488 filepath=node.abspath()
489 self.count_files+=1
490 if self.count_files>recursion_limit:
491 raise PreprocError("recursion limit exceeded")
492 pc=self.parse_cache
493 debug('preproc: reading file %r',filepath)
494 try:
495 lns=pc[filepath]
496 except KeyError:
497 pass
498 else:
499 self.lines.extend(lns)
500 return
501 try:
502 lines=filter_comments(filepath)
503 lines.append((POPFILE,''))
504 lines.reverse()
505 pc[filepath]=lines
506 self.lines.extend(lines)
507 except IOError:
508 raise PreprocError("could not read the file %s"%filepath)
509 except Exception:
510 if Logs.verbose>0:
511 error("parsing %s failed"%filepath)
512 traceback.print_exc()
513 def start(self,node,env):
514 debug('preproc: scanning %s (in %s)',node.name,node.parent.name)
515 bld=node.ctx
516 try:
517 self.parse_cache=bld.parse_cache
518 except AttributeError:
519 bld.parse_cache={}
520 self.parse_cache=bld.parse_cache
521 self.current_file=node
522 self.addlines(node)
523 if env['DEFINES']:
524 try:
525 lst=['%s %s'%(x[0],trimquotes('='.join(x[1:])))for x in[y.split('=')for y in env['DEFINES']]]
526 lst.reverse()
527 self.lines.extend([('define',x)for x in lst])
528 except AttributeError:
529 pass
530 while self.lines:
531 (token,line)=self.lines.pop()
532 if token==POPFILE:
533 self.count_files-=1
534 self.currentnode_stack.pop()
535 continue
536 try:
537 ve=Logs.verbose
538 if ve:debug('preproc: line is %s - %s state is %s',token,line,self.state)
539 state=self.state
540 if token[:2]=='if':
541 state.append(undefined)
542 elif token=='endif':
543 state.pop()
544 if token[0]!='e':
545 if skipped in self.state or ignored in self.state:
546 continue
547 if token=='if':
548 ret=eval_macro(tokenize(line),self.defs)
549 if ret:state[-1]=accepted
550 else:state[-1]=ignored
551 elif token=='ifdef':
552 m=re_mac.match(line)
553 if m and m.group(0)in self.defs:state[-1]=accepted
554 else:state[-1]=ignored
555 elif token=='ifndef':
556 m=re_mac.match(line)
557 if m and m.group(0)in self.defs:state[-1]=ignored
558 else:state[-1]=accepted
559 elif token=='include'or token=='import':
560 (kind,inc)=extract_include(line,self.defs)
561 if ve:debug('preproc: include found %s (%s) ',inc,kind)
562 if kind=='"'or not strict_quotes:
563 self.current_file=self.tryfind(inc)
564 if token=='import':
565 self.ban_includes.add(self.current_file)
566 elif token=='elif':
567 if state[-1]==accepted:
568 state[-1]=skipped
569 elif state[-1]==ignored:
570 if eval_macro(tokenize(line),self.defs):
571 state[-1]=accepted
572 elif token=='else':
573 if state[-1]==accepted:state[-1]=skipped
574 elif state[-1]==ignored:state[-1]=accepted
575 elif token=='define':
576 try:
577 self.defs[define_name(line)]=line
578 except Exception:
579 raise PreprocError("Invalid define line %s"%line)
580 elif token=='undef':
581 m=re_mac.match(line)
582 if m and m.group(0)in self.defs:
583 self.defs.__delitem__(m.group(0))
584 elif token=='pragma':
585 if re_pragma_once.match(line.lower()):
586 self.ban_includes.add(self.current_file)
587 except Exception ,e:
588 if Logs.verbose:
589 debug('preproc: line parsing failed (%s): %s %s',e,line,Utils.ex_stack())
590def scan(task):
591 global go_absolute
592 try:
593 incn=task.generator.includes_nodes
594 except AttributeError:
595 raise Errors.WafError('%r is missing a feature such as "c", "cxx" or "includes": '%task.generator)
596 if go_absolute:
597 nodepaths=incn+[task.generator.bld.root.find_dir(x)for x in standard_includes]
598 else:
599 nodepaths=[x for x in incn if x.is_child_of(x.ctx.srcnode)or x.is_child_of(x.ctx.bldnode)]
600 tmp=c_parser(nodepaths)
601 tmp.start(task.inputs[0],task.env)
602 if Logs.verbose:
603 debug('deps: deps for %r: %r; unresolved %r'%(task.inputs,tmp.nodes,tmp.names))
604 return(tmp.nodes,tmp.names)