Package PyFoam :: Package RunDictionary :: Module ParsedParameterFile
[hide private]
[frames] | no frames]

Source Code for Module PyFoam.RunDictionary.ParsedParameterFile

  1  #  ICE Revision: $Id: /local/openfoam/Python/PyFoam/PyFoam/RunDictionary/ParsedParameterFile.py 7522 2011-07-14T22:29:37.344800Z bgschaid  $  
  2  """Parameter file is read into memory and modified there""" 
  3   
  4  from FileBasis import FileBasisBackup 
  5  from PyFoam.Basics.PlyParser import PlyParser 
  6  from PyFoam.Basics.FoamFileGenerator import FoamFileGenerator 
  7   
  8  from PyFoam.Basics.DataStructures import Vector,Field,Dimension,DictProxy,TupleProxy,Tensor,SymmTensor,Unparsed,UnparsedList,Codestream 
  9   
 10  from PyFoam.Error import error,warning,FatalErrorPyFoamException 
 11   
 12  from os import path 
 13  from copy import deepcopy 
 14   
15 -class ParsedParameterFile(FileBasisBackup):
16 """ Parameterfile whose complete representation is read into 17 memory, can be manipulated and afterwards written to disk""" 18
19 - def __init__(self, 20 name, 21 backup=False, 22 debug=False, 23 boundaryDict=False, 24 listDict=False, 25 listDictWithHeader=False, 26 listLengthUnparsed=None, 27 noHeader=False, 28 binaryMode=False, 29 noBody=False, 30 doMacroExpansion=False, 31 dontRead=False, 32 createZipped=True):
33 """@param name: The name of the parameter file 34 @param backup: create a backup-copy of the file 35 @param boundaryDict: the file to parse is a boundary file 36 @param listDict: the file only contains a list 37 @param listDictWithHeader: the file only contains a list and a header 38 @param listLengthUnparsed: Lists longer than that length are not parsed 39 @param binaryMode: Parse long lists in binary mode (to be overridden by 40 the settings in the header 41 @param noHeader: don't expect a header 42 @param noBody: don't read the body of the file (only the header) 43 @param doMacroExpansion: expand #include and $var 44 @param dontRead: Do not read the file during construction 45 """ 46 47 self.noHeader=noHeader 48 self.noBody=noBody 49 FileBasisBackup.__init__(self, 50 name, 51 backup=backup, 52 createZipped=createZipped) 53 self.debug=debug 54 self.boundaryDict=boundaryDict 55 self.listDict=listDict 56 self.listDictWithHeader=listDictWithHeader 57 self.listLengthUnparsed=listLengthUnparsed 58 self.doMacros=doMacroExpansion 59 60 self.header=None 61 self.content=None 62 63 self.binaryMode=binaryMode 64 65 if not dontRead: 66 self.readFile()
67
68 - def parse(self,content):
69 """Constructs a representation of the file""" 70 parser=FoamFileParser(content, 71 debug=self.debug, 72 fName=self.name, 73 boundaryDict=self.boundaryDict, 74 listDict=self.listDict, 75 listDictWithHeader=self.listDictWithHeader, 76 listLengthUnparsed=self.listLengthUnparsed, 77 noHeader=self.noHeader, 78 noBody=self.noBody, 79 binaryMode=self.binaryMode, 80 doMacroExpansion=self.doMacros) 81 82 self.content=parser.getData() 83 self.header=parser.getHeader() 84 return self.content
85
86 - def __contains__(self,key):
87 return key in self.content
88
89 - def __getitem__(self,key):
90 return self.content[key]
91
92 - def __setitem__(self,key,value):
93 self.content[key]=value
94
95 - def __delitem__(self,key):
96 del self.content[key]
97
98 - def __len__(self):
99 return len(self.content)
100
101 - def __iter__(self):
102 for key in self.content: 103 yield key
104
105 - def __str__(self):
106 """Generates a string from the contents in memory 107 Used to be called makeString""" 108 109 string="// -*- C++ -*-\n// File generated by PyFoam - sorry for the ugliness\n\n" 110 111 generator=FoamFileGenerator(self.content,header=self.header) 112 string+=generator.makeString(firstLevel=True) 113 114 return string
115
116 -class WriteParameterFile(ParsedParameterFile):
117 """A specialization that is used to only write to the file"""
118 - def __init__(self, 119 name, 120 backup=False, 121 className="dictionary", 122 objectName=None, 123 createZipped=False):
124 ParsedParameterFile.__init__(self, 125 name, 126 backup=backup, 127 dontRead=True, 128 createZipped=createZipped) 129 130 if objectName==None: 131 objectName=path.basename(name) 132 133 self.content={} 134 self.header={"version":"2.0", 135 "format":"ascii", 136 "class":className, 137 "object":objectName}
138
139 -class Enumerate(object):
140 - def __init__(self, names):
141 for number, name in enumerate(names): 142 setattr(self, name, number)
143 144 inputModes=Enumerate(["merge","error","warn","protect","overwrite","default"]) 145
146 -class FoamFileParser(PlyParser):
147 """Class that parses a string that contains the contents of an 148 OpenFOAM-file and builds a nested structure of directories and 149 lists from it""" 150
151 - def __init__(self, 152 content, 153 fName=None, 154 debug=False, 155 noHeader=False, 156 noBody=False, 157 doMacroExpansion=False, 158 boundaryDict=False, 159 preserveComments=True, 160 preserveNewlines=True, 161 listDict=False, 162 listDictWithHeader=False, 163 listLengthUnparsed=None, 164 binaryMode=False, 165 duplicateCheck=False, 166 duplicateFail=True):
167 """@param content: the string to be parsed 168 @param fName: Name of the actual file (if any) 169 @param debug: output debug information during parsing 170 @param noHeader: switch that turns off the parsing of the header 171 @param duplicateCheck: Check for duplicates in dictionaries 172 @param duplicateFail: Fail if a duplicate is discovered""" 173 174 self.binaryMode=binaryMode 175 self.fName=fName 176 self.data=None 177 self.header=None 178 self.debug=debug 179 self.listLengthUnparsed=listLengthUnparsed 180 self.doMacros=doMacroExpansion 181 self.preserveComments=preserveComments 182 self.preserveNewLines=preserveNewlines 183 self.duplicateCheck=duplicateCheck 184 self.duplicateFail=duplicateFail 185 186 self.collectDecorations=False 187 self.inputMode=inputModes.merge 188 189 self._decorationBuffer="" 190 191 startCnt=0 192 193 self.dictStack=[DictProxy()] 194 195 if noBody: 196 self.start='noBody' 197 startCnt+=1 198 199 if noHeader: 200 self.start='noHeader' 201 startCnt+=1 202 203 if listDict: 204 self.start='pureList' 205 startCnt+=1 206 self.dictStack=[] 207 208 if listDictWithHeader: 209 self.start='pureListWithHeader' 210 startCnt+=1 211 212 if boundaryDict: 213 self.start='boundaryDict' 214 startCnt+=1 215 216 if startCnt>1: 217 error("Only one start symbol can be specified.",startCnt,"are specified") 218 219 PlyParser.__init__(self,debug=debug) 220 221 #sys.setrecursionlimit(50000) 222 #print sys.getrecursionlimit() 223 224 self.emptyCnt=0 225 226 self.header,self.data=self.parse(content)
227
228 - def __contains__(self,key):
229 return key in self.data
230
231 - def __getitem__(self,key):
232 return self.data[key]
233
234 - def __setitem__(self,key,value):
235 self.data[key]=value
236
237 - def __delitem__(self,key):
238 del self.data[key]
239
240 - def __iter__(self):
241 for key in self.data: 242 yield key
243 244 ## def __len__(self): 245 ## if self.data==None: 246 ## return 0 247 ## else: 248 ## return len(self.data) 249
250 - def resetDecoration(self):
251 self._decorationBuffer=""
252
253 - def addToDecoration(self,text):
254 if self.collectDecorations: 255 self._decorationBuffer+=text
256
257 - def addCommentToDecorations(self,text):
258 if self.preserveComments: 259 self.addToDecoration(text)
260
261 - def addNewlinesToDecorations(self,text):
262 if self.preserveNewLines: 263 self.addToDecoration(text)
264
265 - def getDecoration(self):
266 tmp=self._decorationBuffer 267 self.resetDecoration() 268 if len(tmp)>0: 269 if tmp[-1]=='\n': 270 tmp=tmp[:-1] 271 return tmp
272
273 - def directory(self):
274 if self.fName==None: 275 return path.curdir 276 else: 277 return path.dirname(self.fName)
278
279 - def getData(self):
280 """ Get the data structure""" 281 return self.data
282
283 - def getHeader(self):
284 """ Get the OpenFOAM-header""" 285 return self.header
286
287 - def printContext(self,c,ind):
288 """Prints the context of the current index""" 289 print "------" 290 print c[max(0,ind-100):max(0,ind-1)] 291 print "------" 292 print ">",c[ind-1],"<" 293 print "------" 294 print c[min(len(c),ind):min(len(c),ind+100)] 295 print "------"
296
297 - def parserError(self,text,c,ind):
298 """Prints the error message of the parser and exit""" 299 print "PARSER ERROR:",text 300 print "On index",ind 301 self.printContext(c,ind) 302 raise PyFoamParserError("Unspecified")
303
304 - def condenseAllPreFixLists(self,orig):
305 """Checks whether this list is a list that consists only of prefix-Lists""" 306 isAllPreList=False 307 if (len(orig) % 2)==0: 308 isAllPreList=True 309 for i in range(0,len(orig),2): 310 if type(orig[i])==int and (type(orig[i+1]) in [list,Vector,Tensor,SymmTensor]): 311 if len(orig[i+1])!=orig[i]: 312 isAllPreList=False 313 break 314 else: 315 isAllPreList=False 316 break 317 318 if isAllPreList: 319 return orig[1::2] 320 else: 321 return orig
322 323 tokens = ( 324 'NAME', 325 'ICONST', 326 'FCONST', 327 'SCONST', 328 'FOAMFILE', 329 'UNIFORM', 330 'NONUNIFORM', 331 'UNPARSEDCHUNK', 332 'CODESTREAMCHUNK', 333 'REACTION', 334 'SUBSTITUTION', 335 'MERGE', 336 'OVERWRITE', 337 'ERROR', 338 'WARN', 339 'PROTECT', 340 'DEFAULT', 341 'INCLUDE', 342 'INCLUDEIFPRESENT', 343 'REMOVE', 344 'INPUTMODE', 345 'KANALGITTER', 346 'CODESTART', 347 'CODEEND', 348 ) 349 350 reserved = { 351 'FoamFile' : 'FOAMFILE', 352 'uniform' : 'UNIFORM', 353 'nonuniform' : 'NONUNIFORM', 354 'include' : 'INCLUDE', 355 'includeIfPresent': 'INCLUDEIFPRESENT', 356 'remove' : 'REMOVE', 357 'inputMode' : 'INPUTMODE', 358 'merge' : 'MERGE', 359 'overwrite' : 'OVERWRITE', 360 'error' : 'ERROR', 361 'warn' : 'WARN', 362 'protect' : 'PROTECT', 363 'default' : 'DEFAULT', 364 } 365 366 states = ( 367 ('unparsed', 'exclusive'), 368 ('codestream', 'exclusive'), 369 ) 370
371 - def t_unparsed_left(self,t):
372 r'\(' 373 t.lexer.level+=1
374 # print "left",t.lexer.level, 375
376 - def t_unparsed_right(self,t):
377 r'\)' 378 t.lexer.level-=1 379 # print "right",t.lexer.level, 380 if t.lexer.level < 0 : 381 t.value = t.lexer.lexdata[t.lexer.code_start:t.lexer.lexpos-1] 382 # print t.value 383 t.lexer.lexpos-=1 384 t.type = "UNPARSEDCHUNK" 385 t.lexer.lineno += t.value.count('\n') 386 t.lexer.begin('INITIAL') 387 return t
388 389 t_unparsed_ignore = ' \t\n0123456789.-+e' 390
391 - def t_unparsed_error(self,t):
392 print "Error",t.lexer.lexdata[t.lexer.lexpos] 393 t.lexer.skip(1)
394
395 - def t_codestream_end(self,t):
396 r"\#\}" 397 t.value = t.lexer.lexdata[t.lexer.code_start:t.lexer.lexpos-2] 398 t.lexer.lexpos-=2 399 t.type = "CODESTREAMCHUNK" 400 t.lexer.lineno += t.value.count('\n') 401 t.lexer.begin('INITIAL') 402 return t
403 404 t_codestream_ignore = '' 405
406 - def t_codestream_throwaway(self,t):
407 r'[^#]' 408 pass
409
410 - def t_codestream_error(self,t):
411 print "Error",t.lexer.lexdata[t.lexer.lexpos] 412 t.lexer.skip(1)
413
414 - def t_NAME(self,t):
415 r'[a-zA-Z_][+\-<>(),.\*|a-zA-Z_0-9&%:]*' 416 t.type=self.reserved.get(t.value,'NAME') 417 if t.value[-1]==")": 418 if t.value.count(")")>t.value.count("("): 419 # Give back the last ) because it propably belongs to a list 420 t.value=t.value[:-1] 421 t.lexer.lexpos-=1 422 423 return t
424
425 - def t_SUBSTITUITION(self,t):
426 r'\$[a-zA-Z_][+\-<>(),.\*|a-zA-Z_0-9&%:]*' 427 t.type=self.reserved.get(t.value,'SUBSTITUTION') 428 if t.value[-1]==")": 429 if t.value.count(")")>t.value.count("("): 430 # Give back the last ) because it propably belongs to a list 431 t.value=t.value[:-1] 432 t.lexer.lexpos-=1 433 434 return t
435 436 t_CODESTART = r'\#\{' 437 438 t_CODEEND = r'\#\}' 439 440 t_KANALGITTER = r'\#' 441 442 t_ICONST = r'(-|)\d+([uU]|[lL]|[uU][lL]|[lL][uU])?' 443 444 t_FCONST = r'(-|)((\d+)(\.\d*)(e(\+|-)?(\d+))? | (\d+)e(\+|-)?(\d+))([lL]|[fF])?' 445 446 t_SCONST = r'\"([^\\\n]|(\\.))*?\"' 447 448 literals = "(){};[]" 449 450 t_ignore=" \t\r" 451 452 # Define a rule so we can track line numbers
453 - def t_newline(self,t):
454 r'\n+' 455 t.lexer.lineno += len(t.value) 456 now=t.lexer.lexpos 457 next=t.lexer.lexdata.find('\n',now) 458 if next>=0: 459 line=t.lexer.lexdata[now:next] 460 pos=line.find("=") 461 if pos>=0: 462 if ((line.find("//")>=0 and line.find("//")<pos)) or (line.find("/*")>=0 and line.find("/*")<pos) or (line.find('"')>=0 and line.find('"')<pos): 463 return 464 t.value = line 465 t.type = "REACTION" 466 t.lexer.lineno += 1 467 t.lexer.lexpos = next 468 return t
469 # self.addNewlinesToDecorations(t.value) 470 471 # C or C++ comment (ignore)
472 - def t_ccode_comment(self,t):
473 r'(/\*(.|\n)*?\*/)|(//.*)' 474 t.lexer.lineno += t.value.count('\n') 475 self.addCommentToDecorations(t.value) 476 pass
477 478 # Error handling rule
479 - def t_error(self,t):
480 raise PyFoamParserError("Illegal character '%s'" % t.value[0])
481 # t.lexer.skip(1) # the old days when illegal characters were accepted 482
483 - def p_global(self,p):
484 'global : header dictbody' 485 p[0] = ( p[1] , p[2] )
486
487 - def p_gotHeader(self,p):
488 'gotHeader :' 489 p.lexer.lexpos=len(p.lexer.lexdata)
490
491 - def p_noBody(self,p):
492 ''' noBody : FOAMFILE '{' dictbody gotHeader '}' ''' 493 p[0] = ( p[3] , {} )
494
495 - def p_noHeader(self,p):
496 'noHeader : dictbody' 497 p[0] = ( None , p[1] )
498
499 - def p_pureList(self,p):
500 'pureList : list' 501 p[0] = ( None , p[1] )
502
503 - def p_pureListWithHeader(self,p):
504 '''pureListWithHeader : header list 505 | header prelist ''' 506 p[0] = ( p[1] , p[2] )
507
508 - def p_boundaryDict(self,p):
509 '''boundaryDict : header list 510 | header prelist ''' 511 # p[0] = ( p[1] , dict(zip(p[2][::2],p[2][1::2])) ) 512 p[0] = ( p[1] , p[2] )
513
514 - def p_header(self,p):
515 'header : FOAMFILE dictionary' 516 p[0] = p[2] 517 if p[0]["format"]=="binary": 518 self.binaryMode=True 519 raise FatalErrorPyFoamException("Can not parse binary files. It is not implemented") 520 elif p[0]["format"]=="ascii": 521 self.binaryMode=False 522 else: 523 raise FatalErrorPyFoamException("Don't know how to parse file format",p[0]["format"])
524 525
526 - def p_macro(self,p):
527 '''macro : KANALGITTER include 528 | KANALGITTER inputMode 529 | KANALGITTER remove''' 530 p[0] = p[1]+p[2]+"\n" 531 if self.doMacros: 532 p[0]="// "+p[0]
533
534 - def p_include(self,p):
535 '''include : INCLUDE SCONST 536 | INCLUDEIFPRESENT SCONST''' 537 if self.doMacros: 538 fName=path.join(self.directory(),p[2][1:-1]) 539 read=True 540 if p[1]=="includeIfPresent" and not path.exists(fName): 541 read=False 542 if read and not path.exists(fName): 543 raise PyFoamParserError("The included file "+fName+" does not exist") 544 if read: 545 data=ParsedParameterFile(fName,noHeader=True) 546 into=self.dictStack[-1] 547 for k in data: 548 into[k]=data[k] 549 550 p[0] = p[1] + " " + p[2]
551
552 - def p_inputMode(self,p):
553 '''inputMode : INPUTMODE ERROR 554 | INPUTMODE WARN 555 | INPUTMODE PROTECT 556 | INPUTMODE DEFAULT 557 | INPUTMODE MERGE 558 | INPUTMODE OVERWRITE''' 559 p[0] = p[1] + " " + p[2] 560 self.inputMode=getattr(inputModes,p[2])
561
562 - def p_remove(self,p):
563 '''remove : REMOVE word 564 | REMOVE wlist''' 565 p[0] = p[1] + " " 566 if type(p[2])==str: 567 p[0]+=p[2] 568 else: 569 p[0]+="( " 570 for w in p[2]: 571 p[0]+=w+" " 572 p[0]+=")"
573
574 - def p_integer(self,p):
575 '''integer : ICONST''' 576 p[0] = int(p[1])
577
578 - def p_float(self,p):
579 '''integer : FCONST''' 580 p[0] = float(p[1])
581
582 - def p_enter_dict(self,p):
583 '''enter_dict :''' 584 self.dictStack.append(DictProxy())
585
586 - def p_exit_dict(self,p):
587 '''exit_dict :''' 588 p[0]=self.dictStack.pop()
589
590 - def p_dictionary(self,p):
591 '''dictionary : '{' enter_dict dictbody '}' exit_dict 592 | '{' '}' ''' 593 if len(p)==6: 594 p[0] = p[5] 595 else: 596 p[0] = DictProxy()
597
598 - def p_dictbody(self,p):
599 '''dictbody : dictbody dictline 600 | dictline 601 | empty''' 602 603 if len(p)==3: 604 p[0]=p[1] 605 if self.duplicateCheck: 606 if p[2][0] in p[0]: 607 if self.duplicateFail: 608 error("Key",p[2][0],"already defined") 609 else: 610 warning("Key",p[2][0],"already defined") 611 p[0][p[2][0]]=p[2][1] 612 p[0].addDecoration(p[2][0],self.getDecoration()) 613 else: 614 p[0]=self.dictStack[-1] 615 616 if p[1]: 617 p[0][p[1][0]]=p[1][1]
618 619
620 - def p_list(self,p):
621 '''list : '(' itemlist ')' ''' 622 p[0] = self.condenseAllPreFixLists(p[2]) 623 if len(p[2])==3 or len(p[2])==9 or len(p[2])==6: 624 isVector=True 625 for i in p[2]: 626 try: 627 float(i) 628 except: 629 isVector=False 630 if isVector: 631 if len(p[2])==3: 632 p[0]=apply(Vector,p[2]) 633 elif len(p[2])==9: 634 p[0]=apply(Tensor,p[2]) 635 else: 636 p[0]=apply(SymmTensor,p[2])
637
638 - def p_wlist(self,p):
639 '''wlist : '(' wordlist ')' ''' 640 p[0] = p[2]
641
642 - def p_unparsed(self,p):
643 '''unparsed : UNPARSEDCHUNK''' 644 p[0] = Unparsed(p[1])
645
646 - def p_prelist_seen(self,p):
647 '''prelist_seen : ''' 648 if self.listLengthUnparsed!=None: 649 if int(p[-1])>=self.listLengthUnparsed: 650 p.lexer.begin('unparsed') 651 p.lexer.level=0 652 p.lexer.code_start = p.lexer.lexpos
653
654 - def p_codestream(self,p):
655 '''codestream : codeSeen CODESTART CODESTREAMCHUNK CODEEND ''' 656 p[0] = Codestream(p[3])
657
658 - def p_codeSeen(self,p):
659 '''codeSeen : ''' 660 p.lexer.begin('codestream') 661 p.lexer.level=0 662 p.lexer.code_start = p.lexer.lexpos
663
664 - def p_prelist(self,p):
665 '''prelist : integer prelist_seen '(' itemlist ')' 666 | integer prelist_seen '(' unparsed ')' ''' 667 if type(p[4])==Unparsed: 668 p[0] = UnparsedList(int(p[1]),p[4].data) 669 else: 670 p[0] = self.condenseAllPreFixLists(p[4])
671
672 - def p_itemlist(self,p):
673 '''itemlist : itemlist item 674 | item ''' 675 if len(p)==2: 676 if p[1]==None: 677 p[0]=[] 678 else: 679 p[0]=[ p[1] ] 680 else: 681 p[0]=p[1] 682 p[0].append(p[2])
683
684 - def p_wordlist(self,p):
685 '''wordlist : wordlist word 686 | word ''' 687 if len(p)==2: 688 if p[1]==None: 689 p[0]=[] 690 else: 691 p[0]=[ p[1] ] 692 else: 693 p[0]=p[1] 694 p[0].append(p[2])
695
696 - def p_word(self,p):
697 '''word : NAME 698 | UNIFORM 699 | NONUNIFORM 700 | MERGE 701 | OVERWRITE 702 | DEFAULT 703 | WARN 704 | PROTECT 705 | ERROR''' 706 p[0]=p[1]
707
708 - def p_substitution(self,p):
709 '''substitution : SUBSTITUTION''' 710 if self.doMacros: 711 nm=p[1][1:] 712 p[0]="<Symbol '"+nm+"' not found>" 713 if nm in self.dictStack[0]: 714 p[0]=deepcopy(self.dictStack[0][nm]) 715 else: 716 p[0]=p[1]
717
718 - def p_dictkey(self,p):
719 '''dictkey : word 720 | SCONST''' 721 p[0]=p[1]
722
723 - def p_dictline(self,p):
724 '''dictline : dictkey dictitem ';' 725 | dictkey list ';' 726 | dictkey prelist ';' 727 | dictkey fieldvalue ';' 728 | macro 729 | substitution ';' 730 | dictkey codestream ';' 731 | dictkey dictionary''' 732 if len(p)==4 and type(p[2])==list: 733 # remove the prefix from long lists (if present) 734 doAgain=True 735 tmp=p[2] 736 while doAgain: 737 doAgain=False 738 for i in range(len(tmp)-1): 739 if type(tmp[i])==int and type(tmp[i+1]) in [list]: 740 if tmp[i]==len(tmp[i+1]): 741 nix=tmp[:i]+tmp[i+1:] 742 for i in range(len(tmp)): 743 tmp.pop() 744 tmp.extend(nix) 745 doAgain=True 746 break 747 if len(p)==4: 748 p[0] = ( p[1] , p[2] ) 749 elif len(p)==3: 750 if p[2]==';': 751 p[0]= (p[1],'') 752 else: 753 p[0] = ( p[1] , p[2] ) 754 else: 755 p[0] = ( self.emptyCnt , p[1] ) 756 self.emptyCnt+=1
757
758 - def p_number(self,p):
759 '''number : integer 760 | FCONST''' 761 p[0] = p[1]
762
763 - def p_dimension(self,p):
764 '''dimension : '[' number number number number number number number ']' 765 | '[' number number number number number ']' ''' 766 result=p[2:-1] 767 if len(result)==5: 768 result+=[0,0] 769 770 p[0]=apply(Dimension,result)
771
772 - def p_vector(self,p):
773 '''vector : '(' number number number ')' ''' 774 p[0]=apply(Vector,p[2:5])
775
776 - def p_tensor(self,p):
777 '''tensor : '(' number number number number number number number number number ')' ''' 778 p[0]=apply(Tensor,p[2:11])
779
780 - def p_symmtensor(self,p):
781 '''symmtensor : '(' number number number number number number ')' ''' 782 p[0]=apply(SymmTensor,p[2:8])
783
784 - def p_fieldvalue_uniform(self,p):
785 '''fieldvalue : UNIFORM number 786 | UNIFORM vector 787 | UNIFORM tensor 788 | UNIFORM symmtensor''' 789 p[0] = Field(p[2])
790
791 - def p_fieldvalue_nonuniform(self,p):
792 '''fieldvalue : NONUNIFORM NAME list 793 | NONUNIFORM NAME prelist''' 794 p[0] = Field(p[3],name=p[2])
795
796 - def p_dictitem(self,p):
797 '''dictitem : longitem 798 | pitem''' 799 if type(p[1])==tuple: 800 if len(p[1])==2 and p[1][0]=="uniform": 801 p[0]=Field(p[1][1]) 802 elif len(p[1])==3 and p[1][0]=="nonuniform": 803 p[0]=Field(p[1][2],name=p[1][1]) 804 else: 805 p[0]=TupleProxy(p[1]) 806 else: 807 p[0] = p[1]
808
809 - def p_longitem(self,p):
810 '''longitem : pitemlist pitem''' 811 p[0] = p[1]+(p[2],)
812
813 - def p_pitemlist(self,p):
814 '''pitemlist : pitemlist pitem 815 | pitem ''' 816 if len(p)==2: 817 p[0]=(p[1],) 818 else: 819 ## print type(p[1][-1]) 820 ## if type(p[1][-1])==int and type(p[2])==tuple: 821 ## print "Hepp",p[2] 822 p[0]=p[1]+(p[2],)
823
824 - def p_pitem(self,p):
825 '''pitem : word 826 | SCONST 827 | number 828 | dictionary 829 | list 830 | dimension 831 | substitution 832 | empty''' 833 p[0] = p[1]
834
835 - def p_item(self,p):
836 '''item : pitem 837 | REACTION 838 | list 839 | dictionary''' 840 p[0] = p[1]
841
842 - def p_empty(self,p):
843 'empty :' 844 pass
845
846 - def p_error(self,p):
847 raise PyFoamParserError("Syntax error at token", p) # .type, p.lineno
848 # Just discard the token and tell the parser it's okay. 849 # self.yacc.errok() 850
851 -class PyFoamParserError(FatalErrorPyFoamException):
852 - def __init__(self,descr,data=None):
853 FatalErrorPyFoamException.__init__(self,"Parser Error:",descr) 854 self.descr=descr 855 self.data=data
856
857 - def __str__(self):
858 result="Error in PyFoamParser: '"+self.descr+"'" 859 if self.data!=None: 860 val=self.data.value 861 if len(val)>100: 862 val=val[:40]+" .... "+val[-40:] 863 864 result+=" @ %r (Type: %s ) in line %d at position %d" % (val, 865 self.data.type, 866 self.data.lineno, 867 self.data.lexpos) 868 869 return result
870
871 - def __repr__(self):
872 return str(self)
873
874 -class FoamStringParser(FoamFileParser):
875 """Convenience class that parses only a headerless OpenFOAM dictionary""" 876
877 - def __init__(self, 878 content, 879 debug=False, 880 duplicateCheck=False, 881 duplicateFail=False):
882 """@param content: the string to be parsed 883 @param debug: output debug information during parsing""" 884 885 FoamFileParser.__init__(self, 886 content, 887 debug=debug, 888 noHeader=True, 889 boundaryDict=False, 890 duplicateCheck=duplicateCheck, 891 duplicateFail=duplicateFail)
892
893 - def __str__(self):
894 return str(FoamFileGenerator(self.data))
895
896 -class ParsedBoundaryDict(ParsedParameterFile):
897 """Convenience class that parses only a OpenFOAM polyMesh-boundaries file""" 898
899 - def __init__(self,name,backup=False,debug=False):
900 """@param name: The name of the parameter file 901 @param backup: create a backup-copy of the file""" 902 903 ParsedParameterFile.__init__(self,name,backup=backup,debug=debug,boundaryDict=True)
904
905 - def parse(self,content):
906 """Constructs a representation of the file""" 907 temp=ParsedParameterFile.parse(self,content) 908 self.content={} 909 for i in range(0,len(temp),2): 910 self.content[temp[i]]=temp[i+1] 911 return self.content
912
913 - def __str__(self):
914 string="// File generated by PyFoam - sorry for the ugliness\n\n" 915 temp=[] 916 for k,v in self.content.iteritems(): 917 temp.append((k,v)) 918 919 temp.sort(lambda x,y:cmp(int(x[1]["startFace"]),int(y[1]["startFace"]))) 920 921 temp2=[] 922 923 for b in temp: 924 temp2.append(b[0]) 925 temp2.append(b[1]) 926 927 generator=FoamFileGenerator(temp2,header=self.header) 928 string+=str(generator) 929 930 return string
931
932 -class ParsedFileHeader(ParsedParameterFile):
933 """Only parse the header of a file""" 934
935 - def __init__(self,name):
936 ParsedParameterFile.__init__(self,name,backup=False,noBody=True)
937
938 - def __getitem__(self,name):
939 return self.header[name]
940
941 - def __contains__(self,name):
942 return name in self.header
943
944 - def __len__(self):
945 return len(self.header)
946