1 | import string, os, re, stat |
---|
2 | |
---|
3 | ncdumpCmd = '/usr/local/5/bin/ncdump' |
---|
4 | ncdumpCmd = 'ncdump' |
---|
5 | ## |
---|
6 | ## this class carries a logging method, and is used to carry information about datasets being parsed. |
---|
7 | ## |
---|
8 | class qcHandler: |
---|
9 | |
---|
10 | def __init__( self, qcc, log, baseDir, logPasses=True ): |
---|
11 | self.datasets = {} |
---|
12 | self.groups = {} |
---|
13 | self.baseDir = baseDir |
---|
14 | self.logPasses = logPasses |
---|
15 | self.log = log |
---|
16 | self.nofail = True |
---|
17 | self.hasTimeRange = False |
---|
18 | for k in qcc.datasets.keys(): |
---|
19 | self.datasets[k] = {} |
---|
20 | for g in qcc.groups: |
---|
21 | self.groups[g[0]] = { 'pat':g[1]} |
---|
22 | self.msg = {} |
---|
23 | self.msgk = {} |
---|
24 | self.msg['CQC.101.001.001'] = 'File size above 10 bytes' |
---|
25 | self.msg['CQC.101.001.002'] = 'File name matches DRS syntax' |
---|
26 | self.msg['CQC.101.001.003'] = 'File name time component matches DRS syntax' |
---|
27 | self.msg['CQC.101.001.004'] = 'File name component not in vocabulary' |
---|
28 | self.msg['CQC.101.001.005'] = 'File name component does not match regex' |
---|
29 | self.msg['CQC.101.001.006'] = 'File name component does not match regex list' |
---|
30 | self.msg['CQC.101.001.007'] = 'File name component does not match regex list with constraints' |
---|
31 | self.msg['CQC.102.002.001'] = 'File name time components in ADS have same length' |
---|
32 | self.msg['CQC.102.002.002'] = 'File name time components in ADS do not overlap' |
---|
33 | self.msg['CQC.102.002.003'] = 'File name time components in ADS have no gaps' |
---|
34 | self.msg['CQC.102.002.004'] = 'File name time components in ADS have correct gap for monthly data' |
---|
35 | self.msg['CQC.102.002.005'] = 'File name time components present for multi-file dataset' |
---|
36 | self.msg['CQC.102.002.006'] = 'Consistency checks' |
---|
37 | self.msg['CQC.102.002.007'] = 'Required variables' |
---|
38 | self.msg['CQC.102.002.008'] = 'Required data variables' |
---|
39 | self.msg['CQC.102.002.009'] = 'File is a recognised NetCDF format' |
---|
40 | self.msg['CQC.200.003.001'] = 'NetCDF files occur at one directory level' |
---|
41 | self.msg['CQC.103.003.002'] = 'Conformant version directory' |
---|
42 | self.msg['CQC.103.003.003'] = 'Latest link points to most recent version' |
---|
43 | self.msg['CQC.200.003.004'] = 'ads occurs in a single directory' |
---|
44 | self.msg['CQC.104.004.001'] = 'Consistent global attributes across experiment' |
---|
45 | self.msg['CQC.105.004.002'] = 'Valid calendar attribute' |
---|
46 | self.msg['CQC.101.004.003'] = 'Regular time step in file' |
---|
47 | self.msg['CQC.102.004.004'] = 'Regular time step between files' |
---|
48 | self.msg['CQC.102.004.005'] = 'Consistent global attributes across ADS' |
---|
49 | self.msg['CQC.105.004.006'] = 'Consistent global attributes across ensemble' |
---|
50 | self.msg['CQC.101.004.007'] = 'Required global attributes' |
---|
51 | self.msg['CQC.103.900.001'] = 'Identifiedmost recent version' |
---|
52 | self.msg['CQC.103.003.005'] = 'Version directories identified in directory containing "latest"' |
---|
53 | ## error keys: when these occur, further processing of that file is blocked. |
---|
54 | self.errorKeys = ['CQC.101.001.001', 'CQC.101.001.002'] |
---|
55 | ## keys in this list will not be recorded as failed tests. |
---|
56 | self.ignoreKeys = [] |
---|
57 | for k in self.msg.keys(): |
---|
58 | self.msgk[k] = 0 |
---|
59 | |
---|
60 | def _log( self, key, item, msg, ok=False ): |
---|
61 | if ok: |
---|
62 | if self.logPasses: |
---|
63 | thisMsg = '%s OK: %s: %s: %s' % (key,item,self.msg[key], msg) |
---|
64 | self.log.info( thisMsg ) |
---|
65 | return |
---|
66 | |
---|
67 | if key not in self.ignoreKeys: |
---|
68 | self.nofail = False |
---|
69 | item = string.replace( item, self.baseDir, '' ) |
---|
70 | if key in self.errorKeys: |
---|
71 | self.log.error( '%s [ERROR] FAIL !(%s): %s: %s' % (key,self.msg[key], item,msg)) |
---|
72 | self.noerror = False |
---|
73 | else: |
---|
74 | thisMsg = '%s FAIL !(%s): %s: %s' % (key,self.msg[key],item, msg) |
---|
75 | self.log.info( thisMsg ) |
---|
76 | |
---|
77 | self.msgk[key] += 1 |
---|
78 | |
---|
79 | class dirParser: |
---|
80 | |
---|
81 | def __init__(self, qcc, linksOnly=True): |
---|
82 | self.nclevs = [] |
---|
83 | self.qcc = qcc |
---|
84 | self.dirNames = {} |
---|
85 | self.count_nc = 0 |
---|
86 | self.linksOnly=linksOnly |
---|
87 | |
---|
88 | def parse( self, handler,dir, files ): |
---|
89 | handler.log.info( 'Directory: %s [%s]' % (dir, len(files)) ) |
---|
90 | bits = string.split(dir,'/') |
---|
91 | thisLev = len(bits) |
---|
92 | files.sort() |
---|
93 | skipf = [] |
---|
94 | |
---|
95 | for f in files: |
---|
96 | if os.path.isdir( '%s/%s' % (dir,f) ) and f in self.qcc.omitDirectories: |
---|
97 | skipf.append(f) |
---|
98 | for f in skipf: |
---|
99 | handler.log.info( 'skipping %s' % f ) |
---|
100 | files.pop( files.index(f) ) |
---|
101 | |
---|
102 | # record diretory names at each level |
---|
103 | if thisLev not in self.dirNames.keys(): |
---|
104 | self.dirNames[thisLev] = [] |
---|
105 | if bits[-1] not in self.dirNames[thisLev]: |
---|
106 | self.dirNames[thisLev].append( bits[-1] ) |
---|
107 | |
---|
108 | ncFiles = [] |
---|
109 | for f in files: |
---|
110 | if f[-3:] == ".nc" and (not self.linksOnly or os.path.islink('%s/%s'%(dir,f))): |
---|
111 | ncFiles.append(f) |
---|
112 | |
---|
113 | # record which directory levels contain netcdf files |
---|
114 | if len(ncFiles) and thisLev not in self.nclevs: |
---|
115 | self.nclevs.append( thisLev ) |
---|
116 | |
---|
117 | tbits = [] |
---|
118 | ncFiles.sort() |
---|
119 | self.count_nc += len( ncFiles ) |
---|
120 | dbits = string.split( string.strip(dir,'/'), '/' ) |
---|
121 | for f in ncFiles: |
---|
122 | fpath = '%s/%s' % (dir,f) |
---|
123 | handler.noerror = True |
---|
124 | handler.nofail = True |
---|
125 | |
---|
126 | if not os.path.islink( fpath ): |
---|
127 | fsb = os.stat( fpath )[stat.ST_SIZE] |
---|
128 | else: |
---|
129 | fsb = os.stat( fpath )[stat.ST_SIZE] |
---|
130 | if fsb < 10: |
---|
131 | handler._log( 'CQC.101.001.001', fpath, '' ) |
---|
132 | |
---|
133 | fbits = string.split( string.split(f,'.')[0], self.qcc.fileNameSegments.sep ) |
---|
134 | if not len( fbits ) in self.qcc.fileNameSegments.nn: |
---|
135 | handler._log( 'CQC.101.001.002', fpath, str(fbits) ) |
---|
136 | ###### |
---|
137 | ###### |
---|
138 | qfns = self.qcc.fileNameSegments |
---|
139 | ns = {} |
---|
140 | for k in range(len(fbits)): |
---|
141 | ns['fn_%s' % qfns.segments[k][1]] = fbits[k] |
---|
142 | if qfns.segments[k][0] == 'vocabulary': |
---|
143 | assert qfns.segments[k][1] in self.qcc.vocab.keys(), '%s not a valid vocabulary name' % qfns.segments[k][1] |
---|
144 | if not fbits[k] in self.qcc.vocab[qfns.segments[k][1]]: |
---|
145 | handler._log( 'CQC.101.001.004', fpath, 'Not in vocab %s' % qfns.segments[k][1] ) |
---|
146 | elif qfns.segments[k][0] == 'abstractVocab': |
---|
147 | assert qfns.segments[k][1] in self.qcc.vocab.keys(), '%s not a valid abstract vocabulary name' % qfns.segments[k][1] |
---|
148 | this = self.qcc.vocab[qfns.segments[k][1]] |
---|
149 | assert this[0] == 'regex', 'Unexpected start of abstractVocab, %s' % str( this ) |
---|
150 | match = False |
---|
151 | for s,t,tt in this[1]: |
---|
152 | if s.match( fbits[k] ): |
---|
153 | match = True |
---|
154 | ## print 'Match [%s] found for %s {%s}' % (t,fbits[k],tt) |
---|
155 | for k in y.groupdict().keys(): |
---|
156 | ns['fnre_%s' % k] = y.groupdict()[k] |
---|
157 | if tt != None: |
---|
158 | ##print 'trying further test' |
---|
159 | tt1 = string.replace(tt,'$','_arg_') |
---|
160 | y = s.match( fbits[k] ) |
---|
161 | for k in y.groupdict().keys(): |
---|
162 | eval( '_arg_%s = int( %s )' % (k,y.groupdict()[k] ) ) |
---|
163 | eval( 'res = tt1' ) |
---|
164 | ##print res |
---|
165 | else: |
---|
166 | pass |
---|
167 | ## print 'no match [%s] for %s ' % (t,fbits[k]) |
---|
168 | |
---|
169 | if not match: |
---|
170 | handler._log( 'CQC.101.001.006', fpath, 'Failed abstractVocab regex tests %s' % fbits[k] ) |
---|
171 | elif qfns.segments[k][0] == 'condAbstractVocab': |
---|
172 | assert qfns.segments[k][1] in self.qcc.vocab.keys(), '%s not a valid abstract vocabulary name' % qfns.segments[k][1] |
---|
173 | this = self.qcc.vocab[qfns.segments[k][1]] |
---|
174 | assert this[0] == 'regex', 'Unexpected start of abstractVocab, %s' % str( this ) |
---|
175 | match = False |
---|
176 | olc = 0 |
---|
177 | for sss in this[1]: |
---|
178 | ol = False |
---|
179 | if sss[0] == '*': |
---|
180 | ol = True |
---|
181 | else: |
---|
182 | for b in string.split(sss[0],','): |
---|
183 | if b in fbits: |
---|
184 | ol = True |
---|
185 | if ol: |
---|
186 | ##print 'Trying conditional matches' |
---|
187 | olc += 1 |
---|
188 | for s,t,tt in sss[1]: |
---|
189 | |
---|
190 | if not match: |
---|
191 | y = s.match( fbits[k] ) |
---|
192 | if y: |
---|
193 | ## print 'Match [%s] found for %s {%s}' % (t,fbits[k],tt) |
---|
194 | for key in y.groupdict().keys(): |
---|
195 | ns['fnre_%s' % key] = y.groupdict()[key] |
---|
196 | ##print '--- Match [%s] found for %s {%s}' % (t,fbits[k],tt) |
---|
197 | if tt != None: |
---|
198 | ## create string with test condition.` |
---|
199 | tt1 = string.replace(tt,'$','_arg_') |
---|
200 | y = s.match( fbits[k] ) |
---|
201 | for key in y.groupdict().keys(): |
---|
202 | locals()['_arg_%s' % key ] = int( y.groupdict()[key] ) |
---|
203 | ##print '_arg_%s' % key , locals()['_arg_%s' % key ] |
---|
204 | res = eval( tt1 ) |
---|
205 | ##print res,tt1 |
---|
206 | if res: |
---|
207 | match = True |
---|
208 | else: |
---|
209 | match = True |
---|
210 | else: |
---|
211 | ##print 'no match [%s] for %s ' % (t,fbits[k]) |
---|
212 | pass |
---|
213 | ##else: |
---|
214 | ##print 'No overlap for %s, %s' % (sss[0],str(fbits)) |
---|
215 | if olc == 0: |
---|
216 | ##print 'No matches fround for %s' % str(fbits) |
---|
217 | pass |
---|
218 | |
---|
219 | if not match: |
---|
220 | handler._log( 'CQC.101.001.007', fpath, 'Failed constrained regex tests %s' % fbits[k] ) |
---|
221 | elif qfns.segments[k][0] == 'regex-match': |
---|
222 | res = qfns.segments[k][2].match( fbits[k] ) |
---|
223 | if res == None: |
---|
224 | handler._log( 'CQC.101.001.005', fpath, 'Failed regex-match test: %s [%s]' % (fbits[k],qfns.segments[k][1] ) ) |
---|
225 | elif qfns.segments[k][0] == 'vocabulary*': |
---|
226 | pass |
---|
227 | else: |
---|
228 | print 'segment test id %s not recognised' % qfns.segments[k][0] |
---|
229 | raise 'segment test id %s not recognised' % qfns.segments[k][0] |
---|
230 | ################################## |
---|
231 | versionned = False |
---|
232 | if not versionned: |
---|
233 | for k in self.qcc.datasets.keys(): |
---|
234 | if self.qcc.datasets[k].datasetIdArg == 'fileNameBits': |
---|
235 | dsId = self.qcc.datasets[k].getDatasetId( fbits ) |
---|
236 | elif self.qcc.datasets[k].datasetIdArg == 'filePathBits': |
---|
237 | try: |
---|
238 | dsId = self.qcc.datasets[k].getDatasetId( fbits, dbits ) |
---|
239 | except: |
---|
240 | print 'Failed to get dsID:',fbits,dbits |
---|
241 | raise |
---|
242 | else: |
---|
243 | assert False, 'datasetIdMethod %s not supported yet' % self.qcc.datasets[k].datasetIdMethod |
---|
244 | |
---|
245 | if os.path.islink( fpath ): |
---|
246 | dsId += '_lnk' |
---|
247 | if not handler.datasets[k].has_key( dsId ): |
---|
248 | handler.datasets[k][dsId] = [] |
---|
249 | handler.datasets[k][dsId].append( (dir,f, handler.nofail, ns) ) |
---|
250 | |
---|
251 | class dataSetParser: |
---|
252 | |
---|
253 | def __init__(self,qcc, log, handler): |
---|
254 | self.qcc = qcc |
---|
255 | self.log = log |
---|
256 | self.h = handler |
---|
257 | self.re_istr = re.compile( '^[0-9]*$' ) |
---|
258 | |
---|
259 | def parse(self,dsclass, dsid, files, inFileChecks=False, forceInFileChecks=True): |
---|
260 | self.h.nofail = True |
---|
261 | fns = map( lambda x: x[1], self.qcc.fileNameSegments.segments ) |
---|
262 | dsok = True |
---|
263 | for dir,f, fok, ns in files: |
---|
264 | dsok &= fok |
---|
265 | |
---|
266 | self.h.nofail = dsok |
---|
267 | ## |
---|
268 | ## this test should have a switch -- only to be applied to one category of file group |
---|
269 | ## need dsclass constraints |
---|
270 | ## |
---|
271 | ## constraint: setOnce: |
---|
272 | ## |
---|
273 | if dsok: |
---|
274 | if self.qcc.hasTimeRange: |
---|
275 | allOk = True |
---|
276 | tbl = [] |
---|
277 | for dir,f, fok, ns in files: |
---|
278 | thisOk = True |
---|
279 | fbits = string.split( string.split(f,'.')[0], self.qcc.fileNameSegments.sep ) |
---|
280 | thisOk, tb = self.qcc.timeRange.get( fbits ) |
---|
281 | |
---|
282 | allOk &= thisOk |
---|
283 | tbl.append( tb ) |
---|
284 | |
---|
285 | if allOk: |
---|
286 | kkl = [] |
---|
287 | for tb in tbl: |
---|
288 | kk = 0 |
---|
289 | for i in range(2): |
---|
290 | if tb[i] != None: |
---|
291 | ## tb[i] = int(tb[i]) |
---|
292 | kk+=1 |
---|
293 | kkl.append(kk) |
---|
294 | |
---|
295 | thisOk = True |
---|
296 | cc = '' |
---|
297 | for k in range( len(tbl)-1 ): |
---|
298 | if kkl[k] != kkl[0]: |
---|
299 | thisOk = False |
---|
300 | cc += str(files[k]) |
---|
301 | self.h._log( 'CQC.102.002.001', cc, '', ok=thisOk ) |
---|
302 | |
---|
303 | self.h._log( 'CQC.102.002.005', '%s@%s' % (dsid,dsclass), '', ok=not(thisOk and kkl[0] == 0 and len(files) > 1) ) |
---|
304 | |
---|
305 | if thisOk and kkl[0] == 2: |
---|
306 | cc = '' |
---|
307 | for k in range( len(tbl) -1 ): |
---|
308 | if tbl[k+1][0] <= tbl[k][1]: |
---|
309 | thisOk = False |
---|
310 | cc += '%s, %s [%s,%s];' % (str(files[k]), str(files[k+1]),tbl[k][1],tbl[k+1][0]) |
---|
311 | self.h._log( 'CQC.102.002.002', cc, '', ok=thisOk ) |
---|
312 | |
---|
313 | ### |
---|
314 | ### run group constraints |
---|
315 | ### |
---|
316 | if self.qcc.groupConstraints.has_key( dsclass ): |
---|
317 | for ct in self.qcc.groupConstraints[dsclass]: |
---|
318 | ct.__reset__() |
---|
319 | for dir,f, fok, ns in files: |
---|
320 | if fok: |
---|
321 | ### |
---|
322 | rv,res = ct.check( ns ) |
---|
323 | if rv != 'PASS': |
---|
324 | self.h._log( 'CQC.102.002.006', f, ct.msg, res ) |
---|
325 | |
---|
326 | ## |
---|
327 | ## should only do the in-file checks once |
---|
328 | ## intention is to be able to have multiple definitions of groups with different tests |
---|
329 | ## |
---|
330 | files2 = [] |
---|
331 | if (self.h.nofail and inFileChecks) or forceInFileChecks: |
---|
332 | ##print 'starting in-file checks' |
---|
333 | import ncd_parse |
---|
334 | for dir,f, fok, ns in files: |
---|
335 | if fok or forceInFileChecks: |
---|
336 | tmpDumpFile = '/tmp/qc_ncdump_tmp.txt' |
---|
337 | if os.path.isfile( tmpDumpFile ): |
---|
338 | os.unlink( tmpDumpFile ) |
---|
339 | targf = '%s/%s' % (dir,f) |
---|
340 | fsb = os.stat( targf )[stat.ST_SIZE] |
---|
341 | assert fsb > 10, 'Small file slipped through: %s, %s' % (targ,fok) |
---|
342 | cmd = '%s -k %s/%s 2>&1 > %s' % (ncdumpCmd,dir,f,tmpDumpFile) |
---|
343 | res = os.popen( cmd ).readlines() |
---|
344 | ii = open( tmpDumpFile ).readlines() |
---|
345 | if len(ii) == 0: |
---|
346 | this_ok = False |
---|
347 | else: |
---|
348 | this_ok = 'Unknown' not in ii[0] |
---|
349 | self.h._log( 'CQC.102.002.009', '%s/%s' % (dir,f), '', ok=this_ok ) |
---|
350 | files2.append( (dir,f, this_ok, ns) ) |
---|
351 | if this_ok: |
---|
352 | cmd = '%s -h %s/%s > %s' % (ncdumpCmd,dir,f,tmpDumpFile) |
---|
353 | ii = os.popen( cmd ).readlines() |
---|
354 | fsb = os.stat( tmpDumpFile )[stat.ST_SIZE] |
---|
355 | assert fsb > 100, 'ncdump output too small, %s/%s' % (dir,f) |
---|
356 | |
---|
357 | rd = ncd_parse.read_ncdump( tmpDumpFile ) |
---|
358 | rd.parse() |
---|
359 | |
---|
360 | ## |
---|
361 | ## messy hack -- copying globals attributes into a new dictionary |
---|
362 | ## |
---|
363 | for k in rd.gats.keys(): |
---|
364 | ns['g_%s' % k] = rd.gats[k] |
---|
365 | ## rd.vars[k] is a tuple: (dims,atts), where atts is a dictionary of attributes. |
---|
366 | for k in rd.vars.keys(): |
---|
367 | ns['v_%s' % k] = rd.vars[k] |
---|
368 | for k in rd.dims.keys(): |
---|
369 | ns['d_%s' % k] = rd.dims[k] |
---|
370 | |
---|
371 | if self.qcc.attributeTests: |
---|
372 | for a in self.qcc.requiredGlobalAttributes: |
---|
373 | self.h._log( 'CQC.101.004.007', '%s/%s' % (dir,f), 'Attribute: %s' % a, ok=a in rd.gats.keys() ) |
---|
374 | |
---|
375 | if self.qcc.variableTests: |
---|
376 | for dir,f, fok, ns in files2: |
---|
377 | if fok: |
---|
378 | for rv in self.qcc.requiredVariables: |
---|
379 | if rv[0][0] != '$': |
---|
380 | self.h._log( 'CQC.102.002.007', f, 'Required variable %s'% (rv[0]), 'v_%s' % rv[0] in ns.keys()) |
---|
381 | |
---|
382 | if self.qcc.groups: |
---|
383 | for dir,f, fok, ns in files2: |
---|
384 | if fok: |
---|
385 | for g in self.qcc.groups: |
---|
386 | gid = g[1] % ns |
---|
387 | if not self.qcc.groupDict[g[0]].has_key( gid ): |
---|
388 | self.qcc.groupDict[g[0]][ gid ] = [] |
---|
389 | self.qcc.groupDict[g[0]][ gid ].append( ( dir,f,fok) ) |
---|
390 | ## print '%s:: %s' % (g[0],gid) |
---|
391 | |
---|
392 | |
---|
393 | if self.qcc.constraintTests: |
---|
394 | for dir,f, fok, ns in files2: |
---|
395 | if fok: |
---|
396 | for ct in self.qcc.constraints: |
---|
397 | ### |
---|
398 | rv,res = ct.check( ns ) |
---|
399 | if rv != 'PASS': |
---|
400 | self.h._log( 'CQC.102.002.006', f, ct.msg, res ) |
---|
401 | |
---|
402 | if self.qcc.variableTests: |
---|
403 | for dir,f, fok, ns in files2: |
---|
404 | if fok: |
---|
405 | for v in self.qcc.dataVariables: |
---|
406 | var = ns[v[1]] |
---|
407 | if v[0] == 'ns': |
---|
408 | isPresent = 'v_%s' % var in ns.keys() |
---|
409 | if v[3]: |
---|
410 | self.h._log( 'CQC.102.002.008', f, '%s [%s::%s]'% (var,v[1],v[2]), isPresent ) |
---|
411 | |
---|
412 | |
---|
413 | class dataset: |
---|
414 | def __init__(self,name): |
---|
415 | self.name = name |
---|
416 | |
---|
417 | class qcConfigParse: |
---|
418 | |
---|
419 | def __init__( self, file ): |
---|
420 | assert os.path.isfile( file ), '%s not found' % file |
---|
421 | self.firstFile = True |
---|
422 | self.fh = open( file ) |
---|
423 | self.file = file |
---|
424 | self.sections = {} |
---|
425 | self.omitDirectories = [] |
---|
426 | |
---|
427 | def close(self): |
---|
428 | self.fh.close() |
---|
429 | self.file = None |
---|
430 | |
---|
431 | def open(self,file): |
---|
432 | assert os.path.isfile( file ), '%s not found' % file |
---|
433 | self.fh = open( file ) |
---|
434 | self.file = file |
---|
435 | |
---|
436 | def parse_l0(self): |
---|
437 | f = False |
---|
438 | sname = None |
---|
439 | for l in self.fh.readlines(): |
---|
440 | if f: |
---|
441 | if l[0:4] == 'END ' and string.index( l,sname) == 4: |
---|
442 | f = False |
---|
443 | self._parse_l0_section.close() |
---|
444 | else: |
---|
445 | self._parse_l0_section.add( l ) |
---|
446 | elif l[0:6] == 'START ': |
---|
447 | sname = string.strip( string.split(l)[1] ) |
---|
448 | self._parse_l0_section = section_parser_l0( self, sname ) |
---|
449 | f = True |
---|
450 | |
---|
451 | def parse_l1(self): |
---|
452 | |
---|
453 | if self.firstFile: |
---|
454 | requiredSections = ['FILENAME', 'VOCABULARIES','PATH'] |
---|
455 | else: |
---|
456 | requiredSections = [] |
---|
457 | self.firstFile = False |
---|
458 | |
---|
459 | for s in requiredSections: |
---|
460 | assert s in self.sections.keys(), 'Required section %s not found in %s [parsing %s]' % (s, self.section.keys(),self.file) |
---|
461 | self._parse_l1 = section_parser_l1( self ) |
---|
462 | self._parse_l1.parse( 'GENERAL' ) |
---|
463 | self._parse_l1.parse( 'VOCABULARIES' ) |
---|
464 | self._parse_l1.parse( 'FILENAME' ) |
---|
465 | self._parse_l1.parse( 'PATH' ) |
---|
466 | self._parse_l1.parse( 'ATTRIBUTES' ) |
---|
467 | self._parse_l1.parse( 'VARIABLES' ) |
---|
468 | self._parse_l1.parse( 'CONSTRAINTS' ) |
---|
469 | self._parse_l1.parse( 'GROUPS' ) |
---|
470 | |
---|
471 | regv = re.compile( 'version=([0-9.]+)' ) |
---|
472 | refs = re.compile( 'separator=(.+)' ) |
---|
473 | revsc = re.compile( 'validSectionCount,(.+)' ) |
---|
474 | |
---|
475 | class section_parser_l1: |
---|
476 | |
---|
477 | def __init__(self,parent): |
---|
478 | self.parent = parent |
---|
479 | self.currentSection = None |
---|
480 | self.gc = {} |
---|
481 | |
---|
482 | def _getVersion( self ): |
---|
483 | assert self.currentSection != None, '_getVersion called with no section set' |
---|
484 | x = regv.findall( self.currentSection[0] ) |
---|
485 | assert len(x) == 1, 'valid version not identified at start of section: %s\n%s' % (self.currentSectionName,self.currentSection[0]) |
---|
486 | self.version = x[0] |
---|
487 | |
---|
488 | def parse( self, sname ): |
---|
489 | if self.parent.sections.has_key( sname ): |
---|
490 | |
---|
491 | self.currentSectionName = sname |
---|
492 | self.currentSection = self.parent.sections[sname] |
---|
493 | self._getVersion() |
---|
494 | else: |
---|
495 | self.currentSection = None |
---|
496 | |
---|
497 | self.parent.constraintTests = False |
---|
498 | ## print 'Parsing %s' % sname |
---|
499 | if sname == 'VOCABULARIES': |
---|
500 | self.parent.vocab = {} |
---|
501 | self.parse_vocabularies() |
---|
502 | elif sname == 'FILENAME': |
---|
503 | self.parse_filename() |
---|
504 | elif sname == 'PATH': |
---|
505 | self.parse_path() |
---|
506 | elif sname == 'ATTRIBUTES': |
---|
507 | self.parse_attributes() |
---|
508 | elif sname == 'VARIABLES': |
---|
509 | self.parse_variables() |
---|
510 | elif sname == 'CONSTRAINTS': |
---|
511 | self.parse_constraints() |
---|
512 | elif sname == 'GENERAL': |
---|
513 | self.parse_general() |
---|
514 | elif sname == 'GROUPS': |
---|
515 | self.parse_groups() |
---|
516 | |
---|
517 | def __get_match( self, regex, line, id ): |
---|
518 | x = regex.findall( line ) |
---|
519 | assert len(x) == 1, 'No match found, id=%s, line=%s' % 'id,line' |
---|
520 | return x[0] |
---|
521 | |
---|
522 | def parse_vocabularies(self): |
---|
523 | ## print len( self.currentSection ) |
---|
524 | for l in self.currentSection[1:]: |
---|
525 | |
---|
526 | bits = map( string.strip, string.split( l, ', ' ) ) |
---|
527 | id = bits[0] |
---|
528 | isAbstract = False |
---|
529 | if id[0] == '*': |
---|
530 | id = id[1:] |
---|
531 | isAbstract = True |
---|
532 | |
---|
533 | sl = string.split( bits[1], '|' ) |
---|
534 | fromFile = 'file' in sl |
---|
535 | isRegex = 'regex' in sl |
---|
536 | withSub = 'sub' in sl |
---|
537 | isCond = 'cond' in sl |
---|
538 | |
---|
539 | if not fromFile: |
---|
540 | vlist = string.split( bits[2] ) |
---|
541 | else: |
---|
542 | fn = bits[2] |
---|
543 | assert os.path.isfile( fn), 'File %s (specified as vocabulary %s) not found' % (fn,bits[0] ) |
---|
544 | ii = open( fn ).readlines() |
---|
545 | bb = string.split( bits[1], '|' ) |
---|
546 | if '1perline' in sl: |
---|
547 | vlist = map( string.strip, ii ) |
---|
548 | else: |
---|
549 | assert False, 'file syntax option (%s) not recognised' % bits[1] |
---|
550 | |
---|
551 | if isRegex: |
---|
552 | cr = [] |
---|
553 | if withSub: |
---|
554 | if isCond: |
---|
555 | for ccc in vlist: |
---|
556 | i0 = ccc.index(':') |
---|
557 | cc = ccc[:i0] |
---|
558 | cr0 = [] |
---|
559 | for v in string.split( ccc[i0+1:] ): |
---|
560 | v = string.strip(v) |
---|
561 | if v[0] == '{': |
---|
562 | i1 = v.index('}') |
---|
563 | tt = v[1:i1] |
---|
564 | v = v[i1+1:] |
---|
565 | else: |
---|
566 | tt = None |
---|
567 | v = string.strip( v, "'" ) |
---|
568 | cr0.append( (re.compile( v % self.gc ),v % self.gc,tt) ) |
---|
569 | cr.append( (cc,cr0) ) |
---|
570 | else: |
---|
571 | for v in vlist: |
---|
572 | v = string.strip( v, "'" ) |
---|
573 | cr.append( (re.compile( v % self.gc ),v % self.gc) ) |
---|
574 | else: |
---|
575 | for v in vlist: |
---|
576 | v = string.strip( v, "'" ) |
---|
577 | cr.append( (re.compile( v ),v) ) |
---|
578 | self.parent.vocab[id] = ('regex', cr ) |
---|
579 | else: |
---|
580 | self.parent.vocab[id] = vlist[:] |
---|
581 | |
---|
582 | def parse_filename(self): |
---|
583 | sep = self.__get_match( refs, self.currentSection[1], 'File separator' ) |
---|
584 | nn = map( int, string.split( self.__get_match( revsc, self.currentSection[2], 'File separator' ),',') ) |
---|
585 | self.parent.fileNameSegments = fileNameSegments( self.parent, sep, nn ) |
---|
586 | for l in self.currentSection[3:]: |
---|
587 | self.parent.fileNameSegments.add(l) |
---|
588 | self.parent.fileNameSegments.finish() |
---|
589 | |
---|
590 | def parse_attributes(self): |
---|
591 | if self.currentSection == None: |
---|
592 | self.parent.attributeTests = False |
---|
593 | return |
---|
594 | self.parent.requiredGlobalAttributes = [] |
---|
595 | self.parent.attributeTests = True |
---|
596 | for l in self.currentSection[1:]: |
---|
597 | bits = map( string.strip, string.split(l,',')) |
---|
598 | if bits[0] == 'global': |
---|
599 | if bits[2] == 'required': |
---|
600 | self.parent.requiredGlobalAttributes.append( bits[1] ) |
---|
601 | |
---|
602 | def parse_general(self): |
---|
603 | if self.currentSection == None: |
---|
604 | return |
---|
605 | self.parent.requiredGlobalAttributes = [] |
---|
606 | self.parent.dataVariables = [] |
---|
607 | for l in self.currentSection[1:]: |
---|
608 | if l[0] == '$': |
---|
609 | bits = map( string.strip, string.split(l[1:],'=')) |
---|
610 | self.gc[bits[0]] = bits[1] |
---|
611 | else: |
---|
612 | bits = map( string.strip, string.split(l,',')) |
---|
613 | if bits[0] == 'DataVariable': |
---|
614 | if bits[1] == 'byName': |
---|
615 | isRequired = bits[3] == 'required' |
---|
616 | key, msg = ref_to_key( bits[2] ) |
---|
617 | self.parent.dataVariables.append( ('ns',key,msg,isRequired) ) |
---|
618 | |
---|
619 | def parse_groups(self): |
---|
620 | self.parent.groups = [] |
---|
621 | self.parent.groupDict = {} |
---|
622 | if self.currentSection == None: |
---|
623 | return |
---|
624 | for l in self.currentSection[1:]: |
---|
625 | bits = map( string.strip, string.split(l,',')) |
---|
626 | if bits[1] not in self.parent.groupDict.keys(): |
---|
627 | self.parent.groupDict[bits[1]] = {} |
---|
628 | if bits[0] == 'group': |
---|
629 | cc = [] |
---|
630 | for r in string.split( bits[2], '.' ): |
---|
631 | cc.append( '%' + ('(%s)s' % ref_to_key( r )[0] ) ) |
---|
632 | self.parent.groups.append( (bits[1], string.join( cc, '.' ) ) ) |
---|
633 | |
---|
634 | def parse_constraints(self): |
---|
635 | if self.currentSection == None: |
---|
636 | self.parent.constraintTests = False |
---|
637 | return |
---|
638 | self.parent.constraintTests = True |
---|
639 | self.parent.constraints = [] |
---|
640 | self.parent.groupConstraints = {} |
---|
641 | for l in self.currentSection[1:]: |
---|
642 | bits = map( string.strip, string.split(l,',')) |
---|
643 | bb = string.split( bits[0], ':' ) |
---|
644 | if len(bb) == 2: |
---|
645 | gid = bb[0] |
---|
646 | cid = bb[1] |
---|
647 | if gid not in self.parent.groupConstraints.keys(): |
---|
648 | self.parent.groupConstraints[gid] = [] |
---|
649 | else: |
---|
650 | gid = None |
---|
651 | cid = bits[0] |
---|
652 | assert cid in ['identity','onlyOnce'], 'constraint id %s not recognised' % cid |
---|
653 | |
---|
654 | if cid == 'identity': |
---|
655 | cstr = Constraint__IdentityChecker( bits[1], bits[2] ) |
---|
656 | elif cid == 'onlyOnce': |
---|
657 | ## print 'Set Constraint only once, %s ' % bits[1] |
---|
658 | cstr = Constraint__OnlyOnce( bits[1] ) |
---|
659 | |
---|
660 | if gid == None: |
---|
661 | self.parent.constraints.append( cstr ) |
---|
662 | else: |
---|
663 | self.parent.groupConstraints[gid].append( cstr ) |
---|
664 | |
---|
665 | def parse_variables(self): |
---|
666 | if self.currentSection == None: |
---|
667 | self.parent.variableTests = False |
---|
668 | return |
---|
669 | self.parent.variableTests = True |
---|
670 | self.parent.requiredVariables = [] |
---|
671 | for l in self.currentSection[1:]: |
---|
672 | bits = map( string.strip, string.split(l,',')) |
---|
673 | isDimension = bits[0] == 'dimension' |
---|
674 | if bits[2] == 'required': |
---|
675 | if bits[1][0] != '$': |
---|
676 | self.parent.requiredVariables.append( (bits[1],isDimension) ) |
---|
677 | else: |
---|
678 | key,info = ref_to_key( bits[1][1:] ) |
---|
679 | if key == 'VALUE': |
---|
680 | self.parent.requiredVariables.append( (info,isDimension) ) |
---|
681 | else: |
---|
682 | self.parent.requiredVariables.append( ('$%s' % key, isDimension) ) |
---|
683 | |
---|
684 | |
---|
685 | |
---|
686 | def parse_path(self): |
---|
687 | if self.currentSection == None: |
---|
688 | self.pathTests = False |
---|
689 | return |
---|
690 | self.pathTests = True |
---|
691 | self.datasetIdMethod = None |
---|
692 | self.datasetVersionMode = [None,] |
---|
693 | self.parent.datasets = {} |
---|
694 | datasetHierarchy = None |
---|
695 | for l in self.currentSection[1:]: |
---|
696 | bits = map( string.strip, string.split(l,',')) |
---|
697 | if bits[0] == 'datasetVersion': |
---|
698 | vdsName = bits[1] |
---|
699 | if bits[2] == 'pathElement': |
---|
700 | self.datasetVersionMode = ['pathElement',] |
---|
701 | self.versionPathElement = int( bits[3] ) |
---|
702 | if bits[4] == 'regex': |
---|
703 | self.datasetVersionMode.append( 'regex' ) |
---|
704 | self.datasetVersionRe = re.compile( string.strip( bits[5], '"' ) ) |
---|
705 | else: |
---|
706 | self.datasetVersionMode.append( None ) |
---|
707 | elif bits[0] == 'datasetId': |
---|
708 | thisDs = dataset(bits[1]) |
---|
709 | thisDs.datasetIdMethod = bits[2] |
---|
710 | if bits[2] == 'prints': |
---|
711 | thisDs.getDatasetId = lambda x: bits[3] % x |
---|
712 | thisDs.datasetIdTuple = tuple( bits[4:] ) |
---|
713 | elif bits[2] == 'joinFileNameSegSlice': |
---|
714 | thisSlice = slice( int(bits[4]), int(bits[5]) ) |
---|
715 | thisDs.getDatasetId = dsid1( thisSlice, bits[3] ).get |
---|
716 | thisDs.datasetIdArg = 'fileNameBits' |
---|
717 | elif bits[2] == 'cmip5': |
---|
718 | thisSlice = slice( int(bits[4]), int(bits[5]) ) |
---|
719 | thisDs.getDatasetId = cmip5_dsid( thisSlice, bits[3] ).get |
---|
720 | thisDs.datasetIdArg = 'filePathBits' |
---|
721 | self.parent.datasets[bits[1]] = thisDs |
---|
722 | elif bits[0] == 'datasetHierarchy': |
---|
723 | datasetHierarchy = bits[1:] |
---|
724 | elif bits[0] == 'omitDirectories': |
---|
725 | self.parent.omitDirectories = string.split( string.strip( bits[1] ) ) |
---|
726 | |
---|
727 | if self.datasetVersionMode[0] != None: |
---|
728 | assert vdsName in self.parent.datasets.keys(), 'Invalid dataset specified for version: %s [%s]' % (vdsName, str( self.parent.datasets.keys() ) ) |
---|
729 | self.versionnedDataset = self.parent.datasets[ vdsName ] |
---|
730 | |
---|
731 | if datasetHierarchy == None: |
---|
732 | self.datasetHierarchy = False |
---|
733 | else: |
---|
734 | self.datasetHierarchy = True |
---|
735 | bb = string.split( string.strip( datasetHierarchy[0]), '/' ) |
---|
736 | for b in bb: |
---|
737 | assert b in self.parent.datasets.keys(), 'Invalid dataset hierarchy, %s not among defined datasets' % b |
---|
738 | for k in self.parent.datasets.keys(): |
---|
739 | self.parent.datasets[k].inHierarchy = k in bb |
---|
740 | |
---|
741 | for k in range( len(bb) ): |
---|
742 | if k == 0: |
---|
743 | self.parent.datasets[bb[k]].parent = None |
---|
744 | else: |
---|
745 | self.parent.datasets[bb[k]].parent = self.parent.datasets[bb[k-1]] |
---|
746 | if k == len(bb)-1: |
---|
747 | self.parent.datasets[bb[k]].child = None |
---|
748 | else: |
---|
749 | self.parent.datasets[bb[k]].child = self.parent.datasets[bb[k+1]] |
---|
750 | |
---|
751 | class dsid1: |
---|
752 | |
---|
753 | def __init__(self,slice,sep): |
---|
754 | self.slice = slice |
---|
755 | self.sep = sep |
---|
756 | |
---|
757 | def get(self,x): |
---|
758 | return string.join( x[self.slice], self.sep ) |
---|
759 | |
---|
760 | class cmip5_dsid: |
---|
761 | |
---|
762 | def __init__(self,slice,sep): |
---|
763 | self.slice = slice |
---|
764 | self.sep = sep |
---|
765 | |
---|
766 | def get(self,x,y): |
---|
767 | return '%s_%s.%s' % (string.join( x[self.slice], self.sep ) , y[-2], y[-1] ) |
---|
768 | |
---|
769 | |
---|
770 | class get_trange: |
---|
771 | |
---|
772 | def __init__(self,pat,kseg): |
---|
773 | self.kseg = kseg |
---|
774 | self.re_istr = re.compile( '^[0-9]*$' ) |
---|
775 | if type( pat ) == type( 'x' ): |
---|
776 | self.pat = pat |
---|
777 | self.re = re.compile( pat ) |
---|
778 | else: |
---|
779 | self.re = pat |
---|
780 | |
---|
781 | def _test( self, s): |
---|
782 | return self.re.match( s ) != None |
---|
783 | |
---|
784 | def _get( self, s, handler=None ): |
---|
785 | x = self.re.match( s ) |
---|
786 | tb = [None,None] |
---|
787 | if x == None: |
---|
788 | return False, tuple(tb) |
---|
789 | |
---|
790 | thisOk = True |
---|
791 | tb[0] = x.groupdict().get( 'start', None ) |
---|
792 | tb[1] = x.groupdict().get( 'end', None ) |
---|
793 | if x.groupdict().has_key( 'isClim' ): |
---|
794 | tb.append( x.groupdict()['isClim'] ) |
---|
795 | for i in range(2): |
---|
796 | b = tb[i] |
---|
797 | if b != None: |
---|
798 | if self.re_istr.match( b ) == None: |
---|
799 | if handler != None: |
---|
800 | handler._log( 'CQC.101.001.003', dir + f, 'part of string not an integer' ) |
---|
801 | thisOk = False |
---|
802 | else: |
---|
803 | tb[i] = int(tb[i]) |
---|
804 | |
---|
805 | return thisOk, tb |
---|
806 | |
---|
807 | |
---|
808 | def test(self, l ): |
---|
809 | if len(l) < self.kseg + 1: |
---|
810 | return True |
---|
811 | return self._test( l[self.kseg] ) |
---|
812 | |
---|
813 | def get(self,l): |
---|
814 | if len(l) < self.kseg + 1: |
---|
815 | return True, (None,None) |
---|
816 | return self._get( l[self.kseg] ) |
---|
817 | |
---|
818 | class fileNameSegments: |
---|
819 | def __init__(self, parent, sep, nn ): |
---|
820 | self.sep = sep |
---|
821 | self.nn = nn |
---|
822 | self.nn.sort() |
---|
823 | self.__segments = {} |
---|
824 | self.parent = parent |
---|
825 | |
---|
826 | def add( self, line ): |
---|
827 | bits = map( string.strip, string.split( line, ', ' ) ) |
---|
828 | k = int(bits[0]) |
---|
829 | if bits[1] == 'vocabulary': |
---|
830 | assert bits[2] in self.parent.vocab.keys(), 'Vocabulary specified in file name section not defined in vocab sections, %s' % bits[2] |
---|
831 | |
---|
832 | self.__segments[k] = ('vocabulary',bits[2]) |
---|
833 | elif bits[1][0:5] == 'regex' or bits[2] == 'TimeRange': |
---|
834 | try: |
---|
835 | regex = re.compile( string.strip( bits[3], "'") ) |
---|
836 | except: |
---|
837 | print 'Failed to compile (in re): %s' % bits[3] |
---|
838 | raise |
---|
839 | self.__segments[k] = (bits[1],bits[2], regex) |
---|
840 | else: |
---|
841 | self.__segments[k] = tuple( bits[1:] ) |
---|
842 | |
---|
843 | if bits[2] == 'TimeRange': |
---|
844 | self.parent.hasTimeRange = True |
---|
845 | self.parent.timeRange = get_trange(regex,k) |
---|
846 | |
---|
847 | def finish(self): |
---|
848 | sl = [] |
---|
849 | for k in range(self.nn[-1]): |
---|
850 | sl.append( self.__segments.get( k, None ) ) |
---|
851 | self.segments = tuple( sl ) |
---|
852 | |
---|
853 | class Constraint__IdentityChecker: |
---|
854 | |
---|
855 | def __init__(self, ref1, ref2 ): |
---|
856 | self.mode = 'd' |
---|
857 | self.Ref1 = self.__parse_ref(ref1) |
---|
858 | self.Ref2 = self.__parse_ref(ref2) |
---|
859 | if self.Ref1 == 'VALUE': |
---|
860 | self.Ref1 = self.Ref2 |
---|
861 | self.Ref2 = 'VALUE' |
---|
862 | if self.Ref2 == 'VALUE': |
---|
863 | self.mode = 's' |
---|
864 | |
---|
865 | if self.mode == 's': |
---|
866 | self.msg = '%s equals %s' % (self.Ref1[1], self.value) |
---|
867 | else: |
---|
868 | self.msg = '%s equals %s' % (self.Ref1[1], self.Ref2[1]) |
---|
869 | |
---|
870 | def __parse_ref(self,ref): |
---|
871 | bits = string.split(ref,'/') |
---|
872 | assert bits[0] in ['VALUE','PATH','FILENAME','ATTRIBUTES','CONFIG','ARGS'], 'Bad line in CONSTRAINT section of config file' |
---|
873 | if bits[0] == 'ATTRIBUTES': |
---|
874 | if bits[1] == 'Global': |
---|
875 | return ('g_%s' % bits[2],'Global attribute %s' % bits[2] ) |
---|
876 | elif bits[0] == 'FILENAME': |
---|
877 | return ('fn_%s' % bits[1],'File name component %s' % bits[1] ) |
---|
878 | elif bits[0] == 'VALUE': |
---|
879 | self.value = bits[1] |
---|
880 | return 'VALUE' |
---|
881 | |
---|
882 | def __reset__(self): |
---|
883 | pass |
---|
884 | |
---|
885 | def check(self,fns): |
---|
886 | if self.mode == 's': |
---|
887 | if fns.has_key( self.Ref1[0] ): |
---|
888 | return ('ANS',fns[self.Ref1[0]] == self.value ) |
---|
889 | else: |
---|
890 | return ('PASS',None) |
---|
891 | else: |
---|
892 | if fns.has_key( self.Ref1[0] ) and fns.has_key( self.Ref2[0] ): |
---|
893 | return ('ANS',fns[self.Ref1[0]] == fns[self.Ref2[0]]) |
---|
894 | else: |
---|
895 | return ('PASS',None) |
---|
896 | |
---|
897 | def parse_ref(ref): |
---|
898 | bits = string.split(ref,'/') |
---|
899 | assert bits[0] in ['VALUE','PATH','FILENAME','FILENAMEregex','ATTRIBUTES','CONFIG','ARGS'], 'Bad line in CONSTRAINT section of config file' |
---|
900 | if bits[0] == 'ATTRIBUTES': |
---|
901 | if bits[1] == 'Global': |
---|
902 | return ('g_%s' % bits[2],'Global attribute %s' % bits[2] ) |
---|
903 | elif bits[0] == 'FILENAME': |
---|
904 | return ('fn_%s' % bits[1],'File name component %s' % bits[1] ) |
---|
905 | elif bits[0] == 'FILENAMEregex': |
---|
906 | return ('fnre_%s' % bits[1],'File name component %s' % bits[1] ) |
---|
907 | elif bits[0] == 'VALUE': |
---|
908 | return ('VALUE', bits[1]) |
---|
909 | |
---|
910 | class Constraint__OnlyOnce: |
---|
911 | |
---|
912 | def __init__(self, ref1): |
---|
913 | self.nn = 0 |
---|
914 | self.Ref1 = parse_ref(ref1) |
---|
915 | self.msg = '%s occurs only once' % self.Ref1[1] |
---|
916 | |
---|
917 | def __reset__(self): |
---|
918 | self.nn = 0 |
---|
919 | |
---|
920 | def check(self,fns): |
---|
921 | if fns.has_key( self.Ref1[0] ): |
---|
922 | self.nn+=1 |
---|
923 | return ('ANS', self.nn <= 1) |
---|
924 | else: |
---|
925 | keys = fns.keys() |
---|
926 | keys.sort() |
---|
927 | return ('PASS',None) |
---|
928 | |
---|
929 | def ref_to_key(ref): |
---|
930 | bits = string.split(ref,'/') |
---|
931 | assert bits[0] in ['VALUE','PATH','FILENAME','ATTRIBUTES','CONFIG','ARGS'], 'Bad line in CONSTRAINT section of config file' |
---|
932 | if bits[0] == 'ATTRIBUTES': |
---|
933 | if bits[1] == 'Global': |
---|
934 | return ('g_%s' % bits[2],'Global attribute %s' % bits[2] ) |
---|
935 | elif bits[0] == 'FILENAME': |
---|
936 | return ('fn_%s' % bits[1],'File name component %s' % bits[1] ) |
---|
937 | elif bits[0] == 'VALUE': |
---|
938 | return ('VALUE',bits[1]) |
---|
939 | |
---|
940 | class section_parser_l0: |
---|
941 | |
---|
942 | def __init__(self,parent,sectionName): |
---|
943 | self.sname = sectionName |
---|
944 | self.parent = parent |
---|
945 | self.lines = [] |
---|
946 | |
---|
947 | def add( self, l ): |
---|
948 | self.lines.append( string.strip( l ) ) |
---|
949 | |
---|
950 | def close(self): |
---|
951 | assert type(self.parent.sections) == type( {} ), 'parent.sections has wrong type (%s), should be a dictionary' % ( str( type( self.parent.sections ) ) ) |
---|
952 | |
---|
953 | self.parent.sections[self.sname] = self.lines[:] |
---|
954 | self.lines = [] |
---|