source: CCCC/trunk/ceda_cc/c4.py @ 242

Subversion URL: http://proj.badc.rl.ac.uk/svn/exarch/CCCC/trunk/ceda_cc/c4.py@242
Revision 242, 22.2 KB checked in by astephen, 5 years ago (diff)

bugs in CCMI configuration fixed

Line 
1
2import sys
3
4## callout to summary.py: if this option is selected, imports of libraries are not needed.
5if __name__ == '__main__' and sys.argv[1] == '--sum':
6      import summary
7      summary.main()
8      raise SystemExit(0)
9
10# Standard library imports
11import os, string, time, logging, sys, glob, pkgutil
12import shutil
13## pkgutil is used in file_utils
14# Third party imports
15
16## Local imports with 3rd party dependencies
17#### netcdf --- currently only support for cmds2 -- re-arranged to facilitate support for alternative modules
18
19import file_utils
20
21from file_utils import fileMetadata, ncLib
22
23# Local imports
24import utils_c4 as utils
25import config_c4 as config
26
27reload( utils )
28
29from xceptions import baseException
30
31from fcc_utils2 import tupsort
32
33
34#driving_model_ensemble_member = <CMIP5Ensemble_member>
35#rcm_version_id = <RCMVersionID>                     
36
37class dummy(object):
38   pass
39
40pathTmplDict = { 'CORDEX':'%(project)s/%(product)s/%(domain)s/%(institute)s/%(driving_model)s/%(experiment)s/%(ensemble)s/%(model)s/%(model_version)s/%(frequency)s/%(variable)s/files/%%(version)s/',   \
41                 'SPECS':'%(project)s/%(product)s/%(institute)s/%(model)s/%(experiment)s/%(start_date)s/%(frequency)s/%(realm)s/%(table)s/%(variable)s/%(ensemble)s/files/%%(version)s/', \
42                 'CMIP5':'%(project)s/%(product)s/%(institute)s/%(model)s/%(experiment)s/%(frequency)s/%(realm)s/%(table)s/%(ensemble)s/files/%%(version)s/%(variable)s/', \
43                 'CCMI':'%(project)s/%(product)s/%(institute)s/%(model)s/%(experiment)s/%(frequency)s/%(realm)s/%(table)s/%(ensemble)s/files/%%(version)s/%(variable)s/', \
44                 '__def__':'%(project)s/%(product)s/%(institute)s/%(model)s/%(experiment)s/%(frequency)s/%(realm)s/%(variable)s/%(ensemble)s/files/%%(version)s/', \
45               }
46
47## Core DRS: list of vocab names
48## Path template: -- current version puts upper case in "project"
49## Dataset template: 
50
51class recorder(object):
52
53  def __init__(self,project,fileName,type='map',dummy=False):
54    self.dummy = dummy
55    self.file = fileName
56    self.type = type
57    self.pathTmpl = '%(project)s/%(product)s/%(domain)s/%(institute)s/%(driving_model)s/%(experiment)s/%(ensemble)s/%(model)s/%(model_version)s/%(frequency)s/%(variable)s/files/%%(version)s/'
58    self.pathTmpl = pathTmplDict.get(project,pathTmplDict['__def__'])
59    self.records = {}
60    self.tidtupl = []
61
62  def open(self):
63    if self.type == 'map':
64      self.fh = open( self.file, 'a' )
65    else:
66      self.sh = shelve.open( self.file )
67
68  def close(self):
69    if self.type == 'map':
70      self.fh.close()
71    else:
72      self.sh.close()
73
74  def add(self,fpath,drs,safe=True):
75    assert self.type == 'map','Can only do map files at present'
76    assert type(drs) == type( {} ), '2nd user argument to method add should be a dictionary [%s]' % type(drs)
77    print drs.keys()
78    tpath = self.pathTmpl % drs
79    if not self.dummy:
80      assert os.path.isfile( fpath ), 'File %s not found' % fpath
81      fdate = time.ctime(os.path.getmtime(fpath))
82      sz = os.stat(fpath).st_size
83    else:
84      fdate = "na"
85      sz = 0
86    record = '%s | OK | %s | modTime = %s | target = %s ' % (fpath,sz,fdate,tpath)
87    fn = string.split( fpath, '/' )[-1]
88    for k in ['creation_date','tracking_id']:
89      if k in drs.keys():
90        record += ' | %s = %s' % (k,drs[k])
91        if k == 'tracking_id':
92          self.tidtupl.append( (fn,drs[k]) )
93
94    self.records[fn] = record
95 
96  def modify(self,fn,msg):
97    assert fn in self.records.keys(),'Attempt to modify non-existent record %s, %s' % [fn,str(self.records.keys()[0:10])]
98    if string.find( self.records[fn], '| OK |') == -1:
99      ##print 'File %s already flagged with errors' % fn
100      return
101    s = string.replace( self.records[fn], '| OK |', '| %s |' % msg )
102    ##print '--> ',s
103    self.records[fn] = s
104
105  def checktids(self):
106## sort by tracking id
107    if len( self.tidtupl ) == 1:
108      return
109    self.tidtupl.sort( cmp=tupsort(k=1).cmp )
110    nd = 0
111    fnl = []
112    for k in range(len(self.tidtupl)-1):
113      if self.tidtupl[k][1] == self.tidtupl[k+1][1]:
114        print 'Duplicate tracking_id: %s, %s:: %s' % (self.tidtupl[k][0],self.tidtupl[k+1][0],self.tidtupl[k][1])
115        nd += 1
116        if len(fnl) == 0 or fnl[-1] != self.tidtupl[k][0]:
117          fnl.append( self.tidtupl[k][0])
118        fnl.append( self.tidtupl[k+1][0])
119    if nd == 0:
120      print 'No duplicate tracking ids found in %s files' % len(self.tidtupl)
121    else:
122      print '%s duplicate tracking ids' % nd
123      for f in fnl:
124        self.modify( f, 'ERROR: duplicate tid' )
125
126  def dumpAll(self,safe=True):
127    keys = self.records.keys()
128    keys.sort()
129    for k in keys:
130      self.dump( self.records[k], safe=safe )
131
132  def dump( self, record, safe=True ):
133    if safe:
134      self.open()
135    self.fh.write( record + '\n' )
136    if safe:
137      self.close()
138
139  def addErr(self,fpath,reason,safe=True):
140    record = '%s | %s' % (fpath, reason)
141    fn = string.split( fpath, '/' )[-1]
142    self.records[fn] = record
143
144class checker(object):
145  def __init__(self, pcfg, cls,reader,abortMessageCount=-1):
146    self.info = dummy()
147    self.info.pcfg = pcfg
148    self.info.abortMessageCount = abortMessageCount
149    self.calendar = 'None'
150    self.ncReader = reader
151    self.cfn = utils.checkFileName( parent=self.info,cls=cls)
152    self.cga = utils.checkGlobalAttributes( parent=self.info,cls=cls)
153    self.cgd = utils.checkStandardDims( parent=self.info,cls=cls)
154    self.cgg = utils.checkGrids( parent=self.info,cls=cls)
155    self.cls = cls
156
157    # Define vocabs based on project
158    ##self.vocabs = getVocabs(pcgf)
159    self.vocabs = pcfg.vocabs
160
161  def checkFile(self,fpath,log=None,attributeMappings=[]):
162    self.calendar = 'None'
163    self.info.log = log
164
165    fn = string.split( fpath, '/' )[-1]
166
167    if attributeMappings != []:
168      self.ncReader.loadNc( fpath )
169      self.ncReader.applyMap( attributeMappings, self.cfn.globalAttributesInFn, log=log )
170      ncRed = True
171      thisFn = self.ncReader.fn
172    else:
173      ncRed = False
174      thisFn = fn
175
176    self.cfn.check( thisFn )
177    if not self.cfn.completed:
178      self.completed = False
179      return
180    if not self.info.pcfg.project[:2] == '__':
181      if not os.path.isfile( fpath ):
182        print 'File %s not found [2]' % fpath
183        self.completed = False
184        return
185
186    if not ncRed:
187      ##print fpath
188      self.ncReader.loadNc( fpath )
189    self.ga = self.ncReader.ga
190    self.va = self.ncReader.va
191    self.da = self.ncReader.da
192
193    if self.cfn.freq != None:
194      vGroup = self.cfn.freq
195    else:
196      vGroup = self.info.pcfg.mipVocabVgmap.get(self.cfn.group,self.cfn.group)
197    self.cga.check( self.ga, self.va, self.cfn.var, vGroup, self.vocabs, self.cfn.fnParts )
198    if not self.cga.completed:
199      self.completed = False
200      return
201
202    ##self.cgd.plevRequired = config.plevRequired
203    ##self.cgd.plevValues = config.plevValues
204    ##self.cgd.heightRequired = config.heightRequired
205    ##self.cgd.heightValues = config.heightValues
206    ##self.cgd.heightRange = config.heightRange
207    self.cgd.check( self.cfn.var, self.cfn.freq, self.da, self.va, self.cga.isInstantaneous )
208    self.calendar = self.cgd.calendar
209    if not self.cgd.completed:
210      self.completed = False
211      return
212
213    if self.info.pcfg.doCheckGrids:
214      ##self.cgg.rotatedPoleGrids = config.rotatedPoleGrids
215      ##self.cgg.interpolatedGrids = config.interpolatedGrids
216      self.cgg.check( self.cfn.var, self.cfn.domain, self.da, self.va )
217   
218      if not self.cgg.completed:
219        self.completed = False
220        return
221    self.completed = True
222    self.drs = self.cga.getDrs()
223    self.drs['project'] = self.info.pcfg.project
224    self.errorCount = self.cfn.errorCount + self.cga.errorCount + self.cgd.errorCount + self.cgg.errorCount
225
226class c4_init(object):
227
228  def __init__(self,args=None):
229    self.logByFile = True
230    self.policyFileLogfileMode = 'w'
231    self.policyBatchLogfileMode = 'np'
232    if args==None:
233       args = sys.argv[1:]
234    nn = 0
235
236    self.attributeMappingFile = None
237    self.recordFile = 'Rec.txt'
238    self.logDir = 'logs_02'
239    self.errs = []
240   
241    # Set default project to "CORDEX"
242    self.project = "CORDEX"
243    self.holdExceptions = False
244    forceLogOrg = None
245    argsIn = args[:]
246
247    # The --copy-config option must be the first argument if it is present.
248    if args[0] == '--copy-config':
249       if len(args) < 2:
250         self.commandHints( argsIn )
251       args.pop(0)
252       dest_dir = args.pop(0)
253       config.copy_config(dest_dir)
254       print 'Configuration directory copied to %s.  Set CC_CONFIG_DIR to use this configuration.' % dest_dir
255       print
256       raise SystemExit(0)
257    elif args[0] == '-h':
258       print 'Help command not implemented yet'
259       raise SystemExit(0)
260
261    self.summarymode = args[0] == '--sum'
262    if self.summarymode:
263      return
264
265    self.forceNetcdfLib = None
266    fltype = None
267    argu = []
268    while len(args) > 0:
269      next = args.pop(0)
270      if next == '-f':
271        flist = [args.pop(0),]
272        self.logByFile = False
273        fltype = '-f'
274        self.source = flist[0]
275      elif next == '--log':
276        x = args.pop(0)
277        assert x in ['single','multi','s','m'], 'unrecognised logging option (--log): %s' % (x)
278        if x in ['multi','m']:
279           forceLogOrg = 'multi'
280        elif x in ['single','s']:
281           forceLogOrg = 'single'
282      elif next == '--force-ncq':
283        self.forceNetcdfLib = 'ncq3'
284      elif next == '--force-cdms2':
285        self.forceNetcdfLib = 'cdms2'
286      elif next == '--force-pync4':
287        self.forceNetcdfLib = 'netCDF4'
288      elif next == '--force-scientific':
289        self.forceNetcdfLib = 'Scientific'
290      elif next == '--flfmode':
291        lfmk = args.pop(0)
292        assert lfmk in ['a','n','np','w','wo'], 'Unrecognised file logfile mode (--flfmode): %s' % lfmk
293        self.policyFileLogfileMode = lfmk
294      elif next == '--blfmode':
295        lfmk = args.pop(0)
296        assert lfmk in ['a','n','np','w','wo'], 'Unrecognised batch logfile mode (--blfmode): %s' % lfmk
297        self.policyBatchLogfileMode = lfmk
298      elif next == '-d':
299        fdir = args.pop(0)
300        flist = glob.glob( '%s/*.nc' % fdir  )
301        self.source = '%s/*.nc' % fdir
302      elif next == '-D':
303        flist  = []
304        fdir = args.pop(0)
305        for root, dirs, files in os.walk( fdir, followlinks=True ):
306          for f in files:
307            fpath = '%s/%s' % (root,f)
308            if (os.path.isfile( fpath ) or os.path.islink( fpath )) and f[-3:] == '.nc':
309              flist.append( fpath )
310        self.source = '%s/.....' % fdir
311      elif next == '-R':
312        self.recordFile = args.pop(0)
313      elif next == '--ld':
314        self.logDir = args.pop(0)
315      elif next in ['--catchAllExceptions','--cae']:
316        self.holdExceptions = True
317      elif next == '--aMap':
318        self.attributeMappingFile = args.pop(0)
319        assert os.path.isfile( self.attributeMappingFile ), 'The token "--aMap" should be followed by the path or name of a file'
320      elif next == "-p":
321        self.project = args.pop(0)
322      else:
323       print 'Unused argument: %s' % next
324       argu.append( next )
325       nn+=1
326    if nn != 0:
327      print 'Unused arguments: ', argu
328      self.commandHints( argsIn )
329
330    if self.project == 'CMIP5' and fltype != '-f':
331      fl0 = []
332      for f in flist:
333        if string.find( f, '/latest/' ) != -1:
334          fl0.append(f)
335      flist = fl0
336
337    if forceLogOrg != None:
338      if forceLogOrg == 'single':
339        self.logByFile = False
340      else:
341        self.logByFile = True
342
343    if self.project[:2] == '__':
344       self.source = 'dummy'
345       flist = []
346       ss = 'abcdefgijk'
347       ss = 'abcdefgijklmnopqrstuvwxyz'
348       ss = 'abc'
349       for i in range(10):
350         v = 'v%s' % i
351         for a in ss:
352           for b in ss:
353             flist.append( '%s_day_%s_%s_1900-1909.nc' % (v,a,b) )
354    flist.sort()
355    fnl = []
356    for f in flist:
357      fn = string.split(f, '/')[-1]
358      fnl.append(fn)
359    nd = 0
360    dupl = []
361    for k in range(len(fnl)-1):
362      if fnl[k] == fnl[k-1]:
363        nd += 1
364        dupl.append( fnl[k] )
365    self.dupDict = {}
366    for f in dupl:
367      self.dupDict[f] = 0
368    if nd != 0:
369      self.errs.append( 'Duplicate file names encountered: %s' % nd )
370      self.errs.append( dupl )
371    self.flist = flist
372    self.fnl = fnl
373    if not os.path.isdir(   self.logDir ):
374       os.mkdir(   self.logDir )
375
376    tstring1 = '%4.4i%2.2i%2.2i_%2.2i%2.2i%2.2i' % time.gmtime()[0:6]
377    self.batchLogfile = '%s/qcBatchLog_%s.txt' % (  self.logDir,tstring1)
378## default appending to myapp.log; mode='w' forces a new file (deleting old contents).
379    self.logger = logging.getLogger('c4logger')
380    if self.policyBatchLogfileMode in ['n','np']:
381        assert not os.path.isfile( self.batchLogfile ), '%s exists and policy set to new file' % self.batchLogfile
382    m = self.policyBatchLogfileMode[0]
383    if m == 'n':
384      m = 'w'
385    if m == 'a':
386      self.hdlr = logging.FileHandler(self.batchLogfile)
387    else:
388      self.hdlr = logging.FileHandler(self.batchLogfile,mode=m)
389    formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
390    self.hdlr.setFormatter(formatter)
391    self.logger.setLevel(logging.INFO)
392    self.logger.addHandler(self.hdlr)
393
394    self.attributeMappings = []
395    self.attributeMappingsLog = None
396    if self.attributeMappingFile != None:
397      for l in open( self.attributeMappingFile ).readlines():
398        if l[0] != '#':
399          bb = string.split( string.strip(l), '|' ) 
400          assert len(bb) ==2, "Error in experimental module attributeMapping -- configuration line not scanned [%s]" % str(l)
401          bits = string.split( bb[0], ';' )
402          cl = []
403          for b in bits:
404            cl.append( string.split(b, '=' ) )
405          self.attributeMappings.append( ('am001',cl, string.split(bb[1],'=') ) )
406      self.attributeMappingsLog = open( 'attributeMappingsLog.txt', 'w' )
407
408  def commandHints(self, args):
409    if args[0] in ['-h','--sum']:
410      print 'Arguments look OK'
411    elif args[0] == '--copy-config':
412      print 'Usage [configuration copy]: ceda_cc --copy-config <target directory path>'
413    else:
414      if not( '-f' in args or '-d' in args or '-D' in args):
415        print 'No file or target directory specified'
416        print """USAGE:
417ceda_cc -p <project> [-f <NetCDF file>|-d <directory containing files>|-D <root of directory tree>] [other options]
418
419With the "-D" option, all files in the directory tree beneath the given diretory will be checked. With the "-d" option, only files in the given directory will be checked.
420"""
421    raise SystemExit(0)
422   
423
424  def getFileLog( self, fn, flf=None ):
425    if flf == None:
426      tstring2 = '%4.4i%2.2i%2.2i' % time.gmtime()[0:3]
427      if fn in self.dupDict.keys():
428        tag = '__%2.2i' % self.dupDict[fn]
429        self.dupDict[fn] += 1
430      else:
431        tag = ''
432      self.fileLogfile = '%s/%s%s__qclog_%s.txt' % (self.logDir,fn[:-3],tag,tstring2)
433      if self.policyFileLogfileMode in ['n','np']:
434        assert not os.path.isfile( self.fileLogfile ), '%s exists and policy set to new file' % self.fileLogfile
435      m = self.policyFileLogfileMode[0]
436      if m == 'n':
437        m = 'w'
438    else:
439      m = 'a'
440      self.fileLogfile = flf
441
442    self.fLogger = logging.getLogger('fileLog_%s_%s' % (fn,m))
443    if m == 'a':
444      self.fHdlr = logging.FileHandler(self.fileLogfile)
445    else:
446      self.fHdlr = logging.FileHandler(self.fileLogfile,mode=m)
447    fileFormatter = logging.Formatter('%(message)s')
448    self.fHdlr.setFormatter(fileFormatter)
449    self.fLogger.addHandler(self.fHdlr)
450    self.fLogger.setLevel(logging.INFO)
451    return self.fLogger
452
453  def closeFileLog(self):
454    self.fLogger.removeHandler(self.fHdlr)
455    self.fHdlr.close()
456    if self.policyFileLogfileMode in ['wo','np']:
457      os.popen( 'chmod %s %s;' % (444, self.fileLogfile) )
458
459  def closeBatchLog(self):
460    self.logger.removeHandler(self.hdlr)
461    self.hdlr.close()
462    if self.policyBatchLogfileMode in ['wo','np']:
463      os.popen( 'chmod %s %s;' % (444, self.batchLogfile) )
464
465
466class main(object):
467
468  def __init__(self,args=None,abortMessageCount=-1,printInfo=False,monitorFileHandles = False):
469    logDict = {}
470    ecount = 0
471    c4i = c4_init(args=args)
472     
473    isDummy  = c4i.project[:2] == '__'
474    if (ncLib == None) and (not isDummy):
475       raise baseException( 'Cannot proceed with non-dummy [%s] project without a netcdf API' % (c4i.project) )
476    pcfg = config.projectConfig( c4i.project )
477    ncReader = fileMetadata(dummy=isDummy, attributeMappingsLog=c4i.attributeMappingsLog,forceLib=c4i.forceNetcdfLib)
478    self.cc = checker(pcfg, c4i.project, ncReader,abortMessageCount=abortMessageCount)
479    rec = recorder( c4i.project, c4i.recordFile, dummy=isDummy )
480
481    # This list will record the drs dictionaries of all checked files for export to JSON
482    drs_list = []
483
484    if monitorFileHandles:
485      self.monitor = utils.sysMonitor()
486    else:
487      self.monitor = None
488
489    cal = None
490    c4i.logger.info( 'Starting batch -- number of file: %s' % (len(c4i.flist)) )
491    c4i.logger.info( 'Source: %s' % c4i.source )
492    if len( c4i.errs ) > 0:
493      for i in range(0,len( c4i.errs ), 2 ):
494        c4i.logger.info( c4i.errs[i] )
495 
496    self.cc.info.amapListDraft = []
497    cbv = utils.checkByVar( parent=self.cc.info,cls=c4i.project,monitor=self.monitor)
498    cbv.impt( c4i.flist )
499    if printInfo:
500      print cbv.info
501
502    fileLogOpen = False
503    self.resList =  []
504    stdoutsum = 2000
505    npass = 0
506    kf = 0
507    for f in c4i.flist:
508      kf += 1
509      rv = False
510      ec = None
511      if monitorFileHandles:
512        nofhStart = self.monitor.get_open_fds()
513      fn = string.split(f,'/')[-1]
514      c4i.logger.info( 'Starting: %s' % fn )
515      try:
516  ### need to have a unique name, otherwise get mixing of logs despite close statement below.
517  ### if duplicate file names are present, this will be recorded in the main log, tag appended to file level log name (not yet tested).
518        if c4i.logByFile:
519          fLogger = c4i.getFileLog( fn )
520          logDict[fn] = c4i.fileLogfile
521          c4i.logger.info( 'Log file: %s' % c4i.fileLogfile )
522          fileLogOpen = True
523        else:
524          fLogger = c4i.logger
525 
526        fLogger.info( 'Starting file %s' % fn )
527## default appending to myapp.log; mode='w' forces a new file (deleting old contents).
528        self.cc.checkFile( f, log=fLogger,attributeMappings=c4i.attributeMappings )
529
530        if self.cc.completed:
531          if cal not in (None, 'None') and self.cc.cgd.varGroup != "fx":
532            if cal != self.cc.calendar:
533              cal_change_err_msg = 'Error: change in calendar attribute %s --> %s' % (cal, self.cc.calendar)
534              c4i.logger.info(cal_change_err_msg)
535              fLogger.info(cal_change_err_msg)
536              self.cc.errorCount += 1
537
538          cal = self.cc.calendar
539          ec = self.cc.errorCount
540        rv =  ec == 0
541        if rv:
542          npass += 1
543        self.resList.append( (rv,ec) )
544
545        if c4i.logByFile:
546          if self.cc.completed:
547            fLogger.info( 'Done -- error count %s' % self.cc.errorCount )
548          else:
549            fLogger.info( 'Done -- checks not completed' )
550          c4i.closeFileLog( )
551          fileLogOpen = False
552
553        if self.cc.completed:
554          c4i.logger.info( 'Done -- error count %s' % self.cc.errorCount ) 
555          ecount += self.cc.errorCount
556          if self.cc.errorCount == 0:
557            rec.add( f, self.cc.drs )
558            drs_list.append({'path': f, 'drs': self.cc.drs})
559          else:
560            rec.addErr( f, 'ERRORS FOUND | errorCount = %s' % self.cc.errorCount )
561        else:
562          ecount += 20
563          c4i.logger.info( 'Done -- testing aborted because of severity of errors' )
564          rec.addErr( f, 'ERRORS FOUND AND CHECKS ABORTED' )
565      except:
566        c4i.logger.error("Exception has occured" ,exc_info=1)
567        if fileLogOpen:
568          fLogger.error("C4.100.001: [exception]: FAILED:: Exception has occured" ,exc_info=1)
569          c4i.closeFileLog( )
570          fileLogOpen = False
571        rec.addErr( f, 'ERROR: Exception' )
572        if not c4i.holdExceptions:
573          raise
574      if stdoutsum > 0 and kf%stdoutsum == 0:
575         print '%s files checked; %s passed this round' % (kf,npass)
576      if monitorFileHandles:
577        nofhEnd = self.monitor.get_open_fds()
578        if nofhEnd > nofhStart:
579           print 'Open file handles: %s --- %s' % (nofhStart, nofhEnd)
580 
581    self.cc.info.log = c4i.logger
582   
583    if c4i.project not in ['SPECS','CCMI','CMIP5']:
584       cbv.c4i = c4i
585       cbv.setLogDict( logDict )
586       cbv.check( recorder=rec, calendar=self.cc.calendar)
587       try:
588         ecount += cbv.errorCount
589       except:
590         ecount = None
591    ncReader.close()
592    if type( self.cc.info.amapListDraft ) == type( [] ) and len(  self.cc.info.amapListDraft ) > 0:
593      ll =  self.cc.info.amapListDraft
594      ll.sort()
595      oo = open( 'amapDraft.txt', 'w' )
596      oo.write( ll[0] + '\n' )
597      for i in range( 1,len(ll) ):
598        if ll[i] != ll[i-1]:
599          oo.write( ll[i] + '\n' )
600      oo.close()
601    if c4i.project in ['SPECS','CCMI','CMIP5']:
602      rec.checktids()
603    rec.dumpAll()
604
605    #!TODO: the recorder class could export JSON if it recorded the full drs dictionaries.
606    #       This lightweight solution re-uses the filename from the rec class and dumps
607    #       JSON in a separate function.
608    json_file = os.path.splitext(rec.file)[0] + '.json'
609    dump_drs_list(drs_list, json_file)
610
611    if printInfo:
612      print 'Error count %s' % ecount
613    ##c4i.hdlr.close()
614    c4i.closeBatchLog()
615    self.ok = all( map( lambda x: x[0], self.resList ) )
616
617
618def dump_drs_list(drs_list, filename):
619    import json
620    with open(filename, 'a+') as fh:
621          for drs in drs_list:
622                fh.write(json.dumps(drs))
623                fh.write('\n')
624
625
626def main_entry():
627   """
628   Wrapper around main() for use with setuptools.
629
630   """
631   main(printInfo=True)
632
633if __name__ == '__main__':
634  if sys.argv[1] == '--sum':
635      import summary
636      summary.main()
637      raise SystemExit(0)
638  main_entry()
639
640
641##else:
642  ##f1 = '/data/u10/cordex/AFR-44/SMHI/ECMWF-ERAINT/evaluation/SMHI-RCA4/v1/day/clh/clh_AFR-44_ECMWF-ERAINT_evaluation_r1i1p1_SMHI-RCA4_v1_day_19810101-19851231.nc'
643  ##f2 = '/data/u10/cordex/AFR-44/SMHI/ECMWF-ERAINT/evaluation/SMHI-RCA4/v1/sem/tas/tas_AFR-44_ECMWF-ERAINT_evaluation_r1i1p1_SMHI-RCA4_v1_sem_200012-201011.nc'
644  ##f3 = '/data/u10/cordex/AFR-44i/SMHI/ECMWF-ERAINT/evaluation/SMHI-RCA4/v1/mon/tas/tas_AFR-44i_ECMWF-ERAINT_evaluation_r1i1p1_SMHI-RCA4_v1_mon_199101-200012.nc'
645  ##cc.checkFile( f3 )
Note: See TracBrowser for help on using the repository browser.