source: mauRepo/MolesManager/trunk/src/libs/migration/processor/deployment.py @ 8409

Subversion URL: http://proj.badc.rl.ac.uk/svn/ndg/mauRepo/MolesManager/trunk/src/libs/migration/processor/deployment.py@8409
Revision 8409, 35.9 KB checked in by mnagni, 9 years ago (diff)

Incomplete - # 22490: CEDA Observation Collection - Description
 http://team.ceda.ac.uk/trac/ceda/ticket/22490

Line 
1'''
2BSD Licence
3Copyright (c) 2012, Science & Technology Facilities Council (STFC)
4All rights reserved.
5
6Redistribution and use in source and binary forms, with or without modification,
7are permitted provided that the following conditions are met:
8
9    * Redistributions of source code must retain the above copyright notice,
10        this list of conditions and the following disclaimer.
11    * Redistributions in binary form must reproduce the above copyright notice,
12        this list of conditions and the following disclaimer in the documentation
13        and/or other materials provided with the distribution.
14    * Neither the name of the Science & Technology Facilities Council (STFC)
15        nor the names of its contributors may be used to endorse or promote
16        products derived from this software without specific prior written permission.
17
18THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
19AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
20THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
21PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
22BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
23OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
24SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
25HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
26OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
27OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28
29Created on 15 Nov 2011
30
31@author: Maurizio Nagni
32'''
33from libs.migration.processor.commons import findMolesLineage,\
34    createMO_ResponsiblePartyInfo,\
35    DO_BADC, DO_NEODC, findAuthorsInResource, \
36    createMD_Identifier, extractSummary, extractQuality, \
37    hasMOSameHash, getAtomDocumentHashByMO, extractTitle,\
38        createCEDA_Result,\
39    createEX_GeographicBoundingBox, extractGeographicExtentInMigrationDocument, findDownloadLinksInMigrationDocument,\
40    extractContent, createCI_Citation, createCI_Date, createDate,\
41    createTM_Position, createTM_Instant, extractMolesCreationDate,\
42    createDateTime, isoDateTimeStringToTimeDate, extractMolesProviderID,\
43    DO_UKSSDC, createMO_Organization,\
44    createCI_Contact, createCI_Address, createCI_OnlineResource,\
45    createCI_Telephone, extractMolesTemporalRange, isoDateStringToTimeDate,\
46    createTM_Period, findAccessLinksInMigrationDocument,\
47    findLinksInDeployment, createMD_LegalConstraints,\
48    createDQ_Element, createDQ_ConformanceResult, findUpdatedDate,\
49    createMD_Metadata, createMO_OnlineResource, createCEDA_Review, calculateHash,\
50    createCI_ResponsibleParty, extractUpdateFrequency,\
51    findLinksInMigrationDocument, findSubTypeInDPT, extractMolesPublishedDate,\
52    createMD_Keywords, hasMOBeenProcessed, createMO_Individual
53from libs.epb import EPB
54from libs.migration.processor.deployment_data import DeploymentDataProcessor
55from libs.migration.exception.exceptions import NoDataLineage,\
56    NoAssociatedAuthor
57from libs.migration.authors import authors
58from logging import StreamHandler
59import logging
60from datetime import date
61from ea_model.iso_19115_2006_metadata_corrigendum.citation_and_responsible_party_information.ci_onlinefunctioncode import CI_OnLineFunctionCode
62from ea_model.iso_19115_2006_metadata_corrigendum.citation_and_responsible_party_information.ci_datetypecode import CI_DateTypeCode
63from ea_model.iso_19115_2006_metadata_corrigendum.constraint_information.md_restrictioncode import MD_RestrictionCode
64from copy import deepcopy
65import datetime
66from MolesManager.ceda_guid import CedaGUID
67from ea_model.iso_19115_2006_metadata_corrigendum.citation_and_responsible_party_information.ci_rolecode import CI_RoleCode
68from ea_model.iso_19115_2006_metadata_corrigendum.maintenance_information.md_maintenancefrequencycode import MD_MaintenanceFrequencyCode
69from ea_model.moles3_4.utilities.ceda_rolevalue import CEDA_RoleValue
70from ea_model.moles3_4.observation.mo_observationpublicationstatevalue import MO_ObservationPublicationStateValue
71from MolesManager.codelist import MM_RoleValue,\
72    MM_ObservationPublicationStateValue, getCLValue
73from ea_model.ceda_metadatamodel.ceda_observation.ceda_observation import CEDA_Observation
74from ea_model.ceda_metadatamodel.ceda_result.ceda_curationvalue import CEDA_CurationValue
75from ea_model.ceda_metadatamodel.ceda_utilities.ceda_reviewstatusvalue import CEDA_ReviewStatusValue
76from ea_model.ceda_metadatamodel.ceda_utilities.ceda_reviewfrequencyvalue import CEDA_ReviewFrequencyValue
77
78MET_GEO_FEATURE = 'Meteorological geographical features'
79ORTHOIMAGERY = 'Orthoimagery'
80           
81class DeploymentProcessor(object):
82    '''
83        Migrates a deployment element in a CEDA_Observation entity
84    '''
85    publisherName = 'CEDA'
86   
87    log = logging.getLogger('DeploymentProcessor')
88    log.addHandler(StreamHandler())
89    log.setLevel(logging.INFO)   
90    def __init__(self, dataEntityMigration, deploymentMigration, epbRepo):
91        '''
92            Initializes the class
93            @param dataEntityMigration: a DataEntityMigration instance
94            @param deploymentMigration: the DeploymentMigration instance
95            @param epbRepo: an instance of EPBRepo             
96        '''             
97        self._dataEntityMigration = dataEntityMigration
98        self._deploymentMigration = deploymentMigration
99        self.epbRepo = epbRepo
100        self._dataEntityHasSameHash = hasMOSameHash(self._dataEntityMigration)
101        self._deploymentHasSameHash = hasMOSameHash(self._deploymentMigration)
102        self._deploymentHasBeenProcessed = hasMOBeenProcessed(self._deploymentMigration)
103
104    def _existsCEDAasPublisher(self):
105        for rp in self._ceda_observation.relatedParty:
106            if rp.role == getCLValue(MM_RoleValue.cl_publisher):
107                for party in rp.parties:
108                    if party.name == DeploymentProcessor.publisherName:
109                        return True
110        return False
111
112    """                   
113    def _extractResponsiblePartyInfo(self, authors = None, parsedParties = []):
114        '''
115            @param authors: the deployment's authors
116            @param parsedParties: a list to fill with the created MO_ResponsiblePartyInfos
117        '''
118        #First is assumend to be the author
119        i_party = createMO_Organization(name = authors[:1])
120        parsedParties.append(createMO_ResponsiblePartyInfo(MO_RoleValue.cl_author, [i_party]))
121        i_party = []
122        if len(authors) < 2:
123            return
124       
125        for i_name in authors[:1]:
126            i_party.append(createMO_Organization(name = i_name))           
127        parsedParties.append(createMO_ResponsiblePartyInfo(MO_RoleValue.cl_coinvestigator, i_party))           
128    """
129
130    def _addResponsiblePartyInfo(self, oldResponsiblePartyInfos, newResponsiblePartyInfo):
131        opi = None
132        for oldPartyInfo in oldResponsiblePartyInfos:
133            if oldPartyInfo.role == newResponsiblePartyInfo.role:
134                opi = oldPartyInfo
135                break
136           
137        if len(oldResponsiblePartyInfos) == 0 or opi is None:
138            oldResponsiblePartyInfos.append(newResponsiblePartyInfo)
139       
140        if opi is not None: 
141            for np in newResponsiblePartyInfo.party:
142                opi.party.append(np)
143       
144
145
146
147    def _extractIndividualsAndOrganizations(self, tmp_auth, whereAreAuthors):
148        if whereAreAuthors is None:
149            raise Exception("migrationObject is None")
150        try:
151            if tmp_auth['author'] == 'unknown':
152                doc_authors = findAuthorsInResource(self._dataEntityMigration)
153                tmp_auth = authors[doc_authors['authors']]
154           
155            ret = {'ind': [], 'org': [], 'co_ind': [], 'co_org': []}
156            if tmp_auth['type'] == 'ind':
157                ret['ind'].append(tmp_auth['author'])
158            elif tmp_auth['type'] == 'org':
159                ret['org'].append(tmp_auth['author'])           
160               
161            if tmp_auth['author'] == 'unknown':
162                DeploymentProcessor.log.debug("%s %s %s has unknown author" \
163                                             % (whereAreAuthors.doc_status, whereAreAuthors.doc_owner, whereAreAuthors.doc_name))       
164
165            for item in tmp_auth['co_author_type']:           
166                if (tmp_auth['co_author'][tmp_auth['co_author_type'].index(item)] == 'unknown'):
167                    doc_authors = findAuthorsInResource(self._dataEntityMigration)
168                    tmp_auth = authors[doc_authors['contributors']]
169                    break
170                 
171            for index in range(len(tmp_auth['co_author_type'])): 
172                ptype = tmp_auth['co_author_type'][index]               
173                if ptype == 'ind':
174                    ret['co_ind'].append(tmp_auth['co_author'][index])
175                elif ptype == 'org':
176                    ret['co_org'].append(tmp_auth['co_author'][index])
177           
178                if (tmp_auth['co_author'][index] == 'unknown'):
179                    DeploymentProcessor.log.info("%s %s %s has unknown author" \
180                                                 % (whereAreAuthors.doc_status, whereAreAuthors.doc_owner, whereAreAuthors.doc_name))
181        except Exception as e:
182            print e
183        return ret
184       
185       
186    def updateObservation(self):
187        return EPB.searchOrCreate(CEDA_Observation, self._deploymentMigration.ceda_observation_id)
188
189    def assignDOI(self, observation, doi):
190        if doi and doi.has_key('href'):
191            doi = doi['href'][22:]           
192           
193            #Check if a doi has been already assigned
194            observation = self.epbRepo.moles3EPB.loadAttributes(observation, 'identifier')
195            obs_identifier = observation.identifier
196            if obs_identifier:
197                for ident in obs_identifier:
198                    if ident.code == doi:
199                        return
200           
201            py_date = None
202            cited_responsible = createCI_ResponsibleParty(role=getCLValue(CI_RoleCode.cl_publisher), \
203                                                              organizationName='NERC - British Atmospheric Data Centre')
204            if doi.upper() == '10.5285/E8F43A51-0198-4323-A926-FE69225D57DD':
205                py_date = date(2011, 4, 1)
206            elif doi.upper() == '10.5285/78114093-E2BD-4601-8AE5-3551E62AEF2B':
207                py_date = date(2011, 11, 29)               
208            elif doi.upper() == '10.5285/DB8D8981-1A51-4D6E-81C0-CCED9B921390':
209                py_date = date(2012, 4, 16)
210            elif doi.upper() == '10.5285/639A3714-BC74-46A6-9026-64931F355E07':
211                py_date = date(2012, 4, 16)               
212               
213            if py_date:   
214                dt = createDate(py_date)
215                ci_date = createCI_Date(getCLValue(CI_DateTypeCode.cl_publication), date = dt)
216                i_authority = createCI_Citation("DOI", date = ci_date)
217                identifier = createMD_Identifier(code = doi, authority=i_authority)
218                self.epbRepo.moles3EPB.updateCedaObject(observation, {'identifier': identifier})
219                DeploymentProcessor.log.info("DOI: %s" % (doi))                                 
220
221    def _assignKeywords(self, ceda_observation):
222        if self._deploymentHasSameHash:
223            return
224       
225        provider_id = extractMolesProviderID(self._deploymentMigration)
226        i_keywords = []
227        if provider_id == DO_BADC:
228            i_keywords.append(MET_GEO_FEATURE)
229        if provider_id == DO_NEODC:
230            i_keywords.append(ORTHOIMAGERY)
231        if len(i_keywords) > 0:
232            #Is a first time process?
233            if not self._deploymentHasBeenProcessed: 
234                ceda_observation.keywords.append(createMD_Keywords(i_keywords))
235            else:
236                ceda_observation.keywords.keyword = i_keywords         
237
238    def _assignLineage(self, observation):
239        if self._deploymentHasSameHash:
240            return
241       
242        data_lineage = findMolesLineage(self._dataEntityMigration)
243        if data_lineage is None:
244            raise NoDataLineage(self._dataEntityMigration)
245       
246        if data_lineage != observation.dataLineage:
247            observation.dataLineage = data_lineage 
248
249    def _assignResult(self, observation):
250        if self._deploymentHasSameHash and self._dataEntityHasSameHash:
251            return
252
253        i_sources = []               
254        download = findDownloadLinksInMigrationDocument(self._deploymentMigration)
255        content = None
256        if len(download) == 0:
257            download = findDownloadLinksInMigrationDocument(self._dataEntityMigration)
258            content = extractContent(self._dataEntityMigration)
259        else:
260            content = extractContent(self._deploymentMigration)
261        for dwn in download:
262            int_description = None
263            int_applicationProfile = None
264            if content.has_key('formats'):
265                #int_applicationProfile = content['formats']
266                pass
267            if dwn['href'].startswith('http://badc.nerc.ac.uk/browse') or dwn['href'].startswith('http://neodc.nerc.ac.uk/browse'):
268                int_description = "download directly from archive"   
269            i_sources.append(createMO_OnlineResource(linkage = dwn['href'], name = dwn['title'], \
270                                                   function = getCLValue(CI_OnLineFunctionCode.cl_download), \
271                                                   description = int_description, applicationProfile = int_applicationProfile))
272           
273        dataentity_id = '%s__ATOM__%s' % (self._dataEntityMigration.doc_owner, self._dataEntityMigration.doc_name)
274        dataentity_id = dataentity_id.replace('.atom', '')           
275        infodb_de = self.epbRepo.infodbEPB.getCedaInfoApp_dataentityByDE_ID(dataentity_id)
276        i_logical_path = '/dummy'
277        if infodb_de is None:
278            i_logical_path = dwn['href'][dwn['href'].index('/browse/') + 7:]
279       
280        if infodb_de and infodb_de.has_key('logical_path'):
281            i_logical_path = infodb_de['logical_path']
282               
283        i_category = getCLValue(CEDA_CurationValue.cl_a)                                 
284        if infodb_de and infodb_de.has_key('category') and infodb_de['category']:
285            i_category = CEDA_CurationValue.from_string(infodb_de['category'].lower())       
286
287        if not self._deploymentHasBeenProcessed:                     
288            observation.result = createCEDA_Result(i_category, i_logical_path, source = i_sources)
289            return 
290       
291        if observation.result.internalPath != i_logical_path:
292            observation.result.internalPath = i_logical_path
293           
294        if observation.result.curationCategory != i_category:
295            observation.result.curationCategory = i_category
296                       
297        #Still have to update observation.result.source
298
299    def _assignPublisherCurator(self, observation):
300        if self._deploymentHasSameHash:
301            return
302           
303        provider_id = extractMolesProviderID(self._deploymentMigration)
304        party = None
305        if provider_id == DO_BADC:
306            i_linkage = 'http://badc.rl.ac.uk'
307            i_onlineResources = createCI_OnlineResource(linkage = i_linkage, name = 'British Atmospheric Data Centre Website')
308            i_address = createCI_Address(deliveryPoint = ['British Atmospheric Data Centre, STFC Rutherford Appleton Laboratory'], \
309                                         electronicMailAddress=['badc@rl.ac.uk'], postalCode='OX11 0QX', country='UK', city='Harwell Oxford')                                   
310            i_phone = createCI_Telephone(voice=['+44(0)1235 446432'])                                 
311            contact = createCI_Contact(phone=i_phone, address=i_address, onlineResource=i_onlineResources)                       
312            party = createMO_Organization(name = "NERC - British Atmospheric Data Centre", contactInfo = [contact])
313        elif provider_id == DO_NEODC:
314            i_linkage = 'http://www.neodc.rl.ac.uk'
315            i_onlineResources = createCI_OnlineResource(linkage = i_linkage, name = 'NERC Earth Observation Data Centre website')
316            i_address = createCI_Address(deliveryPoint = ['NERC - Earth Observation Data Centre, STFC Rutherford Appleton Laboratory'], \
317                                         electronicMailAddress=['neodc@rl.ac.uk'], postalCode='OX11 0QX', country='UK', city='Harwell Oxford')                                   
318            i_phone = createCI_Telephone(voice=['+44(0)1235 446432'])                                 
319            contact = createCI_Contact(phone=i_phone, address=i_address, onlineResource=i_onlineResources)                       
320            party = createMO_Organization(name = 'NERC - Earth Observation Data Centre', contactInfo = [contact])
321        elif provider_id == DO_UKSSDC:
322            i_linkage = 'http://www.ukssdc.rl.ac.uk'
323            i_onlineResources = createCI_OnlineResource(linkage = i_linkage, name = 'UK Solar System Data Centre website')
324            i_address = createCI_Address(deliveryPoint = ['UK Solar System Data Centre, STFC Rutherford Appleton Laboratory'], \
325                                         electronicMailAddress=['support@rl.ac.uk'], postalCode='OX11 0QX', country='UK', city='Harwell Oxford')                                   
326            i_phone = createCI_Telephone(voice=['+44(0)1235 445173'])                                 
327            contact = createCI_Contact(phone=i_phone, address=i_address, onlineResource=i_onlineResources)                       
328            party = createMO_Organization(name = 'NERC - UK Solar System Data Centre', contactInfo = [contact])
329       
330        if party and not self._deploymentHasBeenProcessed:
331            observation.relatedParty.append(createMO_ResponsiblePartyInfo(getCLValue(MM_RoleValue.cl_publisher), [party]))
332            observation.relatedParty.append(createMO_ResponsiblePartyInfo(getCLValue(MM_RoleValue.cl_custodian), deepcopy([party])))
333            observation.relatedParty.append(createMO_ResponsiblePartyInfo(getCLValue(MM_RoleValue.cl_distributor), deepcopy([party])))
334            observation.relatedParty.append(createMO_ResponsiblePartyInfo(getCLValue(MM_RoleValue.cl_pointofcontact), deepcopy([party])))
335            observation.relatedParty.append(createMO_ResponsiblePartyInfo(getCLValue(MM_RoleValue.cl_metadataowner), deepcopy([party])))
336            observation.relatedParty.append(createMO_ResponsiblePartyInfo(getCLValue(MM_RoleValue.cl_curator), deepcopy([party])))
337            return       
338        #Still have to update observation.result.source
339
340    def _assignQuality(self, observation):
341        if self._dataEntityHasSameHash:
342            return
343               
344        doc_quality = extractQuality(self._dataEntityMigration)
345        doc_date = findUpdatedDate(self._dataEntityMigration)
346        ci_dates = []           
347        if doc_date:
348            i_date = createDate(isoDateTimeStringToTimeDate(doc_date))               
349            ci_dates.append(createCI_Date(getCLValue(CI_DateTypeCode.cl_revision), date = i_date))           
350        else:
351            i_date = createDate(isoDateTimeStringToTimeDate(datetime.datetime.now()))
352            ci_dates.append(createCI_Date(getCLValue(CI_DateTypeCode.cl_creation), date = i_date))
353
354        i_specification = createCI_Citation(title = "CEDA Data Quality Statement", date=ci_dates)
355        i_dq_result = createDQ_ConformanceResult(doc_quality, True, i_specification)
356        i_quality_element = createDQ_Element(i_dq_result)
357       
358        if not self._deploymentHasBeenProcessed:     
359            observation.resultQuality.append(i_quality_element)
360            return               
361        #Still have to update observation.result.source
362
363    def _assignDescription(self, observation):
364        if self._dataEntityHasSameHash and self._deploymentHasSameHash:
365            return
366       
367        description = extractSummary(self._deploymentMigration)
368        if description is None:
369            description = extractSummary(self._dataEntityMigration)
370           
371        if description:
372            observation.description = description
373
374    def _assignTitle(self, observation): 
375        if self._dataEntityHasSameHash and self._deploymentHasSameHash:
376            return
377                     
378        doc_title = extractTitle(self._deploymentMigration)
379        if doc_title is None:
380            doc_title = extractTitle(self._dataEntityMigration)
381       
382        if doc_title.startswith('deployment_') or doc_title.startswith('Deployment_'):
383            links = findLinksInMigrationDocument(self._deploymentMigration)
384            dptList = links['DPT']
385            if links.has_key('DPT'):
386                doc_title = 'Data from ' + dptList[0]['title']
387                if len(dptList) > 2:
388                    for dpt in dptList[1:-2]:
389                        doc_title += ', ' + dpt['title']
390                    if len(dptList) > 1:
391                        doc_title += ' and ' + dptList[-1]
392                                                       
393            links = findLinksInDeployment(self._deploymentMigration)
394            if links.has_key('OBS'):
395                obsList = []
396                for obs in links['OBS']:
397                    observationStation = self.epbRepo.migrationEPB.getDeploymentDataMigrationByName(self._deploymentMigration, obs + '.atom')
398                    obsList.append((extractTitle(observationStation), findSubTypeInDPT(observationStation)))
399               
400                if obsList[0][1] in ['stationary platform' ,'moving platform', 'ship','aircraft','satellite','computer']:
401                    doc_title += ' on '
402                else : 
403                    doc_title += ' at '
404                    doc_title += obsList[0][0]
405                if len(obsList) > 2:
406                    for obs in obsList[1:-2]:
407                        doc_title += ', ' + obs[0]
408                    if len(obsList) > 1:
409                        doc_title += ' and ' + obsList[-1][0]
410           
411            if links.has_key('ACTIVITY'):             
412                for link in links['ACTIVITY']:
413                    activity = self.epbRepo.migrationEPB.getDeploymentDataMigrationByName(self._deploymentMigration, link + '.atom')                   
414     
415                    projSubType = findSubTypeInDPT(activity)
416                    doc_title += ' for the ' + extractTitle(activity)
417                    if projSubType[0:14] == 'dgActivityData':
418                        doc_title += ' ' + projSubType[14:]
419                    else :
420                        doc_title += ' ' + projSubType                                                   
421        else:             
422            if doc_title[0:10] != 'Data from' :
423                doc_title = "Data from " + doc_title           
424        auth = createCI_Citation(title = 'ceda_title')                 
425        identifier = createMD_Identifier(code = doc_title, authority = auth)
426       
427        if not self._deploymentHasBeenProcessed: 
428            observation.identifier.append(identifier)
429            return           
430        #Still have to update observation.identifier         
431
432    def _assignGeographicExtent(self, observation):
433        if self._dataEntityHasSameHash and self._deploymentHasSameHash:
434            return 
435       
436        ge = extractGeographicExtentInMigrationDocument(self._deploymentMigration)
437        if not ge:
438            ge = extractGeographicExtentInMigrationDocument(self._dataEntityMigration)
439            if ge:
440                geographicExtent = createEX_GeographicBoundingBox(ge['east'], ge['north'], ge['west'], ge['south'])
441                if not self._deploymentHasBeenProcessed:
442                    observation.geographicExtent.append(geographicExtent)
443            return         
444        #Still have to update observation.geographicExtent
445           
446    def _assignCreationDate(self, observation):
447        if self._deploymentHasSameHash:
448            return 
449       
450        creation_date = extractMolesPublishedDate(self._deploymentMigration)
451        if creation_date is None:
452            creation_date = extractMolesCreationDate(self._deploymentMigration)
453        py_datetime = isoDateTimeStringToTimeDate(creation_date)
454        date_time = createDateTime(py_datetime)   
455        tm_position = createTM_Position(dateTime8601 = date_time)
456       
457        if not self._deploymentHasBeenProcessed: 
458            observation.resultTime = createTM_Instant(tm_position)
459            return       
460        #Still have to update observation.geographicExtent
461
462    def _assignPhenomenonTime(self, observation): 
463        if self._deploymentHasSameHash:
464            return 
465             
466        doc_phenomenon_time = extractMolesTemporalRange(self._deploymentMigration)       
467        if doc_phenomenon_time:
468            pt = None
469            if '/' in doc_phenomenon_time:
470                period = doc_phenomenon_time.split('/')
471                begin_date = createDate(isoDateStringToTimeDate(period[0]))
472                begin_position = createTM_Position(date8601 = begin_date)
473                begin_tm_instant = createTM_Instant(begin_position)
474               
475                end_date = createDate(isoDateStringToTimeDate(period[1]))
476                end_position = createTM_Position(date8601 = end_date)
477                end_tm_instant = createTM_Instant(end_position)
478               
479                pt = createTM_Period(begin_tm_instant, end_tm_instant)
480            else:
481                pt = createTM_Position(date8601 = createDate(isoDateStringToTimeDate(doc_phenomenon_time)))
482           
483            if not self._deploymentHasBeenProcessed:
484                observation.phenomenonTime = pt
485        #Still have to update observation.phenomenonTime               
486               
487    def _assignPermission(self, observation):
488        if self._deploymentHasSameHash and self._dataEntityHasSameHash:
489            return 
490       
491        access_link = findAccessLinksInMigrationDocument(self._deploymentMigration)
492        dwn_link = findDownloadLinksInMigrationDocument(self._deploymentMigration)
493        if len(access_link) == 0:
494            access_link = findAccessLinksInMigrationDocument(self._dataEntityMigration) 
495
496        i_accessConstraints = []
497        i_use_limitation = []
498       
499        permission = None
500        if len(access_link) == 0:
501            if len(dwn_link) == 0:
502                dwn_link = findDownloadLinksInMigrationDocument(self._dataEntityMigration)
503                if dwn_link and len(dwn_link) == 1:               
504                    i_use_limitation.append("These data are open access and available through %s." % (dwn_link[0]['href']) )
505                    #i_accessConstraints.append(MD_RestrictionCode.cl_)
506                    observation.permission = createMD_LegalConstraints(useLimitation = i_use_limitation, accessConstrains = i_accessConstraints)
507        else:
508            if access_link and len(access_link) == 1:
509                i_use_limitation.append("Access to these data is restricted. To obtain access please apply for access at: %s" % (access_link[0]['href']))
510                i_accessConstraints.append(getCLValue(MD_RestrictionCode.cl_restricted))
511                observation.permission = createMD_LegalConstraints(useLimitation = i_use_limitation, accessConstrains = i_accessConstraints)
512               
513        if not self._deploymentHasBeenProcessed:
514            observation.permission = permission       
515        #Still have to update observation.permission
516                                   
517        '''                               
518        contentDict = extractContent(self._deploymentMigration)
519        if not contentDict.has_key('access-restricted'):
520            contentDict = extractContent(self._dataEntityMigration)
521        '''           
522
523    def _assignMoles2Link(self, ceda_observation):
524        if self._deploymentHasSameHash:
525            return 
526               
527        i_code = 'http://badc.nerc.ac.uk/view/%s__ATOM__%s' % (self._deploymentMigration.doc_owner, self._deploymentMigration.doc_name)
528        i_code = i_code.replace('.atom', '')
529        #i_code = buildExistDocPath(self._deploymentMigration.doc_status, DT_DEPLOYMENTS, self._deploymentMigration.doc_owner, self._deploymentMigration.doc_name)
530        i_authority = createCI_Citation('moles2url')
531        identifier = createMD_Identifier(code = i_code, authority = i_authority)
532        if not self._deploymentHasBeenProcessed:             
533            ceda_observation.identifier.append(identifier)
534        #Still have to update observation.permission           
535
536    def _assignInternalReview(self, ceda_observation):
537        if self._deploymentHasBeenProcessed:             
538            return
539               
540        i_party = createMO_Individual(name = 'Graham Parton')
541        i_reviewer = createMO_ResponsiblePartyInfo(getCLValue(MM_RoleValue.cl_processor), [i_party])
542        ceda_observation.metadataManagement.append( \
543            createCEDA_Review(reviewer=i_reviewer, reviewFrequency=getCLValue(CEDA_ReviewFrequencyValue.cl_yearly), \
544                              reviewStatus=getCLValue(CEDA_ReviewStatusValue.cl_required))) 
545
546    def _assignLanguage(self, ceda_observation):
547        if self._deploymentHasBeenProcessed:             
548            return
549       
550        i_date_stamp = createDate(datetime.datetime.now())
551        #i_contact = createCI_Citation("", date = i_date_stamp)
552        i_contact = createCI_ResponsibleParty(getCLValue(CI_RoleCode.cl_user))
553        ceda_observation.metadata = createMD_Metadata(date_stamp=i_date_stamp, contact = [i_contact], language = "English")
554                       
555    def _processResultAccumulation(self, ceda_observation): 
556        if self._dataEntityHasSameHash:             
557            return                     
558
559            updateFrequency = extractUpdateFrequency(self._dataEntityMigration)
560            if updateFrequency:
561                resultAccumulation = MD_MaintenanceFrequencyCode.from_string(updateFrequency)
562                if not self._deploymentHasBeenProcessed:
563                    self.epbRepo.moles3EPB.updateCedaObject(ceda_observation, {'resultAccumulation': resultAccumulation})             
564                    return
565        #Still have to update observation.permission
566                   
567    def _assignName(self, observation):       
568        '''
569            @param relatedPartyInfos: a MO_ResponsiblePartyInfo list
570            @return True if the documents changed, False otherwise
571        '''
572        if self._deploymentHasSameHash and self._dataEntityHasSameHash:
573            return 
574       
575        whereAreAuthors = self._deploymentMigration       
576        doc_authors = findAuthorsInResource(self._deploymentMigration)       
577        if doc_authors['authors'] in [DO_BADC, DO_NEODC]:
578            doc_authors = findAuthorsInResource(self._dataEntityMigration)
579            whereAreAuthors = self._dataEntityMigration
580               
581        ind_names = []
582        org_names = []
583        if authors.has_key(doc_authors['authors']):
584            tmp_auth = authors[doc_authors['authors']]
585            ret = self._extractIndividualsAndOrganizations(tmp_auth, whereAreAuthors)
586
587            if len(ret['ind']) > 0:
588                i_party = createMO_Individual(name = ret['ind'][0])
589                observation.relatedParty.append(createMO_ResponsiblePartyInfo(getCLValue(MM_RoleValue.cl_author), [i_party]))                 
590            if len(ret['org']) > 0:
591                i_party = createMO_Organization(name = ret['org'][0])
592                observation.relatedParty.append(createMO_ResponsiblePartyInfo(getCLValue(MM_RoleValue.cl_author), [i_party]))
593           
594            if len(ret['ind']) > 1:
595                ind_names.extend(ret['ind'][1:])                                                       
596            if len(ret['org']) > 1:
597                org_names.extend(ret['org'][1:])
598            if len(ret['co_ind']) > 0:                               
599                ind_names.extend(ret['co_ind'])
600            if len(ret['co_org']) > 0:                               
601                org_names.extend(ret['co_org'])                   
602                       
603        else:
604            raise NoAssociatedAuthor(doc_authors['authors'], migrationObject = whereAreAuthors)
605                     
606        if doc_authors['contributors'] and authors.has_key(doc_authors['contributors']):
607            tmp_auth = authors[doc_authors['contributors']]           
608            ret = self._extractIndividualsAndOrganizations(tmp_auth, whereAreAuthors)
609            ind_names.extend(ret['ind'])
610            ind_names.extend(ret['co_ind'])
611            org_names.extend(ret['org'])
612            org_names.extend(ret['co_org'])
613           
614        i_party = []
615        for nm in ind_names:
616            i_party.append(createMO_Individual(name = nm))
617               
618        for nm in org_names:
619            i_party.append(createMO_Organization(name = nm))
620           
621        if i_party:
622            if not self._deploymentHasBeenProcessed:
623                observation.relatedParty.append(createMO_ResponsiblePartyInfo(getCLValue(MM_RoleValue.cl_coinvestigator), i_party))
624                return
625        #Still have to update observation.permission
626   
627    def _execute(self, ceda_observation):
628        self._assignKeywords(ceda_observation)
629        self._assignLineage(ceda_observation)
630        self._assignResult(ceda_observation)
631        self._assignPublisherCurator(ceda_observation)                       
632        self._assignQuality(ceda_observation)       
633        self._assignDescription(ceda_observation)
634        self._assignTitle(ceda_observation)   
635        self._assignGeographicExtent(ceda_observation)               
636        self._assignCreationDate(ceda_observation)       
637        self._assignPhenomenonTime(ceda_observation)       
638        self._assignPermission(ceda_observation)       
639        self._assignMoles2Link(ceda_observation)               
640        self._assignInternalReview(ceda_observation)       
641        self._assignLanguage(ceda_observation)
642        self._processResultAccumulation(ceda_observation)           
643        self._assignName(ceda_observation)
644        #self._assignDOI(ceda_observation)       
645       
646        #Is a first time process?
647        if not hasMOBeenProcessed(self._deploymentMigration):
648            ceda_observation.publicationState = getCLValue(MM_ObservationPublicationStateValue.cl_working)         
649            docHash = getAtomDocumentHashByMO(self._dataEntityMigration)
650            self.epbRepo.moles3EPB.persistInstance(ceda_observation)       
651            self.epbRepo.migrationEPB.updateMigrationObject(self._deploymentMigration, \
652                {'ceda_observation_id': ceda_observation.id,
653                 'doc_hash': docHash})               
654       
655        #Has a proper CEDAGUID?
656        if self.epbRepo.moles3EPB.retrieveGUIDFromInstance(ceda_observation) is None:       
657            ceda_guid = CedaGUID()
658            ceda_guid.id = calculateHash(self._deploymentMigration.depl_id)
659            ceda_guid.ceda_observation = ceda_observation.id
660            self.epbRepo.moles3EPB.persistInstance(ceda_guid)
661            DeploymentProcessor.log.info("GUID for this Observation: %s" % (ceda_guid.id))
662
663
664        if not self._deploymentHasBeenProcessed:               
665            deploymentDataProcessor = DeploymentDataProcessor(self._deploymentMigration, self.epbRepo)               
666            procedure = deploymentDataProcessor.createProcess()
667            project = deploymentDataProcessor.createProject() 
668            self.epbRepo.moles3EPB.updateCedaObject(ceda_observation, {'procedure': procedure, 'inSupportOf': project})
669
670        #Still have to update observation.procedure
671        #Still have to update observation.project
672       
673        return ceda_observation
674       
675    def process(self):
676        ceda_observation = None
677        #Moles3 object exists...
678        if self._deploymentMigration.ceda_observation_id:
679            ceda_observation = self.epbRepo.moles3EPB.search(CEDA_Observation, self._deploymentMigration.ceda_observation_id)
680        else:
681            #... does not exist so create it
682            ceda_observation =  ceda_observation = CEDA_Observation()
683   
684        self._execute(ceda_observation)   
685        return ceda_observation
Note: See TracBrowser for help on using the repository browser.