source: mauRepo/MolesManager/trunk/src/libs/migration/processor/deployment.py @ 8416

Subversion URL: http://proj.badc.rl.ac.uk/svn/ndg/mauRepo/MolesManager/trunk/src/libs/migration/processor/deployment.py@8416
Revision 8416, 36.1 KB checked in by mnagni, 7 years ago (diff)

Incomplete - # 22488: CEDA Observation Collection - Geographical Extent
 http://team.ceda.ac.uk/trac/ceda/ticket/22488

Line 
1'''
2BSD Licence
3Copyright (c) 2012, Science & Technology Facilities Council (STFC)
4All rights reserved.
5
6Redistribution and use in source and binary forms, with or without modification,
7are permitted provided that the following conditions are met:
8
9    * Redistributions of source code must retain the above copyright notice,
10        this list of conditions and the following disclaimer.
11    * Redistributions in binary form must reproduce the above copyright notice,
12        this list of conditions and the following disclaimer in the documentation
13        and/or other materials provided with the distribution.
14    * Neither the name of the Science & Technology Facilities Council (STFC)
15        nor the names of its contributors may be used to endorse or promote
16        products derived from this software without specific prior written permission.
17
18THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
19AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
20THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
21PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
22BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
23OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
24SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
25HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
26OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
27OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28
29Created on 15 Nov 2011
30
31@author: Maurizio Nagni
32'''
33from libs.migration.processor.commons import findMolesLineage,\
34    createMO_ResponsiblePartyInfo,\
35    DO_BADC, DO_NEODC, findAuthorsInResource, \
36    createMD_Identifier, extractSummary, extractQuality, \
37    hasMOSameHash, getAtomDocumentHashByMO, extractTitle,\
38        createCEDA_Result,\
39    createEX_GeographicBoundingBox, extractGeographicExtentInMigrationDocument, findDownloadLinksInMigrationDocument,\
40    extractContent, createCI_Citation, createCI_Date, createDate,\
41    createTM_Position, createTM_Instant, extractMolesCreationDate,\
42    createDateTime, isoDateTimeStringToTimeDate, extractMolesProviderID,\
43    DO_UKSSDC, createMO_Organization,\
44    createCI_Contact, createCI_Address, createCI_OnlineResource,\
45    createCI_Telephone, extractMolesTemporalRange, isoDateStringToTimeDate,\
46    createTM_Period, findAccessLinksInMigrationDocument,\
47    findLinksInDeployment, createMD_LegalConstraints,\
48    createDQ_Element, createDQ_ConformanceResult, findUpdatedDate,\
49    createMD_Metadata, createMO_OnlineResource, createCEDA_Review, calculateHash,\
50    createCI_ResponsibleParty, extractUpdateFrequency,\
51    findLinksInMigrationDocument, findSubTypeInDPT, extractMolesPublishedDate,\
52    createMD_Keywords, hasMOBeenProcessed, createMO_Individual
53from libs.epb import EPB
54from libs.migration.processor.deployment_data import DeploymentDataProcessor
55from libs.migration.exception.exceptions import NoDataLineage,\
56    NoAssociatedAuthor
57from libs.migration.authors import authors
58from logging import StreamHandler
59import logging
60from datetime import date
61from ea_model.iso_19115_2006_metadata_corrigendum.citation_and_responsible_party_information.ci_onlinefunctioncode import CI_OnLineFunctionCode
62from ea_model.iso_19115_2006_metadata_corrigendum.citation_and_responsible_party_information.ci_datetypecode import CI_DateTypeCode
63from ea_model.iso_19115_2006_metadata_corrigendum.constraint_information.md_restrictioncode import MD_RestrictionCode
64from copy import deepcopy
65import datetime
66from MolesManager.ceda_guid import CedaGUID
67from ea_model.iso_19115_2006_metadata_corrigendum.citation_and_responsible_party_information.ci_rolecode import CI_RoleCode
68from ea_model.iso_19115_2006_metadata_corrigendum.maintenance_information.md_maintenancefrequencycode import MD_MaintenanceFrequencyCode
69from ea_model.moles3_4.utilities.ceda_rolevalue import CEDA_RoleValue
70from ea_model.moles3_4.observation.mo_observationpublicationstatevalue import MO_ObservationPublicationStateValue
71from MolesManager.codelist import MM_RoleValue,\
72    MM_ObservationPublicationStateValue, getCLValue
73from ea_model.ceda_metadatamodel.ceda_observation.ceda_observation import CEDA_Observation
74from ea_model.ceda_metadatamodel.ceda_result.ceda_curationvalue import CEDA_CurationValue
75from ea_model.ceda_metadatamodel.ceda_utilities.ceda_reviewstatusvalue import CEDA_ReviewStatusValue
76from ea_model.ceda_metadatamodel.ceda_utilities.ceda_reviewfrequencyvalue import CEDA_ReviewFrequencyValue
77
78MET_GEO_FEATURE = 'Meteorological geographical features'
79ORTHOIMAGERY = 'Orthoimagery'
80           
81class DeploymentProcessor(object):
82    '''
83        Migrates a deployment element in a CEDA_Observation entity
84    '''
85    publisherName = 'CEDA'
86   
87    log = logging.getLogger('DeploymentProcessor')
88    log.addHandler(StreamHandler())
89    log.setLevel(logging.INFO)   
90    def __init__(self, dataEntityMigration, deploymentMigration, epbRepo):
91        '''
92            Initializes the class
93            @param dataEntityMigration: a DataEntityMigration instance
94            @param deploymentMigration: the DeploymentMigration instance
95            @param epbRepo: an instance of EPBRepo             
96        '''             
97        self._dataEntityMigration = dataEntityMigration
98        self._deploymentMigration = deploymentMigration
99        self.epbRepo = epbRepo
100        self._dataEntityHasSameHash = hasMOSameHash(self._dataEntityMigration)
101        self._deploymentHasSameHash = hasMOSameHash(self._deploymentMigration)
102        self._deploymentHasBeenProcessed = hasMOBeenProcessed(self._deploymentMigration)
103
104    def _existsCEDAasPublisher(self):
105        for rp in self._ceda_observation.relatedParty:
106            if rp.role == getCLValue(MM_RoleValue.cl_publisher):
107                for party in rp.parties:
108                    if party.name == DeploymentProcessor.publisherName:
109                        return True
110        return False
111
112    """                   
113    def _extractResponsiblePartyInfo(self, authors = None, parsedParties = []):
114        '''
115            @param authors: the deployment's authors
116            @param parsedParties: a list to fill with the created MO_ResponsiblePartyInfos
117        '''
118        #First is assumend to be the author
119        i_party = createMO_Organization(name = authors[:1])
120        parsedParties.append(createMO_ResponsiblePartyInfo(MO_RoleValue.cl_author, [i_party]))
121        i_party = []
122        if len(authors) < 2:
123            return
124       
125        for i_name in authors[:1]:
126            i_party.append(createMO_Organization(name = i_name))           
127        parsedParties.append(createMO_ResponsiblePartyInfo(MO_RoleValue.cl_coinvestigator, i_party))           
128    """
129
130    def _addResponsiblePartyInfo(self, oldResponsiblePartyInfos, newResponsiblePartyInfo):
131        opi = None
132        for oldPartyInfo in oldResponsiblePartyInfos:
133            if oldPartyInfo.role == newResponsiblePartyInfo.role:
134                opi = oldPartyInfo
135                break
136           
137        if len(oldResponsiblePartyInfos) == 0 or opi is None:
138            oldResponsiblePartyInfos.append(newResponsiblePartyInfo)
139       
140        if opi is not None: 
141            for np in newResponsiblePartyInfo.party:
142                opi.party.append(np)
143       
144
145
146
147    def _extractIndividualsAndOrganizations(self, tmp_auth, whereAreAuthors):
148        if whereAreAuthors is None:
149            raise Exception("migrationObject is None")
150        try:
151            if tmp_auth['author'] == 'unknown':
152                doc_authors = findAuthorsInResource(self._dataEntityMigration)
153                tmp_auth = authors[doc_authors['authors']]
154           
155            ret = {'ind': [], 'org': [], 'co_ind': [], 'co_org': []}
156            if tmp_auth['type'] == 'ind':
157                ret['ind'].append(tmp_auth['author'])
158            elif tmp_auth['type'] == 'org':
159                ret['org'].append(tmp_auth['author'])           
160               
161            if tmp_auth['author'] == 'unknown':
162                DeploymentProcessor.log.debug("%s %s %s has unknown author" \
163                                             % (whereAreAuthors.doc_status, whereAreAuthors.doc_owner, whereAreAuthors.doc_name))       
164
165            for item in tmp_auth['co_author_type']:           
166                if (tmp_auth['co_author'][tmp_auth['co_author_type'].index(item)] == 'unknown'):
167                    doc_authors = findAuthorsInResource(self._dataEntityMigration)
168                    tmp_auth = authors[doc_authors['contributors']]
169                    break
170                 
171            for index in range(len(tmp_auth['co_author_type'])): 
172                ptype = tmp_auth['co_author_type'][index]               
173                if ptype == 'ind':
174                    ret['co_ind'].append(tmp_auth['co_author'][index])
175                elif ptype == 'org':
176                    ret['co_org'].append(tmp_auth['co_author'][index])
177           
178                if (tmp_auth['co_author'][index] == 'unknown'):
179                    DeploymentProcessor.log.info("%s %s %s has unknown author" \
180                                                 % (whereAreAuthors.doc_status, whereAreAuthors.doc_owner, whereAreAuthors.doc_name))
181        except Exception as e:
182            print e
183        return ret
184       
185       
186    def updateObservation(self):
187        return EPB.searchOrCreate(CEDA_Observation, self._deploymentMigration.ceda_observation_id)
188
189    def assignDOI(self, observation, doi):
190        if doi and doi.has_key('href'):
191            doi = doi['href'][22:]           
192           
193            #Check if a doi has been already assigned
194            observation = self.epbRepo.moles3EPB.loadAttributes(observation, 'identifier')
195            obs_identifier = observation.identifier
196            if obs_identifier:
197                for ident in obs_identifier:
198                    if ident.code == doi:
199                        return
200           
201            py_date = None
202            cited_responsible = createCI_ResponsibleParty(role=getCLValue(CI_RoleCode.cl_publisher), \
203                                                              organizationName='NERC - British Atmospheric Data Centre')
204            if doi.upper() == '10.5285/E8F43A51-0198-4323-A926-FE69225D57DD':
205                py_date = date(2011, 4, 1)
206            elif doi.upper() == '10.5285/78114093-E2BD-4601-8AE5-3551E62AEF2B':
207                py_date = date(2011, 11, 29)               
208            elif doi.upper() == '10.5285/DB8D8981-1A51-4D6E-81C0-CCED9B921390':
209                py_date = date(2012, 4, 16)
210            elif doi.upper() == '10.5285/639A3714-BC74-46A6-9026-64931F355E07':
211                py_date = date(2012, 4, 16)               
212               
213            if py_date:   
214                dt = createDate(py_date)
215                ci_date = createCI_Date(getCLValue(CI_DateTypeCode.cl_publication), date = dt)
216                i_authority = createCI_Citation("DOI", date = ci_date)
217                identifier = createMD_Identifier(code = doi, authority=i_authority)
218                self.epbRepo.moles3EPB.updateCedaObject(observation, {'identifier': identifier})
219                DeploymentProcessor.log.info("DOI: %s" % (doi))                                 
220
221    def _assignKeywords(self, ceda_observation):
222        if self._deploymentHasSameHash:
223            return
224       
225        provider_id = extractMolesProviderID(self._deploymentMigration)
226        i_keywords = []
227        if provider_id == DO_BADC:
228            i_keywords.append(MET_GEO_FEATURE)
229        if provider_id == DO_NEODC:
230            i_keywords.append(ORTHOIMAGERY)
231        if len(i_keywords) > 0:
232            #Is a first time process?
233            if not self._deploymentHasBeenProcessed: 
234                ceda_observation.keywords.append(createMD_Keywords(i_keywords))
235            else:
236                ceda_observation.keywords.keyword = i_keywords         
237
238    def _assignLineage(self, observation):
239        if self._deploymentHasSameHash:
240            return
241       
242        data_lineage = findMolesLineage(self._dataEntityMigration)
243        if data_lineage is None:
244            raise NoDataLineage(self._dataEntityMigration)
245       
246        if data_lineage != observation.dataLineage:
247            observation.dataLineage = data_lineage 
248
249    def _assignResult(self, observation):
250        if self._deploymentHasSameHash and self._dataEntityHasSameHash:
251            return
252
253        i_sources = []               
254        download = findDownloadLinksInMigrationDocument(self._deploymentMigration)
255        content = None
256        if len(download) == 0:
257            download = findDownloadLinksInMigrationDocument(self._dataEntityMigration)
258            content = extractContent(self._dataEntityMigration)
259        else:
260            content = extractContent(self._deploymentMigration)
261        for dwn in download:
262            int_description = None
263            int_applicationProfile = None
264            if content.has_key('formats'):
265                #int_applicationProfile = content['formats']
266                pass
267            if dwn['href'].startswith('http://badc.nerc.ac.uk/browse') or dwn['href'].startswith('http://neodc.nerc.ac.uk/browse'):
268                int_description = "download directly from archive"   
269            i_sources.append(createMO_OnlineResource(linkage = dwn['href'], name = dwn['title'], \
270                                                   function = getCLValue(CI_OnLineFunctionCode.cl_download), \
271                                                   description = int_description, applicationProfile = int_applicationProfile))
272           
273        dataentity_id = '%s__ATOM__%s' % (self._dataEntityMigration.doc_owner, self._dataEntityMigration.doc_name)
274        dataentity_id = dataentity_id.replace('.atom', '')           
275        infodb_de = self.epbRepo.infodbEPB.getCedaInfoApp_dataentityByDE_ID(dataentity_id)
276        i_logical_path = '/dummy'
277        if infodb_de is None:
278            i_logical_path = dwn['href'][dwn['href'].index('/browse/') + 7:]
279       
280        if infodb_de and infodb_de.has_key('logical_path'):
281            i_logical_path = infodb_de['logical_path']
282               
283        i_category = getCLValue(CEDA_CurationValue.cl_a)                                 
284        if infodb_de and infodb_de.has_key('category') and infodb_de['category']:
285            i_category = CEDA_CurationValue.from_string(infodb_de['category'].lower())       
286
287        if not self._deploymentHasBeenProcessed:                     
288            observation.result = createCEDA_Result(i_category, i_logical_path, source = i_sources)
289            return 
290       
291        if observation.result.internalPath != i_logical_path:
292            observation.result.internalPath = i_logical_path
293           
294        if observation.result.curationCategory != i_category:
295            observation.result.curationCategory = i_category
296                       
297        #Still have to update observation.result.source
298
299    def _assignPublisherCurator(self, observation):
300        if self._deploymentHasSameHash:
301            return
302           
303        provider_id = extractMolesProviderID(self._deploymentMigration)
304        party = None
305        if provider_id == DO_BADC:
306            i_linkage = 'http://badc.rl.ac.uk'
307            i_onlineResources = createCI_OnlineResource(linkage = i_linkage, name = 'British Atmospheric Data Centre Website')
308            i_address = createCI_Address(deliveryPoint = ['British Atmospheric Data Centre, STFC Rutherford Appleton Laboratory'], \
309                                         electronicMailAddress=['badc@rl.ac.uk'], postalCode='OX11 0QX', country='UK', city='Harwell Oxford')                                   
310            i_phone = createCI_Telephone(voice=['+44(0)1235 446432'])                                 
311            contact = createCI_Contact(phone=i_phone, address=i_address, onlineResource=i_onlineResources)                       
312            party = createMO_Organization(name = "NERC - British Atmospheric Data Centre", contactInfo = [contact])
313        elif provider_id == DO_NEODC:
314            i_linkage = 'http://www.neodc.rl.ac.uk'
315            i_onlineResources = createCI_OnlineResource(linkage = i_linkage, name = 'NERC Earth Observation Data Centre website')
316            i_address = createCI_Address(deliveryPoint = ['NERC - Earth Observation Data Centre, STFC Rutherford Appleton Laboratory'], \
317                                         electronicMailAddress=['neodc@rl.ac.uk'], postalCode='OX11 0QX', country='UK', city='Harwell Oxford')                                   
318            i_phone = createCI_Telephone(voice=['+44(0)1235 446432'])                                 
319            contact = createCI_Contact(phone=i_phone, address=i_address, onlineResource=i_onlineResources)                       
320            party = createMO_Organization(name = 'NERC - Earth Observation Data Centre', contactInfo = [contact])
321        elif provider_id == DO_UKSSDC:
322            i_linkage = 'http://www.ukssdc.rl.ac.uk'
323            i_onlineResources = createCI_OnlineResource(linkage = i_linkage, name = 'UK Solar System Data Centre website')
324            i_address = createCI_Address(deliveryPoint = ['UK Solar System Data Centre, STFC Rutherford Appleton Laboratory'], \
325                                         electronicMailAddress=['support@rl.ac.uk'], postalCode='OX11 0QX', country='UK', city='Harwell Oxford')                                   
326            i_phone = createCI_Telephone(voice=['+44(0)1235 445173'])                                 
327            contact = createCI_Contact(phone=i_phone, address=i_address, onlineResource=i_onlineResources)                       
328            party = createMO_Organization(name = 'NERC - UK Solar System Data Centre', contactInfo = [contact])
329       
330        if party and not self._deploymentHasBeenProcessed:
331            observation.relatedParty.append(createMO_ResponsiblePartyInfo(getCLValue(MM_RoleValue.cl_publisher), [party]))
332            observation.relatedParty.append(createMO_ResponsiblePartyInfo(getCLValue(MM_RoleValue.cl_custodian), deepcopy([party])))
333            observation.relatedParty.append(createMO_ResponsiblePartyInfo(getCLValue(MM_RoleValue.cl_distributor), deepcopy([party])))
334            observation.relatedParty.append(createMO_ResponsiblePartyInfo(getCLValue(MM_RoleValue.cl_pointofcontact), deepcopy([party])))
335            observation.relatedParty.append(createMO_ResponsiblePartyInfo(getCLValue(MM_RoleValue.cl_metadataowner), deepcopy([party])))
336            observation.relatedParty.append(createMO_ResponsiblePartyInfo(getCLValue(MM_RoleValue.cl_curator), deepcopy([party])))
337            return       
338        #Still have to update observation.result.source
339
340    def _assignQuality(self, observation):
341        if self._dataEntityHasSameHash:
342            return
343               
344        doc_quality = extractQuality(self._dataEntityMigration)
345        doc_date = findUpdatedDate(self._dataEntityMigration)
346        ci_dates = []           
347        if doc_date:
348            i_date = createDate(isoDateTimeStringToTimeDate(doc_date))               
349            ci_dates.append(createCI_Date(getCLValue(CI_DateTypeCode.cl_revision), date = i_date))           
350        else:
351            i_date = createDate(isoDateTimeStringToTimeDate(datetime.datetime.now()))
352            ci_dates.append(createCI_Date(getCLValue(CI_DateTypeCode.cl_creation), date = i_date))
353
354        i_specification = createCI_Citation(title = "CEDA Data Quality Statement", date=ci_dates)
355        i_dq_result = createDQ_ConformanceResult(doc_quality, True, i_specification)
356        i_quality_element = createDQ_Element(i_dq_result)
357       
358        if not self._deploymentHasBeenProcessed:     
359            observation.resultQuality.append(i_quality_element)
360            return               
361        #Still have to update observation.result.source
362
363    def _assignDescription(self, observation):
364        if self._dataEntityHasSameHash and self._deploymentHasSameHash:
365            return
366       
367        description = extractSummary(self._deploymentMigration)
368        if description is None:
369            description = extractSummary(self._dataEntityMigration)
370           
371        if description:
372            observation.description = description
373
374    def _assignTitle(self, observation): 
375        if self._dataEntityHasSameHash and self._deploymentHasSameHash:
376            return
377                     
378        doc_title = extractTitle(self._deploymentMigration)
379        if doc_title is None:
380            doc_title = extractTitle(self._dataEntityMigration)
381       
382        if doc_title.startswith('deployment_') or doc_title.startswith('Deployment_'):
383            links = findLinksInMigrationDocument(self._deploymentMigration)
384            dptList = links['DPT']
385            if links.has_key('DPT'):
386                doc_title = 'Data from ' + dptList[0]['title']
387                if len(dptList) > 2:
388                    for dpt in dptList[1:-2]:
389                        doc_title += ', ' + dpt['title']
390                    if len(dptList) > 1:
391                        doc_title += ' and ' + dptList[-1]
392                                                       
393            links = findLinksInDeployment(self._deploymentMigration)
394            if links.has_key('OBS'):
395                obsList = []
396                for obs in links['OBS']:
397                    observationStation = self.epbRepo.migrationEPB.getDeploymentDataMigrationByName(self._deploymentMigration, obs + '.atom')
398                    obsList.append((extractTitle(observationStation), findSubTypeInDPT(observationStation)))
399               
400                if obsList[0][1] in ['stationary platform' ,'moving platform', 'ship','aircraft','satellite','computer']:
401                    doc_title += ' on '
402                else : 
403                    doc_title += ' at '
404                    doc_title += obsList[0][0]
405                if len(obsList) > 2:
406                    for obs in obsList[1:-2]:
407                        doc_title += ', ' + obs[0]
408                    if len(obsList) > 1:
409                        doc_title += ' and ' + obsList[-1][0]
410           
411            if links.has_key('ACTIVITY'):             
412                for link in links['ACTIVITY']:
413                    activity = self.epbRepo.migrationEPB.getDeploymentDataMigrationByName(self._deploymentMigration, link + '.atom')                   
414     
415                    projSubType = findSubTypeInDPT(activity)
416                    doc_title += ' for the ' + extractTitle(activity)
417                    if projSubType[0:14] == 'dgActivityData':
418                        doc_title += ' ' + projSubType[14:]
419                    else :
420                        doc_title += ' ' + projSubType                                                   
421        else:             
422            if doc_title[0:10] != 'Data from' :
423                doc_title = "Data from " + doc_title           
424        auth = createCI_Citation(title = 'ceda_title')                 
425        identifier = createMD_Identifier(code = doc_title, authority = auth)
426       
427        if not self._deploymentHasBeenProcessed: 
428            observation.identifier.append(identifier)
429            return           
430        #Still have to update observation.identifier         
431
432    def _assignGeographicExtent(self, observation):
433        if self._dataEntityHasSameHash and self._deploymentHasSameHash:
434            return 
435       
436        ge = extractGeographicExtentInMigrationDocument(self._deploymentMigration)
437        if not ge:
438            ge = extractGeographicExtentInMigrationDocument(self._dataEntityMigration)
439            if ge:
440                geographicExtent = createEX_GeographicBoundingBox(ge['east'], ge['north'], ge['west'], ge['south'])
441                if self._deploymentHasBeenProcessed:
442                    DeploymentProcessor.log.warn('The _assignGeographicExtent update is skipped because not implemented')
443                observation.geographicExtent.append(geographicExtent)
444            else:
445                print "No Geographic Extent"
446            return         
447        #Still have to update observation.geographicExtent
448           
449    def _assignCreationDate(self, observation):
450        if self._deploymentHasSameHash:
451            return 
452       
453        creation_date = extractMolesPublishedDate(self._deploymentMigration)
454        if creation_date is None:
455            creation_date = extractMolesCreationDate(self._deploymentMigration)
456        py_datetime = isoDateTimeStringToTimeDate(creation_date)
457        date_time = createDateTime(py_datetime)   
458        tm_position = createTM_Position(dateTime8601 = date_time)
459       
460        if not self._deploymentHasBeenProcessed: 
461            observation.resultTime = createTM_Instant(tm_position)
462            return       
463        #Still have to update observation.geographicExtent
464
465    def _assignPhenomenonTime(self, observation): 
466        if self._deploymentHasSameHash:
467            return 
468             
469        doc_phenomenon_time = extractMolesTemporalRange(self._deploymentMigration)       
470        if doc_phenomenon_time:
471            pt = None
472            if '/' in doc_phenomenon_time:
473                period = doc_phenomenon_time.split('/')
474                begin_date = createDate(isoDateStringToTimeDate(period[0]))
475                begin_position = createTM_Position(date8601 = begin_date)
476                begin_tm_instant = createTM_Instant(begin_position)
477               
478                end_date = createDate(isoDateStringToTimeDate(period[1]))
479                end_position = createTM_Position(date8601 = end_date)
480                end_tm_instant = createTM_Instant(end_position)
481               
482                pt = createTM_Period(begin_tm_instant, end_tm_instant)
483            else:
484                pt = createTM_Position(date8601 = createDate(isoDateStringToTimeDate(doc_phenomenon_time)))
485           
486            if not self._deploymentHasBeenProcessed:
487                observation.phenomenonTime = pt
488        #Still have to update observation.phenomenonTime               
489               
490    def _assignPermission(self, observation):
491        if self._deploymentHasSameHash and self._dataEntityHasSameHash:
492            return 
493       
494        access_link = findAccessLinksInMigrationDocument(self._deploymentMigration)
495        dwn_link = findDownloadLinksInMigrationDocument(self._deploymentMigration)
496        if len(access_link) == 0:
497            access_link = findAccessLinksInMigrationDocument(self._dataEntityMigration) 
498
499        i_accessConstraints = []
500        i_use_limitation = []
501       
502        permission = None
503        if len(access_link) == 0:
504            if len(dwn_link) == 0:
505                dwn_link = findDownloadLinksInMigrationDocument(self._dataEntityMigration)
506                if dwn_link and len(dwn_link) == 1:               
507                    i_use_limitation.append("These data are open access and available through %s." % (dwn_link[0]['href']) )
508                    #i_accessConstraints.append(MD_RestrictionCode.cl_)
509                    observation.permission = createMD_LegalConstraints(useLimitation = i_use_limitation, accessConstrains = i_accessConstraints)
510        else:
511            if access_link and len(access_link) == 1:
512                i_use_limitation.append("Access to these data is restricted. To obtain access please apply for access at: %s" % (access_link[0]['href']))
513                i_accessConstraints.append(getCLValue(MD_RestrictionCode.cl_restricted))
514                observation.permission = createMD_LegalConstraints(useLimitation = i_use_limitation, accessConstrains = i_accessConstraints)
515               
516        if not self._deploymentHasBeenProcessed:
517            observation.permission = permission       
518        #Still have to update observation.permission
519                                   
520        '''                               
521        contentDict = extractContent(self._deploymentMigration)
522        if not contentDict.has_key('access-restricted'):
523            contentDict = extractContent(self._dataEntityMigration)
524        '''           
525
526    def _assignMoles2Link(self, ceda_observation):
527        if self._deploymentHasSameHash:
528            return 
529               
530        i_code = 'http://badc.nerc.ac.uk/view/%s__ATOM__%s' % (self._deploymentMigration.doc_owner, self._deploymentMigration.doc_name)
531        i_code = i_code.replace('.atom', '')
532        #i_code = buildExistDocPath(self._deploymentMigration.doc_status, DT_DEPLOYMENTS, self._deploymentMigration.doc_owner, self._deploymentMigration.doc_name)
533        i_authority = createCI_Citation('moles2url')
534        identifier = createMD_Identifier(code = i_code, authority = i_authority)
535        if not self._deploymentHasBeenProcessed:             
536            ceda_observation.identifier.append(identifier)
537        #Still have to update observation.permission           
538
539    def _assignInternalReview(self, ceda_observation):
540        if self._deploymentHasBeenProcessed:             
541            return
542               
543        i_party = createMO_Individual(name = 'Graham Parton')
544        i_reviewer = createMO_ResponsiblePartyInfo(getCLValue(MM_RoleValue.cl_processor), [i_party])
545        ceda_observation.metadataManagement.append( \
546            createCEDA_Review(reviewer=i_reviewer, reviewFrequency=getCLValue(CEDA_ReviewFrequencyValue.cl_yearly), \
547                              reviewStatus=getCLValue(CEDA_ReviewStatusValue.cl_required))) 
548
549    def _assignLanguage(self, ceda_observation):
550        if self._deploymentHasBeenProcessed:             
551            return
552       
553        i_date_stamp = createDate(datetime.datetime.now())
554        #i_contact = createCI_Citation("", date = i_date_stamp)
555        i_contact = createCI_ResponsibleParty(getCLValue(CI_RoleCode.cl_user))
556        ceda_observation.metadata = createMD_Metadata(date_stamp=i_date_stamp, contact = [i_contact], language = "English")
557                       
558    def _processResultAccumulation(self, ceda_observation): 
559        if self._dataEntityHasSameHash:             
560            return                     
561
562            updateFrequency = extractUpdateFrequency(self._dataEntityMigration)
563            if updateFrequency:
564                resultAccumulation = MD_MaintenanceFrequencyCode.from_string(updateFrequency)
565                if not self._deploymentHasBeenProcessed:
566                    self.epbRepo.moles3EPB.updateCedaObject(ceda_observation, {'resultAccumulation': resultAccumulation})             
567                    return
568        #Still have to update observation.permission
569                   
570    def _assignName(self, observation):       
571        '''
572            @param relatedPartyInfos: a MO_ResponsiblePartyInfo list
573            @return True if the documents changed, False otherwise
574        '''
575        if self._deploymentHasSameHash and self._dataEntityHasSameHash:
576            return 
577       
578        whereAreAuthors = self._deploymentMigration       
579        doc_authors = findAuthorsInResource(self._deploymentMigration)       
580        if doc_authors['authors'] in [DO_BADC, DO_NEODC]:
581            doc_authors = findAuthorsInResource(self._dataEntityMigration)
582            whereAreAuthors = self._dataEntityMigration
583               
584        ind_names = []
585        org_names = []
586        if authors.has_key(doc_authors['authors']):
587            tmp_auth = authors[doc_authors['authors']]
588            ret = self._extractIndividualsAndOrganizations(tmp_auth, whereAreAuthors)
589
590            if len(ret['ind']) > 0:
591                i_party = createMO_Individual(name = ret['ind'][0])
592                observation.relatedParty.append(createMO_ResponsiblePartyInfo(getCLValue(MM_RoleValue.cl_author), [i_party]))                 
593            if len(ret['org']) > 0:
594                i_party = createMO_Organization(name = ret['org'][0])
595                observation.relatedParty.append(createMO_ResponsiblePartyInfo(getCLValue(MM_RoleValue.cl_author), [i_party]))
596           
597            if len(ret['ind']) > 1:
598                ind_names.extend(ret['ind'][1:])                                                       
599            if len(ret['org']) > 1:
600                org_names.extend(ret['org'][1:])
601            if len(ret['co_ind']) > 0:                               
602                ind_names.extend(ret['co_ind'])
603            if len(ret['co_org']) > 0:                               
604                org_names.extend(ret['co_org'])                   
605                       
606        else:
607            raise NoAssociatedAuthor(doc_authors['authors'], migrationObject = whereAreAuthors)
608                     
609        if doc_authors['contributors'] and authors.has_key(doc_authors['contributors']):
610            tmp_auth = authors[doc_authors['contributors']]           
611            ret = self._extractIndividualsAndOrganizations(tmp_auth, whereAreAuthors)
612            ind_names.extend(ret['ind'])
613            ind_names.extend(ret['co_ind'])
614            org_names.extend(ret['org'])
615            org_names.extend(ret['co_org'])
616           
617        i_party = []
618        for nm in ind_names:
619            i_party.append(createMO_Individual(name = nm))
620               
621        for nm in org_names:
622            i_party.append(createMO_Organization(name = nm))
623           
624        if i_party:
625            if not self._deploymentHasBeenProcessed:
626                observation.relatedParty.append(createMO_ResponsiblePartyInfo(getCLValue(MM_RoleValue.cl_coinvestigator), i_party))
627                return
628        #Still have to update observation.permission
629   
630    def _execute(self, ceda_observation):
631        self._assignKeywords(ceda_observation)
632        self._assignLineage(ceda_observation)
633        self._assignResult(ceda_observation)
634        self._assignPublisherCurator(ceda_observation)                       
635        self._assignQuality(ceda_observation)       
636        self._assignDescription(ceda_observation)
637        self._assignTitle(ceda_observation)   
638        self._assignGeographicExtent(ceda_observation)               
639        self._assignCreationDate(ceda_observation)       
640        self._assignPhenomenonTime(ceda_observation)       
641        self._assignPermission(ceda_observation)       
642        self._assignMoles2Link(ceda_observation)               
643        self._assignInternalReview(ceda_observation)       
644        self._assignLanguage(ceda_observation)
645        self._processResultAccumulation(ceda_observation)           
646        self._assignName(ceda_observation)
647        #self._assignDOI(ceda_observation)       
648       
649        #Is a first time process?
650        if not hasMOBeenProcessed(self._deploymentMigration):
651            ceda_observation.publicationState = getCLValue(MM_ObservationPublicationStateValue.cl_working)         
652            docHash = getAtomDocumentHashByMO(self._dataEntityMigration)
653            self.epbRepo.moles3EPB.persistInstance(ceda_observation)       
654            self.epbRepo.migrationEPB.updateMigrationObject(self._deploymentMigration, \
655                {'ceda_observation_id': ceda_observation.id,
656                 'doc_hash': docHash})               
657       
658        #Has a proper CEDAGUID?
659        if self.epbRepo.moles3EPB.retrieveGUIDFromInstance(ceda_observation) is None:       
660            ceda_guid = CedaGUID()
661            ceda_guid.id = calculateHash(self._deploymentMigration.depl_id)
662            ceda_guid.ceda_observation = ceda_observation.id
663            self.epbRepo.moles3EPB.persistInstance(ceda_guid)
664            DeploymentProcessor.log.info("GUID for this Observation: %s" % (ceda_guid.id))
665
666
667        if not self._deploymentHasBeenProcessed:               
668            deploymentDataProcessor = DeploymentDataProcessor(self._deploymentMigration, self.epbRepo)               
669            procedure = deploymentDataProcessor.createProcess()
670            project = deploymentDataProcessor.createProject() 
671            self.epbRepo.moles3EPB.updateCedaObject(ceda_observation, {'procedure': procedure, 'inSupportOf': project})
672
673        #Still have to update observation.procedure
674        #Still have to update observation.project
675       
676        return ceda_observation
677       
678    def process(self):
679        ceda_observation = None
680        #Moles3 object exists...
681        if self._deploymentMigration.ceda_observation_id:
682            ceda_observation = self.epbRepo.moles3EPB.search(CEDA_Observation, self._deploymentMigration.ceda_observation_id)
683        else:
684            #... does not exist so create it
685            ceda_observation =  ceda_observation = CEDA_Observation()
686   
687        self._execute(ceda_observation)   
688        return ceda_observation
Note: See TracBrowser for help on using the repository browser.