Changeset 8496
- Timestamp:
- 08/08/12 14:52:24 (8 years ago)
- Location:
- mauRepo/MolesManager/trunk/cedaMoles
- Files:
-
- 13 edited
Legend:
- Unmodified
- Added
- Removed
-
mauRepo/MolesManager/trunk/cedaMoles/MolesManager/moles3epb.py
r8494 r8496 46 46 from ea_model.ceda_metadatamodel.ceda_project.ceda_project import CEDA_Project 47 47 from cedaMoles.MolesManager.codelist import MM_RoleValue, getCLValue 48 from cedaMoles.libs.migration.processor.commons import from PhenomenonTimeToString48 from cedaMoles.libs.migration.processor.commons import from_pt_to_string 49 49 from datetime import datetime 50 50 from ascore.utils import synchAttributes … … 405 405 406 406 pt = member.phenomenonTime 407 ptString = from PhenomenonTimeToString(pt)407 ptString = from_pt_to_string(pt) 408 408 if ptString[0] is not None: 409 409 ptStart.append(datetime.strptime(ptString[0], dateFormat)) -
mauRepo/MolesManager/trunk/cedaMoles/libs/migration/processor/commons.py
r8495 r8496 118 118 MOLES_NS = "{http://ndg.nerc.ac.uk/schema/moles2beta}" 119 119 #htmlNS = "{http://www.w3.org/1999/xhtml}" 120 GEORSS_NS ="{http://www.georss.org/georss/10}"121 GML_NS ="{http://www.opengis.net/gml}"120 GEORSS_NS = "{http://www.georss.org/georss/10}" 121 GML_NS = "{http://www.opengis.net/gml}" 122 122 DATE_FORMAT = '%Y-%m-%d' 123 123 DATETIME_FORMAT = DATE_FORMAT + 'T%H:%M:%SZ' … … 134 134 namespaceHTMLElements=False) 135 135 136 def calculate Hash(text):136 def calculate_hash(text): 137 137 """ 138 138 Returns an md5 hexadecimal representation of the given text … … 144 144 return encoder.hexdigest() 145 145 146 def build ExistDocPath(doc_status, doc_type, doc_owner, doc_name):146 def build_exist_doc_path(doc_status, doc_type, doc_owner, doc_name): 147 147 ''' 148 148 @param doc_status: one value from commons.docStatus … … 151 151 @param doc_name: one value from commons.DOC_OWNERS 152 152 ''' 153 return '%s/%s' % ( buildExistOwnerPath(doc_status, doc_type, doc_owner), doc_name)154 155 def buildExistOwnerPath(doc_status, doc_type, doc_owner):153 return '%s/%s' % (_build_exist_owner_path(doc_status, doc_type, doc_owner), doc_name) 154 155 def _build_exist_owner_path(doc_status, doc_type, doc_owner): 156 156 ''' 157 157 @param doc_status: one value from commons.docStatus … … 159 159 @param doc_owner: one value from commons.DOC_OWNERS 160 160 ''' 161 return '%s/%s' % (build ExistTypePath(doc_status, doc_type), doc_owner)162 163 def build ExistTypePath(doc_status, doc_type):161 return '%s/%s' % (build_exist_type_path(doc_status, doc_type), doc_owner) 162 163 def build_exist_type_path(doc_status, doc_type): 164 164 ''' 165 165 @param doc_status: one value from commons.docStatus 166 166 @param doc_type: one value from commons.docCollections 167 167 ''' 168 return '%s/%s' % (buildExistStatusPath(doc_status), doc_type) 169 170 def buildExistStatusPath(doc_status): 171 ''' 172 @param doc_status: one value from commons.docStatus 173 ''' 174 return '/exist/rest/atoms/%s' % (doc_status) 175 176 def getAtomDocumentByMO(migration_object): 168 return '%s/%s' % ('/exist/rest/atoms/%s' % (doc_status), doc_type) 169 170 def _get_atom_document_by_mo(migration_object): 177 171 if migration_object is None: 178 172 raise Exception("migrationObject is None") … … 180 174 'DataEntityMigration': DT_DATA_ENTITIES, \ 181 175 'DeploymentDataMigration': DT_DEPLOYMENT_DATA} 182 return get AtomDocumentAsElementtree(migration_object.doc_status, \176 return get_atom_document_as_elementtree(migration_object.doc_status, \ 183 177 mo_type_dict[type(migration_object).__name__], \ 184 178 migration_object.doc_owner, migration_object.doc_name) 185 179 186 def get AtomDocumentHashByMO(migration_object):180 def get_atom_document_hash_by_mo(migration_object): 187 181 if migration_object is None: 188 182 raise Exception("migrationObject is None") … … 193 187 mo_type_dict[type(migration_object).__name__],\ 194 188 migration_object.doc_owner, migration_object.doc_name) 195 return calculateHash(text) 196 197 def getAtomDocumentHash(doc_status, doc_type, doc_owner, doc_name): 198 source = buildExistDocPath(doc_status, doc_type, doc_owner, doc_name) 199 text = _get_document(source) 200 return calculateHash(text) 201 202 def hasMOBeenProcessed(migration_object): 189 return calculate_hash(text) 190 191 def has_mo_been_processed(migration_object): 203 192 ''' 204 193 Checks if a migration object has been already processed. … … 217 206 return True 218 207 219 def has MOSameHash(migration_object):208 def has_mo_same_hash(migration_object): 220 209 ''' 221 210 Checks if a migration object has changed. … … 225 214 same of the migrationObject, otherwise False 226 215 ''' 227 if not has MOBeenProcessed(migration_object):216 if not has_mo_been_processed(migration_object): 228 217 return False 229 return getAtomDocumentHashByMO(migration_object) == migration_object.doc_hash 230 231 def getAtomDocumentByType(migration_object, doc_type): 232 if migration_object is None: 233 raise Exception("migrationObject is None") 234 return getAtomDocumentAsElementtree(migration_object.doc_status, \ 235 doc_type, migration_object.doc_owner, \ 236 migration_object.doc_name) 218 return get_atom_document_hash_by_mo(migration_object) == migration_object.doc_hash 237 219 238 220 def _get_atom_document_as_text(doc_status, doc_type, doc_owner, doc_name): 239 source = build ExistDocPath(doc_status, doc_type, doc_owner, doc_name)221 source = build_exist_doc_path(doc_status, doc_type, doc_owner, doc_name) 240 222 return _get_document(source) 241 223 242 def get AtomDocumentAsElementtree(doc_status, doc_type, doc_owner, doc_name):243 source = build ExistDocPath(doc_status, doc_type, doc_owner, doc_name)224 def get_atom_document_as_elementtree(doc_status, doc_type, doc_owner, doc_name): 225 source = build_exist_doc_path(doc_status, doc_type, doc_owner, doc_name) 244 226 return _get_xml_document(source) 245 227 … … 275 257 conn = HTTPConnection(host = HOST, port = PORT) 276 258 conn.connect() 277 #req =conn.request('GET', source)259 conn.request('GET', source) 278 260 res = conn.getresponse() 279 261 xml_doc = res.read() 280 '''281 print (xmlDoc)282 '''283 262 conn.close() 284 263 return xml_doc … … 289 268 return element.text 290 269 291 def find MolesCreationDate(resource_xml):270 def find_moles_creation_date(resource_xml): 292 271 creation_date = resource_xml.find('%sentity/%smolesISO/%screated' \ 293 272 % (MOLES_NS, MOLES_NS, MOLES_NS)) 294 273 return _return_not_none_text(creation_date) 295 274 296 def findMolesPublishedDate(resource_xml):275 def _find_moles_published_date(resource_xml): 297 276 creation_date = resource_xml.find('%sentity/%smolesISO/%spublished' \ 298 277 % (MOLES_NS, MOLES_NS, MOLES_NS)) 299 278 return _return_not_none_text(creation_date) 300 279 301 def find MolesLineage(data_entity_migration):302 resource_xml = getAtomDocumentByMO(data_entity_migration)280 def find_moles_lineage(data_entity_migration): 281 resource_xml = _get_atom_document_by_mo(data_entity_migration) 303 282 lineage = resource_xml.find('%sentity/%smolesISO/%slineage' \ 304 283 % (MOLES_NS, MOLES_NS, MOLES_NS)) … … 307 286 return lineage.text 308 287 309 def extract MolesProviderID(migration_object):310 resource_xml = getAtomDocumentByMO(migration_object)288 def extract_moles_provider_id(migration_object): 289 resource_xml = _get_atom_document_by_mo(migration_object) 311 290 provider_id = resource_xml.find('%sentity/%smolesISO/%sproviderID' \ 312 291 % (MOLES_NS, MOLES_NS, MOLES_NS)) 313 292 return _return_not_none_text(provider_id) 314 293 315 def extractMolesQuality(migration_object): 316 resourceXML = getAtomDocumentByMO(migration_object) 317 quality = resourceXML.find('%sentity/%smolesISO/%squality' \ 318 % (MOLES_NS, MOLES_NS, MOLES_NS)) 319 return _return_not_none_text(quality) 320 321 def extractMolesTemporalRange(migration_object): 322 resource_xml = getAtomDocumentByMO(migration_object) 294 def extract_moles_temporal_range(migration_object): 295 resource_xml = _get_atom_document_by_mo(migration_object) 323 296 temporal_range = resource_xml.find('%stemporalRange' % (MOLES_NS)) 324 297 return _return_not_none_text(temporal_range) 325 298 326 def extract MolesCreationDate(migration_object):327 resource_xml = getAtomDocumentByMO(migration_object)328 return find MolesCreationDate(resource_xml)329 330 def extract MolesPublishedDate(migration_object):331 resource_xml = getAtomDocumentByMO(migration_object)332 return findMolesPublishedDate(resource_xml)333 334 def extract Quality(data_entity_migration):335 resource_xml = getAtomDocumentByMO(data_entity_migration)299 def extract_moles_creation_date(migration_object): 300 resource_xml = _get_atom_document_by_mo(migration_object) 301 return find_moles_creation_date(resource_xml) 302 303 def extract_moles_published_date(migration_object): 304 resource_xml = _get_atom_document_by_mo(migration_object) 305 return _find_moles_published_date(resource_xml) 306 307 def extract_quality(data_entity_migration): 308 resource_xml = _get_atom_document_by_mo(data_entity_migration) 336 309 quality = resource_xml.find('%sentity/%smolesISO/%squality' \ 337 310 % (MOLES_NS, MOLES_NS, MOLES_NS)) 338 311 return _return_not_none_text(quality) 339 312 340 def extract UpdateFrequency(data_entity_migration):341 resource_xml = getAtomDocumentByMO(data_entity_migration)313 def extract_update_frequency(data_entity_migration): 314 resource_xml = _get_atom_document_by_mo(data_entity_migration) 342 315 update_frequency = resource_xml.find('%sentity/%smolesISO/%supdateFrequency' \ 343 316 % (MOLES_NS, MOLES_NS, MOLES_NS)) 344 317 return _return_not_none_text(update_frequency) 345 318 346 def extract Content(data_entity_migration):319 def extract_content(data_entity_migration): 347 320 """ 348 321 Returns a dictionary containing the div composing the 349 322 <content> element in a dataentity document. 350 323 """ 351 resource_xml = getAtomDocumentByMO(data_entity_migration)324 resource_xml = _get_atom_document_by_mo(data_entity_migration) 352 325 content = resource_xml.find('%scontent' % (ATOM_NS)) 353 326 text = _return_not_none_text(content) … … 362 335 return contentDict 363 336 364 def find AuthorsInResource(resource_migration):337 def find_authors_in_resource(resource_migration): 365 338 ''' 366 339 Returns a dictionary with the following keys: … … 369 342 ''' 370 343 ret = {} 371 resource_xml = getAtomDocumentByMO(resource_migration)372 ret['authors'] = findAuthorInResource(resource_xml)373 ret['contributors'] = findContributorInResource(resource_xml)344 resource_xml = _get_atom_document_by_mo(resource_migration) 345 ret['authors'] = _find_author_in_resource(resource_xml) 346 ret['contributors'] = _find_contributor_in_resource(resource_xml) 374 347 return ret 375 348 376 def findAuthorInResource(resource_xml):349 def _find_author_in_resource(resource_xml): 377 350 author = resource_xml.find('%sauthor/%sname' % (ATOM_NS, ATOM_NS)) 378 351 return _return_not_none_text(author) 379 352 380 def findContributorInResource(resource_xml):353 def _find_contributor_in_resource(resource_xml): 381 354 contributors = resource_xml.find('%scontributor/%sname' % (ATOM_NS, ATOM_NS)) 382 355 return _return_not_none_text(contributors) 383 356 384 def find PublishedDate(resource_migration):385 resource_xml = getAtomDocumentByMO(resource_migration)357 def find_published_date(resource_migration): 358 resource_xml = _get_atom_document_by_mo(resource_migration) 386 359 publishedDate = resource_xml.find('%spublished' % (ATOM_NS)) 387 360 return _return_not_none_text(publishedDate) 388 361 389 def find UpdatedDate(resource_migration):390 resource_xml = getAtomDocumentByMO(resource_migration)362 def find_updated_date(resource_migration): 363 resource_xml = _get_atom_document_by_mo(resource_migration) 391 364 publishedDate = resource_xml.find('%supdated' % (ATOM_NS)) 392 365 return _return_not_none_text(publishedDate) 393 366 394 def find Summary(resource_migration):367 def find_summary(resource_migration): 395 368 """ 396 369 Returns the <entry><summary> tag of an atom document … … 398 371 @return: the <summary> value or None if empty 399 372 """ 400 resource_xml = getAtomDocumentByMO(resource_migration)373 resource_xml = _get_atom_document_by_mo(resource_migration) 401 374 summary = resource_xml.find('%ssummary' % (ATOM_NS)) 402 375 return _return_not_none_text(summary) 403 376 404 def find ID(data_ent_xml):377 def find_id(data_ent_xml): 405 378 ent_id = data_ent_xml.find('%sid' % (ATOM_NS)) 406 379 return _return_not_none_text(ent_id) … … 422 395 return links_dict 423 396 424 def find LinksInMigrationDocument(data_entity_migration):425 data_ent_XML = getAtomDocumentByMO(data_entity_migration)397 def find_links_in_migration_document(data_entity_migration): 398 data_ent_XML = _get_atom_document_by_mo(data_entity_migration) 426 399 links_dict = _extract_links(data_ent_XML, LINK_MARKERS) 427 400 return links_dict … … 429 402 430 403 431 def find DownloadLinksInMigrationDocument(migration_object):404 def find_download_links_in_migration_document(migration_object): 432 405 """ 433 406 Return a list of dictionaries describing a <link rel="...DOWNLOAD..."> tag type … … 436 409 @return: a list of dictionaries 437 410 """ 438 links_dict = find LinksInMigrationDocument(migration_object)411 links_dict = find_links_in_migration_document(migration_object) 439 412 if links_dict.has_key('DOWNLOAD'): 440 413 return links_dict['DOWNLOAD'] 441 414 return {} 442 415 443 def find AccessLinksInMigrationDocument(migration_object):416 def find_access_links_in_migration_document(migration_object): 444 417 """ 445 418 Return a list of dictionaries describing a <link rel="...ACCESS..."> tag type … … 448 421 @return: a list of dictionaries 449 422 """ 450 links_dict = find LinksInMigrationDocument(migration_object)423 links_dict = find_links_in_migration_document(migration_object) 451 424 if links_dict.has_key('ACCESS'): 452 425 return links_dict['ACCESS'] 453 426 return {} 454 427 455 def find DocumentationInMigrationDocument(migration_object):428 def find_documentation_in_migration_document(migration_object): 456 429 """ 457 430 Return a list of dictionaries describing a <link rel="...DOCUMENTATION..."> … … 461 434 @return: a list of dictionaries 462 435 """ 463 links_dict = find LinksInMigrationDocument(migration_object)436 links_dict = find_links_in_migration_document(migration_object) 464 437 if links_dict.has_key('DOCUMENTATION'): 465 438 return links_dict['DOCUMENTATION'] 466 439 return {} 467 440 468 def find DOIInMigrationDocument(migration_object):441 def find_doi_in_migration_document(migration_object): 469 442 """ 470 443 Return a dictionary describing a <link rel="...DOCUMENTATION..."> tag type … … 473 446 @return: a dictionary relative to the DOI, None otherwise 474 447 """ 475 for link in find DocumentationInMigrationDocument(migration_object):448 for link in find_documentation_in_migration_document(migration_object): 476 449 if link['href'].startswith('http://dx.doi.org/doi:'): 477 450 return link 478 451 return None 479 452 480 def find DeploymentsInDE(data_entity_migration):481 links_dict = find LinksInMigrationDocument(data_entity_migration)453 def find_deployments_in_de(data_entity_migration): 454 links_dict = find_links_in_migration_document(data_entity_migration) 482 455 links = _extract_links_by_marker(links_dict, 'Deployment') 483 456 return [depName + '.atom' for depName in links] 484 457 485 def find SubTypeInDPT(resourceMigration):486 resource_xml = getAtomDocumentByMO(resourceMigration)458 def find_subtype_in_dpt(resourceMigration): 459 resource_xml = _get_atom_document_by_mo(resourceMigration) 487 460 categories = resource_xml.findall('%scategory' % (ATOM_NS)) 488 461 for category in categories: … … 490 463 return category.get("label") 491 464 492 def extract Title(deploymentMigration):493 resource_xml = getAtomDocumentByMO(deploymentMigration)465 def extract_title(deploymentMigration): 466 resource_xml = _get_atom_document_by_mo(deploymentMigration) 494 467 title = resource_xml.find('%stitle' % (ATOM_NS)) 495 468 return _return_not_none_text(title) 496 469 497 def extract Summary(migrationObject):498 resource_xml = getAtomDocumentByMO(migrationObject)470 def extract_summary(migrationObject): 471 resource_xml = _get_atom_document_by_mo(migrationObject) 499 472 summary = resource_xml.find('%ssummary' % (ATOM_NS)) 500 473 return _return_not_none_text(summary) 501 474 502 def extract GeographicExtentInMigrationDocument(migration_object):475 def extract_ge_in_migration_doc(migration_object): 503 476 """ 504 477 Extracts if existing the georss:where/gel:Enveloper/upper-lowerCorner elements. … … 508 481 the values are float 509 482 """ 510 resource_xml = getAtomDocumentByMO(migration_object)483 resource_xml = _get_atom_document_by_mo(migration_object) 511 484 upper_corner = resource_xml.find('%swhere/%sEnvelope/%supperCorner' \ 512 485 % (GEORSS_NS, GML_NS, GML_NS)) … … 521 494 return ret 522 495 523 def find LinksInDeployment(migration_object):496 def find_links_in_deployment(migration_object): 524 497 """ 525 498 Returns a dictionary of links owned by the given dataEntity document … … 528 501 """ 529 502 links = {} 530 links_dict = find LinksInMigrationDocument(migration_object)503 links_dict = find_links_in_migration_document(migration_object) 531 504 for marker in LINK_MARKERS: 532 505 links[marker] = _extract_links_by_marker(links_dict, marker) … … 548 521 return dpt 549 522 550 551 def getResourceRefs(deployment_refs): 552 ''' 553 Returns a list of Elements representing the inner resource reference items 554 @param resourceRefs: the name of the eXist collection name below the 555 'deployments' one 556 ''' 557 xml_depl = _get_xml_document(deployment_refs) 558 return xml_depl.findall('%scollection/%sresource' % (EXIST_NS, EXIST_NS)) 559 560 def getOwnerRefs(doc_status, doc_type, doc_owner): 523 def get_owner_refs(doc_status, doc_type, doc_owner): 561 524 ''' 562 525 Returns a list of Elements representing the inner resource reference items … … 564 527 'deployments' one 565 528 ''' 566 xml_depl = _get_xml_document( buildExistOwnerPath(doc_status, doc_type, doc_owner))529 xml_depl = _get_xml_document(_build_exist_owner_path(doc_status, doc_type, doc_owner)) 567 530 return xml_depl.findall('%scollection/%sresource' % (EXIST_NS, EXIST_NS)) 568 531 569 def getTypeRefs(docStatus, docType): 570 ''' 571 Returns a list of Elements representing the inner resource reference items 572 @param resourceRefs: the name of the eXist collection name below the 573 'deployments' one 574 ''' 575 xml_depl = _get_xml_document(buildExistTypePath(docStatus, docType)) 576 return xml_depl.findall('%scollection/%sresource' % (EXIST_NS, EXIST_NS)) 577 578 579 580 def getCollectionRefs(published_refs): 532 def get_collection_refs(published_refs): 581 533 ''' 582 534 Returns a list of Elements representing the inner deployment reference items … … 587 539 return xml_publ.findall('%scollection/%scollection' % (EXIST_NS, EXIST_NS)) 588 540 589 def getResource(source, resource_name): 590 resource_source = '%s/%s' % (source, resource_name) 591 resource_doc = _get_document(resource_source) 592 return XML(resource_doc) 593 594 def createMD_Keywords(keywords, k_type=None, thesaurusName=None): 541 def create_md_keywords(keywords, k_type=None, thesaurusName=None): 595 542 ''' 596 543 Creates a new MD_Keywords instance. … … 606 553 607 554 608 def create DateTime(datetime):555 def create_datetime(datetime): 609 556 ''' 610 557 Creates a new DateTime instance. … … 624 571 return dateTime 625 572 626 def create Date(date):573 def create_date(date): 627 574 ''' 628 575 Creates a new Date instance. … … 636 583 return idate 637 584 638 def create TM_Position(anyOther = None, \585 def create_tm_position(anyOther = None, \ 639 586 date8601 = None, \ 640 587 dateTime8601 = None, \ … … 658 605 return tm_position 659 606 660 def create TM_Instant(position):607 def create_tm_instant(position): 661 608 ''' 662 609 Creates a new TM_Position instance … … 667 614 return tm_instant 668 615 669 def create TM_Period(begin, end):616 def create_tm_period(begin, end): 670 617 ''' 671 618 Creates a new TM_Position instance … … 677 624 return tm_period 678 625 679 def create CI_Address(deliveryPoint = None, electronicMailAddress = None, \626 def create_ci_address(deliveryPoint = None, electronicMailAddress = None, \ 680 627 city = None, country = None, postalCode = None): 681 628 ''' … … 702 649 return ci_address 703 650 704 def create CI_OnlineResource(linkage, name=None):651 def create_ci_onlineresource(linkage, name=None): 705 652 ''' 706 653 Creates a new CI_OnlineResource instance … … 714 661 return ci_online_resource 715 662 716 def create CI_Telephone(voice=None, facsimile=None):663 def create_ci_telephone(voice=None, facsimile=None): 717 664 ''' 718 665 Creates a new CI_Telephone instance … … 727 674 return ci_telephone 728 675 729 def create CI_Contact(phone, address = None, onlineResource = None):676 def create_ci_contact(phone, address = None, onlineResource = None): 730 677 ''' 731 678 Creates a new CI_Contact instance … … 742 689 return ci_contact 743 690 744 def create MO_Individual(name = None, contactInfo = None):691 def create_mo_individual(name = None, contactInfo = None): 745 692 ''' 746 693 Creates a new MO_Individual instance … … 755 702 return ci_party 756 703 757 def create MO_Organization(name = None, contactInfo = None):704 def create_mo_organization(name = None, contactInfo = None): 758 705 ''' 759 706 Creates a new MO_Organization instance. … … 768 715 return ci_party 769 716 770 def create MO_ResponsiblePartyInfo(role, i_party):717 def create_mo_responsible_party_info(role, i_party): 771 718 """ 772 719 @param role: a CI_RoleCode/MO_RoleValue assigned to this ResponsibleParty … … 779 726 780 727 781 def create CI_Date(dateType, date = None):728 def create_ci_date(dateType, date = None): 782 729 """ 783 730 Creates a new CI_Date … … 791 738 return ci_date 792 739 793 def create CI_Citation(title, date = None, icitedResponsibleParty = None):740 def create_ci_citation(title, date = None, icitedResponsibleParty = None): 794 741 """ 795 742 Creates a new CI_Citation … … 806 753 return ci_citation 807 754 808 def create MD_Constraints(useLimitation = None):755 def create_md_constraints(useLimitation = None): 809 756 """ 810 757 Creates a new MD_Constrains … … 816 763 return md_constraints 817 764 818 def create MD_LegalConstraints(useLimitation = None, accessConstrains = None):765 def create_md_legal_constraints(useLimitation = None, accessConstrains = None): 819 766 """ 820 767 Creates a new MD_LegalConstrains … … 829 776 return md_legalconstraints 830 777 831 def create MD_Identifier(code, authority = None):778 def create_md_identifier(code, authority = None): 832 779 """ 833 780 Creates a new MD_Identifier … … 843 790 return md_identifier 844 791 845 def create CI_ResponsibleParty(role, organizationName = None, individualName = None):792 def create_ci_responsible_party(role, organizationName = None, individualName = None): 846 793 """ 847 794 Creates a new CI_ResponsibeParty … … 856 803 return ci_responsible_party 857 804 858 def create MD_Metadata(date_stamp, contact, language = None):805 def create_md_metadata(date_stamp, contact, language = None): 859 806 """ 860 807 Creates a new MD_Metadata … … 871 818 return md_metadata 872 819 873 def create MO_OnlineResource(linkage, instance = None, name = None, function = None, \820 def create_mo_online_resource(linkage, instance = None, name = None, function = None, \ 874 821 description = None, applicationProfile = None): 875 822 """ … … 895 842 return on_line_resource 896 843 897 def create CEDA_Result(curation_category, internal_path, source = None):844 def create_ceda_result(curation_category, internal_path, source = None): 898 845 """ 899 846 Creates a new CEDA_Result … … 911 858 912 859 913 def create DQ_ConformanceResult(explanation, pass_, specification):860 def create_dq_conformance_result(explanation, pass_, specification): 914 861 """ 915 862 Creates a DQ_ConformanceResult instance … … 924 871 return dq_conformanceResult 925 872 926 def create DQ_Element(result):873 def create_dq_element(result): 927 874 """ 928 875 Creates a DQ_Element instance … … 934 881 return dq_element 935 882 936 def create EX_GeographicBoundingBox(east, north, west, south):883 def create_ex_geographic_boundingbox(east, north, west, south): 937 884 """ 938 885 Creates an EX_GeographicBoundingBox instance … … 949 896 return ex_geographic_bb 950 897 951 def create CEDA_Processing():898 def create_ceda_processing(): 952 899 ceda_processing = CEDA_Processing() 953 900 return ceda_processing 954 901 955 902 956 def create CEDA_Instrument():903 def create_ceda_instrument(): 957 904 ceda_instrument = CEDA_Instrument() 958 905 return ceda_instrument 959 906 960 def create CEDA_CompositeProcess():907 def create_ceda_composite_process(): 961 908 ceda_cp = CEDA_CompositeProcess() 962 909 return ceda_cp 963 910 964 def create CEDA_Acquisition():911 def create_ceda_acquisition(): 965 912 ceda_acquisition = CEDA_Acquisition() 966 913 return ceda_acquisition 967 914 968 def create CEDA_Review(reviewer, reviewFrequency, reviewStatus):915 def create_ceda_review(reviewer, reviewFrequency, reviewStatus): 969 916 """ 970 917 Returns a new CEDA_Review … … 979 926 return ceda_review 980 927 981 def create CEDA_Project(abstract = None, publication_state = None, \928 def create_ceda_project(abstract = None, publication_state = None, \ 982 929 documentation = None, project_resource=None): 983 930 ceda_project = CEDA_Project() … … 992 939 return ceda_project 993 940 994 def from DateStringToPhenomenonTime(doc_phenomenon_time):941 def from_date_string_to_pt(doc_phenomenon_time): 995 942 """ 996 943 Transforms a date string like '2002-07-22' (startDate) in a TM_Instant instance or … … 1002 949 if '/' in doc_phenomenon_time: 1003 950 period = doc_phenomenon_time.split('/') 1004 begin_date = create Date(isoDateStringToTimeDate(period[0]))1005 begin_position = create TM_Position(date8601 = begin_date)1006 begin_tm_instant = create TM_Instant(begin_position)951 begin_date = create_date(isoDateStringToTimeDate(period[0])) 952 begin_position = create_tm_position(date8601 = begin_date) 953 begin_tm_instant = create_tm_instant(begin_position) 1007 954 1008 end_date = create Date(isoDateStringToTimeDate(period[1]))1009 end_position = create TM_Position(date8601 = end_date)1010 end_tm_instant = create TM_Instant(end_position)955 end_date = create_date(isoDateStringToTimeDate(period[1])) 956 end_position = create_tm_position(date8601 = end_date) 957 end_tm_instant = create_tm_instant(end_position) 1011 958 1012 pt = create TM_Period(begin_tm_instant, end_tm_instant)959 pt = create_tm_period(begin_tm_instant, end_tm_instant) 1013 960 else: 1014 tm_position = create TM_Position(date8601 = \1015 create Date(isoDateStringToTimeDate(doc_phenomenon_time)))1016 pt = create TM_Instant(tm_position)961 tm_position = create_tm_position(date8601 = \ 962 create_date(isoDateStringToTimeDate(doc_phenomenon_time))) 963 pt = create_tm_instant(tm_position) 1017 964 return pt 1018 965 1019 def from PhenomenonTimeToString(phenomenonTime):966 def from_pt_to_string(phenomenonTime): 1020 967 """ 1021 968 Transforms a TM_Instant instance in a date string like '2002-07-22' (startDate) or … … 1036 983 return startDate, endDate 1037 984 1038 def compare PhenomenonTimes(p1, p2):1039 s1 = from PhenomenonTimeToString(p1)1040 s2 = from PhenomenonTimeToString(p2)985 def compare_phenomenon_times(p1, p2): 986 s1 = from_pt_to_string(p1) 987 s2 = from_pt_to_string(p2) 1041 988 return s1[0] == s2[0] and s1[1] == s2[1] 1042 989 … … 1045 992 return str(datetime.date(int(idate.year), int(idate.month), int(idate.day))) 1046 993 1047 def fromGeographicBoundingBoxToString(gbb):994 def _from_geographic_bb_to_string(gbb): 1048 995 if gbb is None: 1049 996 return None … … 1052 999 1053 1000 def compareGeographicBoundingBoxes(gb1, gb2): 1054 return fromGeographicBoundingBoxToString(gb1) == fromGeographicBoundingBoxToString(gb2)1001 return _from_geographic_bb_to_string(gb1) == _from_geographic_bb_to_string(gb2) -
mauRepo/MolesManager/trunk/cedaMoles/libs/migration/processor/dataEntity.py
r8494 r8496 34 34 migrationObjectDescription, NoAssociatedDeployments,\ 35 35 NoGeographicalExtensionException, NoCitationException 36 from cedaMoles.libs.migration.processor.commons import find DeploymentsInDE,\37 create MD_Identifier, extractContent,\38 has MOSameHash, createCI_Citation, createCI_Date, findPublishedDate,\39 isoDateTimeStringToTimeDate, find UpdatedDate, createDate,\40 calculate Hash, findDOIInMigrationDocument,\41 has MOBeenProcessed, getAtomDocumentHashByMO, extractTitle, extractSummary,\42 create EX_GeographicBoundingBox, fromDateStringToPhenomenonTime,\43 compare PhenomenonTimes, compareGeographicBoundingBoxes36 from cedaMoles.libs.migration.processor.commons import find_deployments_in_de,\ 37 create_md_identifier, extract_content,\ 38 has_mo_same_hash, create_ci_citation, create_ci_date, find_published_date,\ 39 isoDateTimeStringToTimeDate, find_updated_date, create_date,\ 40 calculate_hash, find_doi_in_migration_document,\ 41 has_mo_been_processed, get_atom_document_hash_by_mo, extract_title, extract_summary,\ 42 create_ex_geographic_boundingbox, from_date_string_to_pt,\ 43 compare_phenomenon_times, compareGeographicBoundingBoxes 44 44 from cedaMoles.libs.migration.processor.deployment import DeploymentProcessor 45 45 from logging import StreamHandler … … 69 69 # raise MigrationObjectException("DataEntityProcessor cannot process an None item") 70 70 self._dataEntityMigration = dataEntityMigration 71 self._dataEntityHasSameHash = has MOSameHash(self._dataEntityMigration)72 self._dataEntityHasBeenProcessed = has MOBeenProcessed(self._dataEntityMigration)71 self._dataEntityHasSameHash = has_mo_same_hash(self._dataEntityMigration) 72 self._dataEntityHasBeenProcessed = has_mo_been_processed(self._dataEntityMigration) 73 73 self.epbRepo = epbRepo 74 74 self._report = [] … … 80 80 east, north = upperCornerData.split() 81 81 west, south = lowerCornerData.split() 82 geographicExtent = create EX_GeographicBoundingBox(float(east), float(north), \82 geographicExtent = create_ex_geographic_boundingbox(float(east), float(north), \ 83 83 float(west), float(south)) 84 84 … … 97 97 if end is not None: 98 98 dateString = '%s/%s' % (start, end) 99 pt = from DateStringToPhenomenonTime(dateString)99 pt = from_date_string_to_pt(dateString) 100 100 101 101 if pt is not None and ceda_observationCollection.phenomenonTime is not None \ 102 102 and (len(ceda_observationCollection.phenomenonTime) == 0 \ 103 103 or (len(ceda_observationCollection.phenomenonTime) == 1 and \ 104 not compare PhenomenonTimes(ceda_observationCollection.phenomenonTime[0], pt))):104 not compare_phenomenon_times(ceda_observationCollection.phenomenonTime[0], pt))): 105 105 self.epbRepo.moles3EPB.updateCedaObject(ceda_observationCollection, {'phenomenonTime': pt}) 106 106 107 107 def _assignDescription(self, ceda_observationCollection): 108 description = extract Summary(self._dataEntityMigration)108 description = extract_summary(self._dataEntityMigration) 109 109 110 110 if description: … … 112 112 113 113 def _processTitle(self, ceda_observationCollection): 114 ititle = extract Title(self._dataEntityMigration)114 ititle = extract_title(self._dataEntityMigration) 115 115 if ceda_observationCollection.identifier: 116 116 for ident in ceda_observationCollection.identifier: … … 122 122 123 123 #Else create new 124 i_citation = create CI_Citation(title = CEDA_TITLE)125 newIdentifier = create MD_Identifier(code = ititle, authority=i_citation)124 i_citation = create_ci_citation(title = CEDA_TITLE) 125 newIdentifier = create_md_identifier(code = ititle, authority=i_citation) 126 126 127 127 if self._dataEntityHasBeenProcessed: … … 133 133 134 134 def _processCitation(self, ceda_observationCollection): 135 contentDict = extract Content(self._dataEntityMigration)135 contentDict = extract_content(self._dataEntityMigration) 136 136 if not contentDict.has_key('citation'): 137 137 self._report(NoCitationException(self._dataEntityMigration)) … … 141 141 142 142 ci_dates = [] 143 doc_date = find PublishedDate(self._dataEntityMigration)143 doc_date = find_published_date(self._dataEntityMigration) 144 144 if doc_date: 145 i_date = create Date(isoDateTimeStringToTimeDate(doc_date))146 ci_dates.append(create CI_Date(getCLValue(CI_DateTypeCode.cl_publication), date = i_date))147 148 doc_date = find UpdatedDate(self._dataEntityMigration)145 i_date = create_date(isoDateTimeStringToTimeDate(doc_date)) 146 ci_dates.append(create_ci_date(getCLValue(CI_DateTypeCode.cl_publication), date = i_date)) 147 148 doc_date = find_updated_date(self._dataEntityMigration) 149 149 if doc_date: 150 i_date = create Date(isoDateTimeStringToTimeDate(doc_date))151 ci_dates.append(create CI_Date(getCLValue(CI_DateTypeCode.cl_revision), date = i_date))152 153 i_citation = create CI_Citation(title = 'ceda_moles2_citation', date=ci_dates)154 newIdentifier = create MD_Identifier(code = contentDict['citation'], authority=i_citation)150 i_date = create_date(isoDateTimeStringToTimeDate(doc_date)) 151 ci_dates.append(create_ci_date(getCLValue(CI_DateTypeCode.cl_revision), date = i_date)) 152 153 i_citation = create_ci_citation(title = 'ceda_moles2_citation', date=ci_dates) 154 newIdentifier = create_md_identifier(code = contentDict['citation'], authority=i_citation) 155 155 156 156 if self._dataEntityHasBeenProcessed: … … 174 174 175 175 #Is a first time process? 176 if not has MOBeenProcessed(self._dataEntityMigration):177 docHash = get AtomDocumentHashByMO(self._dataEntityMigration)176 if not has_mo_been_processed(self._dataEntityMigration): 177 docHash = get_atom_document_hash_by_mo(self._dataEntityMigration) 178 178 ceda_observationCollection.publicationState = getCLValue(MM_ObservationPublicationStateValue.cl_working) 179 179 self.epbRepo.moles3EPB.persistInstance(ceda_observationCollection) … … 183 183 184 184 #Has to updated the hash? 185 if not self._dataEntityHasSameHash and has MOBeenProcessed(self._dataEntityMigration):186 docHash = get AtomDocumentHashByMO(self._dataEntityMigration)185 if not self._dataEntityHasSameHash and has_mo_been_processed(self._dataEntityMigration): 186 docHash = get_atom_document_hash_by_mo(self._dataEntityMigration) 187 187 self.epbRepo.migrationEPB.updateMigrationObject(self._dataEntityMigration, \ 188 188 {'doc_hash': docHash}) … … 192 192 #Adds the CedaGUID 193 193 ceda_guid = CedaGUID() 194 ceda_guid.id = calculate Hash(self._dataEntityMigration.data_ent_id)194 ceda_guid.id = calculate_hash(self._dataEntityMigration.data_ent_id) 195 195 setattr(ceda_guid, 'ceda_observationcollection', ceda_observationCollection.id) 196 196 self.epbRepo.moles3EPB.persistInstance(ceda_guid) … … 198 198 199 199 def _processDOI(self, deploymentMigration, ceda_observation, deProcessor, single_deployment): 200 doi = find DOIInMigrationDocument(deploymentMigration)200 doi = find_doi_in_migration_document(deploymentMigration) 201 201 if single_deployment: 202 202 if doi is None: 203 doi = find DOIInMigrationDocument(self._dataEntityMigration)203 doi = find_doi_in_migration_document(self._dataEntityMigration) 204 204 #collection_identifier = Moles3EPB.extractCollectionIdentifierByTitle(MD_CODE_MOLES2_CITATION, self.migrationSessions.molesSession) 205 205 #if collection_identifier.count()==1: … … 250 250 251 251 #retrieves the associated deployment links from the data_entity 252 deploymentsLinks = find DeploymentsInDE(self._dataEntityMigration)252 deploymentsLinks = find_deployments_in_de(self._dataEntityMigration) 253 253 #retrieves the DataEntityMigration sorted by creation date 254 254 deploymentMigrations = self.epbRepo.migrationEPB.getAllDeploymentsMigrationByDataEntitySortedByDate( \ -
mauRepo/MolesManager/trunk/cedaMoles/libs/migration/processor/deployment.py
r8490 r8496 31 31 @author: Maurizio Nagni 32 32 ''' 33 from cedaMoles.libs.migration.processor.commons import find MolesLineage, \34 create MO_ResponsiblePartyInfo,\35 DO_BADC, DO_NEODC, find AuthorsInResource, \36 create MD_Identifier, extractSummary, extractQuality, \37 has MOSameHash, getAtomDocumentHashByMO, extractTitle,\38 create CEDA_Result,\39 create EX_GeographicBoundingBox, extractGeographicExtentInMigrationDocument, \40 find DownloadLinksInMigrationDocument,\41 extract Content, createCI_Citation, createCI_Date, createDate,\42 create TM_Position, createTM_Instant, extractMolesCreationDate,\43 create DateTime, isoDateTimeStringToTimeDate, extractMolesProviderID,\44 DO_UKSSDC, create MO_Organization,\45 create CI_Contact, createCI_Address, createCI_OnlineResource,\46 create CI_Telephone, extractMolesTemporalRange,\47 find AccessLinksInMigrationDocument,\48 find LinksInDeployment, createMD_LegalConstraints,\49 create DQ_Element, createDQ_ConformanceResult, findUpdatedDate,\50 create MD_Metadata, createMO_OnlineResource, createCEDA_Review, calculateHash,\51 create CI_ResponsibleParty, extractUpdateFrequency,\52 find LinksInMigrationDocument, findSubTypeInDPT, extractMolesPublishedDate,\53 create MD_Keywords, hasMOBeenProcessed, createMO_Individual,\54 from DateStringToPhenomenonTime, \55 compare PhenomenonTimes, compareGeographicBoundingBoxes33 from cedaMoles.libs.migration.processor.commons import find_moles_lineage, \ 34 create_mo_responsible_party_info,\ 35 DO_BADC, DO_NEODC, find_authors_in_resource, \ 36 create_md_identifier, extract_summary, extract_quality, \ 37 has_mo_same_hash, get_atom_document_hash_by_mo, extract_title,\ 38 create_ceda_result,\ 39 create_ex_geographic_boundingbox, extract_ge_in_migration_doc, \ 40 find_download_links_in_migration_document,\ 41 extract_content, create_ci_citation, create_ci_date, create_date,\ 42 create_tm_position, create_tm_instant, extract_moles_creation_date,\ 43 create_datetime, isoDateTimeStringToTimeDate, extract_moles_provider_id,\ 44 DO_UKSSDC, create_mo_organization,\ 45 create_ci_contact, create_ci_address, create_ci_onlineresource,\ 46 create_ci_telephone, extract_moles_temporal_range,\ 47 find_access_links_in_migration_document,\ 48 find_links_in_deployment, create_md_legal_constraints,\ 49 create_dq_element, create_dq_conformance_result, find_updated_date,\ 50 create_md_metadata, create_mo_online_resource, create_ceda_review, calculate_hash,\ 51 create_ci_responsible_party, extract_update_frequency,\ 52 find_links_in_migration_document, find_subtype_in_dpt, extract_moles_published_date,\ 53 create_md_keywords, has_mo_been_processed, create_mo_individual,\ 54 from_date_string_to_pt, \ 55 compare_phenomenon_times, compareGeographicBoundingBoxes 56 56 from cedaMoles.libs.epb import EPB 57 57 from cedaMoles.libs.migration.processor.deployment_data import DeploymentDataProcessor … … 112 112 self._deployment_migration = deployment_migration 113 113 self.epbRepo = epbRepo 114 self._dataEntityHasSameHash = has MOSameHash(self._data_entity_migration)115 self._deploymentHasSameHash = has MOSameHash(self._deployment_migration)116 self._deploymentHasBeenProcessed = has MOBeenProcessed(self._deployment_migration)114 self._dataEntityHasSameHash = has_mo_same_hash(self._data_entity_migration) 115 self._deploymentHasSameHash = has_mo_same_hash(self._deployment_migration) 116 self._deploymentHasBeenProcessed = has_mo_been_processed(self._deployment_migration) 117 117 self._report = [] 118 118 … … 139 139 try: 140 140 if tmp_auth['author'] == 'unknown': 141 doc_authors = find AuthorsInResource(self._data_entity_migration)141 doc_authors = find_authors_in_resource(self._data_entity_migration) 142 142 tmp_auth = authors[doc_authors['authors']] 143 143 … … 154 154 for item in tmp_auth['co_author_type']: 155 155 if (tmp_auth['co_author'][tmp_auth['co_author_type'].index(item)] == 'unknown'): 156 doc_authors = find AuthorsInResource(self._data_entity_migration)156 doc_authors = find_authors_in_resource(self._data_entity_migration) 157 157 tmp_auth = authors[doc_authors['contributors']] 158 158 break … … 189 189 190 190 py_date = None 191 cited_responsible = create CI_ResponsibleParty(role=getCLValue(CI_RoleCode.cl_publisher), \191 cited_responsible = create_ci_responsible_party(role=getCLValue(CI_RoleCode.cl_publisher), \ 192 192 organizationName='NERC - British Atmospheric Data Centre') 193 193 if doi.upper() == '10.5285/E8F43A51-0198-4323-A926-FE69225D57DD': … … 201 201 202 202 if py_date: 203 dt = create Date(py_date)204 ci_date = create CI_Date(getCLValue(CI_DateTypeCode.cl_publication), date = dt)205 i_authority = create CI_Citation("DOI", date = ci_date)206 identifier = create MD_Identifier(code = doi, authority=i_authority)203 dt = create_date(py_date) 204 ci_date = create_ci_date(getCLValue(CI_DateTypeCode.cl_publication), date = dt) 205 i_authority = create_ci_citation("DOI", date = ci_date) 206 identifier = create_md_identifier(code = doi, authority=i_authority) 207 207 self.epbRepo.moles3EPB.updateCedaObject(observation, {'identifier': identifier}) 208 208 DeploymentProcessor.log.info("DOI: %s" % (doi)) … … 212 212 return 213 213 214 provider_id = extract MolesProviderID(self._deployment_migration)214 provider_id = extract_moles_provider_id(self._deployment_migration) 215 215 i_keywords = [] 216 216 if provider_id == DO_BADC: … … 221 221 #Is a first time process? 222 222 if not self._deploymentHasBeenProcessed: 223 ceda_observation.keywords.append(create MD_Keywords(i_keywords))223 ceda_observation.keywords.append(create_md_keywords(i_keywords)) 224 224 else: 225 225 ceda_observation.keywords.keyword = i_keywords … … 229 229 return 230 230 231 data_lineage = find MolesLineage(self._data_entity_migration)231 data_lineage = find_moles_lineage(self._data_entity_migration) 232 232 if data_lineage is None: 233 233 raise NoDataLineage(self._data_entity_migration) … … 241 241 242 242 i_sources = [] 243 download = find DownloadLinksInMigrationDocument(self._deployment_migration)243 download = find_download_links_in_migration_document(self._deployment_migration) 244 244 content = None 245 245 if len(download) == 0: 246 download = find DownloadLinksInMigrationDocument(self._data_entity_migration)247 content = extract Content(self._data_entity_migration)246 download = find_download_links_in_migration_document(self._data_entity_migration) 247 content = extract_content(self._data_entity_migration) 248 248 else: 249 content = extract Content(self._deployment_migration)249 content = extract_content(self._deployment_migration) 250 250 for dwn in download: 251 251 int_description = None … … 256 256 if dwn['href'].startswith('http://badc.nerc.ac.uk/browse') or dwn['href'].startswith('http://neodc.nerc.ac.uk/browse'): 257 257 int_description = "download directly from archive" 258 i_sources.append(create MO_OnlineResource(linkage = dwn['href'], name = dwn['title'], \258 i_sources.append(create_mo_online_resource(linkage = dwn['href'], name = dwn['title'], \ 259 259 function = getCLValue(CI_OnLineFunctionCode.cl_download), \ 260 260 description = int_description, applicationProfile = int_applicationProfile)) … … 275 275 276 276 if not self._deploymentHasBeenProcessed: 277 observation.result = create CEDA_Result(i_category, i_logical_path, source = i_sources)277 observation.result = create_ceda_result(i_category, i_logical_path, source = i_sources) 278 278 return 279 279 … … 290 290 return 291 291 292 provider_id = extract MolesProviderID(self._deployment_migration)292 provider_id = extract_moles_provider_id(self._deployment_migration) 293 293 party = None 294 294 if provider_id == DO_BADC: 295 295 i_linkage = 'http://badc.rl.ac.uk' 296 i_onlineResources = create CI_OnlineResource(linkage = i_linkage, name = 'British Atmospheric Data Centre Website')297 i_address = create CI_Address(deliveryPoint = ['British Atmospheric Data Centre, STFC Rutherford Appleton Laboratory'], \296 i_onlineResources = create_ci_onlineresource(linkage = i_linkage, name = 'British Atmospheric Data Centre Website') 297 i_address = create_ci_address(deliveryPoint = ['British Atmospheric Data Centre, STFC Rutherford Appleton Laboratory'], \ 298 298 electronicMailAddress=['badc@rl.ac.uk'], postalCode='OX11 0QX', country='UK', city='Harwell Oxford') 299 i_phone = create CI_Telephone(voice=['+44(0)1235 446432'])300 contact = create CI_Contact(phone=i_phone, address=i_address, onlineResource=i_onlineResources)301 party = create MO_Organization(name = "NERC - British Atmospheric Data Centre", contactInfo = [contact])299 i_phone = create_ci_telephone(voice=['+44(0)1235 446432']) 300 contact = create_ci_contact(phone=i_phone, address=i_address, onlineResource=i_onlineResources) 301 party = create_mo_organization(name = "NERC - British Atmospheric Data Centre", contactInfo = [contact]) 302 302 elif provider_id == DO_NEODC: 303 303 i_linkage = 'http://www.neodc.rl.ac.uk' 304 i_onlineResources = create CI_OnlineResource(linkage = i_linkage, name = 'NERC Earth Observation Data Centre website')305 i_address = create CI_Address(deliveryPoint = ['NERC - Earth Observation Data Centre, STFC Rutherford Appleton Laboratory'], \304 i_onlineResources = create_ci_onlineresource(linkage = i_linkage, name = 'NERC Earth Observation Data Centre website') 305 i_address = create_ci_address(deliveryPoint = ['NERC - Earth Observation Data Centre, STFC Rutherford Appleton Laboratory'], \ 306 306 electronicMailAddress=['neodc@rl.ac.uk'], postalCode='OX11 0QX', country='UK', city='Harwell Oxford') 307 i_phone = create CI_Telephone(voice=['+44(0)1235 446432'])308 contact = create CI_Contact(phone=i_phone, address=i_address, onlineResource=i_onlineResources)309 party = create MO_Organization(name = 'NERC - Earth Observation Data Centre', contactInfo = [contact])307 i_phone = create_ci_telephone(voice=['+44(0)1235 446432']) 308 contact = create_ci_contact(phone=i_phone, address=i_address, onlineResource=i_onlineResources) 309 party = create_mo_organization(name = 'NERC - Earth Observation Data Centre', contactInfo = [contact]) 310 310 elif provider_id == DO_UKSSDC: 311 311 i_linkage = 'http://www.ukssdc.rl.ac.uk' 312 i_onlineResources = create CI_OnlineResource(linkage = i_linkage, name = 'UK Solar System Data Centre website')313 i_address = create CI_Address(deliveryPoint = ['UK Solar System Data Centre, STFC Rutherford Appleton Laboratory'], \312 i_onlineResources = create_ci_onlineresource(linkage = i_linkage, name = 'UK Solar System Data Centre website') 313 i_address = create_ci_address(deliveryPoint = ['UK Solar System Data Centre, STFC Rutherford Appleton Laboratory'], \ 314 314 electronicMailAddress=['support@rl.ac.uk'], postalCode='OX11 0QX', country='UK', city='Harwell Oxford') 315 i_phone = create CI_Telephone(voice=['+44(0)1235 445173'])316 contact = create CI_Contact(phone=i_phone, address=i_address, onlineResource=i_onlineResources)317 party = create MO_Organization(name = 'NERC - UK Solar System Data Centre', contactInfo = [contact])315 i_phone = create_ci_telephone(voice=['+44(0)1235 445173']) 316 contact = create_ci_contact(phone=i_phone, address=i_address, onlineResource=i_onlineResources) 317 party = create_mo_organization(name = 'NERC - UK Solar System Data Centre', contactInfo = [contact]) 318 318 319 319 if party and not self._deploymentHasBeenProcessed: 320 320 newrp = [] 321 newrp.append(create MO_ResponsiblePartyInfo(getCLValue(MM_RoleValue.cl_publisher), [party]))322 newrp.append(create MO_ResponsiblePartyInfo(getCLValue(MM_RoleValue.cl_custodian), deepcopy([party])))323 newrp.append(create MO_ResponsiblePartyInfo(getCLValue(MM_RoleValue.cl_distributor), deepcopy([party])))324 newrp.append(create MO_ResponsiblePartyInfo(getCLValue(MM_RoleValue.cl_pointofcontact), deepcopy([party])))325 newrp.append(create MO_ResponsiblePartyInfo(getCLValue(MM_RoleValue.cl_metadataowner), deepcopy([party])))326 newrp.append(create MO_ResponsiblePartyInfo(getCLValue(MM_RoleValue.cl_curator), deepcopy([party])))321 newrp.append(create_mo_responsible_party_info(getCLValue(MM_RoleValue.cl_publisher), [party])) 322 newrp.append(create_mo_responsible_party_info(getCLValue(MM_RoleValue.cl_custodian), deepcopy([party]))) 323 newrp.append(create_mo_responsible_party_info(getCLValue(MM_RoleValue.cl_distributor), deepcopy([party]))) 324 newrp.append(create_mo_responsible_party_info(getCLValue(MM_RoleValue.cl_pointofcontact), deepcopy([party]))) 325 newrp.append(create_mo_responsible_party_info(getCLValue(MM_RoleValue.cl_metadataowner), deepcopy([party]))) 326 newrp.append(create_mo_responsible_party_info(getCLValue(MM_RoleValue.cl_curator), deepcopy([party]))) 327 327 self.epbRepo.moles3EPB.updateCedaObject(observation, {'relatedParty': newrp}) 328 328 return … … 333 333 return 334 334 335 doc_quality = extract Quality(self._data_entity_migration)336 doc_date = find UpdatedDate(self._data_entity_migration)335 doc_quality = extract_quality(self._data_entity_migration) 336 doc_date = find_updated_date(self._data_entity_migration) 337 337 ci_dates = [] 338 338 if doc_date: 339 i_date = create Date(isoDateTimeStringToTimeDate(doc_date))340 ci_dates.append(create CI_Date(getCLValue(CI_DateTypeCode.cl_revision), date = i_date))339 i_date = create_date(isoDateTimeStringToTimeDate(doc_date)) 340 ci_dates.append(create_ci_date(getCLValue(CI_DateTypeCode.cl_revision), date = i_date)) 341 341 else: 342 i_date = create Date(isoDateTimeStringToTimeDate(datetime.datetime.now()))343 ci_dates.append(create CI_Date(getCLValue(CI_DateTypeCode.cl_creation), date = i_date))344 345 i_specification = create CI_Citation(title = "CEDA Data Quality Statement", date=ci_dates)346 i_dq_result = create DQ_ConformanceResult(doc_quality, True, i_specification)347 i_quality_element = create DQ_Element(i_dq_result)342 i_date = create_date(isoDateTimeStringToTimeDate(datetime.datetime.now())) 343 ci_dates.append(create_ci_date(getCLValue(CI_DateTypeCode.cl_creation), date = i_date)) 344 345 i_specification = create_ci_citation(title = "CEDA Data Quality Statement", date=ci_dates) 346 i_dq_result = create_dq_conformance_result(doc_quality, True, i_specification) 347 i_quality_element = create_dq_element(i_dq_result) 348 348 349 349 if not self._deploymentHasBeenProcessed: … … 356 356 return 357 357 358 description = extract Summary(self._deployment_migration)358 description = extract_summary(self._deployment_migration) 359 359 if description is None: 360 description = extract Summary(self._data_entity_migration)360 description = extract_summary(self._data_entity_migration) 361 361 362 362 if description: … … 367 367 return 368 368 369 doc_title = extract Title(self._deployment_migration)369 doc_title = extract_title(self._deployment_migration) 370 370 if doc_title is None: 371 doc_title = extract Title(self._data_entity_migration)371 doc_title = extract_title(self._data_entity_migration) 372 372 373 373 if doc_title.startswith('deployment_') or doc_title.startswith('Deployment_'): 374 links = find LinksInMigrationDocument(self._deployment_migration)374 links = find_links_in_migration_document(self._deployment_migration) 375 375 dptList = links['DPT'] 376 376 if links.has_key('DPT'): … … 382 382 doc_title += ' and ' + dptList[-1] 383 383 384 links = find LinksInDeployment(self._deployment_migration)384 links = find_links_in_deployment(self._deployment_migration) 385 385 if links.has_key('OBS'): 386 386 obsList = [] 387 387 for obs in links['OBS']: 388 388 observationStation = self.epbRepo.migrationEPB.getDeploymentDataMigrationByName(self._deployment_migration, obs + '.atom') 389 obsList.append((extract Title(observationStation), findSubTypeInDPT(observationStation)))389 obsList.append((extract_title(observationStation), find_subtype_in_dpt(observationStation))) 390 390 391 391 if obsList[0][1] in ['stationary platform' ,'moving platform', 'ship','aircraft','satellite','computer']: … … 404 404 activity = self.epbRepo.migrationEPB.getDeploymentDataMigrationByName(self._deployment_migration, link + '.atom') 405 405 406 projSubType = find SubTypeInDPT(activity)407 doc_title += ' for the ' + extract Title(activity)406 projSubType = find_subtype_in_dpt(activity) 407 doc_title += ' for the ' + extract_title(activity) 408 408 if projSubType[0:14] == 'dgActivityData': 409 409 doc_title += ' ' + projSubType[14:] … … 413 413 if doc_title[0:10] != 'Data from' : 414 414 doc_title = "Data from " + doc_title 415 auth = create CI_Citation(title = 'ceda_title')416 identifier = create MD_Identifier(code = doc_title, authority = auth)415 auth = create_ci_citation(title = 'ceda_title') 416 identifier = create_md_identifier(code = doc_title, authority = auth) 417 417 418 418 if not self._deploymentHasBeenProcessed: … … 425 425 return 426 426 427 ge = extract GeographicExtentInMigrationDocument(self._deployment_migration)427 ge = extract_ge_in_migration_doc(self._deployment_migration) 428 428 if ge is None: 429 ge = extract GeographicExtentInMigrationDocument(self._data_entity_migration)429 ge = extract_ge_in_migration_doc(self._data_entity_migration) 430 430 431 431 if ge is None: … … 433 433 self._report.append(NoGeographicalExtensionException(self._data_entity_migration)) 434 434 435 geographicExtent = create EX_GeographicBoundingBox(ge['east'], ge['north'], ge['west'], ge['south'])435 geographicExtent = create_ex_geographic_boundingbox(ge['east'], ge['north'], ge['west'], ge['south']) 436 436 437 437 if len(observation.geographicExtent) == 0 or \ … … 445 445 return 446 446 447 creation_date = extract MolesPublishedDate(self._deployment_migration)447 creation_date = extract_moles_published_date(self._deployment_migration) 448 448 if creation_date is None: 449 creation_date = extract MolesCreationDate(self._deployment_migration)449 creation_date = extract_moles_creation_date(self._deployment_migration) 450 450 py_datetime = isoDateTimeStringToTimeDate(creation_date) 451 date_time = create DateTime(py_datetime)452 tm_position = create TM_Position(dateTime8601 = date_time)451 date_time = create_datetime(py_datetime) 452 tm_position = create_tm_position(dateTime8601 = date_time) 453 453 454 454 if not self._deploymentHasBeenProcessed: 455 observation.resultTime = create TM_Instant(tm_position)455 observation.resultTime = create_tm_instant(tm_position) 456 456 return 457 457 #Still have to update observation.geographicExtent … … 461 461 return 462 462 463 doc_phenomenon_time = extract MolesTemporalRange(self._deployment_migration)464 pt = from DateStringToPhenomenonTime(doc_phenomenon_time)465 466 if not self._deploymentHasBeenProcessed or not compare PhenomenonTimes(pt,observation.phenomenonTime):463 doc_phenomenon_time = extract_moles_temporal_range(self._deployment_migration) 464 pt = from_date_string_to_pt(doc_phenomenon_time) 465 466 if not self._deploymentHasBeenProcessed or not compare_phenomenon_times(pt,observation.phenomenonTime): 467 467 self.epbRepo.moles3EPB.updateCedaObject(observation, {'phenomenonTime': pt}) 468 468 #Now update phenomenonTime but has to remove the previous phenomenonTime … … 472 472 return 473 473 474 access_link = find AccessLinksInMigrationDocument(self._deployment_migration)475 dwn_link = find DownloadLinksInMigrationDocument(self._deployment_migration)474 access_link = find_access_links_in_migration_document(self._deployment_migration) 475 dwn_link = find_download_links_in_migration_document(self._deployment_migration) 476 476 if len(access_link) == 0: 477 access_link = find AccessLinksInMigrationDocument(self._data_entity_migration)477 access_link = find_access_links_in_migration_document(self._data_entity_migration) 478 478 479 479 i_accessConstraints = [] … … 483 483 if len(access_link) == 0: 484 484 if len(dwn_link) == 0: 485 dwn_link = find DownloadLinksInMigrationDocument(self._data_entity_migration)485 dwn_link = find_download_links_in_migration_document(self._data_entity_migration) 486 486 if dwn_link and len(dwn_link) == 1: 487 487 i_use_limitation.append("These data are open access and available through %s." % (dwn_link[0]['href']) ) 488 488 #i_accessConstraints.append(MD_RestrictionCode.cl_) 489 observation.permission = create MD_LegalConstraints(useLimitation = i_use_limitation, accessConstrains = i_accessConstraints)489 observation.permission = create_md_legal_constraints(useLimitation = i_use_limitation, accessConstrains = i_accessConstraints) 490 490 else: 491 491 if access_link and len(access_link) == 1: 492 492 i_use_limitation.append("Access to these data is restricted. To obtain access please apply for access at: %s" % (access_link[0]['href'])) 493 493 i_accessConstraints.append(getCLValue(MD_RestrictionCode.cl_restricted)) 494 observation.permission = create MD_LegalConstraints(useLimitation = i_use_limitation, accessConstrains = i_accessConstraints)494 observation.permission = create_md_legal_constraints(useLimitation = i_use_limitation, accessConstrains = i_accessConstraints) 495 495 496 496 if not self._deploymentHasBeenProcessed: … … 499 499 500 500 ''' 501 contentDict = extract Content(self._deployment_migration)501 contentDict = extract_content(self._deployment_migration) 502 502 if not contentDict.has_key('access-restricted'): 503 contentDict = extract Content(self._data_entity_migration)503 contentDict = extract_content(self._data_entity_migration) 504 504 ''' 505 505 … … 510 510 i_code = 'http://badc.nerc.ac.uk/view/%s__ATOM__%s' % (self._deployment_migration.doc_owner, self._deployment_migration.doc_name) 511 511 i_code = i_code.replace('.atom', '') 512 #i_code = buildExistDocPath(self._deployment_migration.doc_status, DT_DEPLOYMENTS, self._deployment_migration.doc_owner, self._deployment_migration.doc_name) 513 i_authority = createCI_Citation('moles2url') 514 identifier = createMD_Identifier(code = i_code, authority = i_authority) 512 i_authority = create_ci_citation('moles2url') 513 identifier = create_md_identifier(code = i_code, authority = i_authority) 515 514 if not self._deploymentHasBeenProcessed: 516 515 ceda_observation.identifier.append(identifier) … … 521 520 return 522 521 523 i_party = create MO_Individual(name = 'Graham Parton')524 i_reviewer = create MO_ResponsiblePartyInfo(getCLValue(MM_RoleValue.cl_processor), [i_party])522 i_party = create_mo_individual(name = 'Graham Parton') 523 i_reviewer = create_mo_responsible_party_info(getCLValue(MM_RoleValue.cl_processor), [i_party]) 525 524 ceda_observation.metadataManagement.append( \ 526 create CEDA_Review(reviewer=i_reviewer, reviewFrequency=getCLValue(CEDA_ReviewFrequencyValue.cl_yearly), \525 create_ceda_review(reviewer=i_reviewer, reviewFrequency=getCLValue(CEDA_ReviewFrequencyValue.cl_yearly), \ 527 526 reviewStatus=getCLValue(CEDA_ReviewStatusValue.cl_required))) 528 527 … … 531 530 return 532 531 533 i_date_stamp = create Date(datetime.datetime.now())532 i_date_stamp = create_date(datetime.datetime.now()) 534 533 #i_contact = createCI_Citation("", date = i_date_stamp) 535 i_contact = create CI_ResponsibleParty(getCLValue(CI_RoleCode.cl_user))536 ceda_observation.metadata = create MD_Metadata(date_stamp=i_date_stamp, contact = [i_contact], language = "English")534 i_contact = create_ci_responsible_party(getCLValue(CI_RoleCode.cl_user)) 535 ceda_observation.metadata = create_md_metadata(date_stamp=i_date_stamp, contact = [i_contact], language = "English") 537 536 538 537 def _processResultAccumulation(self, ceda_observation): … … 540 539 return 541 540 542 updateFrequency = extract UpdateFrequency(self._data_entity_migration)541 updateFrequency = extract_update_frequency(self._data_entity_migration) 543 542 if updateFrequency: 544 543 resultAccumulation = MD_MaintenanceFrequencyCode.from_string(updateFrequency) … … 557 556 558 557 whereAreAuthors = self._deployment_migration 559 doc_authors = find AuthorsInResource(self._deployment_migration)558 doc_authors = find_authors_in_resource(self._deployment_migration) 560 559 if doc_authors['authors'] in [DO_BADC, DO_NEODC]: 561 doc_authors = find AuthorsInResource(self._data_entity_migration)560 doc_authors = find_authors_in_resource(self._data_entity_migration) 562 561 whereAreAuthors = self._data_entity_migration 563 562 … … 569 568 570 569 if len(ret['ind']) > 0: 571 i_party = create MO_Individual(name = ret['ind'][0])572 observation.relatedParty.append(create MO_ResponsiblePartyInfo(getCLValue(MM_RoleValue.cl_author), [i_party]))570 i_party = create_mo_individual(name = ret['ind'][0]) 571 observation.relatedParty.append(create_mo_responsible_party_info(getCLValue(MM_RoleValue.cl_author), [i_party])) 573 572 if len(ret['org']) > 0: 574 i_party = create MO_Organization(name = ret['org'][0])575 observation.relatedParty.append(create MO_ResponsiblePartyInfo(getCLValue(MM_RoleValue.cl_author), [i_party]))573 i_party = create_mo_organization(name = ret['org'][0]) 574 observation.relatedParty.append(create_mo_responsible_party_info(getCLValue(MM_RoleValue.cl_author), [i_party])) 576 575 577 576 if len(ret['ind']) > 1: … … 597 596 i_party = [] 598 597 for nm in ind_names: 599 i_party.append(create MO_Individual(name = nm))598 i_party.append(create_mo_individual(name = nm)) 600 599 601 600 for nm in org_names: 602 i_party.append(create MO_Organization(name = nm))601 i_party.append(create_mo_organization(name = nm)) 603 602 604 603 if i_party: 605 604 if not self._deploymentHasBeenProcessed: 606 rp = create MO_ResponsiblePartyInfo(getCLValue(MM_RoleValue.cl_coinvestigator), i_party)605 rp = create_mo_responsible_party_info(getCLValue(MM_RoleValue.cl_coinvestigator), i_party) 607 606 self.epbRepo.moles3EPB.updateCedaObject(observation, {'relatedParty': rp}) 608 607 return … … 629 628 630 629 #Is a first time process? 631 if not has MOBeenProcessed(self._deployment_migration):630 if not has_mo_been_processed(self._deployment_migration): 632 631 ceda_observation.publicationState = getCLValue(MM_ObservationPublicationStateValue.cl_working) 633 docHash = get AtomDocumentHashByMO(self._data_entity_migration)632 docHash = get_atom_document_hash_by_mo(self._data_entity_migration) 634 633 self.epbRepo.moles3EPB.persistInstance(ceda_observation) 635 634 self.epbRepo.migrationEPB.updateMigrationObject(self._deployment_migration, \ … … 640 639 if self.epbRepo.moles3EPB.retrieveGUIDFromInstance(ceda_observation) is None: 641 640 ceda_guid = CedaGUID() 642 ceda_guid.id = calculate Hash(self._deployment_migration.depl_id)641 ceda_guid.id = calculate_hash(self._deployment_migration.depl_id) 643 642 ceda_guid.ceda_observation = ceda_observation.id 644 643 self.epbRepo.moles3EPB.persistInstance(ceda_guid) -
mauRepo/MolesManager/trunk/cedaMoles/libs/migration/processor/deployment_data.py
r8486 r8496 31 31 @author: Maurizio Nagni 32 32 ''' 33 from cedaMoles.libs.migration.processor.commons import find SubTypeInDPT,\34 create CEDA_Processing, createCEDA_Instrument, createCEDA_Project,\35 find Summary, findDocumentationInMigrationDocument, createCI_Citation,\36 create MO_OnlineResource, findLinksInDeployment, hasMOBeenProcessed,\37 get AtomDocumentHashByMO, calculateHash, hasMOSameHash33 from cedaMoles.libs.migration.processor.commons import find_subtype_in_dpt,\ 34 create_ceda_processing, create_ceda_instrument, create_ceda_project,\ 35 find_summary, find_documentation_in_migration_document, create_ci_citation,\ 36 create_mo_online_resource, find_links_in_deployment, has_mo_been_processed,\ 37 get_atom_document_hash_by_mo, calculate_hash, has_mo_same_hash 38 38 from ea_model.moles3_4.utilities.ceda_publicationstatevalue import CEDA_PublicationStateValue 39 39 from cedaMoles.MolesManager.codelist import getCLValue … … 54 54 self._deploymentMigration = deploymentMigration 55 55 self.epbRepo = epbRepo 56 self._links = find LinksInDeployment(self._deploymentMigration)56 self._links = find_links_in_deployment(self._deploymentMigration) 57 57 58 58 def _commitDeploymentMigration(self, associateWithCedaObservation, dataProductionTool, dataProductionToolField): … … 61 61 62 62 def _createProject(self, activity): 63 i_abstract = find Summary(activity)64 doc_link = find DocumentationInMigrationDocument(activity)63 i_abstract = find_summary(activity) 64 doc_link = find_documentation_in_migration_document(activity) 65 65 i_documentation = None 66 66 i_resource = None 67 67 if doc_link and len(doc_link) == 1: 68 i_documentation = create CI_Citation("Documentation resource")69 i_resource = create MO_OnlineResource(doc_link[0]['href'])70 project = create CEDA_Project(abstract=i_abstract, publication_state=getCLValue(CEDA_PublicationStateValue.cl_working), \68 i_documentation = create_ci_citation("Documentation resource") 69 i_resource = create_mo_online_resource(doc_link[0]['href']) 70 project = create_ceda_project(abstract=i_abstract, publication_state=getCLValue(CEDA_PublicationStateValue.cl_working), \ 71 71 documentation=i_documentation, project_resource=i_resource) 72 72 self._commitDeploymentMigration(project, activity, 'ceda_project_id') … … 78 78 for link in self._links['ACTIVITY']: 79 79 activity = self.epbRepo.migrationEPB.getDeploymentDataMigrationByName(self._deploymentMigration, link + '.atom') 80 deploymentDataHasSameHash = has MOSameHash(activity)81 deploymentDataHasBeenProcessed = has MOBeenProcessed(activity)80 deploymentDataHasSameHash = has_mo_same_hash(activity) 81 deploymentDataHasBeenProcessed = has_mo_been_processed(activity) 82 82 try : 83 83 if activity.ceda_project_id is not None: … … 93 93 #Is a first time process? 94 94 if not deploymentDataHasBeenProcessed: 95 docHash = get AtomDocumentHashByMO(activity)95 docHash = get_atom_document_hash_by_mo(activity) 96 96 self.epbRepo.migrationEPB.updateMigrationObject(activity, \ 97 97 {'ceda_project_id': project.id, \ … … 100 100 #Has to updated the hash? 101 101 if not deploymentDataHasSameHash and deploymentDataHasBeenProcessed: 102 docHash = get AtomDocumentHashByMO(activity)102 docHash = get_atom_document_hash_by_mo(activity) 103 103 self.epbRepo.migrationEPB.updateMigrationObject(activity, \ 104 104 {'doc_hash': docHash}) … … 108 108 #Adds the CedaGUID 109 109 ceda_guid = CedaGUID() 110 ceda_guid.id = calculate Hash(activity.deployment_data_id)110 ceda_guid.id = calculate_hash(activity.deployment_data_id) 111 111 setattr(ceda_guid, 'ceda_project', project.id) 112 112 self.epbRepo.moles3EPB.persistInstance(ceda_guid) … … 123 123 124 124 #has the document changed? 125 #if has MOSameHash(dataProductionTool):126 # dataProductionTool.doc_hash = get AtomDocumentHashByMO(self._deploymentMigration)125 #if has_mo_same_hash(dataProductionTool): 126 # dataProductionTool.doc_hash = get_atom_document_hash_by_mo(self._deploymentMigration) 127 127 # self._migrationSessions.migrationSession.commit() 128 128 # continue 129 129 130 subType = find SubTypeInDPT(dataProductionTool)130 subType = find_subtype_in_dpt(dataProductionTool) 131 131 132 132 if subType == 'model': 133 133 #MigrationEPB.loadAttributes(dataProductionTool, 'ceda_processing_id') 134 134 if dataProductionTool.ceda_processing_id is None: 135 associateWithCedaObservation = create CEDA_Processing()135 associateWithCedaObservation = create_ceda_processing() 136 136 self._commitDeploymentMigration(associateWithCedaObservation, dataProductionTool, 'ceda_processing_id') 137 137 if not (hasCedaComposite or hasCedaProcessing): … … 145 145 #MigrationEPB.loadAttributes(dataProductionTool, 'ceda_instrument_id') 146 146 if dataProductionTool.ceda_instrument_id is None: 147 associateWithCedaObservation = create CEDA_Instrument()147 associateWithCedaObservation = create_ceda_instrument() 148 148 self._commitDeploymentMigration(associateWithCedaObservation, dataProductionTool, 'ceda_instrument_id') 149 149 else: … … 157 157 158 158 #has the document changed? 159 #if has MOSameHash(observationStation):160 # observationStation.doc_hash = get AtomDocumentHashByMO(self._deploymentMigration)159 #if has_mo_same_hash(observationStation): 160 # observationStation.doc_hash = get_atom_document_hash_by_mo(self._deploymentMigration) 161 161 # self._migrationSessions.migrationSession.commit() 162 162 # continue 163 163 164 subType = find SubTypeInDPT(observationStation)164 subType = find_subtype_in_dpt(observationStation) 165 165 if subType == 'satellite': 166 166 #MigrationEPB.loadAttributes(dataProductionTool, 'ceda_compositeprocess_id') 167 167 if dataProductionTool.ceda_compositeprocess_id is None: 168 associateWithCedaObservation = create CEDA_Processing()168 associateWithCedaObservation = create_ceda_processing() 169 169 self._commitDeploymentMigration(associateWithCedaObservation, dataProductionTool, 'ceda_compositeprocess_id') 170 170 if not hasCedaComposite: … … 180 180 pass 181 181 ''' 182 associateWithCedaObservation = create CEDA_Acquisition()182 associateWithCedaObservation = create_ceda_acquisition() 183 183 self._commitDeploymentMigration(associateWithCedaObservation, dataProductionTool, 'ceda_acquisition_id') 184 184 ''' -
mauRepo/MolesManager/trunk/cedaMoles/libs/migration/processor/loadResources.py
r8486 r8496 31 31 @author: Maurizio Nagni 32 32 ''' 33 from cedaMoles.libs.migration.processor.commons import DOC_STATUS, get CollectionRefs,\34 find MolesCreationDate, findID, stringToTimestamp, buildExistTypePath,\35 build ExistDocPath, DT_DATA_ENTITIES, DT_DEPLOYMENTS, DT_DEPLOYMENT_DATA,\36 get OwnerRefs, getAtomDocumentAsElementtree33 from cedaMoles.libs.migration.processor.commons import DOC_STATUS, get_collection_refs,\ 34 find_moles_creation_date, find_id, stringToTimestamp, build_exist_type_path,\ 35 build_exist_doc_path, DT_DATA_ENTITIES, DT_DEPLOYMENTS, DT_DEPLOYMENT_DATA,\ 36 get_owner_refs, get_atom_document_as_elementtree 37 37 from cedaMoles.libs.migration.exception.exceptions import NoCreationDate, NoDocumentID 38 38 from cedaMoles.libs.migration.db.classes import DeploymentDataMigration,\ … … 83 83 def _loadCollections(self, docStatus, docType, migrationClass): 84 84 ex = [] 85 refs = get CollectionRefs(buildExistTypePath(docStatus, docType))85 refs = get_collection_refs(build_exist_type_path(docStatus, docType)) 86 86 for ref in refs: 87 87 docOwner = ref.get('name') … … 94 94 def _loadMigrationDocs(self, docStatus, docOwner, docType, migrationClass): 95 95 ex = [] 96 refs = get OwnerRefs(docStatus, docType, docOwner)96 refs = get_owner_refs(docStatus, docType, docOwner) 97 97 for ref in refs: 98 98 docName = ref.get('name') … … 101 101 except Exception as e: 102 102 if hasattr(e, 'value'): 103 e.value = build ExistDocPath(docStatus, docType, docOwner, docName)103 e.value = build_exist_doc_path(docStatus, docType, docOwner, docName) 104 104 ex.append(e) 105 105 return ex … … 115 115 116 116 #The docHash has to be set/check when the document is processed! 117 #docHash = getAtomDocumentHash(docStatus, docType, docOwner, docName) 118 xmlDocument = getAtomDocumentAsElementtree(docStatus, docType, docOwner, docName) 119 doc_id = findID(xmlDocument) 117 xmlDocument = get_atom_document_as_elementtree(docStatus, docType, docOwner, docName) 118 doc_id = find_id(xmlDocument) 120 119 121 120 #The document has been already classified … … 130 129 131 130 def _extractID(self, xmlDocument): 132 doc_id = find ID(xmlDocument)131 doc_id = find_id(xmlDocument) 133 132 if doc_id is None: 134 133 raise NoDocumentID(doc_id) … … 136 135 137 136 def _extractCreationDate(self, xmlDocument): 138 creationDate = find MolesCreationDate(xmlDocument)137 creationDate = find_moles_creation_date(xmlDocument) 139 138 if creationDate is None: 140 139 raise NoCreationDate(creationDate) -
mauRepo/MolesManager/trunk/cedaMoles/tests/cedamoles/mo/responsiblePartyInfo.py
r8487 r8496 31 31 @author: mnagni 32 32 ''' 33 from cedaMoles.libs.migration.processor.commons import create MO_ResponsiblePartyInfo,\34 create CI_OnlineResource, createCI_Address, createCI_Telephone,\35 create CI_Contact, createMO_Organization33 from cedaMoles.libs.migration.processor.commons import create_mo_responsible_party_info,\ 34 create_ci_onlineresource, create_ci_address, create_ci_telephone,\ 35 create_ci_contact, create_mo_organization 36 36 from cedaMoles.MolesManager.codelist import getCLValue, MM_RoleValue 37 37 from ea_model.moles3_4.utilities.mo_organisation import MO_Organisation … … 43 43 parties = [] 44 44 i_linkage = 'http://badc.rl.ac.uk' 45 i_onlineResources = create CI_OnlineResource(linkage = i_linkage, name = 'British Atmospheric Data Centre Website')46 i_address = create CI_Address(deliveryPoint = ['British Atmospheric Data Centre, STFC Rutherford Appleton Laboratory'], \45 i_onlineResources = create_ci_onlineresource(linkage = i_linkage, name = 'British Atmospheric Data Centre Website') 46 i_address = create_ci_address(deliveryPoint = ['British Atmospheric Data Centre, STFC Rutherford Appleton Laboratory'], \ 47 47 electronicMailAddress=['badc@rl.ac.uk'], postalCode='OX11 0QX', country='UK', city='Harwell Oxford') 48 i_phone = create CI_Telephone(voice=['+44(0)1235 446432'])49 contact = create CI_Contact(phone=i_phone, address=i_address, onlineResource=i_onlineResources)50 parties.append(create MO_Organization(name = "NERC - British Atmospheric Data Centre", contactInfo = [contact]))48 i_phone = create_ci_telephone(voice=['+44(0)1235 446432']) 49 contact = create_ci_contact(phone=i_phone, address=i_address, onlineResource=i_onlineResources) 50 parties.append(create_mo_organization(name = "NERC - British Atmospheric Data Centre", contactInfo = [contact])) 51 51 52 52 i_linkage = 'http://www.neodc.rl.ac.uk' 53 i_onlineResources = create CI_OnlineResource(linkage = i_linkage, name = 'NERC Earth Observation Data Centre website')54 i_address = create CI_Address(deliveryPoint = ['NERC - Earth Observation Data Centre, STFC Rutherford Appleton Laboratory'], \53 i_onlineResources = create_ci_onlineresource(linkage = i_linkage, name = 'NERC Earth Observation Data Centre website') 54 i_address = create_ci_address(deliveryPoint = ['NERC - Earth Observation Data Centre, STFC Rutherford Appleton Laboratory'], \ 55 55 electronicMailAddress=['neodc@rl.ac.uk'], postalCode='OX11 0QX', country='UK', city='Harwell Oxford') 56 i_phone = create CI_Telephone(voice=['+44(0)1235 446432'])57 contact = create CI_Contact(phone=i_phone, address=i_address, onlineResource=i_onlineResources)58 parties.append(create MO_Organization(name = 'NERC - Earth Observation Data Centre', contactInfo = [contact]))59 return create MO_ResponsiblePartyInfo(getCLValue(MM_RoleValue.cl_publisher), parties)56 i_phone = create_ci_telephone(voice=['+44(0)1235 446432']) 57 contact = create_ci_contact(phone=i_phone, address=i_address, onlineResource=i_onlineResources) 58 parties.append(create_mo_organization(name = 'NERC - Earth Observation Data Centre', contactInfo = [contact])) 59 return create_mo_responsible_party_info(getCLValue(MM_RoleValue.cl_publisher), parties) 60 60 61 61 class ResponsiblePartyInfoTest(CedaMolesTest): -
mauRepo/MolesManager/trunk/cedaMoles/tests/migration/commontest.py
r8486 r8496 6 6 from unittest import TestCase 7 7 from cedaMoles.libs.migration.processor.commons import isoDateTimeStringToTimeDate,\ 8 from DateStringToPhenomenonTime, fromPhenomenonTimeToString,\9 compare PhenomenonTimes8 from_date_string_to_pt, from_pt_to_string,\ 9 compare_phenomenon_times 10 10 11 11 … … 25 25 def testFromDateStringToPhenomenonTime(self): 26 26 temporalRange = '2002-07-22/2011-08-06' 27 pt = from DateStringToPhenomenonTime(temporalRange)27 pt = from_date_string_to_pt(temporalRange) 28 28 print pt 29 29 30 30 def testFromPhenomenonTimeToString(self): 31 31 temporalRange = '2002-07-22/2011-08-06' 32 pt = from DateStringToPhenomenonTime(temporalRange)33 ptString = from PhenomenonTimeToString(pt)32 pt = from_date_string_to_pt(temporalRange) 33 ptString = from_pt_to_string(pt) 34 34 startDate, endDate = temporalRange.split('/') 35 35 self.assertTrue(startDate == ptString[0], "Error") … … 37 37 38 38 temporalRange = '2002-07-22' 39 pt = from DateStringToPhenomenonTime(temporalRange)40 ptString = from PhenomenonTimeToString(pt)39 pt = from_date_string_to_pt(temporalRange) 40 ptString = from_pt_to_string(pt) 41 41 startDate, endDate = temporalRange, None 42 42 self.assertTrue(startDate == ptString[0], "Error") … … 44 44 45 45 def testComparePhenomenonTimes(self): 46 p1 = from DateStringToPhenomenonTime('2002-07-22/2011-08-06')47 p2 = from DateStringToPhenomenonTime('2002-07-22/2011-08-06')48 self.assertTrue(compare PhenomenonTimes(p1,p2), "Error")46 p1 = from_date_string_to_pt('2002-07-22/2011-08-06') 47 p2 = from_date_string_to_pt('2002-07-22/2011-08-06') 48 self.assertTrue(compare_phenomenon_times(p1,p2), "Error") 49 49 50 p2 = from DateStringToPhenomenonTime('2002-07-22/2011-08-05')51 self.assertFalse(compare PhenomenonTimes(p1,p2), "Error")50 p2 = from_date_string_to_pt('2002-07-22/2011-08-05') 51 self.assertFalse(compare_phenomenon_times(p1,p2), "Error") 52 52 53 p2 = from DateStringToPhenomenonTime('2002-07-22')54 self.assertFalse(compare PhenomenonTimes(p1,p2), "Error")53 p2 = from_date_string_to_pt('2002-07-22') 54 self.assertFalse(compare_phenomenon_times(p1,p2), "Error") 55 55 -
mauRepo/MolesManager/trunk/cedaMoles/tests/migration/loadresource.py
r8494 r8496 7 7 from cedaMoles.libs.migration.processor.loadResources import LoadResources 8 8 from cedaMoles.libs.migration.db.classes import DataEntityMigration 9 from cedaMoles.libs.migration.processor.commons import calculate Hash9 from cedaMoles.libs.migration.processor.commons import calculate_hash 10 10 11 11 class LoadResourceTest(CedaMolesTest): … … 15 15 migrationClass = DataEntityMigration() 16 16 doc_id = 123 17 docHash = calculate Hash('123_docHash')17 docHash = calculate_hash('123_docHash') 18 18 docCreation = '2009-12-10T03:16:25Z' 19 19 lr.createMigrationDoc(migrationClass, doc_id, '123_docName', \ … … 23 23 docHash, docCreation)) 24 24 self.assertTrue(lr.updateMigrationDoc(migrationClass, doc_id, 25 calculate Hash('321_docHash'), \25 calculate_hash('321_docHash'), \ 26 26 docCreation)) 27 27 self.assertTrue(lr.updateMigrationDoc(migrationClass, doc_id, \ 28 calculate Hash('321_docHash'), \28 calculate_hash('321_docHash'), \ 29 29 '2012-12-10T03:16:25Z')) 30 30 -
mauRepo/MolesManager/trunk/cedaMoles/tests/migration/moles3epbtests.py
r8494 r8496 5 5 ''' 6 6 import logging, datetime 7 from cedaMoles.libs.migration.processor.commons import create CI_Date, \8 create DateTime, createDate, createTM_Position, createTM_Instant7 from cedaMoles.libs.migration.processor.commons import create_ci_date, \ 8 create_datetime, create_date, create_tm_position, create_tm_instant 9 9 from ea_model.iso_19115_2006_metadata_corrigendum.\ 10 10 citation_and_responsible_party_information.ci_datetypecode \ … … 123 123 session = self.epbRepo.moles3EPB.getNewMolesSession(); 124 124 py_date = datetime.date(2011, 4, 1) 125 dt = create Date(py_date)126 ci_date = create CI_Date(CI_DateTypeCode.cl_creation, dt)125 dt = create_date(py_date) 126 ci_date = create_ci_date(CI_DateTypeCode.cl_creation, dt) 127 127 128 128 Moles3EPBTest.log.info('Stores an empty new CEDA_ObservationCollection') … … 213 213 def _createDateTime(self): 214 214 py_date = datetime.datetime(2011, 4, 1, 00, 00, 00) 215 return create DateTime(py_date)215 return create_datetime(py_date) 216 216 217 217 def _createCI_Date(self): 218 218 py_date = datetime.date(2011, 4, 1) 219 dt = create Date(py_date)220 return create CI_Date(CI_DateTypeCode.cl_creation, dt)219 dt = create_date(py_date) 220 return create_ci_date(CI_DateTypeCode.cl_creation, dt) 221 221 222 222 def _createTM_Position(self): 223 223 newDateTime = self._createDateTime() 224 return create TM_Position(dateTime8601 = newDateTime)224 return create_tm_position(dateTime8601 = newDateTime) 225 225 226 226 def _createTM_Instant(self): 227 227 tm_position = self._createTM_Position() 228 return create TM_Instant(tm_position)228 return create_tm_instant(tm_position) -
mauRepo/MolesManager/trunk/cedaMoles/tests/migration/test_utils.py
r8486 r8496 5 5 ''' 6 6 from ea_model.ceda_metadatamodel.ceda_observationcollection.ceda_observationcollection import CEDA_ObservationCollection 7 from cedaMoles.libs.migration.processor.commons import create CI_Citation,\8 create MD_Identifier7 from cedaMoles.libs.migration.processor.commons import create_ci_citation,\ 8 create_md_identifier 9 9 from ea_model.ceda_metadatamodel.ceda_observation.ceda_observation import CEDA_Observation 10 10 from ea_model.ceda_metadatamodel.ceda_project.ceda_project import CEDA_Project … … 17 17 ''' 18 18 observationCollection = CEDA_ObservationCollection() 19 auth = create CI_Citation('test_title')20 i_identifier = create MD_Identifier(code = 'test_code', authority=auth)19 auth = create_ci_citation('test_title') 20 i_identifier = create_md_identifier(code = 'test_code', authority=auth) 21 21 observationCollection.identifier.append(i_identifier) 22 22 return observationCollection … … 31 31 ''' 32 32 observation = CEDA_Observation() 33 i_identifier = create MD_Identifier(code = 'test_code', authority=createCI_Citation('test_title'))33 i_identifier = create_md_identifier(code = 'test_code', authority=create_ci_citation('test_title')) 34 34 observation.identifier.append(i_identifier) 35 i_identifier = create MD_Identifier(code = 'mau_code', authority=createCI_Citation('mau_title'))35 i_identifier = create_md_identifier(code = 'mau_code', authority=create_ci_citation('mau_title')) 36 36 observation.identifier.append(i_identifier) 37 37 observation.dataLineage = "test_dataLineage" -
mauRepo/MolesManager/trunk/cedaMoles/tests/moles2gui/creationFactory.py
r8486 r8496 31 31 @author: mnagni 32 32 ''' 33 from cedaMoles.libs.migration.processor.commons import create CI_OnlineResource,\34 create CI_Address, createCI_Telephone, createCI_Contact,\35 create MO_Organization, createMO_Individual, createMO_ResponsiblePartyInfo,\36 isoDateTimeStringToTimeDate, create DateTime, createTM_Position,\37 create TM_Instant, fromDateStringToPhenomenonTime33 from cedaMoles.libs.migration.processor.commons import create_ci_onlineresource,\ 34 create_ci_address, create_ci_telephone, create_ci_contact,\ 35 create_mo_organization, create_mo_individual, create_mo_responsible_party_info,\ 36 isoDateTimeStringToTimeDate, create_datetime, create_tm_position,\ 37 create_tm_instant, from_date_string_to_pt 38 38 from cedaMoles.MolesManager.codelist import getCLValue, MM_RoleValue 39 39 from ea_model.ceda_metadatamodel.ceda_observation.ceda_observation import CEDA_Observation 40 40 41 41 def assembleMO_Individual(name = 'CedaTest', iid = 1): 42 ret = create MO_Individual(name = name)42 ret = create_mo_individual(name = name) 43 43 ret.id = iid 44 44 return ret … … 46 46 def assembleMO_Organization(name = 'UK Solar System Data Centre website', iid = 1): 47 47 i_linkage = 'http://www.ukssdc.rl.ac.uk' 48 i_onlineResources = create CI_OnlineResource(linkage = i_linkage, name = name)49 i_address = create CI_Address(deliveryPoint = ['UK Solar System Data Centre, STFC Rutherford Appleton Laboratory'], \48 i_onlineResources = create_ci_onlineresource(linkage = i_linkage, name = name) 49 i_address = create_ci_address(deliveryPoint = ['UK Solar System Data Centre, STFC Rutherford Appleton Laboratory'], \ 50 50 electronicMailAddress=['support@rl.ac.uk'], postalCode='OX11 0QX', country='UK', city='Harwell Oxford') 51 i_phone = create CI_Telephone(voice=['+44(0)1235 445173'])52 contact = create CI_Contact(phone=i_phone, address=i_address, onlineResource=i_onlineResources)53 ret = create MO_Organization(name = 'NERC - UK Solar System Data Centre', contactInfo = [contact])51 i_phone = create_ci_telephone(voice=['+44(0)1235 445173']) 52 contact = create_ci_contact(phone=i_phone, address=i_address, onlineResource=i_onlineResources) 53 ret = create_mo_organization(name = 'NERC - UK Solar System Data Centre', contactInfo = [contact]) 54 54 ret.id = iid 55 55 return ret … … 60 60 """ 61 61 party = [assembleMO_Individual(iid = 1), assembleMO_Organization(iid = 2)] 62 rp = create MO_ResponsiblePartyInfo(role, party)62 rp = create_mo_responsible_party_info(role, party) 63 63 rp.id =iid 64 64 return rp … … 72 72 """ 73 73 py_datetime = isoDateTimeStringToTimeDate(timestring) 74 date_time = create DateTime(py_datetime)75 tm_position = create TM_Position(dateTime8601 = date_time)76 return create TM_Instant(tm_position)74 date_time = create_datetime(py_datetime) 75 tm_position = create_tm_position(dateTime8601 = date_time) 76 return create_tm_instant(tm_position) 77 77 78 78 def assembleObservation(phenomenonTime = '2002-07-22/2011-08-06'): … … 80 80 ceda_observation.relatedParty = assembleMO_ResponsiblePartyInfo() 81 81 ceda_observation.resultTime = assembleTM_Instant() 82 ceda_observation.phenomenonTime = from DateStringToPhenomenonTime(phenomenonTime)82 ceda_observation.phenomenonTime = from_date_string_to_pt(phenomenonTime) 83 83 return ceda_observation -
mauRepo/MolesManager/trunk/cedaMoles/tests/moles2gui/om/period.py
r8487 r8496 35 35 from cedaMoles.MolesManager.views.moles2gui import encodeCedaMoles2Json,\ 36 36 decodeJson2CedaMoles, getData 37 from cedaMoles.libs.migration.processor.commons import from DateStringToPhenomenonTime37 from cedaMoles.libs.migration.processor.commons import from_date_string_to_pt 38 38 39 39 … … 54 54 Validates a simple encode/decode 55 55 """ 56 period = from DateStringToPhenomenonTime('2002-07-22/2011-08-06')56 period = from_date_string_to_pt('2002-07-22/2011-08-06') 57 57 json = encodeCedaMoles2Json(period) 58 58 #----------------------- test ------------------------ … … 70 70 Decodes an updated json object 71 71 """ 72 period = from DateStringToPhenomenonTime('2002-07-22/2011-08-06')72 period = from_date_string_to_pt('2002-07-22/2011-08-06') 73 73 json = encodeCedaMoles2Json(period) 74 74 decJson = loads(json)
Note: See TracChangeset
for help on using the changeset viewer.