diff --git a/ted_sws/__init__.py b/ted_sws/__init__.py index d0315aa58..8be595aab 100644 --- a/ted_sws/__init__.py +++ b/ted_sws/__init__.py @@ -15,7 +15,7 @@ import dotenv -from ted_sws.core.adapters.config_resolver import EnvConfigResolver, AirflowAndEnvConfigResolver +from ted_sws.core.adapters.config_resolver import EnvConfigResolver, AirflowAndEnvConfigResolver, env_property dotenv.load_dotenv(verbose=True, override=os.environ.get('IS_PRIME_ENV') != 'true') @@ -38,174 +38,168 @@ class MongoDBConfig: - @property - def MONGO_DB_AUTH_URL(self) -> str: + @env_property() + def MONGO_DB_AUTH_URL(self, config_value: str) -> str: if self.ENVIRONMENT == "dev" and self.AIRFLOW__CORE__EXECUTOR: return self.MONGO_DB_AUTH_URL_DEV_CONTAINER - return EnvConfigResolver().config_resolve() + return config_value - @property - def MONGO_DB_AUTH_URL_DEV_CONTAINER(self) -> str: + @env_property() + def MONGO_DB_AUTH_URL_DEV_CONTAINER(self, config_value: str) -> str: """ This variable is to be used only on dev environment when execution is done from a docker container as oppose to development host environment """ - return EnvConfigResolver().config_resolve() + return config_value - @property - def ENVIRONMENT(self) -> str: - return EnvConfigResolver().config_resolve() + @env_property() + def ENVIRONMENT(self, config_value: str) -> str: + return config_value - @property - def AIRFLOW__CORE__EXECUTOR(self) -> str: - return EnvConfigResolver().config_resolve() + @env_property() + def AIRFLOW__CORE__EXECUTOR(self, config_value: str) -> str: + return config_value - @property - def MONGO_DB_PORT(self) -> int: - return int(EnvConfigResolver().config_resolve()) + @env_property() + def MONGO_DB_PORT(self, config_value: str) -> int: + return int(config_value) - @property - def MONGO_DB_AGGREGATES_DATABASE_NAME(self) -> str: - return EnvConfigResolver().config_resolve() + @env_property() + def MONGO_DB_AGGREGATES_DATABASE_NAME(self, config_value: str) -> str: + return config_value class RMLMapperConfig: - @property - def RML_MAPPER_PATH(self) -> str: - return EnvConfigResolver().config_resolve() + @env_property() + def RML_MAPPER_PATH(self, config_value: str) -> str: + return config_value class LimesAlignmentConfig: - @property - def LIMES_ALIGNMENT_PATH(self) -> str: - return EnvConfigResolver().config_resolve() + @env_property() + def LIMES_ALIGNMENT_PATH(self, config_value: str) -> str: + return config_value class AllegroConfig: - @property - def AGRAPH_SUPER_USER(self) -> str: - return EnvConfigResolver().config_resolve() + @env_property() + def AGRAPH_SUPER_USER(self, config_value: str) -> str: + return config_value - @property - def AGRAPH_SUPER_PASSWORD(self) -> str: - return EnvConfigResolver().config_resolve() + @env_property() + def AGRAPH_SUPER_PASSWORD(self, config_value: str) -> str: + return config_value - @property - def ALLEGRO_HOST(self) -> str: - return EnvConfigResolver().config_resolve() + @env_property() + def ALLEGRO_HOST(self, config_value: str) -> str: + return config_value - @property - def TRIPLE_STORE_ENDPOINT_URL(self) -> str: - return EnvConfigResolver().config_resolve() + @env_property() + def TRIPLE_STORE_ENDPOINT_URL(self, config_value: str) -> str: + return config_value class ELKConfig: - @property - def ELK_HOST(self) -> str: - return EnvConfigResolver().config_resolve() + @env_property() + def ELK_HOST(self, config_value: str) -> str: + return config_value - @property - def ELK_PORT(self) -> int: - v: str = EnvConfigResolver().config_resolve() - return int(v) if v is not None else None + @env_property() + def ELK_PORT(self, config_value: str) -> int: + return int(config_value) if config_value is not None else None - @property - def ELK_VERSION(self) -> int: - v: str = EnvConfigResolver().config_resolve() - return int(v) if v is not None else None + @env_property() + def ELK_VERSION(self, config_value: str) -> int: + return int(config_value) if config_value is not None else None class LoggingConfig: - @property - def MONGO_DB_LOGS_DATABASE_NAME(self) -> str: - return EnvConfigResolver().config_resolve() + @env_property() + def MONGO_DB_LOGS_DATABASE_NAME(self, config_value: str) -> str: + return config_value - @property - def DAG_LOGGER_CONFIG_HANDLERS(self) -> str: - return EnvConfigResolver().config_resolve() + @env_property() + def DAG_LOGGER_CONFIG_HANDLERS(self, config_value: str) -> str: + return config_value - @property - def CLI_LOGGER_CONFIG_HANDLERS(self) -> str: - return EnvConfigResolver().config_resolve() + @env_property() + def CLI_LOGGER_CONFIG_HANDLERS(self, config_value: str) -> str: + return config_value - @property - def LOGGER_LOG_FILENAME(self) -> str: - return EnvConfigResolver().config_resolve() + @env_property() + def LOGGER_LOG_FILENAME(self, config_value: str) -> str: + return config_value class XMLProcessorConfig: - @property - def XML_PROCESSOR_PATH(self) -> str: - return EnvConfigResolver().config_resolve() + @env_property() + def XML_PROCESSOR_PATH(self, config_value: str) -> str: + return config_value class GitHubArtefacts: - @property - def GITHUB_TED_SWS_ARTEFACTS_URL(self) -> str: - return EnvConfigResolver().config_resolve() + @env_property() + def GITHUB_TED_SWS_ARTEFACTS_URL(self, config_value: str) -> str: + return config_value class API: - @property - def ID_MANAGER_PROD_API_HOST(self) -> str: - v: str = EnvConfigResolver().config_resolve() - return v if v else "localhost" + @env_property(default_value="localhost") + def ID_MANAGER_PROD_API_HOST(self, config_value: str) -> str: + return config_value - @property - def ID_MANAGER_DEV_API_HOST(self) -> str: - v: str = EnvConfigResolver().config_resolve() - return v if v else "localhost" + @env_property(default_value="local_host") + def ID_MANAGER_DEV_API_HOST(self, config_value: str) -> str: + return config_value - @property - def ID_MANAGER_API_PORT(self) -> int: - v: str = EnvConfigResolver().config_resolve() - return int(v) if v else 8000 + @env_property(default_value="8000") + def ID_MANAGER_API_PORT(self, config_value: str) -> int: + return int(config_value) class TedAPIConfig: - @property - def TED_API_URL(self) -> str: - return EnvConfigResolver().config_resolve() + @env_property() + def TED_API_URL(self, config_value: str) -> str: + return config_value class FusekiConfig: - @property - def FUSEKI_ADMIN_USER(self) -> str: - return EnvConfigResolver().config_resolve() + @env_property() + def FUSEKI_ADMIN_USER(self, config_value: str) -> str: + return config_value - @property - def FUSEKI_ADMIN_PASSWORD(self) -> str: - return EnvConfigResolver().config_resolve() + @env_property() + def FUSEKI_ADMIN_PASSWORD(self, config_value: str) -> str: + return config_value - @property - def FUSEKI_ADMIN_HOST(self) -> str: - return EnvConfigResolver().config_resolve() + @env_property() + def FUSEKI_ADMIN_HOST(self, config_value: str) -> str: + return config_value class SFTPConfig: - @property - def SFTP_PUBLISH_HOST(self) -> str: - return AirflowAndEnvConfigResolver().config_resolve() + @env_property(config_resolver_class=AirflowAndEnvConfigResolver) + def SFTP_PUBLISH_HOST(self, config_value: str) -> str: + return config_value - @property - def SFTP_PUBLISH_PORT(self) -> int: - v = AirflowAndEnvConfigResolver().config_resolve() - return int(v) if v is not None else 22 + @env_property(config_resolver_class=AirflowAndEnvConfigResolver, default_value="22") + def SFTP_PUBLISH_PORT(self, config_value: str) -> int: + return int(config_value) - @property - def SFTP_PUBLISH_USER(self) -> str: - return AirflowAndEnvConfigResolver().config_resolve() + @env_property(config_resolver_class=AirflowAndEnvConfigResolver) + def SFTP_PUBLISH_USER(self, config_value: str) -> str: + return config_value - @property - def SFTP_PUBLISH_PASSWORD(self) -> str: - return AirflowAndEnvConfigResolver().config_resolve() + @env_property(config_resolver_class=AirflowAndEnvConfigResolver) + def SFTP_PUBLISH_PASSWORD(self, config_value: str) -> str: + return config_value - @property - def SFTP_PUBLISH_PATH(self) -> str: - return AirflowAndEnvConfigResolver().config_resolve() + @env_property(config_resolver_class=AirflowAndEnvConfigResolver) + def SFTP_PUBLISH_PATH(self, config_value: str) -> str: + return config_value class SPARQLConfig: @@ -216,41 +210,41 @@ def SPARQL_PREFIXES(self) -> dict: class S3PublishConfig: - @property - def S3_PUBLISH_USER(self) -> str: - return AirflowAndEnvConfigResolver().config_resolve() + @env_property(config_resolver_class=AirflowAndEnvConfigResolver) + def S3_PUBLISH_USER(self, config_value: str) -> str: + return config_value - @property - def S3_PUBLISH_PASSWORD(self) -> str: - return AirflowAndEnvConfigResolver().config_resolve() + @env_property(config_resolver_class=AirflowAndEnvConfigResolver) + def S3_PUBLISH_PASSWORD(self, config_value: str) -> str: + return config_value - @property - def S3_PUBLISH_HOST(self) -> str: - return AirflowAndEnvConfigResolver().config_resolve() + @env_property(config_resolver_class=AirflowAndEnvConfigResolver) + def S3_PUBLISH_HOST(self, config_value: str) -> str: + return config_value - @property - def S3_PUBLISH_SECURE(self) -> bool: - return True if AirflowAndEnvConfigResolver().config_resolve() == "1" else False + @env_property(config_resolver_class=AirflowAndEnvConfigResolver) + def S3_PUBLISH_SECURE(self, config_value: str) -> bool: + return config_value == "1" - @property - def S3_PUBLISH_REGION(self) -> str: - return AirflowAndEnvConfigResolver().config_resolve() + @env_property(config_resolver_class=AirflowAndEnvConfigResolver) + def S3_PUBLISH_REGION(self, config_value: str) -> str: + return config_value - @property - def S3_PUBLISH_SSL_VERIFY(self) -> bool: - return AirflowAndEnvConfigResolver().config_resolve() == "1" + @env_property(config_resolver_class=AirflowAndEnvConfigResolver) + def S3_PUBLISH_SSL_VERIFY(self, config_value: str) -> bool: + return config_value == "1" - @property - def S3_PUBLISH_NOTICE_BUCKET(self) -> str: - return AirflowAndEnvConfigResolver().config_resolve() + @env_property(config_resolver_class=AirflowAndEnvConfigResolver, default_value="notice") + def S3_PUBLISH_NOTICE_BUCKET(self, config_value: str) -> str: + return config_value - @property - def S3_PUBLISH_NOTICE_RDF_BUCKET(self) -> str: - return AirflowAndEnvConfigResolver().config_resolve() + @env_property(config_resolver_class=AirflowAndEnvConfigResolver, default_value="notice-rdf") + def S3_PUBLISH_NOTICE_RDF_BUCKET(self, config_value: str) -> str: + return config_value - @property - def S3_PUBLISH_ENABLED(self) -> bool: - return AirflowAndEnvConfigResolver().config_resolve() == "1" + @env_property(config_resolver_class=AirflowAndEnvConfigResolver) + def S3_PUBLISH_ENABLED(self, config_value: str) -> bool: + return config_value == "1" class TedConfigResolver(MongoDBConfig, RMLMapperConfig, XMLProcessorConfig, ELKConfig, LoggingConfig, diff --git a/ted_sws/core/adapters/config_resolver.py b/ted_sws/core/adapters/config_resolver.py index d4ea765a4..f0ef9a864 100644 --- a/ted_sws/core/adapters/config_resolver.py +++ b/ted_sws/core/adapters/config_resolver.py @@ -12,6 +12,7 @@ import logging import os from abc import ABC, abstractmethod +from typing import Type from ted_sws.core.adapters.vault_secrets_store import VaultSecretsStore @@ -113,3 +114,23 @@ def concrete_config_resolve(self, config_name: str, default_value: str = None): else: value = EnvConfigResolver().concrete_config_resolve(config_name, default_value) return value + + +def env_property(config_resolver_class: Type[ConfigResolverABC] = EnvConfigResolver, + default_value: str = None): + """ + This function provide decorator mechanism for config resolver. + :param config_resolver_class: + :param default_value: + :return: + """ + def wrap(func): + @property + def wrapped_f(self, *args, **kwargs): + config_value = config_resolver_class().concrete_config_resolve(config_name=func.__name__, + default_value=default_value) + return func(self, config_value, *args, **kwargs) + + return wrapped_f + + return wrap \ No newline at end of file diff --git a/ted_sws/core/adapters/xml_preprocessor.py b/ted_sws/core/adapters/xml_preprocessor.py index 994339df1..fd738b37e 100644 --- a/ted_sws/core/adapters/xml_preprocessor.py +++ b/ted_sws/core/adapters/xml_preprocessor.py @@ -31,8 +31,8 @@ class XMLPreprocessor(XMLPreprocessorABC): This class provides XML preprocessing """ - def __init__(self, path_to_processor: pathlib.Path = config.XML_PROCESSOR_PATH): - self.path_to_processor = path_to_processor + def __init__(self, path_to_processor: pathlib.Path = None): + self.path_to_processor = path_to_processor if path_to_processor else config.XML_PROCESSOR_PATH def _generate_xslt_command(self, xml_path, xslt_path): """ diff --git a/ted_sws/data_manager/adapters/triple_store.py b/ted_sws/data_manager/adapters/triple_store.py index e4ca61ef9..9436f36bc 100644 --- a/ted_sws/data_manager/adapters/triple_store.py +++ b/ted_sws/data_manager/adapters/triple_store.py @@ -169,13 +169,13 @@ class FusekiException(Exception): class FusekiAdapter(TripleStoreABC): - def __init__(self, host: str = config.FUSEKI_ADMIN_HOST, - user: str = config.FUSEKI_ADMIN_USER, - password: str = config.FUSEKI_ADMIN_PASSWORD): + def __init__(self, host: str = None, + user: str = None, + password: str = None): - self.host = host - self.user = user - self.password = password + self.host = host if host else config.FUSEKI_ADMIN_HOST + self.user = user if user else config.FUSEKI_ADMIN_USER + self.password = password if password else config.FUSEKI_ADMIN_PASSWORD def create_repository(self, repository_name: str): """ diff --git a/ted_sws/event_manager/adapters/event_handler_config.py b/ted_sws/event_manager/adapters/event_handler_config.py index f7e843e5a..44e8f0c24 100644 --- a/ted_sws/event_manager/adapters/event_handler_config.py +++ b/ted_sws/event_manager/adapters/event_handler_config.py @@ -159,7 +159,7 @@ class DAGLoggerConfig(EventHandlerConfig): """ def __init__(self, mongodb_client: MongoClient = None, name: str = DEFAULT_LOGGER_NAME, filepath: Path = None, - handlers: HANDLERS_TYPE = None, config_handlers: str = config.DAG_LOGGER_CONFIG_HANDLERS): + handlers: HANDLERS_TYPE = None, config_handlers: str = None): """ This is the constructor/initialization of DAG event handler config. @@ -169,6 +169,7 @@ def __init__(self, mongodb_client: MongoClient = None, name: str = DEFAULT_LOGGE :param handlers: Forced event handlers :param config_handlers: Environment config event handlers for DAG """ + config_handlers = config_handlers if config_handlers else config.DAG_LOGGER_CONFIG_HANDLERS handlers, prime_handlers = self._init_handlers( config_handlers=config_handlers, default_handlers=[ @@ -189,7 +190,7 @@ class CLILoggerConfig(EventHandlerConfig): """ def __init__(self, mongodb_client: MongoClient = None, name: str = DEFAULT_LOGGER_NAME, filepath: Path = None, - handlers: HANDLERS_TYPE = None, config_handlers: str = config.CLI_LOGGER_CONFIG_HANDLERS): + handlers: HANDLERS_TYPE = None, config_handlers: str = None): """ This is the constructor/initialization of CLI event handler config. @@ -199,6 +200,7 @@ def __init__(self, mongodb_client: MongoClient = None, name: str = DEFAULT_LOGGE :param handlers: Forced event handlers :param config_handlers: Environment config event handlers for CLI """ + config_handlers = config_handlers if config_handlers else config.CLI_LOGGER_CONFIG_HANDLERS handlers, prime_handlers = self._init_handlers( config_handlers=config_handlers, default_handlers=[ diff --git a/ted_sws/mapping_suite_processor/services/load_mapping_suite_output_into_triple_store.py b/ted_sws/mapping_suite_processor/services/load_mapping_suite_output_into_triple_store.py index 5a7ac57ec..6d467fa9b 100644 --- a/ted_sws/mapping_suite_processor/services/load_mapping_suite_output_into_triple_store.py +++ b/ted_sws/mapping_suite_processor/services/load_mapping_suite_output_into_triple_store.py @@ -14,12 +14,11 @@ def repository_exists(triple_store: TripleStoreABC, repository_name) -> bool: return repository_name in triple_store.list_repositories() - def load_mapping_suite_output_into_fuseki_triple_store(package_folder_path, - triple_store_host=config.FUSEKI_ADMIN_HOST, - triple_store_user=config.FUSEKI_ADMIN_USER, - triple_store_password=config.FUSEKI_ADMIN_PASSWORD, - ): + triple_store_host: str = None, + triple_store_user: str = None, + triple_store_password: str = None, + ): """ Method to create a repository in the Fuseki triple store and load all ttl files from the output folder of a mapping suite package. Name of the repository will be auto-generated from the folder name. @@ -29,8 +28,9 @@ def load_mapping_suite_output_into_fuseki_triple_store(package_folder_path, :param triple_store_password: :return: """ - triple_store = FusekiAdapter(host=triple_store_host, password=triple_store_password, - user=triple_store_user) + triple_store = FusekiAdapter(host=triple_store_host or config.FUSEKI_ADMIN_HOST, + password=triple_store_password or config.FUSEKI_ADMIN_USER, + user=triple_store_user or config.FUSEKI_ADMIN_PASSWORD) load_mapping_suite_output_into_triple_store(package_folder_path, triple_store) @@ -51,8 +51,6 @@ def load_mapping_suite_output_into_triple_store(package_folder_path, ttl_files_paths = [path for path in package_folder_path.glob("output/**/*.ttl")] - - if repository_exists(triple_store=triple_store, repository_name=package_name): triple_store.delete_repository(repository_name=package_name) diff --git a/ted_sws/mapping_suite_processor/services/mapping_suite_digest_service.py b/ted_sws/mapping_suite_processor/services/mapping_suite_digest_service.py index 898b8c3dd..eb2f30387 100644 --- a/ted_sws/mapping_suite_processor/services/mapping_suite_digest_service.py +++ b/ted_sws/mapping_suite_processor/services/mapping_suite_digest_service.py @@ -4,8 +4,8 @@ def update_digest_api_address_for_mapping_suite(mapping_suite: MappingSuite, - current_digest_api_address: str = config.ID_MANAGER_DEV_API_HOST, - new_digest_api_address: str = config.ID_MANAGER_PROD_API_HOST, + current_digest_api_address: str = None, + new_digest_api_address: str = None, ) -> MappingSuite: """ Replace the digest API address @@ -21,6 +21,10 @@ def update_digest_api_address_for_mapping_suite(mapping_suite: MappingSuite, :param new_digest_api_address: :return: """ + if current_digest_api_address is None: + current_digest_api_address = config.ID_MANAGER_DEV_API_HOST + if new_digest_api_address is None: + new_digest_api_address = config.ID_MANAGER_PROD_API_HOST if not new_digest_api_address or not current_digest_api_address \ or new_digest_api_address == current_digest_api_address: diff --git a/ted_sws/notice_fetcher/adapters/ted_api.py b/ted_sws/notice_fetcher/adapters/ted_api.py index 8e463d468..59ea7e2ea 100644 --- a/ted_sws/notice_fetcher/adapters/ted_api.py +++ b/ted_sws/notice_fetcher/adapters/ted_api.py @@ -42,14 +42,15 @@ class TedAPIAdapter(TedAPIAdapterABC): This class will fetch documents content """ - def __init__(self, request_api: RequestAPI, ted_api_url: str = config.TED_API_URL): + def __init__(self, request_api: RequestAPI, ted_api_url: str = None): """ The constructor will take the API url as a parameter :param request_api: :param ted_api_url: """ + self.request_api = request_api - self.ted_api_url = ted_api_url + self.ted_api_url = ted_api_url if ted_api_url else config.TED_API_URL def get_by_wildcard_date(self, wildcard_date: str) -> List[dict]: """ diff --git a/ted_sws/notice_publisher/adapters/s3_notice_publisher.py b/ted_sws/notice_publisher/adapters/s3_notice_publisher.py index aba4fd362..9854a519c 100644 --- a/ted_sws/notice_publisher/adapters/s3_notice_publisher.py +++ b/ted_sws/notice_publisher/adapters/s3_notice_publisher.py @@ -20,13 +20,19 @@ class S3Publisher: This adapter is to be used to interact with triple store server on S3 bucket. """ - def __init__(self, host: str = config.S3_PUBLISH_HOST, - user: str = config.S3_PUBLISH_USER, - password: str = config.S3_PUBLISH_PASSWORD, - secure: bool = config.S3_PUBLISH_SECURE, - region: str = config.S3_PUBLISH_REGION, - ssl_verify: bool = config.S3_PUBLISH_SSL_VERIFY): - + def __init__(self, host: str = None, + user: str = None, + password: str = None, + secure: bool = None, + region: str = None, + ssl_verify: bool = None): + + host = host or config.S3_PUBLISH_HOST + user = user or config.S3_PUBLISH_USER + password = password or config.S3_PUBLISH_PASSWORD + secure = secure if secure is not None else config.S3_PUBLISH_SECURE + region = region or config.S3_PUBLISH_REGION + ssl_verify = ssl_verify if ssl_verify is not None else config.S3_PUBLISH_SSL_VERIFY if ssl_verify: self.client = Minio( host, diff --git a/ted_sws/notice_publisher/entrypoints/cli/cmd_s3_rdf_publisher.py b/ted_sws/notice_publisher/entrypoints/cli/cmd_s3_rdf_publisher.py index d0eeb5f17..8b589c31a 100644 --- a/ted_sws/notice_publisher/entrypoints/cli/cmd_s3_rdf_publisher.py +++ b/ted_sws/notice_publisher/entrypoints/cli/cmd_s3_rdf_publisher.py @@ -11,11 +11,12 @@ from ted_sws.core.model.manifestation import RDFManifestation from ted_sws.event_manager.adapters.log import LOG_INFO_TEXT, LOG_WARN_TEXT from ted_sws.notice_publisher.adapters.s3_notice_publisher import S3Publisher -from ted_sws.notice_publisher.services.notice_publisher import publish_notice_rdf_content_into_s3, \ - DEFAULT_NOTICE_RDF_S3_BUCKET_NAME +from ted_sws.notice_publisher.services.notice_publisher import publish_notice_rdf_content_into_s3 + from ted_sws.notice_transformer.services.notice_transformer import DEFAULT_TRANSFORMATION_FILE_EXTENSION CMD_NAME = "CMD_S3_RDF_PUBLISHER" +DEFAULT_NOTICE_RDF_S3_BUCKET_NAME = "notice-rdf" OUTPUT_FOLDER = '{mappings_path}/{mapping_suite_id}/' + DEFAULT_OUTPUT_PATH """ diff --git a/ted_sws/notice_publisher/services/notice_publisher.py b/ted_sws/notice_publisher/services/notice_publisher.py index 4b08aaefe..dd686852f 100644 --- a/ted_sws/notice_publisher/services/notice_publisher.py +++ b/ted_sws/notice_publisher/services/notice_publisher.py @@ -13,9 +13,6 @@ from ted_sws.notice_publisher.model.s3_publish_result import S3PublishResult from ted_sws.notice_transformer.services.notice_transformer import DEFAULT_TRANSFORMATION_FILE_EXTENSION -DEFAULT_NOTICE_S3_BUCKET_NAME = config.S3_PUBLISH_NOTICE_BUCKET or "notice" -DEFAULT_NOTICE_RDF_S3_BUCKET_NAME = config.S3_PUBLISH_NOTICE_RDF_BUCKET or "notice-rdf" - def publish_notice(notice: Notice, publisher: SFTPPublisherABC = None, remote_folder_path: str = None) -> bool: @@ -57,10 +54,15 @@ def publish_notice_by_id(notice_id: str, notice_repository: NoticeRepositoryABC, def publish_notice_into_s3(notice: Notice, s3_publisher: S3Publisher = S3Publisher(), - bucket_name: str = DEFAULT_NOTICE_S3_BUCKET_NAME) -> bool: + bucket_name: str = None) -> bool: """ - + This function publish a notice into S3 bucket. + :param notice: + :param s3_publisher: + :param bucket_name: + :return: """ + bucket_name = bucket_name or config.S3_PUBLISH_NOTICE_BUCKET mets_manifestation = notice.mets_manifestation if not mets_manifestation or not mets_manifestation.object_data: raise ValueError("Notice does not have a METS manifestation to be published.") @@ -77,7 +79,16 @@ def publish_notice_into_s3(notice: Notice, s3_publisher: S3Publisher = S3Publish def publish_notice_into_s3_by_id(notice_id: str, notice_repository: NoticeRepositoryABC, s3_publisher: S3Publisher = S3Publisher(), - bucket_name: str = DEFAULT_NOTICE_S3_BUCKET_NAME) -> bool: + bucket_name: str = None) -> bool: + """ + This function publish a notice by notice_id into S3 bucket. + :param notice_id: + :param notice_repository: + :param s3_publisher: + :param bucket_name: + :return: + """ + bucket_name = bucket_name or config.S3_PUBLISH_NOTICE_BUCKET notice = notice_repository.get(reference=notice_id) result = publish_notice_into_s3(notice=notice, bucket_name=bucket_name, s3_publisher=s3_publisher) if result: @@ -86,10 +97,15 @@ def publish_notice_into_s3_by_id(notice_id: str, notice_repository: NoticeReposi def publish_notice_rdf_into_s3(notice: Notice, s3_publisher: S3Publisher = S3Publisher(), - bucket_name: str = DEFAULT_NOTICE_RDF_S3_BUCKET_NAME) -> bool: + bucket_name: str = None) -> bool: """ - + This function publish a distilled RDF Manifestation from a notice into S3 bucket. + :param notice: + :param s3_publisher: + :param bucket_name: + :return: """ + bucket_name = bucket_name or config.S3_PUBLISH_NOTICE_RDF_BUCKET rdf_manifestation: RDFManifestation = notice.distilled_rdf_manifestation result: bool = publish_notice_rdf_content_into_s3( rdf_manifestation=rdf_manifestation, @@ -102,7 +118,16 @@ def publish_notice_rdf_into_s3(notice: Notice, s3_publisher: S3Publisher = S3Pub def publish_notice_rdf_into_s3_by_id(notice_id: str, notice_repository: NoticeRepositoryABC, s3_publisher: S3Publisher = S3Publisher(), - bucket_name: str = DEFAULT_NOTICE_RDF_S3_BUCKET_NAME) -> bool: + bucket_name: str = None) -> bool: + """ + This function publish a distilled RDF Manifestation from a notice by notice_id into S3 bucket. + :param notice_id: + :param notice_repository: + :param s3_publisher: + :param bucket_name: + :return: + """ + bucket_name = bucket_name or config.S3_PUBLISH_NOTICE_RDF_BUCKET notice = notice_repository.get(reference=notice_id) return publish_notice_rdf_into_s3(notice=notice, bucket_name=bucket_name, s3_publisher=s3_publisher) @@ -110,10 +135,20 @@ def publish_notice_rdf_into_s3_by_id(notice_id: str, notice_repository: NoticeRe def publish_notice_rdf_content_into_s3(rdf_manifestation: RDFManifestation, object_name: str, s3_publisher: S3Publisher = S3Publisher(), - bucket_name: str = DEFAULT_NOTICE_RDF_S3_BUCKET_NAME) -> bool: + bucket_name: str = None) -> bool: + """ + This function publish a RDF Manifestation into S3 bucket. + :param rdf_manifestation: + :param object_name: + :param s3_publisher: + :param bucket_name: + :return: + """ if not rdf_manifestation or not rdf_manifestation.object_data: raise ValueError("Notice does not have a RDF manifestation to be published.") + bucket_name = bucket_name or config.S3_PUBLISH_NOTICE_RDF_BUCKET + rdf_content = bytes(rdf_manifestation.object_data, encoding='utf-8') result: S3PublishResult = s3_publisher.publish( bucket_name=bucket_name,