From 5c1015460399cf8e3220d3c04bced27b2e70c661 Mon Sep 17 00:00:00 2001 From: Jamie Diprose <5715104+jdddog@users.noreply.github.com> Date: Tue, 8 Aug 2023 11:17:14 +1200 Subject: [PATCH] Update for Airflow 2.6.3 (#167) --- .github/workflows/unit-tests.yml | 6 +- oaebu_workflows/config.py | 24 +-- .../workflows/google_analytics_telescope.py | 6 +- .../workflows/google_books_telescope.py | 6 +- .../workflows/irus_fulcrum_telescope.py | 6 +- .../workflows/irus_oapen_telescope.py | 6 +- oaebu_workflows/workflows/jstor_telescope.py | 6 +- .../workflows/oapen_metadata_telescope.py | 6 +- oaebu_workflows/workflows/onix_telescope.py | 6 +- oaebu_workflows/workflows/onix_workflow.py | 8 +- .../workflows/tests/test_elastic_workflow.py | 176 ------------------ oaebu_workflows/workflows/thoth_telescope.py | 6 +- .../workflows/ucl_discovery_telescope.py | 6 +- requirements.txt | 7 +- setup.cfg | 15 +- 15 files changed, 44 insertions(+), 246 deletions(-) delete mode 100644 oaebu_workflows/workflows/tests/test_elastic_workflow.py diff --git a/.github/workflows/unit-tests.yml b/.github/workflows/unit-tests.yml index 5d50e03f..8630b923 100644 --- a/.github/workflows/unit-tests.yml +++ b/.github/workflows/unit-tests.yml @@ -29,12 +29,12 @@ jobs: cd .. git clone https://github.com/The-Academic-Observatory/observatory-platform.git cd observatory-platform - pip install -e observatory-api - pip install -e observatory-platform + pip install -e observatory-api --constraint https://raw.githubusercontent.com/apache/airflow/constraints-2.6.3/constraints-no-providers-${{ matrix.python-version }}.txt + pip install -e observatory-platform --constraint https://raw.githubusercontent.com/apache/airflow/constraints-2.6.3/constraints-no-providers-${{ matrix.python-version }}.txt cd .. cd oaebu-workflows - pip install -e .[tests] + pip install -e .[tests] --constraint https://raw.githubusercontent.com/apache/airflow/constraints-2.6.3/constraints-no-providers-${{ matrix.python-version }}.txt # Required for testing ONIX Telescope - name: Set up JDK 11 diff --git a/oaebu_workflows/config.py b/oaebu_workflows/config.py index d497d2f8..d8780ccb 100644 --- a/oaebu_workflows/config.py +++ b/oaebu_workflows/config.py @@ -13,18 +13,16 @@ # limitations under the License. # Author: James Diprose -import os + import json +import os import re from typing import Dict from airflow.exceptions import AirflowException from observatory.platform.config import module_file_path -from observatory.platform.workflows.elastic_import_workflow import ElasticImportConfig from observatory.platform.utils.jinja2_utils import render_template -from observatory.platform.elastic.kibana import TimeField -from observatory.platform.elastic.elastic import KeepInfo, KeepOrder def test_fixtures_folder(*subdirs) -> str: @@ -98,21 +96,3 @@ def elastic_mappings_folder() -> str: """ return module_file_path("oaebu_workflows.database.mappings") - - -ELASTIC_OAEBU_KIBANA_TIME_FIELDS = [ - TimeField("^oaebu-.*-unmatched-book-metrics$", "release_date"), - TimeField("^oaebu-.*-book-product-list$", "time_field"), - TimeField("^oaebu-.*$", "month"), -] -ELASTIC_INDEX_KEEP_INFO = { - "": KeepInfo(ordering=KeepOrder.newest, num=3), - "oaebu": KeepInfo(ordering=KeepOrder.newest, num=3), -} - -ELASTIC_IMPORT_CONFIG = ElasticImportConfig( - elastic_mappings_path=elastic_mappings_folder(), - elastic_mappings_func=load_elastic_mappings_oaebu, - kibana_time_fields=ELASTIC_OAEBU_KIBANA_TIME_FIELDS, - index_keep_info=ELASTIC_INDEX_KEEP_INFO, -) diff --git a/oaebu_workflows/workflows/google_analytics_telescope.py b/oaebu_workflows/workflows/google_analytics_telescope.py index 9345d819..5d03f521 100644 --- a/oaebu_workflows/workflows/google_analytics_telescope.py +++ b/oaebu_workflows/workflows/google_analytics_telescope.py @@ -89,7 +89,7 @@ def __init__( observatory_api_conn_id: str = AirflowConns.OBSERVATORY_API, catchup: bool = True, start_date: pendulum.DateTime = pendulum.datetime(2018, 1, 1), - schedule_interval: str = "@monthly", + schedule: str = "@monthly", ): """Construct a GoogleAnalyticsTelescope instance. :param dag_id: The ID of the DAG @@ -107,12 +107,12 @@ def __init__( :param observatory_api_conn_id: Airflow connection ID for the overvatory API :param catchup: Whether to catchup the DAG or not :param start_date: The start date of the DAG - :param schedule_interval: The schedule interval of the DAG + :param schedule: The schedule interval of the DAG """ super().__init__( dag_id, start_date, - schedule_interval, + schedule, catchup=catchup, airflow_conns=[oaebu_service_account_conn_id, observatory_api_conn_id], tags=["oaebu"], diff --git a/oaebu_workflows/workflows/google_books_telescope.py b/oaebu_workflows/workflows/google_books_telescope.py index f2b2a704..86b5b09e 100644 --- a/oaebu_workflows/workflows/google_books_telescope.py +++ b/oaebu_workflows/workflows/google_books_telescope.py @@ -86,7 +86,7 @@ def __init__( sftp_service_conn_id: str = "sftp_service", observatory_api_conn_id: str = AirflowConns.OBSERVATORY_API, catchup: bool = False, - schedule_interval: str = "@weekly", + schedule: str = "@weekly", start_date: pendulum.DateTime = pendulum.datetime(2018, 1, 1), ): """Construct a GoogleBooksTelescope instance. @@ -104,13 +104,13 @@ def __init__( :param sftp_service_conn_id: Airflow connection ID for the SFTP service :param observatory_api_conn_id: Airflow connection ID for the overvatory API :param catchup: Whether to catchup the DAG or not - :param schedule_interval: The schedule interval of the DAG + :param schedule: The schedule interval of the DAG :param start_date: The start date of the DAG """ super().__init__( dag_id, start_date, - schedule_interval, + schedule, catchup=catchup, airflow_conns=[sftp_service_conn_id, observatory_api_conn_id], tags=["oaebu"], diff --git a/oaebu_workflows/workflows/irus_fulcrum_telescope.py b/oaebu_workflows/workflows/irus_fulcrum_telescope.py index 0e7f21f4..e293564a 100644 --- a/oaebu_workflows/workflows/irus_fulcrum_telescope.py +++ b/oaebu_workflows/workflows/irus_fulcrum_telescope.py @@ -86,7 +86,7 @@ def __init__( observatory_api_conn_id: str = AirflowConns.OBSERVATORY_API, irus_oapen_api_conn_id: str = "irus_api", catchup: bool = True, - schedule_interval: str = "0 0 4 * *", # Run on the 4th of every month + schedule: str = "0 0 4 * *", # Run on the 4th of every month start_date: pendulum.DateTime = pendulum.datetime(2022, 4, 1), # Earliest available data ): """The Fulcrum Telescope @@ -102,7 +102,7 @@ def __init__( :param observatory_api_conn_id: Airflow connection ID for the overvatory API :param irus_oapen_api_conn_id: Airflow connection ID OAPEN IRUS UK (counter 5) :param catchup: Whether to catchup the DAG or not - :param schedule_interval: The schedule interval of the DAG + :param schedule: The schedule interval of the DAG :param start_date: The start date of the DAG """ if bq_table_description is None: @@ -111,7 +111,7 @@ def __init__( super().__init__( dag_id, start_date, - schedule_interval, + schedule, airflow_conns=[observatory_api_conn_id, irus_oapen_api_conn_id], catchup=catchup, tags=["oaebu"], diff --git a/oaebu_workflows/workflows/irus_oapen_telescope.py b/oaebu_workflows/workflows/irus_oapen_telescope.py index 067617bc..0be0c1de 100644 --- a/oaebu_workflows/workflows/irus_oapen_telescope.py +++ b/oaebu_workflows/workflows/irus_oapen_telescope.py @@ -118,7 +118,7 @@ def __init__( irus_oapen_login_conn_id: str = "irus_login", catchup: bool = True, start_date: pendulum.DateTime = pendulum.datetime(2015, 6, 1), - schedule_interval: str = "0 0 4 * *", # Run on the 4th of every month + schedule: str = "0 0 4 * *", # Run on the 4th of every month max_active_runs: int = 5, ): """The OAPEN irus uk telescope. @@ -139,7 +139,7 @@ def __init__( :param irus_oapen_login_conn_id: The Airflow connection ID for IRUS API (login) - for counter 4 :param catchup: Whether to catchup the DAG or not :param start_date: The start date of the DAG - :param schedule_interval: The schedule interval of the DAG + :param schedule: The schedule interval of the DAG :param max_active_runs: The maximum number of concurrent DAG instances """ if bq_table_description is None: @@ -148,7 +148,7 @@ def __init__( super().__init__( dag_id, start_date, - schedule_interval, + schedule, catchup=catchup, airflow_conns=[ observatory_api_conn_id, diff --git a/oaebu_workflows/workflows/jstor_telescope.py b/oaebu_workflows/workflows/jstor_telescope.py index 75ea6fb8..6119d3f7 100644 --- a/oaebu_workflows/workflows/jstor_telescope.py +++ b/oaebu_workflows/workflows/jstor_telescope.py @@ -119,7 +119,7 @@ def __init__( observatory_api_conn_id: str = AirflowConns.OBSERVATORY_API, catchup: bool = False, max_active_runs: int = 1, - schedule_interval: str = "0 0 4 * *", # 4th day of every month + schedule: str = "0 0 4 * *", # 4th day of every month start_date: pendulum.DateTime = pendulum.datetime(2016, 10, 1), ): """Construct a JstorTelescope instance. @@ -138,13 +138,13 @@ def __init__( :param observatory_api_conn_id: Airflow connection ID for the overvatory API :param catchup: Whether to catchup the DAG or not :param max_active_runs: The maximum number of DAG runs that can be run concurrently - :param schedule_interval: The schedule interval of the DAG + :param schedule: The schedule interval of the DAG :param start_date: The start date of the DAG """ super().__init__( dag_id, start_date, - schedule_interval, + schedule, catchup=catchup, airflow_conns=[gmail_api_conn_id, observatory_api_conn_id], max_active_runs=max_active_runs, diff --git a/oaebu_workflows/workflows/oapen_metadata_telescope.py b/oaebu_workflows/workflows/oapen_metadata_telescope.py index 88d5143d..65554ea4 100644 --- a/oaebu_workflows/workflows/oapen_metadata_telescope.py +++ b/oaebu_workflows/workflows/oapen_metadata_telescope.py @@ -95,7 +95,7 @@ def __init__( observatory_api_conn_id: str = AirflowConns.OBSERVATORY_API, catchup=False, start_date: pendulum.DateTime = pendulum.datetime(2018, 5, 14), - schedule_interval: str = "@weekly", + schedule: str = "@weekly", ): """Construct a OapenMetadataTelescope instance. :param dag_id: The ID of the DAG @@ -109,12 +109,12 @@ def __init__( :param observatory_api_conn_id: Airflow connection ID for the overvatory API :param catchup: Whether to catchup the DAG or not :param start_date: The start date of the DAG - :param schedule_interval: The schedule interval of the DAG + :param schedule: The schedule interval of the DAG """ super().__init__( dag_id, start_date, - schedule_interval, + schedule, airflow_conns=[observatory_api_conn_id], catchup=catchup, tags=["oaebu"], diff --git a/oaebu_workflows/workflows/onix_telescope.py b/oaebu_workflows/workflows/onix_telescope.py index 790ad65a..0fd41240 100644 --- a/oaebu_workflows/workflows/onix_telescope.py +++ b/oaebu_workflows/workflows/onix_telescope.py @@ -83,7 +83,7 @@ def __init__( observatory_api_conn_id: str = AirflowConns.OBSERVATORY_API, sftp_service_conn_id: str = "sftp_service", catchup: bool = False, - schedule_interval: str = "@weekly", + schedule: str = "@weekly", start_date: pendulum.DateTime = pendulum.datetime(2021, 3, 28), ): """Construct an OnixTelescope instance. @@ -100,13 +100,13 @@ def __init__( :param observatory_api_conn_id: Airflow connection ID for the overvatory API :param sftp_service_conn_id: Airflow connection ID for the SFTP service :param catchup: Whether to catchup the DAG or not - :param schedule_interval: The schedule interval of the DAG + :param schedule: The schedule interval of the DAG :param start_date: The start date of the DAG """ super().__init__( dag_id, start_date, - schedule_interval, + schedule, catchup=catchup, airflow_conns=[observatory_api_conn_id, sftp_service_conn_id], tags=["oaebu"], diff --git a/oaebu_workflows/workflows/onix_workflow.py b/oaebu_workflows/workflows/onix_workflow.py index bc79d53b..2830ef9d 100644 --- a/oaebu_workflows/workflows/onix_workflow.py +++ b/oaebu_workflows/workflows/onix_workflow.py @@ -162,7 +162,7 @@ def __init__( sensor_dag_ids: List[str] = None, catchup: Optional[bool] = False, start_date: Optional[pendulum.DateTime] = pendulum.datetime(2022, 8, 1), - schedule_interval: Optional[str] = "@weekly", + schedule: Optional[str] = "@weekly", ): """ Initialises the workflow object. @@ -208,7 +208,7 @@ def __init__( :param sensor_dag_ids: Dag IDs for dependent tasks :param catchup: Whether to catch up missed DAG runs. :param start_date: Start date of the DAG. - :param schedule_interval: Scheduled interval for running the DAG. + :param schedule: Scheduled interval for running the DAG. """ if not sensor_dag_ids: @@ -258,7 +258,7 @@ def __init__( self.sensor_dag_ids = sensor_dag_ids self.catchup = catchup self.start_date = start_date - self.schedule_interval = schedule_interval + self.schedule = schedule # Initialise the data partners self.data_partners = [] @@ -280,7 +280,7 @@ def __init__( super().__init__( dag_id=self.dag_id, start_date=start_date, - schedule_interval=schedule_interval, + schedule=schedule, catchup=catchup, airflow_conns=[observatory_api_conn_id], tags=["oaebu"], diff --git a/oaebu_workflows/workflows/tests/test_elastic_workflow.py b/oaebu_workflows/workflows/tests/test_elastic_workflow.py deleted file mode 100644 index 5031f389..00000000 --- a/oaebu_workflows/workflows/tests/test_elastic_workflow.py +++ /dev/null @@ -1,176 +0,0 @@ -# Copyright 2021 Curtin University -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Author: James Diprose - -from __future__ import annotations - -import json -import os - -from oaebu_workflows.config import elastic_mappings_folder, load_elastic_mappings_oaebu -from observatory.platform.observatory_config import Workflow, CloudWorkspace -from observatory.platform.utils.jinja2_utils import render_template -from observatory.platform.observatory_environment import ObservatoryEnvironment, ObservatoryTestCase - - -class TestElasticImportWorkflow(ObservatoryTestCase): - """Tests for the Elastic Import Workflow""" - - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.project_id = os.getenv("TEST_GCP_PROJECT_ID") - self.data_location = os.getenv("TEST_GCP_DATA_LOCATION") - - def test_load_elastic_mappings_oaebu(self): - """Test load_elastic_mappings_oaebu""" - - aggregate_level = "product" - path = elastic_mappings_folder() - expected = [ - ( - "oaebu_anu_press_book_product_author_metrics", - render_template( - os.path.join(path, "oaebu-author-metrics-mappings.json.jinja2"), aggregation_level=aggregate_level - ), - ), - ( - "oaebu_anu_press_book_product_list", - render_template( - os.path.join(path, "oaebu-list-mappings.json.jinja2"), aggregation_level=aggregate_level - ), - ), - ( - "oaebu_anu_press_book_product_metrics", - render_template( - os.path.join(path, "oaebu-metrics-mappings.json.jinja2"), aggregation_level=aggregate_level - ), - ), - ( - "oaebu_anu_press_book_product_metrics_city", - render_template( - os.path.join(path, "oaebu-metrics-city-mappings.json.jinja2"), aggregation_level=aggregate_level - ), - ), - ( - "oaebu_anu_press_book_product_metrics_country", - render_template( - os.path.join(path, "oaebu-metrics-country-mappings.json.jinja2"), aggregation_level=aggregate_level - ), - ), - ( - "oaebu_anu_press_book_product_metrics_events", - render_template( - os.path.join(path, "oaebu-metrics-events-mappings.json.jinja2"), aggregation_level=aggregate_level - ), - ), - ( - "oaebu_anu_press_book_product_metrics_institution", - render_template( - os.path.join(path, "oaebu-metrics-institution-mappings.json.jinja2"), - aggregation_level=aggregate_level, - ), - ), - ( - "oaebu_anu_press_book_product_publisher_metrics", - render_template( - os.path.join(path, "oaebu-publisher-metrics-mappings.json.jinja2"), - aggregation_level=aggregate_level, - ), - ), - ( - "oaebu_anu_press_book_product_subject_bic_metrics", - render_template( - os.path.join(path, "oaebu-subject-bic-metrics-mappings.json.jinja2"), - aggregation_level=aggregate_level, - ), - ), - ( - "oaebu_anu_press_book_product_subject_bisac_metrics", - render_template( - os.path.join(path, "oaebu-subject-bisac-metrics-mappings.json.jinja2"), - aggregation_level=aggregate_level, - ), - ), - ( - "oaebu_anu_press_book_product_subject_thema_metrics", - render_template( - os.path.join(path, "oaebu-subject-thema-metrics-mappings.json.jinja2"), - aggregation_level=aggregate_level, - ), - ), - ( - "oaebu_anu_press_book_product_subject_year_metrics", - render_template( - os.path.join(path, "oaebu-subject-year-metrics-mappings.json.jinja2"), - aggregation_level=aggregate_level, - ), - ), - ( - "oaebu_anu_press_book_product_year_metrics", - render_template( - os.path.join(path, "oaebu-year-metrics-mappings.json.jinja2"), aggregation_level=aggregate_level - ), - ), - ( - "oaebu_anu_press_unmatched_book_metrics", - render_template( - os.path.join(path, "oaebu-unmatched-metrics-mappings.json.jinja2"), - aggregation_level=aggregate_level, - ), - ), - ( - "oaebu_anu_press_unmatched_book_metrics", - render_template( - os.path.join(path, "oaebu-unmatched-metrics-mappings.json.jinja2"), - aggregation_level=aggregate_level, - ), - ), - ( - "oaebu_anu_press_institution_list", - render_template(os.path.join(path, "oaebu-institution-list-mappings.json.jinja2")), - ), - ] - - for table_id, expected_mappings_str in expected: - print(table_id) - expected_mappings = json.loads(expected_mappings_str) - actual_mappings = load_elastic_mappings_oaebu(path, table_id) - self.assertEqual(expected_mappings, actual_mappings) - - def test_dag_load(self): - """Test that the DAG can be loaded from a DAG bag.""" - env = ObservatoryEnvironment( - enable_api=False, - workflows=[ - Workflow( - dag_id="elastic_import_test", - name="Elastic Import Workflow", - class_name="observatory.platform.workflows.elastic_import_workflow.ElasticImportWorkflow", - cloud_workspace=CloudWorkspace( - project_id=self.project_id, - download_bucket="download-bucket", - transform_bucket="transform-bucket", - data_location=self.data_location, - ), - kwargs=dict( - sensor_dag_ids=["onix_workflow_test"], - kibana_spaces=["oaebu-test-press"], - elastic_import_config="oaebu_workflows.config.ELASTIC_IMPORT_CONFIG", - ), - ) - ], - ) - with env.create(): - self.assert_dag_load_from_config("elastic_import_test") diff --git a/oaebu_workflows/workflows/thoth_telescope.py b/oaebu_workflows/workflows/thoth_telescope.py index 9175be98..a6131e97 100644 --- a/oaebu_workflows/workflows/thoth_telescope.py +++ b/oaebu_workflows/workflows/thoth_telescope.py @@ -91,7 +91,7 @@ def __init__( observatory_api_conn_id: str = AirflowConns.OBSERVATORY_API, catchup: bool = False, start_date: pendulum.DateTime = pendulum.datetime(2022, 12, 1), - schedule_interval: str = "@weekly", + schedule: str = "@weekly", ): """Construct an ThothOnixTelescope instance. :param dag_id: The ID of the DAG @@ -108,12 +108,12 @@ def __init__( :param observatory_api_conn_id: Airflow connection ID for the overvatory API :param catchup: Whether to catchup the DAG or not :param start_date: The start date of the DAG - :param schedule_interval: The schedule interval of the DAG + :param schedule: The schedule interval of the DAG """ super().__init__( dag_id, start_date=start_date, - schedule_interval=schedule_interval, + schedule=schedule, airflow_conns=[observatory_api_conn_id], catchup=catchup, tags=["oaebu"], diff --git a/oaebu_workflows/workflows/ucl_discovery_telescope.py b/oaebu_workflows/workflows/ucl_discovery_telescope.py index 845835cb..befb5991 100644 --- a/oaebu_workflows/workflows/ucl_discovery_telescope.py +++ b/oaebu_workflows/workflows/ucl_discovery_telescope.py @@ -88,7 +88,7 @@ def __init__( observatory_api_conn_id: str = AirflowConns.OBSERVATORY_API, oaebu_service_account_conn_id: str = "oaebu_service_account", max_threads: int = os.cpu_count() * 2, - schedule_interval: str = "0 0 4 * *", # run on the 4th of every month + schedule: str = "0 0 4 * *", # run on the 4th of every month start_date: pendulum.DateTime = pendulum.datetime(2015, 6, 1), catchup: bool = True, max_active_runs: int = 10, @@ -107,7 +107,7 @@ def __init__( :param observatory_api_conn_id: Airflow connection ID for the overvatory API :param oaebu_service_account_conn_id: Airflow connection ID for the oaebu service account :param max_threads: The maximum number threads to utilise for parallel processes - :param schedule_interval: The schedule interval of the DAG + :param schedule: The schedule interval of the DAG :param start_date: The start date of the DAG :param catchup: Whether to catchup the DAG or not :param max_active_runs: The maximum number of concurrent DAG runs @@ -115,7 +115,7 @@ def __init__( super().__init__( dag_id, start_date, - schedule_interval, + schedule, catchup=catchup, max_active_runs=max_active_runs, airflow_conns=[observatory_api_conn_id, oaebu_service_account_conn_id], diff --git a/requirements.txt b/requirements.txt index 7e65eedf..92cb54b2 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,7 +1,4 @@ beautifulsoup4>=4.9.3,<5 -faker>=8.12.1,<9 -Markdown==3.3.4 # prevent error: INSTALLED_EXTENSIONS = metadata.entry_points(group='markdown.extensions') TypeError: entry_points() got an unexpected keyword argument 'group' -responses==0.20.* -onixcheck==0.9.7 -ratelimit==2.2.1 +onixcheck>=0.9.7,<1 +ratelimit>=2.2.1,<3 diff --git a/setup.cfg b/setup.cfg index 88fb8959..83122142 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,16 +1,16 @@ [metadata] name = oaebu-workflows author = Curtin University -author-email = agent@observatory.academy +author_email = agent@observatory.academy summary = Oaebu Workflows provides Apache Airflow Workflows for fetching, processing and analysing data about open access books. -description-file = README.md -description-content-type = text/markdown; charset=UTF-8 -home-page = https://github.com/The-Academic-Observatory/oaebu-workflows +description_file = README.md +description_content_type = text/markdown; charset=UTF-8 +home_page = https://github.com/The-Academic-Observatory/oaebu-workflows project_urls = Bug Tracker = https://github.com/The-Academic-Observatory/oaebu-workflows/issues Documentation = https://oaebu-workflows.readthedocs.io/en/latest/ Source Code = https://github.com/The-Academic-Observatory/oaebu-workflows -python-requires = >=3.7 +python_requires = >=3.7 license = Apache License Version 2.0 classifier = Development Status :: 2 - Pre-Alpha @@ -49,10 +49,7 @@ tests = coverage>=5.2,<6 faker>=8.12.1,<9 vcrpy>=4.1.1,<5 + responses>=0,<1 [pbr] skip_authors = true - -[entry_points] -console_scripts = - oaebu-workflows-seed = oaebu_workflows.seed.seed:seed \ No newline at end of file