diff --git a/README.rst b/README.rst index 80843d7..8340d79 100644 --- a/README.rst +++ b/README.rst @@ -50,9 +50,9 @@ These files should be located at ``/edx/app/edxapp/`` directory, see the example in the following files: -- ``lms/envs/aws.py (production.py for ironwood release)`` +- ``lms/envs/production.py (aws.py for hawthorn release)`` -- ``cms/envs/aws.py (production.py for ironwood release)`` +- ``cms/envs/production.py (aws.py for hawthorn release)`` **Note:** @@ -100,9 +100,9 @@ These files should be located at ``/edx/app/edxapp/`` directory, see the example in the following files: -- ``lms/envs/aws.py (production.py for ironwood release)`` +- ``lms/envs/production.py (aws.py for hawthorn release)`` -- ``cms/envs/aws.py (production.py for ironwood release)`` +- ``cms/envs/production.py (aws.py for hawthorn release)`` Using Kafka Broker API ********************** @@ -127,38 +127,95 @@ These files should be located at ``/edx/app/edxapp/`` directory, see the example } 2. Add the following keys and their values in the ``lms.env.json`` and ``cms.env.json`` files. +Please note that all parameters in the `PRODUCER_CONFIG` are unique to the broker instances. You +can set whatever parameters are required for your instance. + :: "CALIPER_KAFKA_SETTINGS": { - "MAXIMUM_RETRIES": , - "END_POINT": "kafka endpoint", - "TOPIC_NAME": "topic name", - "ERROR_REPORT_EMAIL": "support@example.com" - } + "PRODUCER_CONFIG": { + "bootstrap_servers": [ + "" + ], + ... + }, + + "TOPIC_NAME": "", + + "ERROR_REPORT_EMAIL": "", + "MAXIMUM_RETRIES": + }, +------------------+------------------------------------------------------------------------------+ |Keys | Description | +==================+==============================================================================+ |MAXIMUM_RETRIES |Number of times the app will try to send the logs to Kafka in case of failure | +------------------+------------------------------------------------------------------------------+ -|END_POINT |URL for Kafka Broker | +|PRODUCER_CONFIG |Configurations for initializing the Kafka Producer | +| | | +| |Can further contain: | +| | - "bootstrap_servers": | +| | - List of Kafka Brokers URLs | +| | - Any other supported paramter in the `Kafka-python docs`_ | +| | - Please note that it's better to store the sensitive information in | +| | the `*.auth.json` files | +------------------+------------------------------------------------------------------------------+ |TOPIC_NAME |Topic name for the Kafka broker | +------------------+------------------------------------------------------------------------------+ |ERROR_REPORT_EMAIL|Email Address to notify when number of failures exceeds the MAXIMUM_RETRIES | +------------------+------------------------------------------------------------------------------+ -3. Add the following lines of code: +3. Add the following keys and their values in the ``lms.auth.json`` and ``cms.auth.json`` files. +Please note that all parameters in the `PRODUCER_CONFIG` are unique to the broker instances. You +can set whatever parameters are required for you. + +:: + + "CALIPER_KAFKA_AUTH_SETTINGS": { + "PRODUCER_CONFIG": { + ... + "sasl_plain_username": "", + "sasl_plain_password": "", + "security_protocol": "", + "ssl_cafile": "", + ... + } + } + ++------------------+------------------------------------------------------------------------------+ +|Keys | Description | ++==================+==============================================================================+ +|PRODUCER_CONFIG |Configurations for initializing the Kafka Producer. Use this confiration to | +| |store all sensitive configuration like authentication parameters. | +| | | +| |For example: | +| | - Use this to configure paramters like: | +| | - sasl_plain_username | +| | - sasl_plain_password | +| | - security_protocol | +| | - sasl_mechanism | +| | | +| |It can further contain: | +| | - Any other supported paramter in the `Kafka-python docs`_ | +| | - Please note that it's better to store the insensitive information | +| | in the `*.env.json` files | ++------------------+------------------------------------------------------------------------------+ + +.. _Kafka-python docs: https://kafka-python.readthedocs.io/en/2.0.1/apidoc/KafkaProducer.html#kafka.KafkaProducer + +4. Add the following lines of code: + :: if FEATURES.get('ENABLE_KAFKA_FOR_CALIPER'): CALIPER_KAFKA_SETTINGS = ENV_TOKENS.get('CALIPER_KAFKA_SETTINGS') + CALIPER_KAFKA_AUTH_SETTINGS = AUTH_TOKENS.get('CALIPER_KAFKA_AUTH_SETTINGS') in the following files: -- ``lms/envs/aws.py (production.py for ironwood release)`` +- ``lms/envs/production.py (aws.py for hawthorn release)`` -- ``cms/envs/aws.py (production.py for ironwood release)`` +- ``cms/envs/production.py (aws.py for hawthorn release)`` Location of Transformed Logs ############################ diff --git a/openedx_caliper_tracking/exceptions.py b/openedx_caliper_tracking/exceptions.py new file mode 100644 index 0000000..5188fcc --- /dev/null +++ b/openedx_caliper_tracking/exceptions.py @@ -0,0 +1,5 @@ +"""Exceptions for the app""" + + +class InvalidConfigurationsError(Exception): + pass diff --git a/openedx_caliper_tracking/kafka_utils.py b/openedx_caliper_tracking/kafka_utils.py new file mode 100644 index 0000000..56f9069 --- /dev/null +++ b/openedx_caliper_tracking/kafka_utils.py @@ -0,0 +1,19 @@ +import logging + +from django.conf import settings + +LOGGER = logging.getLogger(__name__) + + +def get_kafka_producer_configurations(): + """ + Return the configurations required to initialize the KafkaProducer object. + """ + try: + configurations = {} + configurations.update(settings.CALIPER_KAFKA_SETTINGS.get('PRODUCER_CONFIG', {})) + configurations.update(settings.CALIPER_KAFKA_AUTH_SETTINGS.get('PRODUCER_CONFIG', {})) + return configurations + + except KeyError as ex: + LOGGER.exception('Invalid or no configurations are provided for KafkaProducer: %s', str(ex)) diff --git a/openedx_caliper_tracking/tasks.py b/openedx_caliper_tracking/tasks.py index 074b975..73e967a 100644 --- a/openedx_caliper_tracking/tasks.py +++ b/openedx_caliper_tracking/tasks.py @@ -12,21 +12,25 @@ from kafka.errors import KafkaError from openedx_caliper_tracking.utils import send_notification +from openedx_caliper_tracking.exceptions import InvalidConfigurationsError +from openedx_caliper_tracking.kafka_utils import get_kafka_producer_configurations from openedx_caliper_tracking.loggers import get_caliper_logger LOGGER = logging.getLogger(__name__) -CALIPER_DELIVERY_FAILURE_LOGGER = get_caliper_logger('caliper_delivery_failure', 'local3') -DEFAULT_FROM_EMAIL = settings.DEFAULT_FROM_EMAIL +CALIPER_DELIVERY_FAILURE_LOGGER = get_caliper_logger( + 'caliper_delivery_failure', 'local3' +) + EMAIL_DELIVERY_CACHE_KEY = 'IS_KAFKA_DELIVERY_FAILURE_EMAIL_SENT' HOST_ERROR_CACHE_KEY = 'HOST_NOT_FOUND_ERROR' +DEFAULT_FROM_EMAIL = settings.DEFAULT_FROM_EMAIL +REPORT_EMAIL_VALIDITY_PERIOD = 86400 # in ms. Equals to one day. -def _get_kafka_setting(key): - if hasattr(settings, 'CALIPER_KAFKA_SETTINGS'): - return settings.CALIPER_KAFKA_SETTINGS.get(key) +MAXIMUM_RETRIES = getattr(settings, 'CALIPER_KAFKA_SETTINGS', {}).get('MAXIMUM_RETRIES', 3) -@task(bind=True, max_retries=_get_kafka_setting('MAXIMUM_RETRIES')) +@task(bind=True, max_retries=MAXIMUM_RETRIES) def deliver_caliper_event_to_kafka(self, transformed_event, event_type): """ Deliver caliper event to kafka. @@ -34,15 +38,46 @@ def deliver_caliper_event_to_kafka(self, transformed_event, event_type): Retries for the given number of max_tries in case of any error else sends an error report to the specified email address. """ + KAFKA_SETTINGS = settings.CALIPER_KAFKA_SETTINGS + + bootstrap_servers = KAFKA_SETTINGS['PRODUCER_CONFIG']['bootstrap_servers'] + topic_name = KAFKA_SETTINGS['TOPIC_NAME'] + try: LOGGER.info('Attempt # {} of sending event: {} to kafka ({}) is in progress.'.format( - self.request_stack().get('retries'), event_type, _get_kafka_setting('END_POINT'))) + self.request_stack().get('retries'), event_type, bootstrap_servers)) + + producer_configrations = get_kafka_producer_configurations() + + try: + producer = KafkaProducer( + value_serializer=lambda v: json.dumps(v).encode('utf-8'), + **producer_configrations + ) + + # Invalid/unsupported arguments are provided + except TypeError as ex: + LOGGER.exception( + 'Invalid configurations are provided for KafkaProducer: %s', str(ex)) + raise InvalidConfigurationsError('Invalid Configurations are provided') + + # Most probably a certificate file was not found. + except IOError as ex: + LOGGER.exception( + 'Configured Certificate is not found: %s', str(ex) + ) + raise InvalidConfigurationsError('Invalid Configurations are provided') + + except Exception as ex: + LOGGER.exception( + 'Error occurred while trying to configure Kafka: %s', str(ex) + ) + raise InvalidConfigurationsError('Invalid Configurations are provided') + + producer.send(topic_name, transformed_event).add_errback(host_not_found, + event=transformed_event, + event_type=event_type) - producer = KafkaProducer(bootstrap_servers=_get_kafka_setting('END_POINT'), - value_serializer=lambda v: json.dumps(v).encode('utf-8')) - producer.send(_get_kafka_setting('TOPIC_NAME'), transformed_event).add_errback(host_not_found, - event=transformed_event, - event_type=event_type) producer.flush() if cache.get(HOST_ERROR_CACHE_KEY): @@ -51,21 +86,33 @@ def deliver_caliper_event_to_kafka(self, transformed_event, event_type): if cache.get(EMAIL_DELIVERY_CACHE_KEY): send_system_recovery_email.delay() - cache.set(EMAIL_DELIVERY_CACHE_KEY, False) + cache.set(EMAIL_DELIVERY_CACHE_KEY, False) LOGGER.info('Logs Delivered Successfully: Event ({}) has been successfully sent to kafka ({}).'.format( - event_type, _get_kafka_setting('END_POINT'))) + event_type, bootstrap_servers)) except KafkaError as error: LOGGER.error(('Logs Delivery Failed: Could not deliver event ({}) to kafka ({}) because' - ' of {}.').format(event_type, _get_kafka_setting('END_POINT'), error.__class__.__name__)) + ' of {}.').format(event_type, bootstrap_servers, error.__class__.__name__)) - if self.request_stack().get('retries') == _get_kafka_setting('MAXIMUM_RETRIES'): + if self.request_stack().get('retries') == KAFKA_SETTINGS['MAXIMUM_RETRIES']: CALIPER_DELIVERY_FAILURE_LOGGER.info(json.dumps(transformed_event)) sent_kafka_failure_email.delay(error.__class__.__name__) return - self.retry(exc=error, countdown=int(random.uniform(2, 4) ** self.request.retries)) + self.retry(exc=error, countdown=int( + random.uniform(2, 4) ** self.request.retries)) + + except InvalidConfigurationsError as ex: + # No need to retry the task if there is some configurations issue. + LOGGER.error(('Logs Delivery Failed: Could not deliver event ({}) to kafka ({}) due' + ' to the error: {}').format( + event_type, + bootstrap_servers, + str(ex) + )) + + sent_kafka_failure_email.delay(ex.__class__.__name__) def host_not_found(error, event, event_type): @@ -76,7 +123,10 @@ def host_not_found(error, event, event_type): """ HOST_NOT_FOUND_ERROR = 'Host Not Found' LOGGER.error('Logs Delivery Failed: Could not deliver event ({}) to kafka ({}) because of {}.'.format( - event_type, _get_kafka_setting('END_POINT'), HOST_NOT_FOUND_ERROR)) + event_type, + settings.CALIPER_KAFKA_SETTINGS['PRODUCER_CONFIG']['bootstrap_servers'], + HOST_NOT_FOUND_ERROR + )) cache.set(HOST_ERROR_CACHE_KEY, True) sent_kafka_failure_email.delay(HOST_NOT_FOUND_ERROR) @@ -86,9 +136,13 @@ def sent_kafka_failure_email(self, error): """ Send error report to specified email address. """ + reporting_email = settings.CALIPER_KAFKA_SETTINGS.get('ERROR_REPORT_EMAIL') + if not reporting_email: + return + if cache.get(EMAIL_DELIVERY_CACHE_KEY): LOGGER.info('Email Already Sent: Events delivery failure report has been already sent to {}.'.format( - _get_kafka_setting('ERROR_REPORT_EMAIL'))) + reporting_email)) return data = { @@ -97,16 +151,17 @@ def sent_kafka_failure_email(self, error): 'error': error } subject = 'Failure in logs delivery to Kafka' - if send_notification(data, subject, DEFAULT_FROM_EMAIL, [_get_kafka_setting('ERROR_REPORT_EMAIL')]): + if send_notification(data, subject, DEFAULT_FROM_EMAIL, [reporting_email]): success_message = 'Email Sent Successfully: Events delivery failure report sent to {}.'.format( - _get_kafka_setting('ERROR_REPORT_EMAIL')) + reporting_email) # after one day if the delivery of events to kafka still fails, # email failure delivery report again. - cache.set(EMAIL_DELIVERY_CACHE_KEY, True, timeout=86400) + cache.set(EMAIL_DELIVERY_CACHE_KEY, True, + timeout=REPORT_EMAIL_VALIDITY_PERIOD) LOGGER.info(success_message) else: failure_message = 'Email Sending Failed: Could not send events delivery failure report to {}.'.format( - _get_kafka_setting('ERROR_REPORT_EMAIL')) + reporting_email) LOGGER.error(failure_message) @@ -115,16 +170,20 @@ def send_system_recovery_email(self): """ Send system recovery report to specified email address. """ + reporting_email = settings.CALIPER_KAFKA_SETTINGS.get('ERROR_REPORT_EMAIL') + if not reporting_email: + return + data = { 'name': 'UCSD Support', 'body': 'System has been recovered. Now Caliper logs are being successfully delivered to kafka.', } subject = 'Success in logs delivery to Kafka' - if send_notification(data, subject, DEFAULT_FROM_EMAIL, [_get_kafka_setting('ERROR_REPORT_EMAIL')]): + if send_notification(data, subject, DEFAULT_FROM_EMAIL, [reporting_email]): success_message = 'Email Sent Successfully: Events delivery success report sent to {}.'.format( - _get_kafka_setting('ERROR_REPORT_EMAIL')) + reporting_email) LOGGER.info(success_message) else: failure_message = 'Email Sending Failed: Could not send events delivery success report to {}.'.format( - _get_kafka_setting('ERROR_REPORT_EMAIL')) + reporting_email) LOGGER.error(failure_message) diff --git a/openedx_caliper_tracking/tests/test_caliper_kafka.py b/openedx_caliper_tracking/tests/test_caliper_kafka.py index 3cace97..c5e0fdd 100644 --- a/openedx_caliper_tracking/tests/test_caliper_kafka.py +++ b/openedx_caliper_tracking/tests/test_caliper_kafka.py @@ -15,6 +15,20 @@ from openedx_caliper_tracking.tests import TEST_DIR_PATH +CALIPER_KAFKA_SETTINGS_FIXTURE = { + 'PRODUCER_CONFIG': { + 'bootstrap_servers': [ + 'testing.com', + ] + }, + 'TOPIC_NAME': 'dummy topic', + 'ERROR_REPORT_EMAIL': 'dummy@example.com', + 'MAXIMUM_RETRIES': 3 +} + +CALIPER_KAFKA_AUTH_SETTINGS_FIXTURE = {} + + class CaliperKafkaTestCase(TestCase): def setUp(self): @@ -31,12 +45,8 @@ def setUp(self): ) @override_settings( LMS_ROOT_URL='https://localhost:18000', - CALIPER_KAFKA_SETTINGS={ - 'END_POINT': 'http://localhost:9092', - 'TOPIC_NAME': 'dummy topic', - 'ERROR_REPORT_EMAIL': 'dummy@example.com', - 'MAXIMUM_RETRIES': 3 - }, + CALIPER_KAFKA_SETTINGS=CALIPER_KAFKA_SETTINGS_FIXTURE, + CALIPER_KAFKA_AUTH_SETTINGS=CALIPER_KAFKA_AUTH_SETTINGS_FIXTURE, FEATURES={'ENABLE_KAFKA_FOR_CALIPER': True} ) def test_caliper_event_is_delivered_to_kafka_without_error_using_celery(self, delivery_mock): @@ -60,12 +70,8 @@ def test_caliper_event_is_delivered_to_kafka_without_error_using_celery(self, de autospec=True ) @override_settings( - CALIPER_KAFKA_SETTINGS={ - 'END_POINT': 'http://localhost:9092', - 'TOPIC_NAME': 'dummy topic', - 'ERROR_REPORT_EMAIL': 'dummy@example.com', - 'MAXIMUM_RETRIES': 3 - }, + CALIPER_KAFKA_SETTINGS=CALIPER_KAFKA_SETTINGS_FIXTURE, + CALIPER_KAFKA_AUTH_SETTINGS=CALIPER_KAFKA_AUTH_SETTINGS_FIXTURE ) def test_deliver_caliper_event_to_kafka_without_using_celery_without_error(self, producer_mock, sent_email_mock, logger_mock): @@ -78,12 +84,13 @@ def test_deliver_caliper_event_to_kafka_without_using_celery_without_error(self, self.assertFalse(sent_email_mock.called) self.assertFalse(logger_mock.error.called) logger_mock.info.assert_called_with('Logs Delivered Successfully: Event (book) has been successfully' - ' sent to kafka (http://localhost:9092).') + ' sent to kafka ([\'testing.com\']).') @mock.patch( 'openedx_caliper_tracking.tasks.cache.get', autospec=True, - side_effect=lambda CACHE_KEY: {HOST_ERROR_CACHE_KEY: False, EMAIL_DELIVERY_CACHE_KEY: True}[CACHE_KEY] + side_effect=lambda CACHE_KEY: { + HOST_ERROR_CACHE_KEY: False, EMAIL_DELIVERY_CACHE_KEY: True}[CACHE_KEY] ) @mock.patch( 'openedx_caliper_tracking.tasks.send_system_recovery_email.delay', @@ -102,12 +109,8 @@ def test_deliver_caliper_event_to_kafka_without_using_celery_without_error(self, autospec=True ) @override_settings( - CALIPER_KAFKA_SETTINGS={ - 'END_POINT': 'http://localhost:9092', - 'TOPIC_NAME': 'dummy topic', - 'ERROR_REPORT_EMAIL': 'dummy@example.com', - 'MAXIMUM_RETRIES': 3 - }, + CALIPER_KAFKA_SETTINGS=CALIPER_KAFKA_SETTINGS_FIXTURE, + CALIPER_KAFKA_AUTH_SETTINGS=CALIPER_KAFKA_AUTH_SETTINGS_FIXTURE ) def test_deliver_caliper_event_to_kafka_without_celery_without_error_with_system_recovered(self, producer_mock, sent_email_mock, @@ -125,7 +128,7 @@ def test_deliver_caliper_event_to_kafka_without_celery_without_error_with_system self.assertTrue(recovery_mail_mock.called) self.assertTrue(cache_mock.called) logger_mock.info.assert_called_with('Logs Delivered Successfully: Event (book) has been successfully' - ' sent to kafka (http://localhost:9092).') + ' sent to kafka ([\'testing.com\']).') @mock.patch( 'openedx_caliper_tracking.tasks.cache.get', @@ -137,12 +140,8 @@ def test_deliver_caliper_event_to_kafka_without_celery_without_error_with_system autospec=True ) @override_settings( - CALIPER_KAFKA_SETTINGS={ - 'END_POINT': 'http://localhost:9092', - 'TOPIC_NAME': 'dummy topic', - 'ERROR_REPORT_EMAIL': 'dummy@example.com', - 'MAXIMUM_RETRIES': 3 - }, + CALIPER_KAFKA_SETTINGS=CALIPER_KAFKA_SETTINGS_FIXTURE, + CALIPER_KAFKA_AUTH_SETTINGS=CALIPER_KAFKA_AUTH_SETTINGS_FIXTURE ) def test_deliver_caliper_event_to_kafka_without_celery_with_host_not_found_error_already_occurred(self, producer_mock, @@ -169,25 +168,23 @@ def test_deliver_caliper_event_to_kafka_without_celery_with_host_not_found_error side_effect=KafkaError ) @override_settings( - CALIPER_KAFKA_SETTINGS={ - 'END_POINT': 'http://localhost:9092', - 'TOPIC_NAME': 'dummy topic', - 'ERROR_REPORT_EMAIL': 'dummy@example.com', - 'MAXIMUM_RETRIES': 3 - } + CALIPER_KAFKA_SETTINGS=CALIPER_KAFKA_SETTINGS_FIXTURE, + CALIPER_KAFKA_AUTH_SETTINGS=CALIPER_KAFKA_AUTH_SETTINGS_FIXTURE ) def test_deliver_caliper_event_to_kafka_without_celery_with_error_with_retry(self, producer_mock, sent_email_mock, logger_mock, retry_mock): """ Test that caliper event is not delivered to kafka - when error is occurred and retry code is executed. + when error is occurred and retry code is not executed if + there is some issue with the configurations. """ deliver_caliper_event_to_kafka({}, 'book') self.assertTrue(producer_mock.called) - self.assertFalse(sent_email_mock.called) + self.assertTrue(sent_email_mock.called) logger_mock.error.assert_called_with('Logs Delivery Failed: Could not deliver event (book) to kafka' - ' (http://localhost:9092) because of KafkaError.') - self.assertTrue(retry_mock.called) + ' ([\'testing.com\']) due to the error:' + ' Invalid Configurations are provided') + self.assertFalse(retry_mock.called) @mock.patch( 'openedx_caliper_tracking.tasks.LOGGER', @@ -203,12 +200,8 @@ def test_deliver_caliper_event_to_kafka_without_celery_with_error_with_retry(sel side_effect=KafkaError ) @override_settings( - CALIPER_KAFKA_SETTINGS={ - 'END_POINT': 'http://localhost:9092', - 'TOPIC_NAME': 'dummy topic', - 'ERROR_REPORT_EMAIL': 'dummy@example.com', - 'MAXIMUM_RETRIES': 0 - } + CALIPER_KAFKA_SETTINGS=CALIPER_KAFKA_SETTINGS_FIXTURE, + CALIPER_KAFKA_AUTH_SETTINGS=CALIPER_KAFKA_AUTH_SETTINGS_FIXTURE ) def test_deliver_caliper_event_to_kafka_without_celery_with_error_without_retry(self, producer_mock, sent_email_mock, logger_mock): @@ -220,12 +213,17 @@ def test_deliver_caliper_event_to_kafka_without_celery_with_error_without_retry( self.assertTrue(producer_mock.called) self.assertTrue(sent_email_mock.called) logger_mock.error.assert_called_with('Logs Delivery Failed: Could not deliver event (book) to kafka' - ' (http://localhost:9092) because of KafkaError.') + ' ([\'testing.com\']) due to the error:' + ' Invalid Configurations are provided') @mock.patch( 'openedx_caliper_tracking.tasks.sent_kafka_failure_email.delay', autospec=True, ) + @override_settings( + CALIPER_KAFKA_SETTINGS=CALIPER_KAFKA_SETTINGS_FIXTURE, + CALIPER_KAFKA_AUTH_SETTINGS=CALIPER_KAFKA_AUTH_SETTINGS_FIXTURE + ) def test_host_not_found_error(self, sent_email_mock): host_not_found(mock.MagicMock(), self.event, 'book') self.assertTrue(sent_email_mock.called) @@ -240,9 +238,8 @@ def test_host_not_found_error(self, sent_email_mock): return_value=True ) @override_settings( - CALIPER_KAFKA_SETTINGS={ - 'ERROR_REPORT_EMAIL': 'dummy@example.com', - } + CALIPER_KAFKA_SETTINGS=CALIPER_KAFKA_SETTINGS_FIXTURE, + CALIPER_KAFKA_AUTH_SETTINGS=CALIPER_KAFKA_AUTH_SETTINGS_FIXTURE ) def test_sent_kafka_failure_email_with_success(self, send_notification_mock, logger_mock): """ @@ -263,9 +260,8 @@ def test_sent_kafka_failure_email_with_success(self, send_notification_mock, log return_value=True ) @override_settings( - CALIPER_KAFKA_SETTINGS={ - 'ERROR_REPORT_EMAIL': 'dummy@example.com', - } + CALIPER_KAFKA_SETTINGS=CALIPER_KAFKA_SETTINGS_FIXTURE, + CALIPER_KAFKA_AUTH_SETTINGS=CALIPER_KAFKA_AUTH_SETTINGS_FIXTURE ) def test_sent_kafka_failure_email_with_email_already_sent(self, cache_mock, logger_mock): """ @@ -286,9 +282,8 @@ def test_sent_kafka_failure_email_with_email_already_sent(self, cache_mock, logg return_value=False ) @override_settings( - CALIPER_KAFKA_SETTINGS={ - 'ERROR_REPORT_EMAIL': 'dummy@example.com', - } + CALIPER_KAFKA_SETTINGS=CALIPER_KAFKA_SETTINGS_FIXTURE, + CALIPER_KAFKA_AUTH_SETTINGS=CALIPER_KAFKA_AUTH_SETTINGS_FIXTURE ) def test_sent_kafka_failure_email_with_failure(self, send_notification_mock, logger_mock): """ @@ -309,9 +304,8 @@ def test_sent_kafka_failure_email_with_failure(self, send_notification_mock, log return_value=True ) @override_settings( - CALIPER_KAFKA_SETTINGS={ - 'ERROR_REPORT_EMAIL': 'dummy@example.com', - } + CALIPER_KAFKA_SETTINGS=CALIPER_KAFKA_SETTINGS_FIXTURE, + CALIPER_KAFKA_AUTH_SETTINGS=CALIPER_KAFKA_AUTH_SETTINGS_FIXTURE ) def test_send_system_recovery_email_with_success(self, send_notification_mock, logger_mock): send_system_recovery_email() @@ -329,9 +323,8 @@ def test_send_system_recovery_email_with_success(self, send_notification_mock, l return_value=False ) @override_settings( - CALIPER_KAFKA_SETTINGS={ - 'ERROR_REPORT_EMAIL': 'dummy@example.com', - } + CALIPER_KAFKA_SETTINGS=CALIPER_KAFKA_SETTINGS_FIXTURE, + CALIPER_KAFKA_AUTH_SETTINGS=CALIPER_KAFKA_AUTH_SETTINGS_FIXTURE ) def test_send_system_recovery_email_with_failure(self, send_notification_mock, logger_mock): """ diff --git a/setup.py b/setup.py index bd2b347..6b0b45e 100644 --- a/setup.py +++ b/setup.py @@ -9,7 +9,7 @@ setup( name='openedx-caliper-tracking', - version='0.11.8', + version='0.12.0', packages=find_packages(), include_package_data=True, license='GPL 3.0', @@ -18,7 +18,7 @@ url='https://github.com/ucsd-ets/caliper-tracking', author='UC San Diego', install_requires=[ - 'kafka-python==1.4.7' + 'kafka-python==2.0.1' ], classifiers=[ 'Environment :: Web Environment',