From 05d71a4865bf7218ed52ac00bbfd9869ddc78d54 Mon Sep 17 00:00:00 2001 From: sauravsapkota Date: Mon, 29 Jul 2024 17:23:43 +0545 Subject: [PATCH] Add test cases for Analysis Mutation --- apps/analysis/mutation.py | 29 +-- apps/analysis/serializers.py | 37 ++- apps/analysis/tests/test_mutations.py | 358 ++++++++++++++++++++++++++ schema.graphql | 21 +- 4 files changed, 403 insertions(+), 42 deletions(-) diff --git a/apps/analysis/mutation.py b/apps/analysis/mutation.py index 4e34220516..008785d341 100644 --- a/apps/analysis/mutation.py +++ b/apps/analysis/mutation.py @@ -4,7 +4,6 @@ generate_input_type_for_serializer, PsGrapheneMutation, PsDeleteMutation, - PsBulkGrapheneMutation, ) from deep.permissions import ProjectPermissions as PP @@ -278,7 +277,13 @@ class Arguments: result = graphene.Field(AnalysisReportUploadType) -class CreateAnalysis(RequiredPermissionMixin, PsGrapheneMutation): +class AnalysisMutationMixin(RequiredPermissionMixin): + @classmethod + def filter_queryset(cls, qs, info): + return qs.filter(project=info.context.active_project) + + +class CreateAnalysis(AnalysisMutationMixin, PsGrapheneMutation): class Arguments: data = AnalysisInputType(required=True) model = Analysis @@ -286,7 +291,7 @@ class Arguments: result = graphene.Field(AnalysisType) -class UpdateAnalysis(RequiredPermissionMixin, PsGrapheneMutation): +class UpdateAnalysis(AnalysisMutationMixin, PsGrapheneMutation): class Arguments: data = AnalysisInputType(required=True) id = graphene.ID(required=True) @@ -295,28 +300,13 @@ class Arguments: result = graphene.Field(AnalysisType) -class DeleteAnalysis(RequiredPermissionMixin, PsDeleteMutation): +class DeleteAnalysis(AnalysisMutationMixin, PsDeleteMutation): class Arguments: id = graphene.ID(required=True) model = Analysis result = graphene.Field(AnalysisType) -class BulkAnalysisInputType(AnalysisInputType): - id = graphene.ID() - - -class BulkAnalysis(RequiredPermissionMixin, PsBulkGrapheneMutation): - class Arguments: - items = graphene.List(graphene.NonNull(BulkAnalysisInputType)) - delete_ids = graphene.List(graphene.NonNull(graphene.ID)) - - result = graphene.List(AnalysisType) - deleted_result = graphene.List(graphene.NonNull(AnalysisType)) - model = Analysis - serializer_class = AnalysisGqlSerializer - - class Mutation(): # Analysis Pillar analysis_pillar_update = UpdateAnalysisPillar.Field() @@ -341,4 +331,3 @@ class Mutation(): analysis_create = CreateAnalysis.Field() analysis_update = UpdateAnalysis.Field() analysis_delete = DeleteAnalysis.Field() - analysis_bulk = BulkAnalysis.Field() diff --git a/apps/analysis/serializers.py b/apps/analysis/serializers.py index de39bb4ec4..6295033b9c 100644 --- a/apps/analysis/serializers.py +++ b/apps/analysis/serializers.py @@ -331,11 +331,13 @@ def validate(self, data): class AnalysisPillarGqlSerializer(TempClientIdMixin, UserResourceSerializer): + id = IntegerIDField(required=False) statements = AnalyticalStatementGqlSerializer(many=True, source='analyticalstatement_set', required=False) class Meta: model = AnalysisPillar fields = ( + 'id', 'title', 'main_statement', 'information_gap', @@ -410,19 +412,16 @@ def validate(self, data): class AnalysisGqlSerializer(UserResourceSerializer, ProjectPropertySerializerMixin): - id = IntegerIDField(required=False) analysis_pillar = AnalysisPillarGqlSerializer(many=True, source='analysispillar_set', required=False) start_date = serializers.DateField(required=False, allow_null=True) class Meta: model = Analysis fields = ( - 'id', 'title', 'team_lead', 'start_date', 'end_date', - 'cloned_from', 'analysis_pillar', ) @@ -441,6 +440,38 @@ def validate(self, data): ) return data + def update(self, instance, validated_data): + with transaction.atomic(): + if 'analysispillar_set' in validated_data: + pillars = validated_data.pop('analysispillar_set') + errors = {} + for pillar in pillars: + data = { + "title": pillar.get('title'), + "assignee": pillar.get('assignee').id, + "analysis": instance.id, + } + pillar_id = pillar.get('id', None) + if pillar_id: + data["id"] = pillar_id + analysis_pillar = get_object_or_404(AnalysisPillar, pk=pillar_id) + analysis_pillar_serializer = AnalysisPillarGqlSerializer( + analysis_pillar, + data=data, + context=self.context + ) + else: + analysis_pillar_serializer = AnalysisPillarGqlSerializer(data=data, context=self.context) + if analysis_pillar_serializer.is_valid(): + analysis_pillar_serializer.save() + else: + errors[pillar.get('id', 'new')] = analysis_pillar_serializer.errors + + if errors: + raise serializers.ValidationError(errors) + + return super().update(instance, validated_data) + AnalysisCloneGqlSerializer = AnalysisCloneInputSerializer diff --git a/apps/analysis/tests/test_mutations.py b/apps/analysis/tests/test_mutations.py index 5e00373901..154620a1df 100644 --- a/apps/analysis/tests/test_mutations.py +++ b/apps/analysis/tests/test_mutations.py @@ -9,6 +9,7 @@ from commons.schema_snapshots import SnapshotQuery from user.factories import UserFactory +from project.models import Project from project.factories import ProjectFactory from lead.factories import LeadFactory from entry.factories import EntryFactory @@ -27,6 +28,7 @@ AnalyticalStatementNGram, AnalyticalStatementGeoTask, AnalysisReportSnapshot, + AnalysisPillar, ) @@ -1326,3 +1328,359 @@ def _query_check(_id, **kwargs): else: self.force_login(user) assert _query_public_snapshot_check(snapshot_slug)['data']['publicAnalysisReportSnapshot'] is not None + + +class TestAnalysisMutationSchema(GraphQLTestCase): + CREATE_MUTATION = ''' + mutation MyMutation($analysisData: AnalysisInputType!, $projectId: ID!) { + project(id: $projectId) { + analysisCreate( + data: $analysisData + ) { + errors + ok + result { + id + endDate + title + teamLead { + id + } + pillars { + analysisId + id + title + } + } + } + } + } + ''' + + UPDATE_MUTATION = ''' + mutation MyMutation($analysisUpdate: AnalysisInputType!, $analysisID: ID!, $projectId: ID!) { + project(id: $projectId) { + analysisUpdate(data: $analysisUpdate, id: $analysisID) { + errors + ok + result { + id + endDate + title + teamLead { + id + } + pillars { + analysisId + id + title + } + } + } + } + } + ''' + + DELETE_MUTATION = ''' + mutation MyMutation($projectId: ID!, $deleteId: ID!) { + project(id: $projectId) { + analysisDelete(id: $deleteId) { + errors + result { + id + title + pillars { + analysisId + title + } + } + } + } + } + ''' + + def setUp(self): + super().setUp() + self.af = AnalysisFrameworkFactory.create() + self.project_with_af = ProjectFactory.create(analysis_framework=self.af, status=Project.Status.ACTIVE) + self.project_without_af = ProjectFactory.create() + # Users with different roles + self.non_member_user = UserFactory.create() + self.readonly_member_user = UserFactory.create() + self.member_user = UserFactory.create() + self.project_with_af.add_member(self.readonly_member_user, role=self.project_role_reader_non_confidential) + self.project_with_af.add_member(self.member_user, role=self.project_role_member) + self.analysis, self.analysis1 = AnalysisFactory.create_batch( + 2, + project=self.project_with_af, + team_lead=self.member_user, + end_date=datetime.date(2022, 4, 1), + ) + self.analysis_pillar1, self.analysis_pillar2, self.analysis_pillar3 = AnalysisPillarFactory.create_batch( + 3, + analysis=self.analysis, + assignee=self.member_user, + ) + + def test_create_analysis_without_pillar(self): + def _query_check(**kwargs): + return self.query_check( + self.CREATE_MUTATION, + variables=self.minput, + **kwargs + ) + + self.minput = dict( + analysisData=dict( + title='Test Analysis', teamLead=self.member_user.id, endDate='2020-01-01' + ), + projectId=self.project_with_af.id, + ) + + # -- Without login + _query_check(assert_for_error=True) + + # -- With login (non-member) + self.force_login(self.non_member_user) + _query_check(assert_for_error=True) + + # --- member user (read-only) + self.force_login(self.readonly_member_user) + _query_check(assert_for_error=True) + + # --- member user + self.force_login(self.member_user) + analysis_resp_data = _query_check()['data']['project']['analysisCreate']['result'] + self.assertEqual(analysis_resp_data['title'], self.minput['analysisData']['title']) + self.assertEqual(analysis_resp_data['teamLead']['id'], str(self.member_user.id)) + self.assertEqual(analysis_resp_data['endDate'], str(self.minput['analysisData']['endDate'])) + + def test_create_analysis_with_pillar(self): + def _query_check(**kwargs): + return self.query_check( + self.CREATE_MUTATION, + variables=self.minput, + **kwargs + ) + + self.minput = dict( + analysisData=dict( + title='Updated Analysis', + teamLead=self.member_user.id, + endDate='2022-01-01', + analysisPillar=[ + dict( + title=str("Analysis pillar 1"), + assignee=int(self.member_user.id), + analysis=int(self.analysis.id) + ), + dict( + title=str("Analysis Pillar 2"), + assignee=int(self.member_user.id), + analysis=int(self.analysis1.id) + ), + dict( + title=str("Analysis Pillar 3"), + assignee=int(self.member_user.id), + analysis=int(self.analysis.id) + ), + ] + ), + projectId=self.project_with_af.id, + ) + + # -- Without login + _query_check(assert_for_error=True) + + # -- With login (non-member) + self.force_login(self.non_member_user) + _query_check(assert_for_error=True) + + # --- member user (read-only) + self.force_login(self.readonly_member_user) + _query_check(assert_for_error=True) + + # --- member user + self.force_login(self.member_user) + analysis_resp_data = _query_check()['data']['project']['analysisCreate']['result'] + self.assertEqual(analysis_resp_data['title'], self.minput['analysisData']['title']) + self.assertEqual(analysis_resp_data['teamLead']['id'], str(self.member_user.id)) + self.assertEqual(analysis_resp_data['endDate'], str(self.minput['analysisData']['endDate'])) + for each in analysis_resp_data['pillars']: + self.assertEqual(each['analysisId'], str(analysis_resp_data['id'])) + + def test_create_analysis_without_analysis_framework(self): + minput = dict( + analysisData=dict( + title='Test Analysis', teamLead=self.member_user.id, endDate='2020-01-01' + ), + projectId=self.project_without_af.id, + ) + + self.force_login(self.member_user) + self.query_check(self.CREATE_MUTATION, variables=minput, assert_for_error=True) + + def test_update_analysis(self): + def _query_check(**kwargs): + return self.query_check( + self.UPDATE_MUTATION, + variables=self.update_minput, + **kwargs + ) + + self.update_minput = dict( + analysisUpdate=dict( + title='Updated Analysis', + teamLead=self.member_user.id, + endDate='2022-01-01', + analysisPillar=[ + dict( + id=int(self.analysis_pillar1.id), + title=str("Updated Analysis pillar1"), + assignee=int(self.member_user.id), + analysis=int(self.analysis.id) + ), + dict( + id=int(self.analysis_pillar3.id), + title=str("Updated Analysis pillar3"), + assignee=int(self.member_user.id), + analysis=int(self.analysis1.id) + ), + dict( + title=str("Analysis pillar5"), + assignee=int(self.member_user.id), + analysis=int(self.analysis.id) + ), + ] + ), + analysisID=self.analysis.id, + projectId=self.project_with_af.id, + ) + + # --- member user + self.force_login(self.member_user) + analysis_resp_data = _query_check()['data']['project']['analysisUpdate']['result'] + analysis_resp_data_pillars = [each["id"] for each in analysis_resp_data["pillars"]] + self.assertTrue( + all( + AnalysisPillar.objects.get(id=int(each["id"])).title == each["title"] + for each in analysis_resp_data["pillars"] + ) + ) + expected_analysis_pillar_ids_dict = { + str(self.analysis_pillar1.id), + str(self.analysis_pillar2.id), + str(self.analysis_pillar3.id) + } + self.assertGreaterEqual( + len(analysis_resp_data['pillars']), + len(self.update_minput['analysisUpdate']['analysisPillar']) + ) + self.assertEqual(len(analysis_resp_data['pillars']), 4) + for item in expected_analysis_pillar_ids_dict: + self.assertIn(item, analysis_resp_data_pillars) + self.assertEqual(analysis_resp_data['title'], self.update_minput['analysisUpdate']['title']) + self.assertEqual(analysis_resp_data['teamLead']['id'], str(self.member_user.id)) + self.assertEqual(analysis_resp_data['endDate'], str(self.update_minput['analysisUpdate']['endDate'])) + + def test_delete_analysis(self): + def _query_check(**kwargs): + return self.query_check( + self.DELETE_MUTATION, + variables=self.delete_minput, + **kwargs + ) + + self.delete_minput = dict( + projectId=self.project_with_af.id, + deleteId=self.analysis.id, + ) + # -- Without login + self.logout() + _query_check(assert_for_error=True) + + # -- With login (non-member) + self.force_login(self.non_member_user) + _query_check(assert_for_error=True) + + # --- member user (read-only) + self.force_login(self.readonly_member_user) + _query_check(assert_for_error=True) + + # --- member user + self.force_login(self.member_user) + analysis_resp_data = _query_check()['data']['project']['analysisDelete']['result'] + self.assertEqual(analysis_resp_data['id'], str(self.delete_minput['deleteId'])) + self.assertEqual(len(analysis_resp_data['pillars']), 0) + + +class TestAnalysisPillarMutationSchema(GraphQLTestCase): + UPDATE_MUTATION = ''' + mutation MyMutation($analysisPillarUpdate: AnalysisPillarUpdateInputType!, $analysisPillarID: ID!, $projectId: ID!) { + project(id: $projectId) { + analysisPillarUpdate(data: $analysisPillarUpdate, id: $analysisPillarID) { + errors + ok + result { + analysisId + title + id + } + } + } + } + ''' + + def setUp(self): + super().setUp() + self.af = AnalysisFrameworkFactory.create() + self.project_with_af = ProjectFactory.create(analysis_framework=self.af, status=Project.Status.ACTIVE) + # Users with different roles + self.non_member_user = UserFactory.create() + self.readonly_member_user = UserFactory.create() + self.member_user = UserFactory.create() + self.project_with_af.add_member(self.readonly_member_user, role=self.project_role_reader_non_confidential) + self.project_with_af.add_member(self.member_user, role=self.project_role_member) + self.analysis = AnalysisFactory.create( + project=self.project_with_af, + team_lead=self.member_user, + end_date=datetime.date(2022, 4, 1), + ) + self.analysis_pillar = AnalysisPillarFactory.create( + analysis=self.analysis, + assignee=self.member_user, + ) + + def test_update_analysis_pillar(self): + def _query_check(**kwargs): + return self.query_check( + self.UPDATE_MUTATION, + variables=self.update_minput, + **kwargs + ) + + self.update_minput = dict( + analysisPillarUpdate=dict( + title="Updated Analysis Pillar", + ), + analysisPillarID=self.analysis_pillar.id, + projectId=self.project_with_af.id, + ) + + # -- Without login + _query_check(assert_for_error=True) + + # -- With login (non-member) + self.force_login(self.non_member_user) + _query_check(assert_for_error=True) + + # --- member user (read-only) + self.force_login(self.readonly_member_user) + _query_check(assert_for_error=True) + + # --- member user + self.force_login(self.member_user) + analysis_pillar_resp_data = _query_check()['data']['project']['analysisPillarUpdate']['result'] + self.assertEqual(analysis_pillar_resp_data['title'], self.update_minput['analysisPillarUpdate']['title']) + self.assertEqual(analysis_pillar_resp_data['id'], str(self.update_minput['analysisPillarID'])) + self.assertEqual(analysis_pillar_resp_data['analysisId'], str(self.analysis.id)) diff --git a/schema.graphql b/schema.graphql index 6b6ddd967e..f3deea2e0d 100644 --- a/schema.graphql +++ b/schema.graphql @@ -268,12 +268,10 @@ type AnalysisFrameworkVisibleProjectType { } input AnalysisInputType { - id: ID title: String! teamLead: ID! startDate: Date endDate: Date! - clonedFrom: ID analysisPillar: [AnalysisPillarGqlInputType!] } @@ -322,6 +320,7 @@ type AnalysisPillarEntryListType { } input AnalysisPillarGqlInputType { + id: ID title: String! mainStatement: String informationGap: String @@ -361,6 +360,7 @@ type AnalysisPillarType { } input AnalysisPillarUpdateInputType { + id: ID title: String mainStatement: String informationGap: String @@ -3287,12 +3287,6 @@ enum AutomaticSummaryStatusEnum { SEND_FAILED } -type BulkAnalysis { - errors: [[GenericScalar!]] - result: [AnalysisType] - deletedResult: [AnalysisType!] -} - input BulkAnalysisFrameworkMembershipInputType { id: ID member: ID! @@ -3300,16 +3294,6 @@ input BulkAnalysisFrameworkMembershipInputType { clientId: String } -input BulkAnalysisInputType { - id: ID - title: String! - teamLead: ID! - startDate: Date - endDate: Date! - clonedFrom: ID - analysisPillar: [AnalysisPillarGqlInputType!] -} - type BulkEntry { errors: [[GenericScalar!]] result: [EntryType] @@ -5488,7 +5472,6 @@ type ProjectMutationType { analysisCreate(data: AnalysisInputType!): CreateAnalysis analysisUpdate(data: AnalysisInputType!, id: ID!): UpdateAnalysis analysisDelete(id: ID!): DeleteAnalysis - analysisBulk(deleteIds: [ID!], items: [BulkAnalysisInputType!]): BulkAnalysis exportCreate(data: ExportCreateInputType!): CreateUserExport exportUpdate(data: ExportUpdateInputType!, id: ID!): UpdateUserExport exportCancel(id: ID!): CancelUserExport