From 07877c8dffa5d9c81f9fa143316704765cebcf25 Mon Sep 17 00:00:00 2001 From: Dmitriy Borzenko Date: Mon, 31 Jul 2023 18:51:15 +0300 Subject: [PATCH 01/16] Add manage buttons. --- .../overview-component.html | 42 ++++++++++++++++--- .../overview-component.sass | 37 ++++++++++++++++ .../overview-component/overview-component.ts | 19 ++++++++- src/app/dataset-view/dataset.component.html | 2 - src/app/dataset-view/dataset.component.ts | 5 --- src/assets/styles/var.sass | 2 +- 6 files changed, 92 insertions(+), 15 deletions(-) diff --git a/src/app/dataset-view/additional-components/overview-component/overview-component.html b/src/app/dataset-view/additional-components/overview-component/overview-component.html index 63a7a3934..348d885ca 100644 --- a/src/app/dataset-view/additional-components/overview-component/overview-component.html +++ b/src/app/dataset-view/additional-components/overview-component/overview-component.html @@ -144,7 +144,7 @@

+
+
+ + +
+
+ + +
+
- + - +
diff --git a/src/app/dataset-view/additional-components/overview-component/overview-component.sass b/src/app/dataset-view/additional-components/overview-component/overview-component.sass index 1df6e550c..f3272bc2a 100644 --- a/src/app/dataset-view/additional-components/overview-component/overview-component.sass +++ b/src/app/dataset-view/additional-components/overview-component/overview-component.sass @@ -11,3 +11,40 @@ .no-data-container background-color: rgba(97,144,195,.15) + +.edit-title + border-bottom: 1px solid #DEDEDE + padding: 10px + .left-part + font-size: 14px + .edit-button + border: none + outline: none + padding: 5px 15px + &:first-child + border-top-left-radius: 5px + border-bottom-left-radius: 5px + &:nth-child(2) + border-top-right-radius: 5px + border-bottom-right-radius: 5px + &.active + background: #fff + font-weight: 500 + border: 1px solid #000 + border-radius: 5px + &:not(.active):hover + background: #DEDEDE + + .right-part + .button-changes + border: none + outline: none + border-radius: 6px + padding: 6px 9px + border: 1px solid #DEDEDE + &:hover:first-child + background-color: #e8e3e3 + + &:nth-child(2) + background-color: rgb(26, 127, 55) + color: #fff diff --git a/src/app/dataset-view/additional-components/overview-component/overview-component.ts b/src/app/dataset-view/additional-components/overview-component/overview-component.ts index 4b0ce837e..76b00d303 100644 --- a/src/app/dataset-view/additional-components/overview-component/overview-component.ts +++ b/src/app/dataset-view/additional-components/overview-component/overview-component.ts @@ -34,10 +34,13 @@ import { EditWatermarkModalComponent } from "./components/edit-watermark-modal/e changeDetection: ChangeDetectionStrategy.OnPush, }) export class OverviewComponent extends BaseComponent implements OnInit { - @Input() public isMarkdownEditView: boolean; + public isMarkdownEditView = false; @Input() public datasetBasics?: DatasetBasicsFragment; @Output() toggleReadmeViewEmit = new EventEmitter(); @Output() selectTopicEmit = new EventEmitter(); + public isEditMode = true; + public initialReadmeState = ""; + public readmeState = ""; public currentState?: { schema: MaybeNull; @@ -64,6 +67,8 @@ export class OverviewComponent extends BaseComponent implements OnInit { size: overviewUpdate.size, overview: overviewUpdate.overview, }; + this.initialReadmeState = this.readmeState = + overviewUpdate.overview.metadata.currentReadme ?? ""; }, ), ); @@ -74,7 +79,17 @@ export class OverviewComponent extends BaseComponent implements OnInit { } public toggleReadmeView(): void { - this.toggleReadmeViewEmit.emit(); + this.isMarkdownEditView = !this.isMarkdownEditView; + } + + public toggleEditMode(): void { + this.isEditMode = !this.isEditMode; + } + + public onCancelChanges(): void { + this.readmeState = this.initialReadmeState; + this.isMarkdownEditView = false; + this.isEditMode = true; } public selectTopic(topicName: string): void { diff --git a/src/app/dataset-view/dataset.component.html b/src/app/dataset-view/dataset.component.html index 4b3e749fa..45e1946e1 100644 --- a/src/app/dataset-view/dataset.component.html +++ b/src/app/dataset-view/dataset.component.html @@ -30,9 +30,7 @@ diff --git a/src/app/dataset-view/dataset.component.ts b/src/app/dataset-view/dataset.component.ts index 6bc38758f..18d3534af 100644 --- a/src/app/dataset-view/dataset.component.ts +++ b/src/app/dataset-view/dataset.component.ts @@ -35,7 +35,6 @@ export class DatasetComponent public datasetViewType: DatasetViewTypeEnum = DatasetViewTypeEnum.Overview; public lineageGraphView: [number, number] = [500, 600]; - public isMarkdownEditView = false; @HostListener("window:resize") private checkWindowSize(): void { @@ -159,10 +158,6 @@ export class DatasetComponent console.log("initDiscussionsTab"); } - public toggleReadmeView(): void { - this.isMarkdownEditView = !this.isMarkdownEditView; - } - public selectTopic(topicName: string): void { promiseWithCatch( this.modalService.warning({ diff --git a/src/assets/styles/var.sass b/src/assets/styles/var.sass index 562423e6f..002d5057b 100644 --- a/src/assets/styles/var.sass +++ b/src/assets/styles/var.sass @@ -394,9 +394,9 @@ $app-color-action-list-item-inline-divider: #d0d7de7a & .variable-textarea width: 100% min-height: 300px - max-height: 600px height: max-content border: none + outline: none markdown & * font: revert !important From 851c83738ecbb7894f3873fd4ba9a092e1545638 Mon Sep 17 00:00:00 2001 From: Dmitriy Borzenko Date: Mon, 31 Jul 2023 19:34:09 +0300 Subject: [PATCH 02/16] Add ability to save readme.md --- .../overview-component.html | 6 ++++- .../overview-component.sass | 3 ++- .../overview-component/overview-component.ts | 24 +++++++++++++++++++ .../dataset-view/dataset.component.spec.ts | 8 ------- .../services/templates-yaml-events.service.ts | 16 +++++++++++++ src/assets/styles/var.sass | 2 +- 6 files changed, 48 insertions(+), 11 deletions(-) diff --git a/src/app/dataset-view/additional-components/overview-component/overview-component.html b/src/app/dataset-view/additional-components/overview-component/overview-component.html index 348d885ca..9326e9598 100644 --- a/src/app/dataset-view/additional-components/overview-component/overview-component.html +++ b/src/app/dataset-view/additional-components/overview-component/overview-component.html @@ -181,7 +181,11 @@

> Cancel changes - diff --git a/src/app/dataset-view/additional-components/overview-component/overview-component.sass b/src/app/dataset-view/additional-components/overview-component/overview-component.sass index f3272bc2a..1178a2342 100644 --- a/src/app/dataset-view/additional-components/overview-component/overview-component.sass +++ b/src/app/dataset-view/additional-components/overview-component/overview-component.sass @@ -44,7 +44,8 @@ border: 1px solid #DEDEDE &:hover:first-child background-color: #e8e3e3 - &:nth-child(2) background-color: rgb(26, 127, 55) color: #fff + &:nth-child(2):disabled + background-color: rgb(148, 211, 162) diff --git a/src/app/dataset-view/additional-components/overview-component/overview-component.ts b/src/app/dataset-view/additional-components/overview-component/overview-component.ts index 76b00d303..a6d795dcc 100644 --- a/src/app/dataset-view/additional-components/overview-component/overview-component.ts +++ b/src/app/dataset-view/additional-components/overview-component/overview-component.ts @@ -25,6 +25,8 @@ import { MaybeNull } from "src/app/common/app.types"; import { NgbModal, NgbModalRef } from "@ng-bootstrap/ng-bootstrap"; import { EditDetailsModalComponent } from "./components/edit-details-modal/edit-details-modal.component"; import { EditWatermarkModalComponent } from "./components/edit-watermark-modal/edit-watermark-modal.component"; +import { AppDatasetCreateService } from "src/app/dataset-create/dataset-create.service"; +import { TemplatesYamlEventsService } from "src/app/services/templates-yaml-events.service"; @Component({ selector: "app-overview", @@ -42,6 +44,10 @@ export class OverviewComponent extends BaseComponent implements OnInit { public initialReadmeState = ""; public readmeState = ""; + public get readmeChanged(): boolean { + return this.initialReadmeState !== this.readmeState; + } + public currentState?: { schema: MaybeNull; data: DataRow[]; @@ -53,6 +59,8 @@ export class OverviewComponent extends BaseComponent implements OnInit { private appDatasetSubsService: AppDatasetSubscriptionsService, private navigationService: NavigationService, private modalService: NgbModal, + private createDatasetService: AppDatasetCreateService, + private yamlEventService: TemplatesYamlEventsService, ) { super(); } @@ -92,6 +100,22 @@ export class OverviewComponent extends BaseComponent implements OnInit { this.isEditMode = true; } + public commitChanges(): void { + console.log(this.readmeState); + if (this.datasetBasics) + this.trackSubscription( + this.createDatasetService + .commitEventToDataset( + this.datasetBasics.owner.name, + this.datasetBasics.name as string, + this.yamlEventService.buildYamlSetAttachmentsEvent( + this.readmeState, + ), + ) + .subscribe(() => (this.isMarkdownEditView = false)), + ); + } + public selectTopic(topicName: string): void { this.selectTopicEmit.emit(topicName); } diff --git a/src/app/dataset-view/dataset.component.spec.ts b/src/app/dataset-view/dataset.component.spec.ts index ab6fd5835..57f2dffc9 100644 --- a/src/app/dataset-view/dataset.component.spec.ts +++ b/src/app/dataset-view/dataset.component.spec.ts @@ -149,14 +149,6 @@ describe("DatasetComponent", () => { expect(getMainDataByLineageNodeSpy).toHaveBeenCalledTimes(1); }); - it("should check toggle readme view", () => { - component.isMarkdownEditView = false; - component.toggleReadmeView(); - expect(component.isMarkdownEditView).toBe(true); - component.toggleReadmeView(); - expect(component.isMarkdownEditView).toBe(false); - }); - it("should check run SQL request", () => { const sqlQuery = "select * from test.table"; const requestDatasetDataSqlRunSpy = spyOn( diff --git a/src/app/services/templates-yaml-events.service.ts b/src/app/services/templates-yaml-events.service.ts index 3f19f27e2..9cbecb6bc 100644 --- a/src/app/services/templates-yaml-events.service.ts +++ b/src/app/services/templates-yaml-events.service.ts @@ -54,6 +54,22 @@ export class TemplatesYamlEventsService { return result; } + public buildYamlSetAttachmentsEvent(data: string): string { + this.initialTemplate.content = { + kind: "setAttachments", + attachments: { + kind: "embedded", + items: [ + { + path: "README.md", + content: data, + }, + ], + }, + }; + return stringify(this.initialTemplate); + } + public buildYamlSetPollingSourceEvent( params: Omit, preprocessStepValue: MaybeNull, diff --git a/src/assets/styles/var.sass b/src/assets/styles/var.sass index 002d5057b..c2d585b69 100644 --- a/src/assets/styles/var.sass +++ b/src/assets/styles/var.sass @@ -390,7 +390,7 @@ $app-color-action-list-item-inline-divider: #d0d7de7a font-size: 12px line-height: 32px & .file-content - padding: 16px + padding: 40px 25px & .variable-textarea width: 100% min-height: 300px From 5f8c3f62abe2a91f12ee2699f4ebb22a49178d52 Mon Sep 17 00:00:00 2001 From: Dmitriy Borzenko Date: Tue, 1 Aug 2023 10:32:21 +0300 Subject: [PATCH 03/16] Add new component ReadmeSectionComponent. --- .../readme-section.component.html | 77 ++++++++++++++++ .../readme-section.component.sass | 0 .../readme-section.component.spec.ts | 29 ++++++ .../readme-section.component.ts | 91 +++++++++++++++++++ .../overview-component.html | 87 +----------------- .../overview-component.spec.ts | 8 -- .../overview-component/overview-component.ts | 44 --------- src/app/dataset-view/dataset.module.ts | 2 + 8 files changed, 203 insertions(+), 135 deletions(-) create mode 100644 src/app/dataset-view/additional-components/overview-component/components/readme-section/readme-section.component.html create mode 100644 src/app/dataset-view/additional-components/overview-component/components/readme-section/readme-section.component.sass create mode 100644 src/app/dataset-view/additional-components/overview-component/components/readme-section/readme-section.component.spec.ts create mode 100644 src/app/dataset-view/additional-components/overview-component/components/readme-section/readme-section.component.ts diff --git a/src/app/dataset-view/additional-components/overview-component/components/readme-section/readme-section.component.html b/src/app/dataset-view/additional-components/overview-component/components/readme-section/readme-section.component.html new file mode 100644 index 000000000..06fbb11cb --- /dev/null +++ b/src/app/dataset-view/additional-components/overview-component/components/readme-section/readme-section.component.html @@ -0,0 +1,77 @@ +
+
+
+ + + +

+ README.md +

+
+ +
+
+
+ + +
+
+ + +
+
+
+ + + + + + +
+
diff --git a/src/app/dataset-view/additional-components/overview-component/components/readme-section/readme-section.component.sass b/src/app/dataset-view/additional-components/overview-component/components/readme-section/readme-section.component.sass new file mode 100644 index 000000000..e69de29bb diff --git a/src/app/dataset-view/additional-components/overview-component/components/readme-section/readme-section.component.spec.ts b/src/app/dataset-view/additional-components/overview-component/components/readme-section/readme-section.component.spec.ts new file mode 100644 index 000000000..f421fcca4 --- /dev/null +++ b/src/app/dataset-view/additional-components/overview-component/components/readme-section/readme-section.component.spec.ts @@ -0,0 +1,29 @@ +import { ComponentFixture, TestBed } from "@angular/core/testing"; +import { ReadmeSectionComponent } from "./readme-section.component"; +import { mockDatasetBasicsFragment } from "src/app/search/mock.data"; +import { Apollo, ApolloModule } from "apollo-angular"; +import { ApolloTestingModule } from "apollo-angular/testing"; +import { SharedTestModule } from "src/app/common/shared-test.module"; + +describe("ReadmeSectionComponent", () => { + let component: ReadmeSectionComponent; + let fixture: ComponentFixture; + + beforeEach(async () => { + await TestBed.configureTestingModule({ + providers: [Apollo], + declarations: [ReadmeSectionComponent], + imports: [ApolloModule, ApolloTestingModule, SharedTestModule], + }).compileComponents(); + + fixture = TestBed.createComponent(ReadmeSectionComponent); + component = fixture.componentInstance; + component.datasetBasics = mockDatasetBasicsFragment; + + fixture.detectChanges(); + }); + + it("should create", () => { + expect(component).toBeTruthy(); + }); +}); diff --git a/src/app/dataset-view/additional-components/overview-component/components/readme-section/readme-section.component.ts b/src/app/dataset-view/additional-components/overview-component/components/readme-section/readme-section.component.ts new file mode 100644 index 000000000..b2652b73b --- /dev/null +++ b/src/app/dataset-view/additional-components/overview-component/components/readme-section/readme-section.component.ts @@ -0,0 +1,91 @@ +import { + ChangeDetectionStrategy, + Component, + Input, + OnInit, +} from "@angular/core"; +import { + DatasetBasicsFragment, + DatasetDataSizeFragment, + DatasetOverviewFragment, +} from "src/app/api/kamu.graphql.interface"; +import { MaybeNull } from "src/app/common/app.types"; +import { BaseComponent } from "src/app/common/base.component"; +import { AppDatasetCreateService } from "src/app/dataset-create/dataset-create.service"; +import { OverviewDataUpdate } from "src/app/dataset-view/dataset.subscriptions.interface"; +import { AppDatasetSubscriptionsService } from "src/app/dataset-view/dataset.subscriptions.service"; +import { DatasetSchema, DataRow } from "src/app/interface/dataset.interface"; +import { TemplatesYamlEventsService } from "src/app/services/templates-yaml-events.service"; + +@Component({ + selector: "app-readme-section", + templateUrl: "./readme-section.component.html", + styleUrls: ["./readme-section.component.sass"], + changeDetection: ChangeDetectionStrategy.OnPush, +}) +export class ReadmeSectionComponent extends BaseComponent implements OnInit { + @Input() public datasetBasics?: DatasetBasicsFragment; + @Input() public currentState?: { + schema: MaybeNull; + data: DataRow[]; + overview: DatasetOverviewFragment; + size: DatasetDataSizeFragment; + }; + public isEditMode = true; + public initialReadmeState = ""; + public readmeState = ""; + public isMarkdownEditView = false; + + public get readmeChanged(): boolean { + return this.initialReadmeState !== this.readmeState; + } + + constructor( + private appDatasetSubsService: AppDatasetSubscriptionsService, + + private createDatasetService: AppDatasetCreateService, + private yamlEventService: TemplatesYamlEventsService, + ) { + super(); + } + + ngOnInit(): void { + this.trackSubscription( + this.appDatasetSubsService.onDatasetOverviewDataChanges.subscribe( + (overviewUpdate: OverviewDataUpdate) => { + this.initialReadmeState = this.readmeState = + overviewUpdate.overview.metadata.currentReadme ?? ""; + }, + ), + ); + } + + public toggleReadmeView(): void { + this.isMarkdownEditView = !this.isMarkdownEditView; + } + + public toggleEditMode(): void { + this.isEditMode = !this.isEditMode; + } + + public onCancelChanges(): void { + this.readmeState = this.initialReadmeState; + this.isMarkdownEditView = false; + this.isEditMode = true; + } + + public commitChanges(): void { + if (this.datasetBasics) + this.trackSubscription( + this.createDatasetService + .commitEventToDataset( + this.datasetBasics.owner.name, + this.datasetBasics.name as string, + this.yamlEventService.buildYamlSetAttachmentsEvent( + this.readmeState, + ), + ) + .subscribe(() => (this.isMarkdownEditView = false)), + ); + } +} diff --git a/src/app/dataset-view/additional-components/overview-component/overview-component.html b/src/app/dataset-view/additional-components/overview-component/overview-component.html index 9326e9598..e5932e7eb 100644 --- a/src/app/dataset-view/additional-components/overview-component/overview-component.html +++ b/src/app/dataset-view/additional-components/overview-component/overview-component.html @@ -122,89 +122,10 @@

No data -
-
-
- - - -

- README.md -

-
- -
-
-
- - -
-
- - -
-
-
- - - - - - -
-
+
diff --git a/src/app/dataset-view/additional-components/overview-component/overview-component.spec.ts b/src/app/dataset-view/additional-components/overview-component/overview-component.spec.ts index 14bb24efd..13c8d8ba5 100644 --- a/src/app/dataset-view/additional-components/overview-component/overview-component.spec.ts +++ b/src/app/dataset-view/additional-components/overview-component/overview-component.spec.ts @@ -70,14 +70,6 @@ describe("OverviewComponent", () => { expect(component.metadataFragmentBlock).toEqual(undefined); }); - it("should check toggle readme view", () => { - const emitterSubscription$ = component.toggleReadmeViewEmit - .pipe(first()) - .subscribe(); - component.toggleReadmeView(); - expect(emitterSubscription$.closed).toBeTrue(); - }); - [ { kind: DatasetKind.Derivative, result: "Derivative" }, { kind: DatasetKind.Root, result: "Root" }, diff --git a/src/app/dataset-view/additional-components/overview-component/overview-component.ts b/src/app/dataset-view/additional-components/overview-component/overview-component.ts index a6d795dcc..c48bab74e 100644 --- a/src/app/dataset-view/additional-components/overview-component/overview-component.ts +++ b/src/app/dataset-view/additional-components/overview-component/overview-component.ts @@ -25,8 +25,6 @@ import { MaybeNull } from "src/app/common/app.types"; import { NgbModal, NgbModalRef } from "@ng-bootstrap/ng-bootstrap"; import { EditDetailsModalComponent } from "./components/edit-details-modal/edit-details-modal.component"; import { EditWatermarkModalComponent } from "./components/edit-watermark-modal/edit-watermark-modal.component"; -import { AppDatasetCreateService } from "src/app/dataset-create/dataset-create.service"; -import { TemplatesYamlEventsService } from "src/app/services/templates-yaml-events.service"; @Component({ selector: "app-overview", @@ -36,17 +34,9 @@ import { TemplatesYamlEventsService } from "src/app/services/templates-yaml-even changeDetection: ChangeDetectionStrategy.OnPush, }) export class OverviewComponent extends BaseComponent implements OnInit { - public isMarkdownEditView = false; @Input() public datasetBasics?: DatasetBasicsFragment; @Output() toggleReadmeViewEmit = new EventEmitter(); @Output() selectTopicEmit = new EventEmitter(); - public isEditMode = true; - public initialReadmeState = ""; - public readmeState = ""; - - public get readmeChanged(): boolean { - return this.initialReadmeState !== this.readmeState; - } public currentState?: { schema: MaybeNull; @@ -59,8 +49,6 @@ export class OverviewComponent extends BaseComponent implements OnInit { private appDatasetSubsService: AppDatasetSubscriptionsService, private navigationService: NavigationService, private modalService: NgbModal, - private createDatasetService: AppDatasetCreateService, - private yamlEventService: TemplatesYamlEventsService, ) { super(); } @@ -75,8 +63,6 @@ export class OverviewComponent extends BaseComponent implements OnInit { size: overviewUpdate.size, overview: overviewUpdate.overview, }; - this.initialReadmeState = this.readmeState = - overviewUpdate.overview.metadata.currentReadme ?? ""; }, ), ); @@ -86,36 +72,6 @@ export class OverviewComponent extends BaseComponent implements OnInit { this.navigationService.navigateToWebsite(url); } - public toggleReadmeView(): void { - this.isMarkdownEditView = !this.isMarkdownEditView; - } - - public toggleEditMode(): void { - this.isEditMode = !this.isEditMode; - } - - public onCancelChanges(): void { - this.readmeState = this.initialReadmeState; - this.isMarkdownEditView = false; - this.isEditMode = true; - } - - public commitChanges(): void { - console.log(this.readmeState); - if (this.datasetBasics) - this.trackSubscription( - this.createDatasetService - .commitEventToDataset( - this.datasetBasics.owner.name, - this.datasetBasics.name as string, - this.yamlEventService.buildYamlSetAttachmentsEvent( - this.readmeState, - ), - ) - .subscribe(() => (this.isMarkdownEditView = false)), - ); - } - public selectTopic(topicName: string): void { this.selectTopicEmit.emit(topicName); } diff --git a/src/app/dataset-view/dataset.module.ts b/src/app/dataset-view/dataset.module.ts index 15ee11d2b..09cf4f891 100644 --- a/src/app/dataset-view/dataset.module.ts +++ b/src/app/dataset-view/dataset.module.ts @@ -61,6 +61,7 @@ import { PageNotFoundComponent } from "../components/page-not-found/page-not-fou import { AddPollingSourceComponent } from "./additional-components/metadata-component/components/add-polling-source/add-polling-source.component"; import { MatStepperModule } from "@angular/material/stepper"; import { EngineSelectComponent } from "./additional-components/metadata-component/components/set-transform/components/engine-section/components/engine-select/engine-select.component"; +import { ReadmeSectionComponent } from "./additional-components/overview-component/components/readme-section/readme-section.component"; @NgModule({ imports: [ CommonModule, @@ -146,6 +147,7 @@ import { EngineSelectComponent } from "./additional-components/metadata-componen PageNotFoundComponent, AddPollingSourceComponent, EngineSelectComponent, + ReadmeSectionComponent, ], }) export class DatasetModule { From 061687b62d506da5a6e8d5ec35d4bf9d2812fc21 Mon Sep 17 00:00:00 2001 From: Dmitriy Borzenko Date: Tue, 1 Aug 2023 16:19:42 +0300 Subject: [PATCH 04/16] Update schema. --- resources/schema.graphql | 1272 +++++++++++++++++++------------------- 1 file changed, 637 insertions(+), 635 deletions(-) diff --git a/resources/schema.graphql b/resources/schema.graphql index c0351059d..2e8f8092b 100644 --- a/resources/schema.graphql +++ b/resources/schema.graphql @@ -1,320 +1,329 @@ type AccessToken { - accessToken: String! - scope: String! - tokenType: String! + accessToken: String! + scope: String! + tokenType: String! } interface Account { - id: AccountID! - name: String! + id: AccountID! + name: String! } scalar AccountID type AccountInfo { - login: String! - name: String! - email: String - avatarUrl: String - gravatarId: String + login: String! + name: String! + email: String + avatarUrl: String + gravatarId: String } scalar AccountName type Accounts { - """ - Returns account by its ID - """ - byId(accountId: AccountID!): Account - """ - Returns account by its name - """ - byName(name: String!): Account + """ + Returns account by its ID + """ + byId(accountId: AccountID!): Account + """ + Returns account by its name + """ + byName(name: String!): Account } type AddData { - inputCheckpoint: Multihash - outputData: DataSlice - outputCheckpoint: Checkpoint - outputWatermark: DateTime - sourceState: SourceState + inputCheckpoint: Multihash + outputData: DataSlice + outputCheckpoint: Checkpoint + outputWatermark: DateTime + sourceState: SourceState } type AttachmentEmbedded { - path: String! - content: String! + path: String! + content: String! } union Attachments = AttachmentsEmbedded type AttachmentsEmbedded { - items: [AttachmentEmbedded!]! + items: [AttachmentEmbedded!]! } -type Auth { - githubLogin(code: String!): LoginResponse! - accountInfo(accessToken: String!): AccountInfo! +type AuthMut { + githubLogin(code: String!): LoginResponse! + accountInfo(accessToken: String!): AccountInfo! } type BlockInterval { - start: Multihash! - end: Multihash! + start: Multihash! + end: Multihash! } type BlockRef { - name: String! - blockHash: Multihash! + name: String! + blockHash: Multihash! } + type Checkpoint { - physicalHash: Multihash! - size: Int! + physicalHash: Multihash! + size: Int! } interface CommitResult { - message: String! + message: String! } -type CommitResultAppendError implements CommitResult { - message: String! +type CommitResultAppendError implements CommitResult & UpdateReadmeResult { + message: String! } -type CommitResultSuccess implements CommitResult { - oldHead: Multihash - newHead: Multihash! - message: String! +type CommitResultSuccess implements CommitResult & UpdateReadmeResult { + oldHead: Multihash + newHead: Multihash! + message: String! } enum CompressionFormat { - GZIP - ZIP + GZIP + ZIP } interface CreateDatasetFromSnapshotResult { - message: String! + message: String! } interface CreateDatasetResult { - message: String! + message: String! } type CreateDatasetResultInvalidSnapshot implements CreateDatasetFromSnapshotResult { - message: String! + message: String! } type CreateDatasetResultMissingInputs implements CreateDatasetFromSnapshotResult { - missingInputs: [String!]! - message: String! + missingInputs: [String!]! + message: String! } type CreateDatasetResultNameCollision implements CreateDatasetResult & CreateDatasetFromSnapshotResult { - accountName: AccountName - datasetName: DatasetName! - message: String! + accountName: AccountName + datasetName: DatasetName! + message: String! } type CreateDatasetResultSuccess implements CreateDatasetResult & CreateDatasetFromSnapshotResult { - dataset: Dataset! - message: String! + dataset: Dataset! + message: String! } type DataBatch { - format: DataBatchFormat! - content: String! - numRecords: Int! + format: DataBatchFormat! + content: String! + numRecords: Int! } enum DataBatchFormat { - JSON - JSON_LD - JSON_SOA - CSV + JSON + JSON_LD + JSON_SOA + CSV } type DataQueries { - """ - Executes a specified query and returns its result - """ - query( - query: String! - queryDialect: QueryDialect! - dataFormat: DataBatchFormat - schemaFormat: DataSchemaFormat - limit: Int - ): DataQueryResult! - """ - Lists engines known to the system and recommended for use - """ - knownEngines: [EngineDesc!]! + """ + Executes a specified query and returns its result + """ + query(query: String!, queryDialect: QueryDialect!, dataFormat: DataBatchFormat, schemaFormat: DataSchemaFormat, limit: Int): DataQueryResult! + """ + Lists engines known to the system and recommended for use + """ + knownEngines: [EngineDesc!]! } union DataQueryResult = DataQueryResultSuccess | DataQueryResultError type DataQueryResultError { - errorMessage: String! - errorKind: DataQueryResultErrorKind! + errorMessage: String! + errorKind: DataQueryResultErrorKind! } enum DataQueryResultErrorKind { - INVALID_SQL - INTERNAL_ERROR + INVALID_SQL + INTERNAL_ERROR } type DataQueryResultSuccess { - schema: DataSchema - data: DataBatch! - limit: Int! + schema: DataSchema + data: DataBatch! + limit: Int! } type DataSchema { - format: DataSchemaFormat! - content: String! + format: DataSchemaFormat! + content: String! } enum DataSchemaFormat { - PARQUET - PARQUET_JSON + PARQUET + PARQUET_JSON } type DataSlice { - logicalHash: Multihash! - physicalHash: Multihash! - interval: OffsetInterval! - size: Int! + logicalHash: Multihash! + physicalHash: Multihash! + interval: OffsetInterval! + size: Int! } type Dataset { - """ - Unique identifier of the dataset - """ - id: DatasetID! - """ - Symbolic name of the dataset. - Name can change over the dataset's lifetime. For unique identifier use - `id()`. - """ - name: DatasetName! - """ - Returns the user or organization that owns this dataset - """ - owner: Account! - """ - Returns the kind of a dataset (Root or Derivative) - """ - kind: DatasetKind! - """ - Access to the data of the dataset - """ - data: DatasetData! - """ - Access to the metadata of the dataset - """ - metadata: DatasetMetadata! - """ - Creation time of the first metadata block in the chain - """ - createdAt: DateTime! - """ - Creation time of the most recent metadata block in the chain - """ - lastUpdatedAt: DateTime! + """ + Unique identifier of the dataset + """ + id: DatasetID! + """ + Symbolic name of the dataset. + Name can change over the dataset's lifetime. For unique identifier use + `id()`. + """ + name: DatasetName! + """ + Returns the user or organization that owns this dataset + """ + owner: Account! + """ + Returns the kind of a dataset (Root or Derivative) + """ + kind: DatasetKind! + """ + Access to the data of the dataset + """ + data: DatasetData! + """ + Access to the metadata of the dataset + """ + metadata: DatasetMetadata! + """ + Creation time of the first metadata block in the chain + """ + createdAt: DateTime! + """ + Creation time of the most recent metadata block in the chain + """ + lastUpdatedAt: DateTime! } type DatasetConnection { - """ - A shorthand for `edges { node { ... } }` - """ - nodes: [Dataset!]! - """ - Approximate number of total nodes - """ - totalCount: Int! - """ - Page information - """ - pageInfo: PageBasedInfo! - edges: [DatasetEdge!]! + """ + A shorthand for `edges { node { ... } }` + """ + nodes: [Dataset!]! + """ + Approximate number of total nodes + """ + totalCount: Int! + """ + Page information + """ + pageInfo: PageBasedInfo! + edges: [DatasetEdge!]! } type DatasetData { - """ - Total number of records in this dataset - """ - numRecordsTotal: Int! - """ - An estimated size of data on disk not accounting for replication or - caching - """ - estimatedSize: Int! - """ - Returns the specified number of the latest records in the dataset - This is equivalent to the SQL query: `SELECT * FROM dataset ORDER BY - event_time DESC LIMIT N` - """ - tail( - limit: Int - dataFormat: DataBatchFormat - schemaFormat: DataSchemaFormat - ): DataQueryResult! + """ + Total number of records in this dataset + """ + numRecordsTotal: Int! + """ + An estimated size of data on disk not accounting for replication or + caching + """ + estimatedSize: Int! + """ + Returns the specified number of the latest records in the dataset + This is equivalent to the SQL query: `SELECT * FROM dataset ORDER BY + event_time DESC LIMIT N` + """ + tail(limit: Int, dataFormat: DataBatchFormat, schemaFormat: DataSchemaFormat): DataQueryResult! } type DatasetEdge { - node: Dataset! + node: Dataset! } scalar DatasetID enum DatasetKind { - ROOT - DERIVATIVE + ROOT + DERIVATIVE } type DatasetMetadata { - """ - Access to the temporal metadata chain of the dataset - """ - chain: MetadataChain! - """ - Last recorded watermark - """ - currentWatermark: DateTime - """ - Latest data schema - """ - currentSchema(format: DataSchemaFormat): DataSchema - """ - Current upstream dependencies of a dataset - """ - currentUpstreamDependencies: [Dataset!]! - """ - Current downstream dependencies of a dataset - """ - currentDownstreamDependencies: [Dataset!]! - """ - Current source used by the root dataset - """ - currentSource: SetPollingSource - """ - Current transformation used by the derivative dataset - """ - currentTransform: SetTransform - """ - Current descriptive information about the dataset - """ - currentInfo: SetInfo! - """ - Current readme file as discovered from attachments associated with the - dataset - """ - currentReadme: String - """ - Current license associated with the dataset - """ - currentLicense: SetLicense - """ - Current vocabulary associated with the dataset - """ - currentVocab: SetVocab + """ + Access to the temporal metadata chain of the dataset + """ + chain: MetadataChain! + """ + Last recorded watermark + """ + currentWatermark: DateTime + """ + Latest data schema + """ + currentSchema(format: DataSchemaFormat): DataSchema + """ + Current upstream dependencies of a dataset + """ + currentUpstreamDependencies: [Dataset!]! + """ + Current downstream dependencies of a dataset + """ + currentDownstreamDependencies: [Dataset!]! + """ + Current source used by the root dataset + """ + currentSource: SetPollingSource + """ + Current transformation used by the derivative dataset + """ + currentTransform: SetTransform + """ + Current descriptive information about the dataset + """ + currentInfo: SetInfo! + """ + Current readme file as discovered from attachments associated with the + dataset + """ + currentReadme: String + """ + Current license associated with the dataset + """ + currentLicense: SetLicense + """ + Current vocabulary associated with the dataset + """ + currentVocab: SetVocab +} + +type DatasetMetadataMut { + """ + Access to the mutable metadata chain of the dataset + """ + chain: MetadataChainMut! + """ + Updates or clears the dataset readme + """ + updateReadme(content: String): UpdateReadmeResult! +} + +type DatasetMut { + """ + Access to the mutable metadata of the dataset + """ + metadata: DatasetMetadataMut! } scalar DatasetName @@ -322,46 +331,37 @@ scalar DatasetName scalar DatasetRefAny type Datasets { - """ - Returns dataset by its ID - """ - byId(datasetId: DatasetID!): Dataset - """ - Returns dataset by its owner and name - """ - byOwnerAndName(accountName: AccountName!, datasetName: DatasetName!): Dataset - """ - Returns datasets belonging to the specified account - """ - byAccountId( - accountId: AccountID! - page: Int - perPage: Int - ): DatasetConnection! - """ - Returns datasets belonging to the specified account - """ - byAccountName( - accountName: AccountName! - page: Int - perPage: Int - ): DatasetConnection! - """ - Creates a new empty dataset - """ - createEmpty( - accountId: AccountID! - datasetKind: DatasetKind! - datasetName: DatasetName! - ): CreateDatasetResult! - """ - Creates a new dataset from provided DatasetSnapshot manifest - """ - createFromSnapshot( - accountId: AccountID! - snapshot: String! - snapshotFormat: MetadataManifestFormat! - ): CreateDatasetFromSnapshotResult! + """ + Returns dataset by its ID + """ + byId(datasetId: DatasetID!): Dataset + """ + Returns dataset by its owner and name + """ + byOwnerAndName(accountName: AccountName!, datasetName: DatasetName!): Dataset + """ + Returns datasets belonging to the specified account + """ + byAccountId(accountId: AccountID!, page: Int, perPage: Int): DatasetConnection! + """ + Returns datasets belonging to the specified account + """ + byAccountName(accountName: AccountName!, page: Int, perPage: Int): DatasetConnection! +} + +type DatasetsMut { + """ + Returns a mutable dataset by its ID + """ + byId(datasetId: DatasetID!): DatasetMut + """ + Creates a new empty dataset + """ + createEmpty(accountId: AccountID!, datasetKind: DatasetKind!, datasetName: DatasetName!): CreateDatasetResult! + """ + Creates a new dataset from provided DatasetSnapshot manifest + """ + createFromSnapshot(accountId: AccountID!, snapshot: String!, snapshotFormat: MetadataManifestFormat!): CreateDatasetFromSnapshotResult! } """ @@ -375,485 +375,491 @@ scalar DateTime Describes """ type EngineDesc { - """ - A short name of the engine, e.g. "Spark", "Flink". - Intended for use in UI for quick engine identification and selection. - """ - name: String! - """ - Language and dialect this engine is using for queries - Indended for configuring code highlighting and completions. - """ - dialect: QueryDialect! - """ - OCI image repository and a tag of the latest engine image, e.g. - "ghcr.io/kamu-data/engine-datafusion:0.1.2" - """ - latestImage: String! + """ + A short name of the engine, e.g. "Spark", "Flink". + Intended for use in UI for quick engine identification and selection. + """ + name: String! + """ + Language and dialect this engine is using for queries + Indended for configuring code highlighting and completions. + """ + dialect: QueryDialect! + """ + OCI image repository and a tag of the latest engine image, e.g. + "ghcr.io/kamu-data/engine-datafusion:0.1.2" + """ + latestImage: String! } type EnvVar { - name: String! - value: String + name: String! + value: String } union EventTimeSource = EventTimeSourceFromMetadata | EventTimeSourceFromPath type EventTimeSourceFromMetadata { - dummy: String + dummy: String } type EventTimeSourceFromPath { - pattern: String! - timestampFormat: String + pattern: String! + timestampFormat: String } type ExecuteQuery { - inputSlices: [InputSlice!]! - inputCheckpoint: Multihash - outputData: DataSlice - outputCheckpoint: Checkpoint - outputWatermark: DateTime + inputSlices: [InputSlice!]! + inputCheckpoint: Multihash + outputData: DataSlice + outputCheckpoint: Checkpoint + outputWatermark: DateTime } union FetchStep = FetchStepUrl | FetchStepFilesGlob | FetchStepContainer type FetchStepContainer { - image: String! - command: [String!] - args: [String!] - env: [EnvVar!] + image: String! + command: [String!] + args: [String!] + env: [EnvVar!] } type FetchStepFilesGlob { - path: String! - eventTime: EventTimeSource - cache: SourceCaching - order: SourceOrdering + path: String! + eventTime: EventTimeSource + cache: SourceCaching + order: SourceOrdering } type FetchStepUrl { - url: String! - eventTime: EventTimeSource - cache: SourceCaching - headers: [RequestHeader!] + url: String! + eventTime: EventTimeSource + cache: SourceCaching + headers: [RequestHeader!] } + + type InputSlice { - datasetId: DatasetID! - blockInterval: BlockInterval - dataInterval: OffsetInterval + datasetId: DatasetID! + blockInterval: BlockInterval + dataInterval: OffsetInterval } + type LoginResponse { - token: AccessToken! - accountInfo: AccountInfo! + token: AccessToken! + accountInfo: AccountInfo! } -union MergeStrategy = - MergeStrategyAppend - | MergeStrategyLedger - | MergeStrategySnapshot +union MergeStrategy = MergeStrategyAppend | MergeStrategyLedger | MergeStrategySnapshot type MergeStrategyAppend { - dummy: String + dummy: String } type MergeStrategyLedger { - primaryKey: [String!]! + primaryKey: [String!]! } type MergeStrategySnapshot { - primaryKey: [String!]! - compareColumns: [String!] - observationColumn: String - obsvAdded: String - obsvChanged: String - obsvRemoved: String + primaryKey: [String!]! + compareColumns: [String!] + observationColumn: String + obsvAdded: String + obsvChanged: String + obsvRemoved: String } type MetadataBlockConnection { - """ - A shorthand for `edges { node { ... } }` - """ - nodes: [MetadataBlockExtended!]! - """ - Approximate number of total nodes - """ - totalCount: Int! - """ - Page information - """ - pageInfo: PageBasedInfo! - edges: [MetadataBlockEdge!]! + """ + A shorthand for `edges { node { ... } }` + """ + nodes: [MetadataBlockExtended!]! + """ + Approximate number of total nodes + """ + totalCount: Int! + """ + Page information + """ + pageInfo: PageBasedInfo! + edges: [MetadataBlockEdge!]! } type MetadataBlockEdge { - node: MetadataBlockExtended! + node: MetadataBlockExtended! } type MetadataBlockExtended { - blockHash: Multihash! - prevBlockHash: Multihash - systemTime: DateTime! - author: Account! - event: MetadataEvent! - sequenceNumber: Int! + blockHash: Multihash! + prevBlockHash: Multihash + systemTime: DateTime! + author: Account! + event: MetadataEvent! + sequenceNumber: Int! } type MetadataChain { - """ - Returns all named metadata block references - """ - refs: [BlockRef!]! - """ - Returns a metadata block corresponding to the specified hash - """ - blockByHash(hash: Multihash!): MetadataBlockExtended - """ - Returns a metadata block corresponding to the specified hash and encoded - in desired format - """ - blockByHashEncoded(hash: Multihash!, format: MetadataManifestFormat!): String - """ - Iterates all metadata blocks in the reverse chronological order - """ - blocks(page: Int, perPage: Int): MetadataBlockConnection! - """ - Commits new event to the metadata chain - """ - commitEvent( - event: String! - eventFormat: MetadataManifestFormat! - ): CommitResult! -} - -union MetadataEvent = - AddData - | ExecuteQuery - | Seed - | SetPollingSource - | SetTransform - | SetVocab - | SetWatermark - | SetAttachments - | SetInfo - | SetLicense + """ + Returns all named metadata block references + """ + refs: [BlockRef!]! + """ + Returns a metadata block corresponding to the specified hash + """ + blockByHash(hash: Multihash!): MetadataBlockExtended + """ + Returns a metadata block corresponding to the specified hash and encoded + in desired format + """ + blockByHashEncoded(hash: Multihash!, format: MetadataManifestFormat!): String + """ + Iterates all metadata blocks in the reverse chronological order + """ + blocks(page: Int, perPage: Int): MetadataBlockConnection! +} + +type MetadataChainMut { + """ + Commits new event to the metadata chain + """ + commitEvent(event: String!, eventFormat: MetadataManifestFormat!): CommitResult! +} + +union MetadataEvent = AddData | ExecuteQuery | Seed | SetPollingSource | SetTransform | SetVocab | SetWatermark | SetAttachments | SetInfo | SetLicense enum MetadataManifestFormat { - YAML + YAML } type MetadataManifestMalformed implements CommitResult & CreateDatasetFromSnapshotResult { - message: String! + message: String! } type MetadataManifestUnsupportedVersion implements CommitResult & CreateDatasetFromSnapshotResult { - message: String! + message: String! } scalar Multihash type Mutation { - auth: Auth! - tasks: TasksMutations! + """ + Authentication and authorization-related functionality group + """ + auth: AuthMut! + """ + Dataset-related functionality group. + + Datasets are historical streams of events recorded under a cetrain + schema. + """ + datasets: DatasetsMut! + """ + Tasks-related functionality group. + + Tasks are units of work scheduled and executed by the system to query + and process data. + """ + tasks: TasksMut! +} + +type NoChanges implements CommitResult & UpdateReadmeResult { + message: String! } type OffsetInterval { - start: Int! - end: Int! + start: Int! + end: Int! } type Organization implements Account { - """ - Unique and stable identitfier of this organization account - """ - id: AccountID! - """ - Symbolic account name - """ - name: String! + """ + Unique and stable identitfier of this organization account + """ + id: AccountID! + """ + Symbolic account name + """ + name: String! } type PageBasedInfo { - """ - When paginating backwards, are there more items? - """ - hasPreviousPage: Boolean! - """ - When paginating forwards, are there more items? - """ - hasNextPage: Boolean! - """ - Index of the current page - """ - currentPage: Int! - """ - Approximate number of total pages assuming number of nodes per page - stays the same - """ - totalPages: Int + """ + When paginating backwards, are there more items? + """ + hasPreviousPage: Boolean! + """ + When paginating forwards, are there more items? + """ + hasNextPage: Boolean! + """ + Index of the current page + """ + currentPage: Int! + """ + Approximate number of total pages assuming number of nodes per page + stays the same + """ + totalPages: Int } union PrepStep = PrepStepDecompress | PrepStepPipe type PrepStepDecompress { - format: CompressionFormat! - subPath: String + format: CompressionFormat! + subPath: String } type PrepStepPipe { - command: [String!]! + command: [String!]! } type Query { - """ - Returns the version of the GQL API - """ - apiVersion: String! - """ - Dataset-related functionality group. - - Datasets are historical streams of events recorded under a cetrain - schema. - """ - datasets: Datasets! - """ - Account-related functionality group. - - Accounts can be individual users or organizations registered in the - system. This groups deals with their identities and permissions. - """ - accounts: Accounts! - """ - Task-related functionality group. - - Tasks are units of scheduling that can perform many functions like - ingesting new data, running dataset transformations, answering ad-hoc - queries etc. - """ - tasks: Tasks! - """ - Search-related functionality group. - """ - search: Search! - """ - Querying and data manipulations - """ - data: DataQueries! + """ + Returns the version of the GQL API + """ + apiVersion: String! + """ + Dataset-related functionality group. + + Datasets are historical streams of events recorded under a cetrain + schema. + """ + datasets: Datasets! + """ + Account-related functionality group. + + Accounts can be individual users or organizations registered in the + system. This groups deals with their identities and permissions. + """ + accounts: Accounts! + """ + Task-related functionality group. + + Tasks are units of scheduling that can perform many functions like + ingesting new data, running dataset transformations, answering ad-hoc + queries etc. + """ + tasks: Tasks! + """ + Search-related functionality group. + """ + search: Search! + """ + Querying and data manipulations + """ + data: DataQueries! } enum QueryDialect { - SQL_SPARK - SQL_FLINK - SQL_DATA_FUSION + SQL_SPARK + SQL_FLINK + SQL_DATA_FUSION } -union ReadStep = - ReadStepCsv - | ReadStepJsonLines - | ReadStepGeoJson - | ReadStepEsriShapefile - | ReadStepParquet +union ReadStep = ReadStepCsv | ReadStepJsonLines | ReadStepGeoJson | ReadStepEsriShapefile | ReadStepParquet type ReadStepCsv { - schema: [String!] - separator: String - encoding: String - quote: String - escape: String - comment: String - header: Boolean - enforceSchema: Boolean - inferSchema: Boolean - ignoreLeadingWhiteSpace: Boolean - ignoreTrailingWhiteSpace: Boolean - nullValue: String - emptyValue: String - nanValue: String - positiveInf: String - negativeInf: String - dateFormat: String - timestampFormat: String - multiLine: Boolean + schema: [String!] + separator: String + encoding: String + quote: String + escape: String + comment: String + header: Boolean + enforceSchema: Boolean + inferSchema: Boolean + ignoreLeadingWhiteSpace: Boolean + ignoreTrailingWhiteSpace: Boolean + nullValue: String + emptyValue: String + nanValue: String + positiveInf: String + negativeInf: String + dateFormat: String + timestampFormat: String + multiLine: Boolean } type ReadStepEsriShapefile { - schema: [String!] - subPath: String + schema: [String!] + subPath: String } type ReadStepGeoJson { - schema: [String!] + schema: [String!] } type ReadStepJsonLines { - schema: [String!] - dateFormat: String - encoding: String - multiLine: Boolean - primitivesAsString: Boolean - timestampFormat: String + schema: [String!] + dateFormat: String + encoding: String + multiLine: Boolean + primitivesAsString: Boolean + timestampFormat: String } type ReadStepParquet { - schema: [String!] + schema: [String!] } type RequestHeader { - name: String! - value: String! + name: String! + value: String! } type Search { - """ - Perform search across all resources - """ - query(query: String!, page: Int, perPage: Int): SearchResultConnection! + """ + Perform search across all resources + """ + query(query: String!, page: Int, perPage: Int): SearchResultConnection! } union SearchResult = Dataset type SearchResultConnection { - """ - A shorthand for `edges { node { ... } }` - """ - nodes: [SearchResult!]! - """ - Approximate number of total nodes - """ - totalCount: Int! - """ - Page information - """ - pageInfo: PageBasedInfo! - edges: [SearchResultEdge!]! + """ + A shorthand for `edges { node { ... } }` + """ + nodes: [SearchResult!]! + """ + Approximate number of total nodes + """ + totalCount: Int! + """ + Page information + """ + pageInfo: PageBasedInfo! + edges: [SearchResultEdge!]! } type SearchResultEdge { - node: SearchResult! + node: SearchResult! } type Seed { - datasetId: DatasetID! - datasetKind: DatasetKind! + datasetId: DatasetID! + datasetKind: DatasetKind! } type SetAttachments { - attachments: Attachments! + attachments: Attachments! } type SetInfo { - description: String - keywords: [String!] + description: String + keywords: [String!] } type SetLicense { - shortName: String! - name: String! - spdxId: String - websiteUrl: String! + shortName: String! + name: String! + spdxId: String + websiteUrl: String! } type SetPollingSource { - fetch: FetchStep! - prepare: [PrepStep!] - read: ReadStep! - preprocess: Transform - merge: MergeStrategy! + fetch: FetchStep! + prepare: [PrepStep!] + read: ReadStep! + preprocess: Transform + merge: MergeStrategy! } type SetTransform { - inputs: [TransformInput!]! - transform: Transform! + inputs: [TransformInput!]! + transform: Transform! } type SetVocab { - systemTimeColumn: String - eventTimeColumn: String - offsetColumn: String + systemTimeColumn: String + eventTimeColumn: String + offsetColumn: String } type SetWatermark { - outputWatermark: DateTime! + outputWatermark: DateTime! } union SourceCaching = SourceCachingForever type SourceCachingForever { - dummy: String + dummy: String } enum SourceOrdering { - BY_EVENT_TIME - BY_NAME + BY_EVENT_TIME + BY_NAME } type SourceState { - kind: String! - source: String! - value: String! + kind: String! + source: String! + value: String! } type SqlQueryStep { - alias: String - query: String! + alias: String + query: String! } + type Task { - """ - Unique and stable identitfier of this task - """ - taskId: TaskID! - """ - Life-cycle status of a task - """ - status: TaskStatus! - """ - Whether the task was ordered to be cancelled - """ - cancellationRequested: Boolean! - """ - Describes a certain final outcome of the task once it reaches the - "finished" status - """ - outcome: TaskOutcome - """ - Time when task was originally created and placed in a queue - """ - createdAt: DateTime! - """ - Time when task transitioned into a running state - """ - ranAt: DateTime - """ - Time when cancellation of task was requested - """ - cancellationRequestedAt: DateTime - """ - Time when task has reached a final outcome - """ - finishedAt: DateTime + """ + Unique and stable identitfier of this task + """ + taskId: TaskID! + """ + Life-cycle status of a task + """ + status: TaskStatus! + """ + Whether the task was ordered to be cancelled + """ + cancellationRequested: Boolean! + """ + Describes a certain final outcome of the task once it reaches the + "finished" status + """ + outcome: TaskOutcome + """ + Time when task was originally created and placed in a queue + """ + createdAt: DateTime! + """ + Time when task transitioned into a running state + """ + ranAt: DateTime + """ + Time when cancellation of task was requested + """ + cancellationRequestedAt: DateTime + """ + Time when task has reached a final outcome + """ + finishedAt: DateTime } type TaskConnection { - """ - A shorthand for `edges { node { ... } }` - """ - nodes: [Task!]! - """ - Approximate number of total nodes - """ - totalCount: Int! - """ - Page information - """ - pageInfo: PageBasedInfo! - edges: [TaskEdge!]! + """ + A shorthand for `edges { node { ... } }` + """ + nodes: [Task!]! + """ + Approximate number of total nodes + """ + totalCount: Int! + """ + Page information + """ + pageInfo: PageBasedInfo! + edges: [TaskEdge!]! } type TaskEdge { - node: Task! + node: Task! } scalar TaskID @@ -862,108 +868,104 @@ scalar TaskID Describes a certain final outcome of the task """ enum TaskOutcome { - """ - Task succeeded - """ - SUCCESS - """ - Task failed to complete - """ - FAILED - """ - Task was cancelled by a user - """ - CANCELLED + """ + Task succeeded + """ + SUCCESS + """ + Task failed to complete + """ + FAILED + """ + Task was cancelled by a user + """ + CANCELLED } """ Life-cycle status of a task """ enum TaskStatus { - """ - Task is waiting for capacity to be allocated to it - """ - QUEUED - """ - Task is being executed - """ - RUNNING - """ - Task has reached a certain final outcome (see [TaskOutcome]) - """ - FINISHED + """ + Task is waiting for capacity to be allocated to it + """ + QUEUED + """ + Task is being executed + """ + RUNNING + """ + Task has reached a certain final outcome (see [TaskOutcome]) + """ + FINISHED } type Tasks { - """ - Returns current state of a given task - """ - getTask(taskId: TaskID!): Task - """ - Returns states of tasks associated with a given dataset ordered by - creation time from newest to oldest - """ - listTasksByDataset( - datasetId: DatasetID! - page: Int - perPage: Int - ): TaskConnection! -} - -type TasksMutations { - """ - Requests cancellation of the specified task - """ - cancelTask(taskId: TaskID!): Task! - """ - Schedules a task to update the specified dataset by performing polling - ingest or a derivative transformation - """ - createUpdateDatasetTask(datasetId: DatasetID!): Task! - """ - Schedules a task to update the specified dataset by performing polling - ingest or a derivative transformation - """ - createProbeTask( - datasetId: DatasetID - busyTimeMs: Int - endWithOutcome: TaskOutcome - ): Task! + """ + Returns current state of a given task + """ + getTask(taskId: TaskID!): Task + """ + Returns states of tasks associated with a given dataset ordered by + creation time from newest to oldest + """ + listTasksByDataset(datasetId: DatasetID!, page: Int, perPage: Int): TaskConnection! +} + +type TasksMut { + """ + Requests cancellation of the specified task + """ + cancelTask(taskId: TaskID!): Task! + """ + Schedules a task to update the specified dataset by performing polling + ingest or a derivative transformation + """ + createUpdateDatasetTask(datasetId: DatasetID!): Task! + """ + Schedules a task to update the specified dataset by performing polling + ingest or a derivative transformation + """ + createProbeTask(datasetId: DatasetID, busyTimeMs: Int, endWithOutcome: TaskOutcome): Task! } type TemporalTable { - name: String! - primaryKey: [String!]! + name: String! + primaryKey: [String!]! } union Transform = TransformSql type TransformInput { - id: DatasetID - name: DatasetName! - datasetRef: DatasetRefAny - dataset: Dataset! + id: DatasetID + name: DatasetName! + datasetRef: DatasetRefAny + dataset: Dataset! } type TransformSql { - engine: String! - version: String - queries: [SqlQueryStep!]! - temporalTables: [TemporalTable!] + engine: String! + version: String + queries: [SqlQueryStep!]! + temporalTables: [TemporalTable!] +} + +interface UpdateReadmeResult { + message: String! } type User implements Account { - """ - Unique and stable identitfier of this user account - """ - id: AccountID! - """ - Symbolic account name - """ - name: String! + """ + Unique and stable identitfier of this user account + """ + id: AccountID! + """ + Symbolic account name + """ + name: String! } schema { - query: Query - mutation: Mutation + query: Query + mutation: Mutation } From 38524f2a5688a3afac30235b2a8b09a2af0a9622 Mon Sep 17 00:00:00 2001 From: Dmitriy Borzenko Date: Wed, 2 Aug 2023 14:39:17 +0300 Subject: [PATCH 05/16] Change #commitEventToDataset method from DatasetCreateService. --- src/app/api/dataset.api.ts | 61 +++-- .../gql/create-dataset/commit-event.graphql | 8 +- .../create-empty-dataset.graphql | 2 +- .../create-from-snapshot.graphql | 2 +- ...ataset-by-account-and-dataset-name.graphql | 10 + src/app/api/kamu.graphql.interface.ts | 238 ++++++++++++------ src/app/api/mock/auth.mock.ts | 4 +- src/app/app.module.ts | 6 + .../dataset-create/dataset-create.service.ts | 140 +++++++---- 9 files changed, 317 insertions(+), 154 deletions(-) create mode 100644 src/app/api/gql/dataset-by-account-and-dataset-name.graphql diff --git a/src/app/api/dataset.api.ts b/src/app/api/dataset.api.ts index 8e67e7704..32d9a8ad1 100644 --- a/src/app/api/dataset.api.ts +++ b/src/app/api/dataset.api.ts @@ -1,9 +1,11 @@ import { CommitEventToDatasetGQL, - CommitEventToDatasetQuery, + CommitEventToDatasetMutation, CreateDatasetFromSnapshotGQL, - CreateDatasetFromSnapshotQuery, - CreateEmptyDatasetQuery, + CreateDatasetFromSnapshotMutation, + CreateEmptyDatasetMutation, + DatasetByAccountAndDatasetNameGQL, + DatasetByAccountAndDatasetNameQuery, DatasetKind, GetDatasetSchemaGQL, GetDatasetSchemaQuery, @@ -30,6 +32,7 @@ import { DatasetByIdGQL, CreateEmptyDatasetGQL, } from "./kamu.graphql.interface"; +import { MutationResult } from "apollo-angular"; @Injectable({ providedIn: "root" }) export class DatasetApi { @@ -40,6 +43,7 @@ export class DatasetApi { private datasetsByAccountNameGQL: DatasetsByAccountNameGQL, private metadataBlockGQL: GetMetadataBlockGQL, private datasetByIdGQL: DatasetByIdGQL, + private datasetByAccountAndDatasetNameGQL: DatasetByAccountAndDatasetNameGQL, private createEmptyDatasetGQL: CreateEmptyDatasetGQL, private createDatasetFromSnapshotGQL: CreateDatasetFromSnapshotGQL, private commitEventToDataset: CommitEventToDatasetGQL, @@ -162,17 +166,38 @@ export class DatasetApi { ); } + public getDatasetInfoByAccountAndDatasetName( + accountName: string, + datasetName: string, + ): Observable { + return this.datasetByAccountAndDatasetNameGQL + .watch({ + accountName, + datasetName, + }) + .valueChanges.pipe( + first(), + map( + ( + result: ApolloQueryResult, + ) => { + return result.data; + }, + ), + ); + } + public createDatasetFromSnapshot( accountId: string, snapshot: string, - ): Observable { + ): Observable { return this.createDatasetFromSnapshotGQL - .watch({ accountId, snapshot }) - .valueChanges.pipe( + .mutate({ accountId, snapshot }) + .pipe( first(), map( ( - result: ApolloQueryResult, + result: MutationResult, ) => { return result.data; }, @@ -184,31 +209,29 @@ export class DatasetApi { accountId: string, datasetKind: DatasetKind, datasetName: string, - ): Observable { + ): Observable { return this.createEmptyDatasetGQL - .watch({ accountId, datasetKind, datasetName }) - .valueChanges.pipe( + .mutate({ accountId, datasetKind, datasetName }) + .pipe( first(), - map((result: ApolloQueryResult) => { + map((result: MutationResult) => { return result.data; }), ); } public commitEvent(params: { - accountName: string; - datasetName: string; + datasetId: string; event: string; - }): Observable { + }): Observable { return this.commitEventToDataset - .watch({ - accountName: params.accountName, - datasetName: params.datasetName, + .mutate({ + datasetId: params.datasetId, event: params.event, }) - .valueChanges.pipe( + .pipe( first(), - map((result: ApolloQueryResult) => { + map((result: MutationResult) => { return result.data; }), ); diff --git a/src/app/api/gql/create-dataset/commit-event.graphql b/src/app/api/gql/create-dataset/commit-event.graphql index 913c70027..ecae479ba 100644 --- a/src/app/api/gql/create-dataset/commit-event.graphql +++ b/src/app/api/gql/create-dataset/commit-event.graphql @@ -1,10 +1,6 @@ -query commitEventToDataset( - $accountName: AccountName! - $datasetName: DatasetName! - $event: String! -) { +mutation commitEventToDataset($datasetId: DatasetID!, $event: String!) { datasets { - byOwnerAndName(accountName: $accountName, datasetName: $datasetName) { + byId(datasetId: $datasetId) { metadata { chain { commitEvent(event: $event, eventFormat: YAML) { diff --git a/src/app/api/gql/create-dataset/create-empty-dataset.graphql b/src/app/api/gql/create-dataset/create-empty-dataset.graphql index ca148b55a..451f26892 100644 --- a/src/app/api/gql/create-dataset/create-empty-dataset.graphql +++ b/src/app/api/gql/create-dataset/create-empty-dataset.graphql @@ -1,4 +1,4 @@ -query createEmptyDataset( +mutation createEmptyDataset( $accountId: AccountID! $datasetKind: DatasetKind! $datasetName: DatasetName! diff --git a/src/app/api/gql/create-dataset/create-from-snapshot.graphql b/src/app/api/gql/create-dataset/create-from-snapshot.graphql index ed2891c68..dfa041cb0 100644 --- a/src/app/api/gql/create-dataset/create-from-snapshot.graphql +++ b/src/app/api/gql/create-dataset/create-from-snapshot.graphql @@ -1,4 +1,4 @@ -query createDatasetFromSnapshot($accountId: AccountID!, $snapshot: String!) { +mutation createDatasetFromSnapshot($accountId: AccountID!, $snapshot: String!) { datasets { createFromSnapshot( accountId: $accountId diff --git a/src/app/api/gql/dataset-by-account-and-dataset-name.graphql b/src/app/api/gql/dataset-by-account-and-dataset-name.graphql new file mode 100644 index 000000000..6b066ab32 --- /dev/null +++ b/src/app/api/gql/dataset-by-account-and-dataset-name.graphql @@ -0,0 +1,10 @@ +query datasetByAccountAndDatasetName( + $accountName: AccountName! + $datasetName: DatasetName! +) { + datasets { + byOwnerAndName(accountName: $accountName, datasetName: $datasetName) { + ...DatasetBasics + } + } +} diff --git a/src/app/api/kamu.graphql.interface.ts b/src/app/api/kamu.graphql.interface.ts index 68dbcd762..e9dd4baca 100644 --- a/src/app/api/kamu.graphql.interface.ts +++ b/src/app/api/kamu.graphql.interface.ts @@ -94,17 +94,17 @@ export type AttachmentsEmbedded = { items: Array; }; -export type Auth = { - __typename?: "Auth"; +export type AuthMut = { + __typename?: "AuthMut"; accountInfo: AccountInfo; githubLogin: LoginResponse; }; -export type AuthAccountInfoArgs = { +export type AuthMutAccountInfoArgs = { accessToken: Scalars["String"]; }; -export type AuthGithubLoginArgs = { +export type AuthMutGithubLoginArgs = { code: Scalars["String"]; }; @@ -130,17 +130,19 @@ export type CommitResult = { message: Scalars["String"]; }; -export type CommitResultAppendError = CommitResult & { - __typename?: "CommitResultAppendError"; - message: Scalars["String"]; -}; +export type CommitResultAppendError = CommitResult & + UpdateReadmeResult & { + __typename?: "CommitResultAppendError"; + message: Scalars["String"]; + }; -export type CommitResultSuccess = CommitResult & { - __typename?: "CommitResultSuccess"; - message: Scalars["String"]; - newHead: Scalars["Multihash"]; - oldHead?: Maybe; -}; +export type CommitResultSuccess = CommitResult & + UpdateReadmeResult & { + __typename?: "CommitResultSuccess"; + message: Scalars["String"]; + newHead: Scalars["Multihash"]; + oldHead?: Maybe; + }; export enum CompressionFormat { Gzip = "GZIP", @@ -353,6 +355,24 @@ export type DatasetMetadataCurrentSchemaArgs = { format?: InputMaybe; }; +export type DatasetMetadataMut = { + __typename?: "DatasetMetadataMut"; + /** Access to the mutable metadata chain of the dataset */ + chain: MetadataChainMut; + /** Updates or clears the dataset readme */ + updateReadme: UpdateReadmeResult; +}; + +export type DatasetMetadataMutUpdateReadmeArgs = { + content?: InputMaybe; +}; + +export type DatasetMut = { + __typename?: "DatasetMut"; + /** Access to the mutable metadata of the dataset */ + metadata: DatasetMetadataMut; +}; + export type Datasets = { __typename?: "Datasets"; /** Returns datasets belonging to the specified account */ @@ -363,10 +383,6 @@ export type Datasets = { byId?: Maybe; /** Returns dataset by its owner and name */ byOwnerAndName?: Maybe; - /** Creates a new empty dataset */ - createEmpty: CreateDatasetResult; - /** Creates a new dataset from provided DatasetSnapshot manifest */ - createFromSnapshot: CreateDatasetFromSnapshotResult; }; export type DatasetsByAccountIdArgs = { @@ -390,13 +406,27 @@ export type DatasetsByOwnerAndNameArgs = { datasetName: Scalars["DatasetName"]; }; -export type DatasetsCreateEmptyArgs = { +export type DatasetsMut = { + __typename?: "DatasetsMut"; + /** Returns a mutable dataset by its ID */ + byId?: Maybe; + /** Creates a new empty dataset */ + createEmpty: CreateDatasetResult; + /** Creates a new dataset from provided DatasetSnapshot manifest */ + createFromSnapshot: CreateDatasetFromSnapshotResult; +}; + +export type DatasetsMutByIdArgs = { + datasetId: Scalars["DatasetID"]; +}; + +export type DatasetsMutCreateEmptyArgs = { accountId: Scalars["AccountID"]; datasetKind: DatasetKind; datasetName: Scalars["DatasetName"]; }; -export type DatasetsCreateFromSnapshotArgs = { +export type DatasetsMutCreateFromSnapshotArgs = { accountId: Scalars["AccountID"]; snapshot: Scalars["String"]; snapshotFormat: MetadataManifestFormat; @@ -553,8 +583,6 @@ export type MetadataChain = { blockByHashEncoded?: Maybe; /** Iterates all metadata blocks in the reverse chronological order */ blocks: MetadataBlockConnection; - /** Commits new event to the metadata chain */ - commitEvent: CommitResult; /** Returns all named metadata block references */ refs: Array; }; @@ -573,7 +601,13 @@ export type MetadataChainBlocksArgs = { perPage?: InputMaybe; }; -export type MetadataChainCommitEventArgs = { +export type MetadataChainMut = { + __typename?: "MetadataChainMut"; + /** Commits new event to the metadata chain */ + commitEvent: CommitResult; +}; + +export type MetadataChainMutCommitEventArgs = { event: Scalars["String"]; eventFormat: MetadataManifestFormat; }; @@ -608,10 +642,30 @@ export type MetadataManifestUnsupportedVersion = CommitResult & export type Mutation = { __typename?: "Mutation"; - auth: Auth; - tasks: TasksMutations; + /** Authentication and authorization-related functionality group */ + auth: AuthMut; + /** + * Dataset-related functionality group. + * + * Datasets are historical streams of events recorded under a cetrain + * schema. + */ + datasets: DatasetsMut; + /** + * Tasks-related functionality group. + * + * Tasks are units of work scheduled and executed by the system to query + * and process data. + */ + tasks: TasksMut; }; +export type NoChanges = CommitResult & + UpdateReadmeResult & { + __typename?: "NoChanges"; + message: Scalars["String"]; + }; + export type OffsetInterval = { __typename?: "OffsetInterval"; end: Scalars["Int"]; @@ -941,8 +995,8 @@ export type TasksListTasksByDatasetArgs = { perPage?: InputMaybe; }; -export type TasksMutations = { - __typename?: "TasksMutations"; +export type TasksMut = { + __typename?: "TasksMut"; /** Requests cancellation of the specified task */ cancelTask: Task; /** @@ -957,17 +1011,17 @@ export type TasksMutations = { createUpdateDatasetTask: Task; }; -export type TasksMutationsCancelTaskArgs = { +export type TasksMutCancelTaskArgs = { taskId: Scalars["TaskID"]; }; -export type TasksMutationsCreateProbeTaskArgs = { +export type TasksMutCreateProbeTaskArgs = { busyTimeMs?: InputMaybe; datasetId?: InputMaybe; endWithOutcome?: InputMaybe; }; -export type TasksMutationsCreateUpdateDatasetTaskArgs = { +export type TasksMutCreateUpdateDatasetTaskArgs = { datasetId: Scalars["DatasetID"]; }; @@ -995,6 +1049,10 @@ export type TransformSql = { version?: Maybe; }; +export type UpdateReadmeResult = { + message: Scalars["String"]; +}; + export type User = Account & { __typename?: "User"; /** Unique and stable identitfier of this user account */ @@ -1003,22 +1061,21 @@ export type User = Account & { name: Scalars["String"]; }; -export type CommitEventToDatasetQueryVariables = Exact<{ - accountName: Scalars["AccountName"]; - datasetName: Scalars["DatasetName"]; +export type CommitEventToDatasetMutationVariables = Exact<{ + datasetId: Scalars["DatasetID"]; event: Scalars["String"]; }>; -export type CommitEventToDatasetQuery = { - __typename?: "Query"; +export type CommitEventToDatasetMutation = { + __typename?: "Mutation"; datasets: { - __typename?: "Datasets"; - byOwnerAndName?: { - __typename?: "Dataset"; + __typename?: "DatasetsMut"; + byId?: { + __typename?: "DatasetMut"; metadata: { - __typename?: "DatasetMetadata"; + __typename?: "DatasetMetadataMut"; chain: { - __typename?: "MetadataChain"; + __typename?: "MetadataChainMut"; commitEvent: | { __typename: "CommitResultAppendError"; @@ -1034,23 +1091,24 @@ export type CommitEventToDatasetQuery = { __typename: "MetadataManifestMalformed"; message: string; } - | { __typename: "MetadataManifestUnsupportedVersion" }; + | { __typename: "MetadataManifestUnsupportedVersion" } + | { __typename: "NoChanges" }; }; }; } | null; }; }; -export type CreateEmptyDatasetQueryVariables = Exact<{ +export type CreateEmptyDatasetMutationVariables = Exact<{ accountId: Scalars["AccountID"]; datasetKind: DatasetKind; datasetName: Scalars["DatasetName"]; }>; -export type CreateEmptyDatasetQuery = { - __typename?: "Query"; +export type CreateEmptyDatasetMutation = { + __typename?: "Mutation"; datasets: { - __typename?: "Datasets"; + __typename?: "DatasetsMut"; createEmpty: | { __typename?: "CreateDatasetResultNameCollision"; @@ -1060,15 +1118,15 @@ export type CreateEmptyDatasetQuery = { }; }; -export type CreateDatasetFromSnapshotQueryVariables = Exact<{ +export type CreateDatasetFromSnapshotMutationVariables = Exact<{ accountId: Scalars["AccountID"]; snapshot: Scalars["String"]; }>; -export type CreateDatasetFromSnapshotQuery = { - __typename?: "Query"; +export type CreateDatasetFromSnapshotMutation = { + __typename?: "Mutation"; datasets: { - __typename?: "Datasets"; + __typename?: "DatasetsMut"; createFromSnapshot: | { __typename?: "CreateDatasetResultInvalidSnapshot"; @@ -1095,6 +1153,21 @@ export type CreateDatasetFromSnapshotQuery = { }; }; +export type DatasetByAccountAndDatasetNameQueryVariables = Exact<{ + accountName: Scalars["AccountName"]; + datasetName: Scalars["DatasetName"]; +}>; + +export type DatasetByAccountAndDatasetNameQuery = { + __typename?: "Query"; + datasets: { + __typename?: "Datasets"; + byOwnerAndName?: + | ({ __typename?: "Dataset" } & DatasetBasicsFragment) + | null; + }; +}; + export type DatasetByIdQueryVariables = Exact<{ datasetId: Scalars["DatasetID"]; }>; @@ -1790,7 +1863,7 @@ export type GithubLoginMutationVariables = Exact<{ export type GithubLoginMutation = { __typename?: "Mutation"; auth: { - __typename?: "Auth"; + __typename?: "AuthMut"; githubLogin: { __typename?: "LoginResponse"; token: { @@ -1813,7 +1886,7 @@ export type FetchAccountInfoMutationVariables = Exact<{ export type FetchAccountInfoMutation = { __typename?: "Mutation"; auth: { - __typename?: "Auth"; + __typename?: "AuthMut"; accountInfo: { __typename?: "AccountInfo" } & AccountDetailsFragment; }; }; @@ -2441,16 +2514,9 @@ export const DatasetSearchOverviewFragmentDoc = gql` ${LicenseFragmentDoc} `; export const CommitEventToDatasetDocument = gql` - query commitEventToDataset( - $accountName: AccountName! - $datasetName: DatasetName! - $event: String! - ) { + mutation commitEventToDataset($datasetId: DatasetID!, $event: String!) { datasets { - byOwnerAndName( - accountName: $accountName - datasetName: $datasetName - ) { + byId(datasetId: $datasetId) { metadata { chain { commitEvent(event: $event, eventFormat: YAML) { @@ -2477,9 +2543,9 @@ export const CommitEventToDatasetDocument = gql` @Injectable({ providedIn: "root", }) -export class CommitEventToDatasetGQL extends Apollo.Query< - CommitEventToDatasetQuery, - CommitEventToDatasetQueryVariables +export class CommitEventToDatasetGQL extends Apollo.Mutation< + CommitEventToDatasetMutation, + CommitEventToDatasetMutationVariables > { document = CommitEventToDatasetDocument; @@ -2488,7 +2554,7 @@ export class CommitEventToDatasetGQL extends Apollo.Query< } } export const CreateEmptyDatasetDocument = gql` - query createEmptyDataset( + mutation createEmptyDataset( $accountId: AccountID! $datasetKind: DatasetKind! $datasetName: DatasetName! @@ -2508,9 +2574,9 @@ export const CreateEmptyDatasetDocument = gql` @Injectable({ providedIn: "root", }) -export class CreateEmptyDatasetGQL extends Apollo.Query< - CreateEmptyDatasetQuery, - CreateEmptyDatasetQueryVariables +export class CreateEmptyDatasetGQL extends Apollo.Mutation< + CreateEmptyDatasetMutation, + CreateEmptyDatasetMutationVariables > { document = CreateEmptyDatasetDocument; @@ -2519,7 +2585,7 @@ export class CreateEmptyDatasetGQL extends Apollo.Query< } } export const CreateDatasetFromSnapshotDocument = gql` - query createDatasetFromSnapshot( + mutation createDatasetFromSnapshot( $accountId: AccountID! $snapshot: String! ) { @@ -2544,9 +2610,9 @@ export const CreateDatasetFromSnapshotDocument = gql` @Injectable({ providedIn: "root", }) -export class CreateDatasetFromSnapshotGQL extends Apollo.Query< - CreateDatasetFromSnapshotQuery, - CreateDatasetFromSnapshotQueryVariables +export class CreateDatasetFromSnapshotGQL extends Apollo.Mutation< + CreateDatasetFromSnapshotMutation, + CreateDatasetFromSnapshotMutationVariables > { document = CreateDatasetFromSnapshotDocument; @@ -2554,6 +2620,36 @@ export class CreateDatasetFromSnapshotGQL extends Apollo.Query< super(apollo); } } +export const DatasetByAccountAndDatasetNameDocument = gql` + query datasetByAccountAndDatasetName( + $accountName: AccountName! + $datasetName: DatasetName! + ) { + datasets { + byOwnerAndName( + accountName: $accountName + datasetName: $datasetName + ) { + ...DatasetBasics + } + } + } + ${DatasetBasicsFragmentDoc} +`; + +@Injectable({ + providedIn: "root", +}) +export class DatasetByAccountAndDatasetNameGQL extends Apollo.Query< + DatasetByAccountAndDatasetNameQuery, + DatasetByAccountAndDatasetNameQueryVariables +> { + document = DatasetByAccountAndDatasetNameDocument; + + constructor(apollo: Apollo.Apollo) { + super(apollo); + } +} export const DatasetByIdDocument = gql` query datasetById($datasetId: DatasetID!) { datasets { diff --git a/src/app/api/mock/auth.mock.ts b/src/app/api/mock/auth.mock.ts index 8a84cad85..5450bf1dd 100644 --- a/src/app/api/mock/auth.mock.ts +++ b/src/app/api/mock/auth.mock.ts @@ -19,14 +19,14 @@ export const mockAccountDetails: AccountDetailsFragment = { export const mockUserInfoFromAccessToken: FetchAccountInfoMutation = { auth: { - __typename: "Auth", + __typename: "AuthMut", accountInfo: mockAccountDetails, }, }; export const mockGithubLoginResponse: GithubLoginMutation = { auth: { - __typename: "Auth", + __typename: "AuthMut", githubLogin: { __typename: "LoginResponse", token: { diff --git a/src/app/app.module.ts b/src/app/app.module.ts index e6ad8736c..3853150ea 100644 --- a/src/app/app.module.ts +++ b/src/app/app.module.ts @@ -96,6 +96,12 @@ const Services = [ link: httpLink.create({ uri: appConfig.apiServerGqlUrl, }), + defaultOptions: { + watchQuery: { + fetchPolicy: "network-only", + errorPolicy: "all", + }, + }, }; }, deps: [HttpLink, AppConfigService], diff --git a/src/app/dataset-create/dataset-create.service.ts b/src/app/dataset-create/dataset-create.service.ts index 9376a6aed..feca38496 100644 --- a/src/app/dataset-create/dataset-create.service.ts +++ b/src/app/dataset-create/dataset-create.service.ts @@ -1,13 +1,14 @@ import { - CommitEventToDatasetQuery, - CreateDatasetFromSnapshotQuery, - CreateEmptyDatasetQuery, + CommitEventToDatasetMutation, + CreateDatasetFromSnapshotMutation, + CreateEmptyDatasetMutation, + DatasetByAccountAndDatasetNameQuery, } from "./../api/kamu.graphql.interface"; import { Observable, Subject } from "rxjs"; import { DatasetApi } from "src/app/api/dataset.api"; import { Injectable } from "@angular/core"; import { DatasetKind } from "../api/kamu.graphql.interface"; -import { map } from "rxjs/operators"; +import { map, switchMap } from "rxjs/operators"; import { NavigationService } from "../services/navigation.service"; import { DatasetViewTypeEnum } from "../dataset-view/dataset-view.interface"; import { DatasetService } from "../dataset-view/dataset.service"; @@ -33,6 +34,7 @@ export class AppDatasetCreateService { public get onErrorCommitEventChanges(): Observable { return this.errorCommitEventChanges$.asObservable(); } + private cache = new Map(); public constructor( private datasetApi: DatasetApi, @@ -48,9 +50,9 @@ export class AppDatasetCreateService { return this.datasetApi .createEmptyDataset(accountId, datasetKind, datasetName) .pipe( - map((data: CreateEmptyDatasetQuery) => { + map((data: CreateEmptyDatasetMutation | null | undefined) => { if ( - data.datasets.createEmpty.__typename === + data?.datasets.createEmpty.__typename === "CreateDatasetResultSuccess" ) { this.navigationService.navigateToDatasetView({ @@ -59,9 +61,10 @@ export class AppDatasetCreateService { tab: DatasetViewTypeEnum.Overview, }); } else { - this.errorMessageChanges( - data.datasets.createEmpty.message, - ); + if (data) + this.errorMessageChanges( + data.datasets.createEmpty.message, + ); } }), ); @@ -74,24 +77,32 @@ export class AppDatasetCreateService { return this.datasetApi .createDatasetFromSnapshot(accountId, snapshot) .pipe( - map((data: CreateDatasetFromSnapshotQuery) => { - if ( - data.datasets.createFromSnapshot.__typename === - "CreateDatasetResultSuccess" - ) { - const datasetName = data.datasets.createFromSnapshot - .dataset.name as string; - this.navigationService.navigateToDatasetView({ - accountName: accountId, - datasetName, - tab: DatasetViewTypeEnum.Overview, - }); - } else { - this.errorMessageChanges( - data.datasets.createFromSnapshot.message, - ); - } - }), + map( + ( + data: + | CreateDatasetFromSnapshotMutation + | null + | undefined, + ) => { + if ( + data?.datasets.createFromSnapshot.__typename === + "CreateDatasetResultSuccess" + ) { + const datasetName = data.datasets.createFromSnapshot + .dataset.name as string; + this.navigationService.navigateToDatasetView({ + accountName: accountId, + datasetName, + tab: DatasetViewTypeEnum.Overview, + }); + } else { + if (data) + this.errorMessageChanges( + data.datasets.createFromSnapshot.message, + ); + } + }, + ), ); } @@ -100,34 +111,55 @@ export class AppDatasetCreateService { datasetName: string, event: string, ): Observable { - return this.datasetApi - .commitEvent({ accountName, datasetName, event }) - .pipe( - map((data: CommitEventToDatasetQuery) => { - if ( - data.datasets.byOwnerAndName?.metadata.chain.commitEvent - .__typename === "CommitResultAppendError" || - data.datasets.byOwnerAndName?.metadata.chain.commitEvent - .__typename === "MetadataManifestMalformed" - ) { - this.errorCommitEventChanges( - data.datasets.byOwnerAndName.metadata.chain - .commitEvent.message, - ); - } else { - this.datasetService - .requestDatasetMainData({ - accountName, - datasetName, - }) - .subscribe(); - this.navigationService.navigateToDatasetView({ + const key = `${accountName}${datasetName}`; + let observable: Observable< + CommitEventToDatasetMutation | null | undefined + >; + if (this.cache.has(key)) { + observable = this.datasetApi.commitEvent({ + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + datasetId: this.cache.get(key)!, + event, + }); + } else { + observable = this.datasetApi + .getDatasetInfoByAccountAndDatasetName(accountName, datasetName) + .pipe( + switchMap((x: DatasetByAccountAndDatasetNameQuery) => { + const id = x.datasets.byOwnerAndName?.id as string; + this.cache.set(key, id); + return this.datasetApi.commitEvent({ + datasetId: id, + event, + }); + }), + ); + } + return observable.pipe( + map((data: CommitEventToDatasetMutation | undefined | null) => { + if ( + data?.datasets.byId?.metadata.chain.commitEvent + .__typename === "CommitResultAppendError" || + data?.datasets.byId?.metadata.chain.commitEvent + .__typename === "MetadataManifestMalformed" + ) { + this.errorCommitEventChanges( + data.datasets.byId.metadata.chain.commitEvent.message, + ); + } else { + this.datasetService + .requestDatasetMainData({ accountName, datasetName, - tab: DatasetViewTypeEnum.Overview, - }); - } - }), - ); + }) + .subscribe(); + this.navigationService.navigateToDatasetView({ + accountName, + datasetName, + tab: DatasetViewTypeEnum.Overview, + }); + } + }), + ); } } From 64affb5f5447884ff133701b1f7af45edea7dfb7 Mon Sep 17 00:00:00 2001 From: Dmitriy Borzenko Date: Wed, 2 Aug 2023 16:48:01 +0300 Subject: [PATCH 06/16] Integrate new mutation 'updateReadme'. --- src/app/api/dataset.api.spec.ts | 19 +++--- src/app/api/dataset.api.ts | 24 ++++++- .../gql/create-dataset/update-readme.graphql | 15 +++++ src/app/api/kamu.graphql.interface.ts | 57 +++++++++++++++++ .../dataset-create.service.spec.ts | 18 +++--- .../dataset-create/dataset-create.service.ts | 63 +++++++++++++++---- .../engine-select/engine-select.component.ts | 5 +- .../readme-section.component.ts | 9 +-- src/app/search/mock.data.ts | 10 +-- .../services/templates-yaml-events.service.ts | 16 ----- 10 files changed, 174 insertions(+), 62 deletions(-) create mode 100644 src/app/api/gql/create-dataset/update-readme.graphql diff --git a/src/app/api/dataset.api.spec.ts b/src/app/api/dataset.api.spec.ts index 60a926428..734f237bf 100644 --- a/src/app/api/dataset.api.spec.ts +++ b/src/app/api/dataset.api.spec.ts @@ -19,7 +19,7 @@ import { import { DatasetApi } from "./dataset.api"; import { CommitEventToDatasetDocument, - CommitEventToDatasetQuery, + CommitEventToDatasetMutation, DatasetsByAccountNameDocument, DatasetsByAccountNameQuery, GetDatasetDataSqlRunDocument, @@ -190,16 +190,17 @@ describe("DatasetApi", () => { it("should commit event", () => { service .commitEvent({ - accountName: TEST_USER_NAME, - datasetName: TEST_DATASET_NAME, + datasetId: "mockId", event: "mock event", }) - .subscribe((res: CommitEventToDatasetQuery) => { - expect( - res.datasets.byOwnerAndName?.metadata.chain.commitEvent - .__typename, - ).toEqual("CommitResultSuccess"); - }); + .subscribe( + (res: CommitEventToDatasetMutation | null | undefined) => { + expect( + res?.datasets.byId?.metadata.chain.commitEvent + .__typename, + ).toEqual("CommitResultSuccess"); + }, + ); const op = controller.expectOne(CommitEventToDatasetDocument); expect(op.operation.variables.accountName).toEqual(TEST_USER_NAME); diff --git a/src/app/api/dataset.api.ts b/src/app/api/dataset.api.ts index 32d9a8ad1..93228f8e7 100644 --- a/src/app/api/dataset.api.ts +++ b/src/app/api/dataset.api.ts @@ -9,6 +9,8 @@ import { DatasetKind, GetDatasetSchemaGQL, GetDatasetSchemaQuery, + UpdateReadmeGQL, + UpdateReadmeMutation, } from "src/app/api/kamu.graphql.interface"; import AppValues from "src/app/common/app.values"; import { ApolloQueryResult } from "@apollo/client/core"; @@ -46,8 +48,9 @@ export class DatasetApi { private datasetByAccountAndDatasetNameGQL: DatasetByAccountAndDatasetNameGQL, private createEmptyDatasetGQL: CreateEmptyDatasetGQL, private createDatasetFromSnapshotGQL: CreateDatasetFromSnapshotGQL, - private commitEventToDataset: CommitEventToDatasetGQL, + private commitEventToDatasetGQL: CommitEventToDatasetGQL, private datasetSchemaGQL: GetDatasetSchemaGQL, + private updateReadmeGQL: UpdateReadmeGQL, ) {} public getDatasetMainData(params: { @@ -224,7 +227,7 @@ export class DatasetApi { datasetId: string; event: string; }): Observable { - return this.commitEventToDataset + return this.commitEventToDatasetGQL .mutate({ datasetId: params.datasetId, event: params.event, @@ -236,4 +239,21 @@ export class DatasetApi { }), ); } + + public updateReadme( + datasetId: string, + content: string, + ): Observable { + return this.updateReadmeGQL + .mutate({ + datasetId, + content, + }) + .pipe( + first(), + map((result: MutationResult) => { + return result.data; + }), + ); + } } diff --git a/src/app/api/gql/create-dataset/update-readme.graphql b/src/app/api/gql/create-dataset/update-readme.graphql new file mode 100644 index 000000000..4e4d9b685 --- /dev/null +++ b/src/app/api/gql/create-dataset/update-readme.graphql @@ -0,0 +1,15 @@ +mutation updateReadme($datasetId: DatasetID!, $content: String!) { + datasets { + byId(datasetId: $datasetId) { + metadata { + updateReadme(content: $content) { + __typename + message + ... on CommitResultSuccess { + oldHead + } + } + } + } + } +} diff --git a/src/app/api/kamu.graphql.interface.ts b/src/app/api/kamu.graphql.interface.ts index e9dd4baca..45057e2bc 100644 --- a/src/app/api/kamu.graphql.interface.ts +++ b/src/app/api/kamu.graphql.interface.ts @@ -1153,6 +1153,32 @@ export type CreateDatasetFromSnapshotMutation = { }; }; +export type UpdateReadmeMutationVariables = Exact<{ + datasetId: Scalars["DatasetID"]; + content: Scalars["String"]; +}>; + +export type UpdateReadmeMutation = { + __typename?: "Mutation"; + datasets: { + __typename?: "DatasetsMut"; + byId?: { + __typename?: "DatasetMut"; + metadata: { + __typename?: "DatasetMetadataMut"; + updateReadme: + | { __typename: "CommitResultAppendError"; message: string } + | { + __typename: "CommitResultSuccess"; + oldHead?: any | null; + message: string; + } + | { __typename: "NoChanges"; message: string }; + }; + } | null; + }; +}; + export type DatasetByAccountAndDatasetNameQueryVariables = Exact<{ accountName: Scalars["AccountName"]; datasetName: Scalars["DatasetName"]; @@ -2620,6 +2646,37 @@ export class CreateDatasetFromSnapshotGQL extends Apollo.Mutation< super(apollo); } } +export const UpdateReadmeDocument = gql` + mutation updateReadme($datasetId: DatasetID!, $content: String!) { + datasets { + byId(datasetId: $datasetId) { + metadata { + updateReadme(content: $content) { + __typename + message + ... on CommitResultSuccess { + oldHead + } + } + } + } + } + } +`; + +@Injectable({ + providedIn: "root", +}) +export class UpdateReadmeGQL extends Apollo.Mutation< + UpdateReadmeMutation, + UpdateReadmeMutationVariables +> { + document = UpdateReadmeDocument; + + constructor(apollo: Apollo.Apollo) { + super(apollo); + } +} export const DatasetByAccountAndDatasetNameDocument = gql` query datasetByAccountAndDatasetName( $accountName: AccountName! diff --git a/src/app/dataset-create/dataset-create.service.spec.ts b/src/app/dataset-create/dataset-create.service.spec.ts index a0e5a964f..cbc3071c5 100644 --- a/src/app/dataset-create/dataset-create.service.spec.ts +++ b/src/app/dataset-create/dataset-create.service.spec.ts @@ -1,4 +1,7 @@ -import { CreateDatasetFromSnapshotQuery } from "./../api/kamu.graphql.interface"; +import { + CreateDatasetFromSnapshotMutation, + CreateEmptyDatasetMutation, +} from "./../api/kamu.graphql.interface"; import { mockDatasetBasicsFragment, mockDatasetInfo, @@ -7,10 +10,7 @@ import { TestBed } from "@angular/core/testing"; import { Apollo } from "apollo-angular"; import { of } from "rxjs"; import { DatasetApi } from "../api/dataset.api"; -import { - CreateEmptyDatasetQuery, - DatasetKind, -} from "../api/kamu.graphql.interface"; +import { DatasetKind } from "../api/kamu.graphql.interface"; import { DatasetViewTypeEnum } from "../dataset-view/dataset-view.interface"; import { NavigationService } from "../services/navigation.service"; import { AppDatasetCreateService } from "./dataset-create.service"; @@ -34,7 +34,7 @@ describe("AppDatasetCreateService", () => { }); it("should be create empty dataset with success", () => { - const mockResponseSuccess: CreateEmptyDatasetQuery = { + const mockResponseSuccess: CreateEmptyDatasetMutation = { datasets: { createEmpty: { message: "Success", @@ -66,7 +66,7 @@ describe("AppDatasetCreateService", () => { }); it("should be create empty dataset with error", () => { - const mockResponseError: CreateEmptyDatasetQuery = { + const mockResponseError: CreateEmptyDatasetMutation = { datasets: { createEmpty: { message: "Fail", @@ -101,7 +101,7 @@ describe("AppDatasetCreateService", () => { }); it("should be create dataset using shapshot with success", () => { - const mockResponseSuccess: CreateDatasetFromSnapshotQuery = { + const mockResponseSuccess: CreateDatasetFromSnapshotMutation = { datasets: { createFromSnapshot: { message: "Success", @@ -135,7 +135,7 @@ describe("AppDatasetCreateService", () => { }); it("should be create dataset using shapshot with error", () => { - const mockResponseError: CreateDatasetFromSnapshotQuery = { + const mockResponseError: CreateDatasetFromSnapshotMutation = { datasets: { createFromSnapshot: { message: "Fail", diff --git a/src/app/dataset-create/dataset-create.service.ts b/src/app/dataset-create/dataset-create.service.ts index feca38496..bfca94f21 100644 --- a/src/app/dataset-create/dataset-create.service.ts +++ b/src/app/dataset-create/dataset-create.service.ts @@ -3,6 +3,7 @@ import { CreateDatasetFromSnapshotMutation, CreateEmptyDatasetMutation, DatasetByAccountAndDatasetNameQuery, + UpdateReadmeMutation, } from "./../api/kamu.graphql.interface"; import { Observable, Subject } from "rxjs"; import { DatasetApi } from "src/app/api/dataset.api"; @@ -147,19 +148,59 @@ export class AppDatasetCreateService { data.datasets.byId.metadata.chain.commitEvent.message, ); } else { - this.datasetService - .requestDatasetMainData({ - accountName, - datasetName, - }) - .subscribe(); - this.navigationService.navigateToDatasetView({ - accountName, - datasetName, - tab: DatasetViewTypeEnum.Overview, - }); + this.successActions(accountName, datasetName); + } + }), + ); + } + + public updateReadme( + accountName: string, + datasetName: string, + content: string, + ): Observable { + const key = `${accountName}${datasetName}`; + let observable: Observable; + if (this.cache.has(key)) { + observable = this.datasetApi.updateReadme( + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + this.cache.get(key)!, + content, + ); + } else { + observable = this.datasetApi + .getDatasetInfoByAccountAndDatasetName(accountName, datasetName) + .pipe( + switchMap((x: DatasetByAccountAndDatasetNameQuery) => { + const id = x.datasets.byOwnerAndName?.id as string; + this.cache.set(key, id); + return this.datasetApi.updateReadme(id, content); + }), + ); + } + return observable.pipe( + map((data: UpdateReadmeMutation | null | undefined) => { + if ( + data?.datasets.byId?.metadata.updateReadme.__typename === + "CommitResultSuccess" + ) { + this.successActions(accountName, datasetName); } }), ); } + + private successActions(accountName: string, datasetName: string): void { + this.datasetService + .requestDatasetMainData({ + accountName, + datasetName, + }) + .subscribe(); + this.navigationService.navigateToDatasetView({ + accountName, + datasetName, + tab: DatasetViewTypeEnum.Overview, + }); + } } diff --git a/src/app/dataset-view/additional-components/metadata-component/components/set-transform/components/engine-section/components/engine-select/engine-select.component.ts b/src/app/dataset-view/additional-components/metadata-component/components/set-transform/components/engine-section/components/engine-select/engine-select.component.ts index d10aad548..11ab9a94c 100644 --- a/src/app/dataset-view/additional-components/metadata-component/components/set-transform/components/engine-section/components/engine-select/engine-select.component.ts +++ b/src/app/dataset-view/additional-components/metadata-component/components/set-transform/components/engine-section/components/engine-select/engine-select.component.ts @@ -49,7 +49,10 @@ export class EngineSelectComponent implements OnInit { } } public get value(): string { - return DataHelpers.descriptionForEngine(this.engine).label ?? ""; + return ( + DataHelpers.descriptionForEngine(this.engine.toLowerCase()).label ?? + "" + ); } public getLogo(name: string): EventPropertyLogo { diff --git a/src/app/dataset-view/additional-components/overview-component/components/readme-section/readme-section.component.ts b/src/app/dataset-view/additional-components/overview-component/components/readme-section/readme-section.component.ts index b2652b73b..5b7855192 100644 --- a/src/app/dataset-view/additional-components/overview-component/components/readme-section/readme-section.component.ts +++ b/src/app/dataset-view/additional-components/overview-component/components/readme-section/readme-section.component.ts @@ -15,7 +15,6 @@ import { AppDatasetCreateService } from "src/app/dataset-create/dataset-create.s import { OverviewDataUpdate } from "src/app/dataset-view/dataset.subscriptions.interface"; import { AppDatasetSubscriptionsService } from "src/app/dataset-view/dataset.subscriptions.service"; import { DatasetSchema, DataRow } from "src/app/interface/dataset.interface"; -import { TemplatesYamlEventsService } from "src/app/services/templates-yaml-events.service"; @Component({ selector: "app-readme-section", @@ -42,9 +41,7 @@ export class ReadmeSectionComponent extends BaseComponent implements OnInit { constructor( private appDatasetSubsService: AppDatasetSubscriptionsService, - private createDatasetService: AppDatasetCreateService, - private yamlEventService: TemplatesYamlEventsService, ) { super(); } @@ -78,12 +75,10 @@ export class ReadmeSectionComponent extends BaseComponent implements OnInit { if (this.datasetBasics) this.trackSubscription( this.createDatasetService - .commitEventToDataset( + .updateReadme( this.datasetBasics.owner.name, this.datasetBasics.name as string, - this.yamlEventService.buildYamlSetAttachmentsEvent( - this.readmeState, - ), + this.readmeState, ) .subscribe(() => (this.isMarkdownEditView = false)), ); diff --git a/src/app/search/mock.data.ts b/src/app/search/mock.data.ts index 4dc46775e..9f318feba 100644 --- a/src/app/search/mock.data.ts +++ b/src/app/search/mock.data.ts @@ -1,5 +1,5 @@ import { - CommitEventToDatasetQuery, + CommitEventToDatasetMutation, PageBasedInfo, } from "./../api/kamu.graphql.interface"; import { mockSetVocab } from "./../dataset-block/metadata-block/components/event-details/mock.events"; @@ -763,9 +763,9 @@ export const mockDatasetDataSqlRunInternalErrorResponse: GetDatasetDataSqlRunQue }, }; -export const mockCommitEventResponse: CommitEventToDatasetQuery = { +export const mockCommitEventResponse: CommitEventToDatasetMutation = { datasets: { - byOwnerAndName: { + byId: { metadata: { chain: { commitEvent: { @@ -776,13 +776,9 @@ export const mockCommitEventResponse: CommitEventToDatasetQuery = { newHead: "zW1hgfvGgmdsbrDMhVoBE5TRX2RX4DV2mhh4QgEAeA3fx4Q", }, - __typename: "MetadataChain", }, - __typename: "DatasetMetadata", }, - __typename: "Dataset", }, - __typename: "Datasets", }, }; diff --git a/src/app/services/templates-yaml-events.service.ts b/src/app/services/templates-yaml-events.service.ts index 9cbecb6bc..3f19f27e2 100644 --- a/src/app/services/templates-yaml-events.service.ts +++ b/src/app/services/templates-yaml-events.service.ts @@ -54,22 +54,6 @@ export class TemplatesYamlEventsService { return result; } - public buildYamlSetAttachmentsEvent(data: string): string { - this.initialTemplate.content = { - kind: "setAttachments", - attachments: { - kind: "embedded", - items: [ - { - path: "README.md", - content: data, - }, - ], - }, - }; - return stringify(this.initialTemplate); - } - public buildYamlSetPollingSourceEvent( params: Omit, preprocessStepValue: MaybeNull, From 2414cd810ee88217a2422f75591e6e4bc5b4b131 Mon Sep 17 00:00:00 2001 From: Dmitriy Borzenko Date: Wed, 2 Aug 2023 20:24:36 +0300 Subject: [PATCH 07/16] Moved the finding of the ID to a separate method. --- .../dataset-create/dataset-create.service.ts | 73 +++++++++---------- 1 file changed, 33 insertions(+), 40 deletions(-) diff --git a/src/app/dataset-create/dataset-create.service.ts b/src/app/dataset-create/dataset-create.service.ts index bfca94f21..36f005900 100644 --- a/src/app/dataset-create/dataset-create.service.ts +++ b/src/app/dataset-create/dataset-create.service.ts @@ -5,7 +5,7 @@ import { DatasetByAccountAndDatasetNameQuery, UpdateReadmeMutation, } from "./../api/kamu.graphql.interface"; -import { Observable, Subject } from "rxjs"; +import { Observable, Subject, of } from "rxjs"; import { DatasetApi } from "src/app/api/dataset.api"; import { Injectable } from "@angular/core"; import { DatasetKind } from "../api/kamu.graphql.interface"; @@ -112,31 +112,16 @@ export class AppDatasetCreateService { datasetName: string, event: string, ): Observable { - const key = `${accountName}${datasetName}`; - let observable: Observable< - CommitEventToDatasetMutation | null | undefined - >; - if (this.cache.has(key)) { - observable = this.datasetApi.commitEvent({ - // eslint-disable-next-line @typescript-eslint/no-non-null-assertion - datasetId: this.cache.get(key)!, - event, - }); - } else { - observable = this.datasetApi - .getDatasetInfoByAccountAndDatasetName(accountName, datasetName) - .pipe( - switchMap((x: DatasetByAccountAndDatasetNameQuery) => { - const id = x.datasets.byOwnerAndName?.id as string; - this.cache.set(key, id); - return this.datasetApi.commitEvent({ - datasetId: id, - event, - }); - }), - ); - } - return observable.pipe( + return this.getIdByAccountNameAndDatasetName( + accountName, + datasetName, + ).pipe( + switchMap((id: string) => + this.datasetApi.commitEvent({ + datasetId: id, + event, + }), + ), map((data: CommitEventToDatasetMutation | undefined | null) => { if ( data?.datasets.byId?.metadata.chain.commitEvent @@ -154,31 +139,39 @@ export class AppDatasetCreateService { ); } - public updateReadme( + public getIdByAccountNameAndDatasetName( accountName: string, datasetName: string, - content: string, - ): Observable { + ): Observable { const key = `${accountName}${datasetName}`; - let observable: Observable; if (this.cache.has(key)) { - observable = this.datasetApi.updateReadme( - // eslint-disable-next-line @typescript-eslint/no-non-null-assertion - this.cache.get(key)!, - content, - ); + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + return of(this.cache.get(key)!); } else { - observable = this.datasetApi + return this.datasetApi .getDatasetInfoByAccountAndDatasetName(accountName, datasetName) .pipe( - switchMap((x: DatasetByAccountAndDatasetNameQuery) => { - const id = x.datasets.byOwnerAndName?.id as string; + map((data: DatasetByAccountAndDatasetNameQuery) => { + const id = data.datasets.byOwnerAndName?.id as string; this.cache.set(key, id); - return this.datasetApi.updateReadme(id, content); + return id; }), ); } - return observable.pipe( + } + + public updateReadme( + accountName: string, + datasetName: string, + content: string, + ): Observable { + return this.getIdByAccountNameAndDatasetName( + accountName, + datasetName, + ).pipe( + switchMap((id: string) => + this.datasetApi.updateReadme(id, content), + ), map((data: UpdateReadmeMutation | null | undefined) => { if ( data?.datasets.byId?.metadata.updateReadme.__typename === From 5e759865947030ba6fb0cc3f0635111769e61d0c Mon Sep 17 00:00:00 2001 From: Dmitriy Borzenko Date: Wed, 2 Aug 2023 20:41:46 +0300 Subject: [PATCH 08/16] Fix existing unit tests. --- src/app/api/dataset.api.spec.ts | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/src/app/api/dataset.api.spec.ts b/src/app/api/dataset.api.spec.ts index 734f237bf..b08c8c281 100644 --- a/src/app/api/dataset.api.spec.ts +++ b/src/app/api/dataset.api.spec.ts @@ -188,10 +188,12 @@ describe("DatasetApi", () => { }); it("should commit event", () => { + const mockDatasetId = "mockId"; + const mockEvent = "mock event"; service .commitEvent({ - datasetId: "mockId", - event: "mock event", + datasetId: mockDatasetId, + event: mockEvent, }) .subscribe( (res: CommitEventToDatasetMutation | null | undefined) => { @@ -203,8 +205,8 @@ describe("DatasetApi", () => { ); const op = controller.expectOne(CommitEventToDatasetDocument); - expect(op.operation.variables.accountName).toEqual(TEST_USER_NAME); - expect(op.operation.variables.datasetName).toEqual(TEST_DATASET_NAME); + expect(op.operation.variables.datasetId).toEqual(mockDatasetId); + expect(op.operation.variables.event).toEqual(mockEvent); op.flush({ data: mockCommitEventResponse, }); From 1aa7e36cc44ba34b388eff488cf76cafff6405cb Mon Sep 17 00:00:00 2001 From: Sergei Zaychenko Date: Thu, 3 Aug 2023 02:03:02 -0700 Subject: [PATCH 09/16] Formatted GraphQL schema file --- resources/schema.graphql | 1276 ++++++++++++++++++++------------------ 1 file changed, 663 insertions(+), 613 deletions(-) diff --git a/resources/schema.graphql b/resources/schema.graphql index 2e8f8092b..c494c5eef 100644 --- a/resources/schema.graphql +++ b/resources/schema.graphql @@ -1,329 +1,338 @@ type AccessToken { - accessToken: String! - scope: String! - tokenType: String! + accessToken: String! + scope: String! + tokenType: String! } interface Account { - id: AccountID! - name: String! + id: AccountID! + name: String! } scalar AccountID type AccountInfo { - login: String! - name: String! - email: String - avatarUrl: String - gravatarId: String + login: String! + name: String! + email: String + avatarUrl: String + gravatarId: String } scalar AccountName type Accounts { - """ - Returns account by its ID - """ - byId(accountId: AccountID!): Account - """ - Returns account by its name - """ - byName(name: String!): Account + """ + Returns account by its ID + """ + byId(accountId: AccountID!): Account + """ + Returns account by its name + """ + byName(name: String!): Account } type AddData { - inputCheckpoint: Multihash - outputData: DataSlice - outputCheckpoint: Checkpoint - outputWatermark: DateTime - sourceState: SourceState + inputCheckpoint: Multihash + outputData: DataSlice + outputCheckpoint: Checkpoint + outputWatermark: DateTime + sourceState: SourceState } type AttachmentEmbedded { - path: String! - content: String! + path: String! + content: String! } union Attachments = AttachmentsEmbedded type AttachmentsEmbedded { - items: [AttachmentEmbedded!]! + items: [AttachmentEmbedded!]! } type AuthMut { - githubLogin(code: String!): LoginResponse! - accountInfo(accessToken: String!): AccountInfo! + githubLogin(code: String!): LoginResponse! + accountInfo(accessToken: String!): AccountInfo! } type BlockInterval { - start: Multihash! - end: Multihash! + start: Multihash! + end: Multihash! } type BlockRef { - name: String! - blockHash: Multihash! + name: String! + blockHash: Multihash! } - type Checkpoint { - physicalHash: Multihash! - size: Int! + physicalHash: Multihash! + size: Int! } interface CommitResult { - message: String! + message: String! } type CommitResultAppendError implements CommitResult & UpdateReadmeResult { - message: String! + message: String! } type CommitResultSuccess implements CommitResult & UpdateReadmeResult { - oldHead: Multihash - newHead: Multihash! - message: String! + oldHead: Multihash + newHead: Multihash! + message: String! } enum CompressionFormat { - GZIP - ZIP + GZIP + ZIP } interface CreateDatasetFromSnapshotResult { - message: String! + message: String! } interface CreateDatasetResult { - message: String! + message: String! } type CreateDatasetResultInvalidSnapshot implements CreateDatasetFromSnapshotResult { - message: String! + message: String! } type CreateDatasetResultMissingInputs implements CreateDatasetFromSnapshotResult { - missingInputs: [String!]! - message: String! + missingInputs: [String!]! + message: String! } type CreateDatasetResultNameCollision implements CreateDatasetResult & CreateDatasetFromSnapshotResult { - accountName: AccountName - datasetName: DatasetName! - message: String! + accountName: AccountName + datasetName: DatasetName! + message: String! } type CreateDatasetResultSuccess implements CreateDatasetResult & CreateDatasetFromSnapshotResult { - dataset: Dataset! - message: String! + dataset: Dataset! + message: String! } type DataBatch { - format: DataBatchFormat! - content: String! - numRecords: Int! + format: DataBatchFormat! + content: String! + numRecords: Int! } enum DataBatchFormat { - JSON - JSON_LD - JSON_SOA - CSV + JSON + JSON_LD + JSON_SOA + CSV } type DataQueries { - """ - Executes a specified query and returns its result - """ - query(query: String!, queryDialect: QueryDialect!, dataFormat: DataBatchFormat, schemaFormat: DataSchemaFormat, limit: Int): DataQueryResult! - """ - Lists engines known to the system and recommended for use - """ - knownEngines: [EngineDesc!]! + """ + Executes a specified query and returns its result + """ + query( + query: String! + queryDialect: QueryDialect! + dataFormat: DataBatchFormat + schemaFormat: DataSchemaFormat + limit: Int + ): DataQueryResult! + """ + Lists engines known to the system and recommended for use + """ + knownEngines: [EngineDesc!]! } union DataQueryResult = DataQueryResultSuccess | DataQueryResultError type DataQueryResultError { - errorMessage: String! - errorKind: DataQueryResultErrorKind! + errorMessage: String! + errorKind: DataQueryResultErrorKind! } enum DataQueryResultErrorKind { - INVALID_SQL - INTERNAL_ERROR + INVALID_SQL + INTERNAL_ERROR } type DataQueryResultSuccess { - schema: DataSchema - data: DataBatch! - limit: Int! + schema: DataSchema + data: DataBatch! + limit: Int! } type DataSchema { - format: DataSchemaFormat! - content: String! + format: DataSchemaFormat! + content: String! } enum DataSchemaFormat { - PARQUET - PARQUET_JSON + PARQUET + PARQUET_JSON } type DataSlice { - logicalHash: Multihash! - physicalHash: Multihash! - interval: OffsetInterval! - size: Int! + logicalHash: Multihash! + physicalHash: Multihash! + interval: OffsetInterval! + size: Int! } type Dataset { - """ - Unique identifier of the dataset - """ - id: DatasetID! - """ - Symbolic name of the dataset. - Name can change over the dataset's lifetime. For unique identifier use - `id()`. - """ - name: DatasetName! - """ - Returns the user or organization that owns this dataset - """ - owner: Account! - """ - Returns the kind of a dataset (Root or Derivative) - """ - kind: DatasetKind! - """ - Access to the data of the dataset - """ - data: DatasetData! - """ - Access to the metadata of the dataset - """ - metadata: DatasetMetadata! - """ - Creation time of the first metadata block in the chain - """ - createdAt: DateTime! - """ - Creation time of the most recent metadata block in the chain - """ - lastUpdatedAt: DateTime! + """ + Unique identifier of the dataset + """ + id: DatasetID! + """ + Symbolic name of the dataset. + Name can change over the dataset's lifetime. For unique identifier use + `id()`. + """ + name: DatasetName! + """ + Returns the user or organization that owns this dataset + """ + owner: Account! + """ + Returns the kind of a dataset (Root or Derivative) + """ + kind: DatasetKind! + """ + Access to the data of the dataset + """ + data: DatasetData! + """ + Access to the metadata of the dataset + """ + metadata: DatasetMetadata! + """ + Creation time of the first metadata block in the chain + """ + createdAt: DateTime! + """ + Creation time of the most recent metadata block in the chain + """ + lastUpdatedAt: DateTime! } type DatasetConnection { - """ - A shorthand for `edges { node { ... } }` - """ - nodes: [Dataset!]! - """ - Approximate number of total nodes - """ - totalCount: Int! - """ - Page information - """ - pageInfo: PageBasedInfo! - edges: [DatasetEdge!]! + """ + A shorthand for `edges { node { ... } }` + """ + nodes: [Dataset!]! + """ + Approximate number of total nodes + """ + totalCount: Int! + """ + Page information + """ + pageInfo: PageBasedInfo! + edges: [DatasetEdge!]! } type DatasetData { - """ - Total number of records in this dataset - """ - numRecordsTotal: Int! - """ - An estimated size of data on disk not accounting for replication or - caching - """ - estimatedSize: Int! - """ - Returns the specified number of the latest records in the dataset - This is equivalent to the SQL query: `SELECT * FROM dataset ORDER BY - event_time DESC LIMIT N` - """ - tail(limit: Int, dataFormat: DataBatchFormat, schemaFormat: DataSchemaFormat): DataQueryResult! + """ + Total number of records in this dataset + """ + numRecordsTotal: Int! + """ + An estimated size of data on disk not accounting for replication or + caching + """ + estimatedSize: Int! + """ + Returns the specified number of the latest records in the dataset + This is equivalent to the SQL query: `SELECT * FROM dataset ORDER BY + event_time DESC LIMIT N` + """ + tail( + limit: Int + dataFormat: DataBatchFormat + schemaFormat: DataSchemaFormat + ): DataQueryResult! } type DatasetEdge { - node: Dataset! + node: Dataset! } scalar DatasetID enum DatasetKind { - ROOT - DERIVATIVE + ROOT + DERIVATIVE } type DatasetMetadata { - """ - Access to the temporal metadata chain of the dataset - """ - chain: MetadataChain! - """ - Last recorded watermark - """ - currentWatermark: DateTime - """ - Latest data schema - """ - currentSchema(format: DataSchemaFormat): DataSchema - """ - Current upstream dependencies of a dataset - """ - currentUpstreamDependencies: [Dataset!]! - """ - Current downstream dependencies of a dataset - """ - currentDownstreamDependencies: [Dataset!]! - """ - Current source used by the root dataset - """ - currentSource: SetPollingSource - """ - Current transformation used by the derivative dataset - """ - currentTransform: SetTransform - """ - Current descriptive information about the dataset - """ - currentInfo: SetInfo! - """ - Current readme file as discovered from attachments associated with the - dataset - """ - currentReadme: String - """ - Current license associated with the dataset - """ - currentLicense: SetLicense - """ - Current vocabulary associated with the dataset - """ - currentVocab: SetVocab + """ + Access to the temporal metadata chain of the dataset + """ + chain: MetadataChain! + """ + Last recorded watermark + """ + currentWatermark: DateTime + """ + Latest data schema + """ + currentSchema(format: DataSchemaFormat): DataSchema + """ + Current upstream dependencies of a dataset + """ + currentUpstreamDependencies: [Dataset!]! + """ + Current downstream dependencies of a dataset + """ + currentDownstreamDependencies: [Dataset!]! + """ + Current source used by the root dataset + """ + currentSource: SetPollingSource + """ + Current transformation used by the derivative dataset + """ + currentTransform: SetTransform + """ + Current descriptive information about the dataset + """ + currentInfo: SetInfo! + """ + Current readme file as discovered from attachments associated with the + dataset + """ + currentReadme: String + """ + Current license associated with the dataset + """ + currentLicense: SetLicense + """ + Current vocabulary associated with the dataset + """ + currentVocab: SetVocab } type DatasetMetadataMut { - """ - Access to the mutable metadata chain of the dataset - """ - chain: MetadataChainMut! - """ - Updates or clears the dataset readme - """ - updateReadme(content: String): UpdateReadmeResult! + """ + Access to the mutable metadata chain of the dataset + """ + chain: MetadataChainMut! + """ + Updates or clears the dataset readme + """ + updateReadme(content: String): UpdateReadmeResult! } type DatasetMut { - """ - Access to the mutable metadata of the dataset - """ - metadata: DatasetMetadataMut! + """ + Access to the mutable metadata of the dataset + """ + metadata: DatasetMetadataMut! } scalar DatasetName @@ -331,37 +340,53 @@ scalar DatasetName scalar DatasetRefAny type Datasets { - """ - Returns dataset by its ID - """ - byId(datasetId: DatasetID!): Dataset - """ - Returns dataset by its owner and name - """ - byOwnerAndName(accountName: AccountName!, datasetName: DatasetName!): Dataset - """ - Returns datasets belonging to the specified account - """ - byAccountId(accountId: AccountID!, page: Int, perPage: Int): DatasetConnection! - """ - Returns datasets belonging to the specified account - """ - byAccountName(accountName: AccountName!, page: Int, perPage: Int): DatasetConnection! + """ + Returns dataset by its ID + """ + byId(datasetId: DatasetID!): Dataset + """ + Returns dataset by its owner and name + """ + byOwnerAndName(accountName: AccountName!, datasetName: DatasetName!): Dataset + """ + Returns datasets belonging to the specified account + """ + byAccountId( + accountId: AccountID! + page: Int + perPage: Int + ): DatasetConnection! + """ + Returns datasets belonging to the specified account + """ + byAccountName( + accountName: AccountName! + page: Int + perPage: Int + ): DatasetConnection! } type DatasetsMut { - """ - Returns a mutable dataset by its ID - """ - byId(datasetId: DatasetID!): DatasetMut - """ - Creates a new empty dataset - """ - createEmpty(accountId: AccountID!, datasetKind: DatasetKind!, datasetName: DatasetName!): CreateDatasetResult! - """ - Creates a new dataset from provided DatasetSnapshot manifest - """ - createFromSnapshot(accountId: AccountID!, snapshot: String!, snapshotFormat: MetadataManifestFormat!): CreateDatasetFromSnapshotResult! + """ + Returns a mutable dataset by its ID + """ + byId(datasetId: DatasetID!): DatasetMut + """ + Creates a new empty dataset + """ + createEmpty( + accountId: AccountID! + datasetKind: DatasetKind! + datasetName: DatasetName! + ): CreateDatasetResult! + """ + Creates a new dataset from provided DatasetSnapshot manifest + """ + createFromSnapshot( + accountId: AccountID! + snapshot: String! + snapshotFormat: MetadataManifestFormat! + ): CreateDatasetFromSnapshotResult! } """ @@ -375,491 +400,508 @@ scalar DateTime Describes """ type EngineDesc { - """ - A short name of the engine, e.g. "Spark", "Flink". - Intended for use in UI for quick engine identification and selection. - """ - name: String! - """ - Language and dialect this engine is using for queries - Indended for configuring code highlighting and completions. - """ - dialect: QueryDialect! - """ - OCI image repository and a tag of the latest engine image, e.g. - "ghcr.io/kamu-data/engine-datafusion:0.1.2" - """ - latestImage: String! + """ + A short name of the engine, e.g. "Spark", "Flink". + Intended for use in UI for quick engine identification and selection. + """ + name: String! + """ + Language and dialect this engine is using for queries + Indended for configuring code highlighting and completions. + """ + dialect: QueryDialect! + """ + OCI image repository and a tag of the latest engine image, e.g. + "ghcr.io/kamu-data/engine-datafusion:0.1.2" + """ + latestImage: String! } type EnvVar { - name: String! - value: String + name: String! + value: String } union EventTimeSource = EventTimeSourceFromMetadata | EventTimeSourceFromPath type EventTimeSourceFromMetadata { - dummy: String + dummy: String } type EventTimeSourceFromPath { - pattern: String! - timestampFormat: String + pattern: String! + timestampFormat: String } type ExecuteQuery { - inputSlices: [InputSlice!]! - inputCheckpoint: Multihash - outputData: DataSlice - outputCheckpoint: Checkpoint - outputWatermark: DateTime + inputSlices: [InputSlice!]! + inputCheckpoint: Multihash + outputData: DataSlice + outputCheckpoint: Checkpoint + outputWatermark: DateTime } union FetchStep = FetchStepUrl | FetchStepFilesGlob | FetchStepContainer type FetchStepContainer { - image: String! - command: [String!] - args: [String!] - env: [EnvVar!] + image: String! + command: [String!] + args: [String!] + env: [EnvVar!] } type FetchStepFilesGlob { - path: String! - eventTime: EventTimeSource - cache: SourceCaching - order: SourceOrdering + path: String! + eventTime: EventTimeSource + cache: SourceCaching + order: SourceOrdering } type FetchStepUrl { - url: String! - eventTime: EventTimeSource - cache: SourceCaching - headers: [RequestHeader!] + url: String! + eventTime: EventTimeSource + cache: SourceCaching + headers: [RequestHeader!] } - - type InputSlice { - datasetId: DatasetID! - blockInterval: BlockInterval - dataInterval: OffsetInterval + datasetId: DatasetID! + blockInterval: BlockInterval + dataInterval: OffsetInterval } - type LoginResponse { - token: AccessToken! - accountInfo: AccountInfo! + token: AccessToken! + accountInfo: AccountInfo! } -union MergeStrategy = MergeStrategyAppend | MergeStrategyLedger | MergeStrategySnapshot +union MergeStrategy = + MergeStrategyAppend + | MergeStrategyLedger + | MergeStrategySnapshot type MergeStrategyAppend { - dummy: String + dummy: String } type MergeStrategyLedger { - primaryKey: [String!]! + primaryKey: [String!]! } type MergeStrategySnapshot { - primaryKey: [String!]! - compareColumns: [String!] - observationColumn: String - obsvAdded: String - obsvChanged: String - obsvRemoved: String + primaryKey: [String!]! + compareColumns: [String!] + observationColumn: String + obsvAdded: String + obsvChanged: String + obsvRemoved: String } type MetadataBlockConnection { - """ - A shorthand for `edges { node { ... } }` - """ - nodes: [MetadataBlockExtended!]! - """ - Approximate number of total nodes - """ - totalCount: Int! - """ - Page information - """ - pageInfo: PageBasedInfo! - edges: [MetadataBlockEdge!]! + """ + A shorthand for `edges { node { ... } }` + """ + nodes: [MetadataBlockExtended!]! + """ + Approximate number of total nodes + """ + totalCount: Int! + """ + Page information + """ + pageInfo: PageBasedInfo! + edges: [MetadataBlockEdge!]! } type MetadataBlockEdge { - node: MetadataBlockExtended! + node: MetadataBlockExtended! } type MetadataBlockExtended { - blockHash: Multihash! - prevBlockHash: Multihash - systemTime: DateTime! - author: Account! - event: MetadataEvent! - sequenceNumber: Int! + blockHash: Multihash! + prevBlockHash: Multihash + systemTime: DateTime! + author: Account! + event: MetadataEvent! + sequenceNumber: Int! } type MetadataChain { - """ - Returns all named metadata block references - """ - refs: [BlockRef!]! - """ - Returns a metadata block corresponding to the specified hash - """ - blockByHash(hash: Multihash!): MetadataBlockExtended - """ - Returns a metadata block corresponding to the specified hash and encoded - in desired format - """ - blockByHashEncoded(hash: Multihash!, format: MetadataManifestFormat!): String - """ - Iterates all metadata blocks in the reverse chronological order - """ - blocks(page: Int, perPage: Int): MetadataBlockConnection! + """ + Returns all named metadata block references + """ + refs: [BlockRef!]! + """ + Returns a metadata block corresponding to the specified hash + """ + blockByHash(hash: Multihash!): MetadataBlockExtended + """ + Returns a metadata block corresponding to the specified hash and encoded + in desired format + """ + blockByHashEncoded(hash: Multihash!, format: MetadataManifestFormat!): String + """ + Iterates all metadata blocks in the reverse chronological order + """ + blocks(page: Int, perPage: Int): MetadataBlockConnection! } type MetadataChainMut { - """ - Commits new event to the metadata chain - """ - commitEvent(event: String!, eventFormat: MetadataManifestFormat!): CommitResult! -} - -union MetadataEvent = AddData | ExecuteQuery | Seed | SetPollingSource | SetTransform | SetVocab | SetWatermark | SetAttachments | SetInfo | SetLicense + """ + Commits new event to the metadata chain + """ + commitEvent( + event: String! + eventFormat: MetadataManifestFormat! + ): CommitResult! +} + +union MetadataEvent = + AddData + | ExecuteQuery + | Seed + | SetPollingSource + | SetTransform + | SetVocab + | SetWatermark + | SetAttachments + | SetInfo + | SetLicense enum MetadataManifestFormat { - YAML + YAML } type MetadataManifestMalformed implements CommitResult & CreateDatasetFromSnapshotResult { - message: String! + message: String! } type MetadataManifestUnsupportedVersion implements CommitResult & CreateDatasetFromSnapshotResult { - message: String! + message: String! } scalar Multihash type Mutation { - """ - Authentication and authorization-related functionality group - """ - auth: AuthMut! - """ - Dataset-related functionality group. - - Datasets are historical streams of events recorded under a cetrain - schema. - """ - datasets: DatasetsMut! - """ - Tasks-related functionality group. - - Tasks are units of work scheduled and executed by the system to query - and process data. - """ - tasks: TasksMut! + """ + Authentication and authorization-related functionality group + """ + auth: AuthMut! + """ + Dataset-related functionality group. + + Datasets are historical streams of events recorded under a cetrain + schema. + """ + datasets: DatasetsMut! + """ + Tasks-related functionality group. + + Tasks are units of work scheduled and executed by the system to query + and process data. + """ + tasks: TasksMut! } type NoChanges implements CommitResult & UpdateReadmeResult { - message: String! + message: String! } type OffsetInterval { - start: Int! - end: Int! + start: Int! + end: Int! } type Organization implements Account { - """ - Unique and stable identitfier of this organization account - """ - id: AccountID! - """ - Symbolic account name - """ - name: String! + """ + Unique and stable identitfier of this organization account + """ + id: AccountID! + """ + Symbolic account name + """ + name: String! } type PageBasedInfo { - """ - When paginating backwards, are there more items? - """ - hasPreviousPage: Boolean! - """ - When paginating forwards, are there more items? - """ - hasNextPage: Boolean! - """ - Index of the current page - """ - currentPage: Int! - """ - Approximate number of total pages assuming number of nodes per page - stays the same - """ - totalPages: Int + """ + When paginating backwards, are there more items? + """ + hasPreviousPage: Boolean! + """ + When paginating forwards, are there more items? + """ + hasNextPage: Boolean! + """ + Index of the current page + """ + currentPage: Int! + """ + Approximate number of total pages assuming number of nodes per page + stays the same + """ + totalPages: Int } union PrepStep = PrepStepDecompress | PrepStepPipe type PrepStepDecompress { - format: CompressionFormat! - subPath: String + format: CompressionFormat! + subPath: String } type PrepStepPipe { - command: [String!]! + command: [String!]! } type Query { - """ - Returns the version of the GQL API - """ - apiVersion: String! - """ - Dataset-related functionality group. - - Datasets are historical streams of events recorded under a cetrain - schema. - """ - datasets: Datasets! - """ - Account-related functionality group. - - Accounts can be individual users or organizations registered in the - system. This groups deals with their identities and permissions. - """ - accounts: Accounts! - """ - Task-related functionality group. - - Tasks are units of scheduling that can perform many functions like - ingesting new data, running dataset transformations, answering ad-hoc - queries etc. - """ - tasks: Tasks! - """ - Search-related functionality group. - """ - search: Search! - """ - Querying and data manipulations - """ - data: DataQueries! + """ + Returns the version of the GQL API + """ + apiVersion: String! + """ + Dataset-related functionality group. + + Datasets are historical streams of events recorded under a cetrain + schema. + """ + datasets: Datasets! + """ + Account-related functionality group. + + Accounts can be individual users or organizations registered in the + system. This groups deals with their identities and permissions. + """ + accounts: Accounts! + """ + Task-related functionality group. + + Tasks are units of scheduling that can perform many functions like + ingesting new data, running dataset transformations, answering ad-hoc + queries etc. + """ + tasks: Tasks! + """ + Search-related functionality group. + """ + search: Search! + """ + Querying and data manipulations + """ + data: DataQueries! } enum QueryDialect { - SQL_SPARK - SQL_FLINK - SQL_DATA_FUSION + SQL_SPARK + SQL_FLINK + SQL_DATA_FUSION } -union ReadStep = ReadStepCsv | ReadStepJsonLines | ReadStepGeoJson | ReadStepEsriShapefile | ReadStepParquet +union ReadStep = + ReadStepCsv + | ReadStepJsonLines + | ReadStepGeoJson + | ReadStepEsriShapefile + | ReadStepParquet type ReadStepCsv { - schema: [String!] - separator: String - encoding: String - quote: String - escape: String - comment: String - header: Boolean - enforceSchema: Boolean - inferSchema: Boolean - ignoreLeadingWhiteSpace: Boolean - ignoreTrailingWhiteSpace: Boolean - nullValue: String - emptyValue: String - nanValue: String - positiveInf: String - negativeInf: String - dateFormat: String - timestampFormat: String - multiLine: Boolean + schema: [String!] + separator: String + encoding: String + quote: String + escape: String + comment: String + header: Boolean + enforceSchema: Boolean + inferSchema: Boolean + ignoreLeadingWhiteSpace: Boolean + ignoreTrailingWhiteSpace: Boolean + nullValue: String + emptyValue: String + nanValue: String + positiveInf: String + negativeInf: String + dateFormat: String + timestampFormat: String + multiLine: Boolean } type ReadStepEsriShapefile { - schema: [String!] - subPath: String + schema: [String!] + subPath: String } type ReadStepGeoJson { - schema: [String!] + schema: [String!] } type ReadStepJsonLines { - schema: [String!] - dateFormat: String - encoding: String - multiLine: Boolean - primitivesAsString: Boolean - timestampFormat: String + schema: [String!] + dateFormat: String + encoding: String + multiLine: Boolean + primitivesAsString: Boolean + timestampFormat: String } type ReadStepParquet { - schema: [String!] + schema: [String!] } type RequestHeader { - name: String! - value: String! + name: String! + value: String! } type Search { - """ - Perform search across all resources - """ - query(query: String!, page: Int, perPage: Int): SearchResultConnection! + """ + Perform search across all resources + """ + query(query: String!, page: Int, perPage: Int): SearchResultConnection! } union SearchResult = Dataset type SearchResultConnection { - """ - A shorthand for `edges { node { ... } }` - """ - nodes: [SearchResult!]! - """ - Approximate number of total nodes - """ - totalCount: Int! - """ - Page information - """ - pageInfo: PageBasedInfo! - edges: [SearchResultEdge!]! + """ + A shorthand for `edges { node { ... } }` + """ + nodes: [SearchResult!]! + """ + Approximate number of total nodes + """ + totalCount: Int! + """ + Page information + """ + pageInfo: PageBasedInfo! + edges: [SearchResultEdge!]! } type SearchResultEdge { - node: SearchResult! + node: SearchResult! } type Seed { - datasetId: DatasetID! - datasetKind: DatasetKind! + datasetId: DatasetID! + datasetKind: DatasetKind! } type SetAttachments { - attachments: Attachments! + attachments: Attachments! } type SetInfo { - description: String - keywords: [String!] + description: String + keywords: [String!] } type SetLicense { - shortName: String! - name: String! - spdxId: String - websiteUrl: String! + shortName: String! + name: String! + spdxId: String + websiteUrl: String! } type SetPollingSource { - fetch: FetchStep! - prepare: [PrepStep!] - read: ReadStep! - preprocess: Transform - merge: MergeStrategy! + fetch: FetchStep! + prepare: [PrepStep!] + read: ReadStep! + preprocess: Transform + merge: MergeStrategy! } type SetTransform { - inputs: [TransformInput!]! - transform: Transform! + inputs: [TransformInput!]! + transform: Transform! } type SetVocab { - systemTimeColumn: String - eventTimeColumn: String - offsetColumn: String + systemTimeColumn: String + eventTimeColumn: String + offsetColumn: String } type SetWatermark { - outputWatermark: DateTime! + outputWatermark: DateTime! } union SourceCaching = SourceCachingForever type SourceCachingForever { - dummy: String + dummy: String } enum SourceOrdering { - BY_EVENT_TIME - BY_NAME + BY_EVENT_TIME + BY_NAME } type SourceState { - kind: String! - source: String! - value: String! + kind: String! + source: String! + value: String! } type SqlQueryStep { - alias: String - query: String! + alias: String + query: String! } - type Task { - """ - Unique and stable identitfier of this task - """ - taskId: TaskID! - """ - Life-cycle status of a task - """ - status: TaskStatus! - """ - Whether the task was ordered to be cancelled - """ - cancellationRequested: Boolean! - """ - Describes a certain final outcome of the task once it reaches the - "finished" status - """ - outcome: TaskOutcome - """ - Time when task was originally created and placed in a queue - """ - createdAt: DateTime! - """ - Time when task transitioned into a running state - """ - ranAt: DateTime - """ - Time when cancellation of task was requested - """ - cancellationRequestedAt: DateTime - """ - Time when task has reached a final outcome - """ - finishedAt: DateTime + """ + Unique and stable identitfier of this task + """ + taskId: TaskID! + """ + Life-cycle status of a task + """ + status: TaskStatus! + """ + Whether the task was ordered to be cancelled + """ + cancellationRequested: Boolean! + """ + Describes a certain final outcome of the task once it reaches the + "finished" status + """ + outcome: TaskOutcome + """ + Time when task was originally created and placed in a queue + """ + createdAt: DateTime! + """ + Time when task transitioned into a running state + """ + ranAt: DateTime + """ + Time when cancellation of task was requested + """ + cancellationRequestedAt: DateTime + """ + Time when task has reached a final outcome + """ + finishedAt: DateTime } type TaskConnection { - """ - A shorthand for `edges { node { ... } }` - """ - nodes: [Task!]! - """ - Approximate number of total nodes - """ - totalCount: Int! - """ - Page information - """ - pageInfo: PageBasedInfo! - edges: [TaskEdge!]! + """ + A shorthand for `edges { node { ... } }` + """ + nodes: [Task!]! + """ + Approximate number of total nodes + """ + totalCount: Int! + """ + Page information + """ + pageInfo: PageBasedInfo! + edges: [TaskEdge!]! } type TaskEdge { - node: Task! + node: Task! } scalar TaskID @@ -868,104 +910,112 @@ scalar TaskID Describes a certain final outcome of the task """ enum TaskOutcome { - """ - Task succeeded - """ - SUCCESS - """ - Task failed to complete - """ - FAILED - """ - Task was cancelled by a user - """ - CANCELLED + """ + Task succeeded + """ + SUCCESS + """ + Task failed to complete + """ + FAILED + """ + Task was cancelled by a user + """ + CANCELLED } """ Life-cycle status of a task """ enum TaskStatus { - """ - Task is waiting for capacity to be allocated to it - """ - QUEUED - """ - Task is being executed - """ - RUNNING - """ - Task has reached a certain final outcome (see [TaskOutcome]) - """ - FINISHED + """ + Task is waiting for capacity to be allocated to it + """ + QUEUED + """ + Task is being executed + """ + RUNNING + """ + Task has reached a certain final outcome (see [TaskOutcome]) + """ + FINISHED } type Tasks { - """ - Returns current state of a given task - """ - getTask(taskId: TaskID!): Task - """ - Returns states of tasks associated with a given dataset ordered by - creation time from newest to oldest - """ - listTasksByDataset(datasetId: DatasetID!, page: Int, perPage: Int): TaskConnection! + """ + Returns current state of a given task + """ + getTask(taskId: TaskID!): Task + """ + Returns states of tasks associated with a given dataset ordered by + creation time from newest to oldest + """ + listTasksByDataset( + datasetId: DatasetID! + page: Int + perPage: Int + ): TaskConnection! } type TasksMut { - """ - Requests cancellation of the specified task - """ - cancelTask(taskId: TaskID!): Task! - """ - Schedules a task to update the specified dataset by performing polling - ingest or a derivative transformation - """ - createUpdateDatasetTask(datasetId: DatasetID!): Task! - """ - Schedules a task to update the specified dataset by performing polling - ingest or a derivative transformation - """ - createProbeTask(datasetId: DatasetID, busyTimeMs: Int, endWithOutcome: TaskOutcome): Task! + """ + Requests cancellation of the specified task + """ + cancelTask(taskId: TaskID!): Task! + """ + Schedules a task to update the specified dataset by performing polling + ingest or a derivative transformation + """ + createUpdateDatasetTask(datasetId: DatasetID!): Task! + """ + Schedules a task to update the specified dataset by performing polling + ingest or a derivative transformation + """ + createProbeTask( + datasetId: DatasetID + busyTimeMs: Int + endWithOutcome: TaskOutcome + ): Task! } type TemporalTable { - name: String! - primaryKey: [String!]! + name: String! + primaryKey: [String!]! } union Transform = TransformSql type TransformInput { - id: DatasetID - name: DatasetName! - datasetRef: DatasetRefAny - dataset: Dataset! + id: DatasetID + name: DatasetName! + datasetRef: DatasetRefAny + dataset: Dataset! } type TransformSql { - engine: String! - version: String - queries: [SqlQueryStep!]! - temporalTables: [TemporalTable!] + engine: String! + version: String + queries: [SqlQueryStep!]! + temporalTables: [TemporalTable!] } interface UpdateReadmeResult { - message: String! + message: String! } type User implements Account { - """ - Unique and stable identitfier of this user account - """ - id: AccountID! - """ - Symbolic account name - """ - name: String! + """ + Unique and stable identitfier of this user account + """ + id: AccountID! + """ + Symbolic account name + """ + name: String! } schema { - query: Query - mutation: Mutation + query: Query + mutation: Mutation } From d8127f37028fdca476387bc75e2dcee6cd6c2e4d Mon Sep 17 00:00:00 2001 From: Dmitriy Borzenko Date: Thu, 3 Aug 2023 15:02:33 +0300 Subject: [PATCH 10/16] Fixed PR comments. --- src/app/api/dataset.api.ts | 16 +- src/app/app.module.ts | 6 - .../dataset-create/dataset-create.service.ts | 190 +++++++++--------- .../add-polling-source.component.ts | 2 +- .../components/base-main-event.component.ts | 6 +- .../final-yaml-modal.component.ts | 8 +- .../set-transform/set-transform.component.ts | 2 +- .../edit-details-modal.component.ts | 6 +- .../edit-license-modal.component.ts | 6 +- .../edit-watermark-modal.component.ts | 6 +- .../readme-section.component.html | 20 +- .../readme-section.component.ts | 73 +++---- .../readme-section/readme-section.types.ts | 4 + .../overview-component.html | 2 +- .../services/dataset-commit.service.spec.ts | 16 ++ .../services/dataset-commit.service.ts | 125 ++++++++++++ 16 files changed, 311 insertions(+), 177 deletions(-) create mode 100644 src/app/dataset-view/additional-components/overview-component/components/readme-section/readme-section.types.ts create mode 100644 src/app/dataset-view/additional-components/overview-component/services/dataset-commit.service.spec.ts create mode 100644 src/app/dataset-view/additional-components/overview-component/services/dataset-commit.service.ts diff --git a/src/app/api/dataset.api.ts b/src/app/api/dataset.api.ts index 93228f8e7..e4f99d2e3 100644 --- a/src/app/api/dataset.api.ts +++ b/src/app/api/dataset.api.ts @@ -59,11 +59,17 @@ export class DatasetApi { numRecords?: number; }): Observable { return this.datasetMainDataGQL - .watch({ - accountName: params.accountName, - datasetName: params.datasetName, - limit: params.numRecords ?? AppValues.SQL_QUERY_LIMIT, - }) + .watch( + { + accountName: params.accountName, + datasetName: params.datasetName, + limit: params.numRecords ?? AppValues.SQL_QUERY_LIMIT, + }, + { + fetchPolicy: "network-only", + errorPolicy: "all", + }, + ) .valueChanges.pipe( first(), map((result: ApolloQueryResult) => { diff --git a/src/app/app.module.ts b/src/app/app.module.ts index 3853150ea..e6ad8736c 100644 --- a/src/app/app.module.ts +++ b/src/app/app.module.ts @@ -96,12 +96,6 @@ const Services = [ link: httpLink.create({ uri: appConfig.apiServerGqlUrl, }), - defaultOptions: { - watchQuery: { - fetchPolicy: "network-only", - errorPolicy: "all", - }, - }, }; }, deps: [HttpLink, AppConfigService], diff --git a/src/app/dataset-create/dataset-create.service.ts b/src/app/dataset-create/dataset-create.service.ts index 36f005900..0b7a94d40 100644 --- a/src/app/dataset-create/dataset-create.service.ts +++ b/src/app/dataset-create/dataset-create.service.ts @@ -26,21 +26,21 @@ export class AppDatasetCreateService { return this.errorMessageChanges$.asObservable(); } - private errorCommitEventChanges$: Subject = new Subject(); + // private errorCommitEventChanges$: Subject = new Subject(); - public errorCommitEventChanges(message: string): void { - this.errorCommitEventChanges$.next(message); - } + // public errorCommitEventChanges(message: string): void { + // this.errorCommitEventChanges$.next(message); + // } - public get onErrorCommitEventChanges(): Observable { - return this.errorCommitEventChanges$.asObservable(); - } - private cache = new Map(); + // public get onErrorCommitEventChanges(): Observable { + // return this.errorCommitEventChanges$.asObservable(); + // } + + // private datasetIdsByAccountDatasetName = new Map(); public constructor( private datasetApi: DatasetApi, private navigationService: NavigationService, - private datasetService: DatasetService, ) {} public createEmptyDataset( @@ -107,93 +107,93 @@ export class AppDatasetCreateService { ); } - public commitEventToDataset( - accountName: string, - datasetName: string, - event: string, - ): Observable { - return this.getIdByAccountNameAndDatasetName( - accountName, - datasetName, - ).pipe( - switchMap((id: string) => - this.datasetApi.commitEvent({ - datasetId: id, - event, - }), - ), - map((data: CommitEventToDatasetMutation | undefined | null) => { - if ( - data?.datasets.byId?.metadata.chain.commitEvent - .__typename === "CommitResultAppendError" || - data?.datasets.byId?.metadata.chain.commitEvent - .__typename === "MetadataManifestMalformed" - ) { - this.errorCommitEventChanges( - data.datasets.byId.metadata.chain.commitEvent.message, - ); - } else { - this.successActions(accountName, datasetName); - } - }), - ); - } + // public commitEventToDataset( + // accountName: string, + // datasetName: string, + // event: string, + // ): Observable { + // return this.getIdByAccountNameAndDatasetName( + // accountName, + // datasetName, + // ).pipe( + // switchMap((id: string) => + // this.datasetApi.commitEvent({ + // datasetId: id, + // event, + // }), + // ), + // map((data: CommitEventToDatasetMutation | undefined | null) => { + // if ( + // data?.datasets.byId?.metadata.chain.commitEvent + // .__typename === "CommitResultAppendError" || + // data?.datasets.byId?.metadata.chain.commitEvent + // .__typename === "MetadataManifestMalformed" + // ) { + // this.errorCommitEventChanges( + // data.datasets.byId.metadata.chain.commitEvent.message, + // ); + // } else { + // this.updatePage(accountName, datasetName); + // } + // }), + // ); + // } - public getIdByAccountNameAndDatasetName( - accountName: string, - datasetName: string, - ): Observable { - const key = `${accountName}${datasetName}`; - if (this.cache.has(key)) { - // eslint-disable-next-line @typescript-eslint/no-non-null-assertion - return of(this.cache.get(key)!); - } else { - return this.datasetApi - .getDatasetInfoByAccountAndDatasetName(accountName, datasetName) - .pipe( - map((data: DatasetByAccountAndDatasetNameQuery) => { - const id = data.datasets.byOwnerAndName?.id as string; - this.cache.set(key, id); - return id; - }), - ); - } - } + // public getIdByAccountNameAndDatasetName( + // accountName: string, + // datasetName: string, + // ): Observable { + // const key = `${accountName}${datasetName}`; + // if (this.datasetIdsByAccountDatasetName.has(key)) { + // // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + // return of(this.datasetIdsByAccountDatasetName.get(key)!); + // } else { + // return this.datasetApi + // .getDatasetInfoByAccountAndDatasetName(accountName, datasetName) + // .pipe( + // map((data: DatasetByAccountAndDatasetNameQuery) => { + // const id = data.datasets.byOwnerAndName?.id as string; + // this.datasetIdsByAccountDatasetName.set(key, id); + // return id; + // }), + // ); + // } + // } - public updateReadme( - accountName: string, - datasetName: string, - content: string, - ): Observable { - return this.getIdByAccountNameAndDatasetName( - accountName, - datasetName, - ).pipe( - switchMap((id: string) => - this.datasetApi.updateReadme(id, content), - ), - map((data: UpdateReadmeMutation | null | undefined) => { - if ( - data?.datasets.byId?.metadata.updateReadme.__typename === - "CommitResultSuccess" - ) { - this.successActions(accountName, datasetName); - } - }), - ); - } + // public updateReadme( + // accountName: string, + // datasetName: string, + // content: string, + // ): Observable { + // return this.getIdByAccountNameAndDatasetName( + // accountName, + // datasetName, + // ).pipe( + // switchMap((id: string) => + // this.datasetApi.updateReadme(id, content), + // ), + // map((data: UpdateReadmeMutation | null | undefined) => { + // if ( + // data?.datasets.byId?.metadata.updateReadme.__typename === + // "CommitResultSuccess" + // ) { + // this.updatePage(accountName, datasetName); + // } + // }), + // ); + // } - private successActions(accountName: string, datasetName: string): void { - this.datasetService - .requestDatasetMainData({ - accountName, - datasetName, - }) - .subscribe(); - this.navigationService.navigateToDatasetView({ - accountName, - datasetName, - tab: DatasetViewTypeEnum.Overview, - }); - } + // private updatePage(accountName: string, datasetName: string): void { + // this.datasetService + // .requestDatasetMainData({ + // accountName, + // datasetName, + // }) + // .subscribe(); + // this.navigationService.navigateToDatasetView({ + // accountName, + // datasetName, + // tab: DatasetViewTypeEnum.Overview, + // }); + // } } diff --git a/src/app/dataset-view/additional-components/metadata-component/components/add-polling-source/add-polling-source.component.ts b/src/app/dataset-view/additional-components/metadata-component/components/add-polling-source/add-polling-source.component.ts index e73ae82d8..a3a2c0ecd 100644 --- a/src/app/dataset-view/additional-components/metadata-component/components/add-polling-source/add-polling-source.component.ts +++ b/src/app/dataset-view/additional-components/metadata-component/components/add-polling-source/add-polling-source.component.ts @@ -137,7 +137,7 @@ export class AddPollingSourceComponent public onSaveEvent(): void { this.processFormService.transformForm(this.pollingSourceForm); this.trackSubscription( - this.createDatasetService + this.datasetCommitService .commitEventToDataset( this.getDatasetInfoFromUrl().accountName, this.getDatasetInfoFromUrl().datasetName, diff --git a/src/app/dataset-view/additional-components/metadata-component/components/base-main-event.component.ts b/src/app/dataset-view/additional-components/metadata-component/components/base-main-event.component.ts index 00d7f59be..b2d2773f3 100644 --- a/src/app/dataset-view/additional-components/metadata-component/components/base-main-event.component.ts +++ b/src/app/dataset-view/additional-components/metadata-component/components/base-main-event.component.ts @@ -3,13 +3,13 @@ import { NgbModal } from "@ng-bootstrap/ng-bootstrap"; import { DatasetKind } from "src/app/api/kamu.graphql.interface"; import { MaybeNull } from "src/app/common/app.types"; import { BaseComponent } from "src/app/common/base.component"; -import { AppDatasetCreateService } from "src/app/dataset-create/dataset-create.service"; import { DatasetHistoryUpdate } from "src/app/dataset-view/dataset.subscriptions.interface"; import { TemplatesYamlEventsService } from "src/app/services/templates-yaml-events.service"; +import { DatasetCommitService } from "../../overview-component/services/dataset-commit.service"; export abstract class BaseMainEventComponent extends BaseComponent { public modalService = inject(NgbModal); - public createDatasetService = inject(AppDatasetCreateService); + public datasetCommitService = inject(DatasetCommitService); public cdr = inject(ChangeDetectorRef); public yamlEventService = inject(TemplatesYamlEventsService); @@ -21,7 +21,7 @@ export abstract class BaseMainEventComponent extends BaseComponent { protected subsribeErrorMessage(): void { this.trackSubscription( - this.createDatasetService.onErrorCommitEventChanges.subscribe( + this.datasetCommitService.onErrorCommitEventChanges.subscribe( (message: string) => { this.errorMessage = message; this.cdr.detectChanges(); diff --git a/src/app/dataset-view/additional-components/metadata-component/components/final-yaml-modal/final-yaml-modal.component.ts b/src/app/dataset-view/additional-components/metadata-component/components/final-yaml-modal/final-yaml-modal.component.ts index cb6dbf8b9..b21122a00 100644 --- a/src/app/dataset-view/additional-components/metadata-component/components/final-yaml-modal/final-yaml-modal.component.ts +++ b/src/app/dataset-view/additional-components/metadata-component/components/final-yaml-modal/final-yaml-modal.component.ts @@ -2,10 +2,8 @@ import { BaseComponent } from "src/app/common/base.component"; import { ChangeDetectionStrategy, Component, Input } from "@angular/core"; import { NgbActiveModal } from "@ng-bootstrap/ng-bootstrap"; import * as monaco from "monaco-editor"; - -import { AppDatasetCreateService } from "src/app/dataset-create/dataset-create.service"; - import { DatasetInfo } from "src/app/interface/navigation.interface"; +import { DatasetCommitService } from "../../../overview-component/services/dataset-commit.service"; @Component({ selector: "app-final-yaml-modal", @@ -30,14 +28,14 @@ export class FinalYamlModalComponent extends BaseComponent { constructor( public activeModal: NgbActiveModal, - private createDatasetService: AppDatasetCreateService, + private datasetCommitService: DatasetCommitService, ) { super(); } public saveEvent(): void { this.trackSubscription( - this.createDatasetService + this.datasetCommitService .commitEventToDataset( this.datasetInfo.accountName, this.datasetInfo.datasetName, diff --git a/src/app/dataset-view/additional-components/metadata-component/components/set-transform/set-transform.component.ts b/src/app/dataset-view/additional-components/metadata-component/components/set-transform/set-transform.component.ts index d920dcdb8..6ee769f0b 100644 --- a/src/app/dataset-view/additional-components/metadata-component/components/set-transform/set-transform.component.ts +++ b/src/app/dataset-view/additional-components/metadata-component/components/set-transform/set-transform.component.ts @@ -162,7 +162,7 @@ export class SetTransformComponent public onSaveEvent(): void { this.trackSubscription( - this.createDatasetService + this.datasetCommitService .commitEventToDataset( this.getDatasetInfoFromUrl().accountName, this.getDatasetInfoFromUrl().datasetName, diff --git a/src/app/dataset-view/additional-components/overview-component/components/edit-details-modal/edit-details-modal.component.ts b/src/app/dataset-view/additional-components/overview-component/components/edit-details-modal/edit-details-modal.component.ts index 2a6dfa33d..031e2fae1 100644 --- a/src/app/dataset-view/additional-components/overview-component/components/edit-details-modal/edit-details-modal.component.ts +++ b/src/app/dataset-view/additional-components/overview-component/components/edit-details-modal/edit-details-modal.component.ts @@ -9,9 +9,9 @@ import { } from "src/app/api/kamu.graphql.interface"; import { MaybeNull } from "src/app/common/app.types"; import { BaseComponent } from "src/app/common/base.component"; -import { AppDatasetCreateService } from "src/app/dataset-create/dataset-create.service"; import { DatasetSchema, DataRow } from "src/app/interface/dataset.interface"; import { TemplatesYamlEventsService } from "src/app/services/templates-yaml-events.service"; +import { DatasetCommitService } from "../../services/dataset-commit.service"; @Component({ selector: "app-details-modal", @@ -31,7 +31,7 @@ export class EditDetailsModalComponent extends BaseComponent implements OnInit { public initialDescription = ""; public initialKeywords: string[] = []; constructor( - private createDatasetService: AppDatasetCreateService, + private datasetCommitService: DatasetCommitService, private yamlEventService: TemplatesYamlEventsService, public activeModal: NgbActiveModal, ) { @@ -74,7 +74,7 @@ export class EditDetailsModalComponent extends BaseComponent implements OnInit { public commitSetInfoEvent(): void { if (this.datasetBasics) this.trackSubscription( - this.createDatasetService + this.datasetCommitService .commitEventToDataset( this.datasetBasics.owner.name, this.datasetBasics.name as string, diff --git a/src/app/dataset-view/additional-components/overview-component/components/edit-license-modal/edit-license-modal.component.ts b/src/app/dataset-view/additional-components/overview-component/components/edit-license-modal/edit-license-modal.component.ts index 39995200a..483cdad0e 100644 --- a/src/app/dataset-view/additional-components/overview-component/components/edit-license-modal/edit-license-modal.component.ts +++ b/src/app/dataset-view/additional-components/overview-component/components/edit-license-modal/edit-license-modal.component.ts @@ -12,8 +12,8 @@ import { } from "src/app/api/kamu.graphql.interface"; import { MaybeNull } from "src/app/common/app.types"; import { DatasetSchema, DataRow } from "src/app/interface/dataset.interface"; -import { AppDatasetCreateService } from "src/app/dataset-create/dataset-create.service"; import { TemplatesYamlEventsService } from "src/app/services/templates-yaml-events.service"; +import { DatasetCommitService } from "../../services/dataset-commit.service"; @Component({ selector: "app-edit-license-modal", @@ -41,7 +41,7 @@ export class EditLicenseModalComponent extends BaseComponent implements OnInit { constructor( public activeModal: NgbActiveModal, private fb: FormBuilder, - private createDatasetService: AppDatasetCreateService, + private datasetCommitService: DatasetCommitService, private yamlEventService: TemplatesYamlEventsService, ) { super(); @@ -63,7 +63,7 @@ export class EditLicenseModalComponent extends BaseComponent implements OnInit { public onEditLicense(): void { if (this.datasetBasics) this.trackSubscription( - this.createDatasetService + this.datasetCommitService .commitEventToDataset( this.datasetBasics.owner.name, this.datasetBasics.name as string, diff --git a/src/app/dataset-view/additional-components/overview-component/components/edit-watermark-modal/edit-watermark-modal.component.ts b/src/app/dataset-view/additional-components/overview-component/components/edit-watermark-modal/edit-watermark-modal.component.ts index c89dc3c3b..1c61c2e7c 100644 --- a/src/app/dataset-view/additional-components/overview-component/components/edit-watermark-modal/edit-watermark-modal.component.ts +++ b/src/app/dataset-view/additional-components/overview-component/components/edit-watermark-modal/edit-watermark-modal.component.ts @@ -11,8 +11,8 @@ import moment from "moment-timezone"; import { DatasetBasicsFragment } from "src/app/api/kamu.graphql.interface"; import { BaseComponent } from "src/app/common/base.component"; import { MY_MOMENT_FORMATS } from "src/app/common/data.helpers"; -import { AppDatasetCreateService } from "src/app/dataset-create/dataset-create.service"; import { TemplatesYamlEventsService } from "src/app/services/templates-yaml-events.service"; +import { DatasetCommitService } from "../../services/dataset-commit.service"; @Component({ selector: "app-edit-watermark-modal", @@ -35,7 +35,7 @@ export class EditWatermarkModalComponent constructor( public activeModal: NgbActiveModal, private yamlEventService: TemplatesYamlEventsService, - private createDatasetService: AppDatasetCreateService, + private datasetCommitService: DatasetCommitService, ) { super(); } @@ -73,7 +73,7 @@ export class EditWatermarkModalComponent const date = moment.utc(this.date).tz(this.timeZone).format(); if (this.datasetBasics) { this.trackSubscription( - this.createDatasetService + this.datasetCommitService .commitEventToDataset( this.datasetBasics.owner.name, this.datasetBasics.name as string, diff --git a/src/app/dataset-view/additional-components/overview-component/components/readme-section/readme-section.component.html b/src/app/dataset-view/additional-components/overview-component/components/readme-section/readme-section.component.html index 06fbb11cb..fe5def22d 100644 --- a/src/app/dataset-view/additional-components/overview-component/components/readme-section/readme-section.component.html +++ b/src/app/dataset-view/additional-components/overview-component/components/readme-section/readme-section.component.html @@ -1,7 +1,7 @@
@@ -57,20 +57,20 @@

- + - +
diff --git a/src/app/dataset-view/additional-components/overview-component/components/readme-section/readme-section.component.ts b/src/app/dataset-view/additional-components/overview-component/components/readme-section/readme-section.component.ts index 5b7855192..49ba739ee 100644 --- a/src/app/dataset-view/additional-components/overview-component/components/readme-section/readme-section.component.ts +++ b/src/app/dataset-view/additional-components/overview-component/components/readme-section/readme-section.component.ts @@ -4,17 +4,11 @@ import { Input, OnInit, } from "@angular/core"; -import { - DatasetBasicsFragment, - DatasetDataSizeFragment, - DatasetOverviewFragment, -} from "src/app/api/kamu.graphql.interface"; +import { DatasetBasicsFragment } from "src/app/api/kamu.graphql.interface"; import { MaybeNull } from "src/app/common/app.types"; import { BaseComponent } from "src/app/common/base.component"; -import { AppDatasetCreateService } from "src/app/dataset-create/dataset-create.service"; -import { OverviewDataUpdate } from "src/app/dataset-view/dataset.subscriptions.interface"; -import { AppDatasetSubscriptionsService } from "src/app/dataset-view/dataset.subscriptions.service"; -import { DatasetSchema, DataRow } from "src/app/interface/dataset.interface"; +import { EditMode } from "./readme-section.types"; +import { DatasetCommitService } from "../../services/dataset-commit.service"; @Component({ selector: "app-readme-section", @@ -24,63 +18,60 @@ import { DatasetSchema, DataRow } from "src/app/interface/dataset.interface"; }) export class ReadmeSectionComponent extends BaseComponent implements OnInit { @Input() public datasetBasics?: DatasetBasicsFragment; - @Input() public currentState?: { - schema: MaybeNull; - data: DataRow[]; - overview: DatasetOverviewFragment; - size: DatasetDataSizeFragment; - }; - public isEditMode = true; - public initialReadmeState = ""; + @Input() public currentReadme?: MaybeNull; + public editMode: typeof EditMode = EditMode; + public viewMode = EditMode.Edit; public readmeState = ""; - public isMarkdownEditView = false; + public editViewShow = false; public get readmeChanged(): boolean { - return this.initialReadmeState !== this.readmeState; + return this.currentReadme !== this.readmeState; } - constructor( - private appDatasetSubsService: AppDatasetSubscriptionsService, - private createDatasetService: AppDatasetCreateService, - ) { + constructor(private datasetCommitService: DatasetCommitService) { super(); } ngOnInit(): void { - this.trackSubscription( - this.appDatasetSubsService.onDatasetOverviewDataChanges.subscribe( - (overviewUpdate: OverviewDataUpdate) => { - this.initialReadmeState = this.readmeState = - overviewUpdate.overview.metadata.currentReadme ?? ""; - }, - ), - ); + if (this.currentReadme) { + this.readmeState = this.currentReadme; + } } - public toggleReadmeView(): void { - this.isMarkdownEditView = !this.isMarkdownEditView; + public get isEditView(): boolean { + return this.viewMode === EditMode.Edit; + } + + public get isPreviewView(): boolean { + return this.viewMode === EditMode.Preview; } - public toggleEditMode(): void { - this.isEditMode = !this.isEditMode; + public selectMode(mode: EditMode): void { + this.viewMode = mode; + } + + public toggleReadmeView(): void { + this.editViewShow = !this.editViewShow; } public onCancelChanges(): void { - this.readmeState = this.initialReadmeState; - this.isMarkdownEditView = false; - this.isEditMode = true; + if (this.currentReadme) { + this.readmeState = this.currentReadme; + this.editViewShow = false; + this.viewMode = EditMode.Edit; + } } - public commitChanges(): void { + public saveChanges(): void { if (this.datasetBasics) this.trackSubscription( - this.createDatasetService + this.datasetCommitService .updateReadme( this.datasetBasics.owner.name, this.datasetBasics.name as string, this.readmeState, ) - .subscribe(() => (this.isMarkdownEditView = false)), + .subscribe(() => (this.editViewShow = false)), ); } } diff --git a/src/app/dataset-view/additional-components/overview-component/components/readme-section/readme-section.types.ts b/src/app/dataset-view/additional-components/overview-component/components/readme-section/readme-section.types.ts new file mode 100644 index 000000000..8b2e8472a --- /dev/null +++ b/src/app/dataset-view/additional-components/overview-component/components/readme-section/readme-section.types.ts @@ -0,0 +1,4 @@ +export enum EditMode { + Edit = "edit", + Preview = "preview", +} diff --git a/src/app/dataset-view/additional-components/overview-component/overview-component.html b/src/app/dataset-view/additional-components/overview-component/overview-component.html index e5932e7eb..a96a2193d 100644 --- a/src/app/dataset-view/additional-components/overview-component/overview-component.html +++ b/src/app/dataset-view/additional-components/overview-component/overview-component.html @@ -124,7 +124,7 @@

diff --git a/src/app/dataset-view/additional-components/overview-component/services/dataset-commit.service.spec.ts b/src/app/dataset-view/additional-components/overview-component/services/dataset-commit.service.spec.ts new file mode 100644 index 000000000..748d260a5 --- /dev/null +++ b/src/app/dataset-view/additional-components/overview-component/services/dataset-commit.service.spec.ts @@ -0,0 +1,16 @@ +import { TestBed } from '@angular/core/testing'; + +import { DatasetCommitService } from './dataset-commit.service'; + +describe('DatasetCommitService', () => { + let service: DatasetCommitService; + + beforeEach(() => { + TestBed.configureTestingModule({}); + service = TestBed.inject(DatasetCommitService); + }); + + it('should be created', () => { + expect(service).toBeTruthy(); + }); +}); diff --git a/src/app/dataset-view/additional-components/overview-component/services/dataset-commit.service.ts b/src/app/dataset-view/additional-components/overview-component/services/dataset-commit.service.ts new file mode 100644 index 000000000..7e548506c --- /dev/null +++ b/src/app/dataset-view/additional-components/overview-component/services/dataset-commit.service.ts @@ -0,0 +1,125 @@ +import { Injectable } from "@angular/core"; +import { Observable, Subject, of } from "rxjs"; +import { switchMap, map } from "rxjs/operators"; +import { DatasetApi } from "src/app/api/dataset.api"; +import { + CommitEventToDatasetMutation, + DatasetByAccountAndDatasetNameQuery, + UpdateReadmeMutation, +} from "src/app/api/kamu.graphql.interface"; +import { DatasetViewTypeEnum } from "src/app/dataset-view/dataset-view.interface"; +import { DatasetService } from "src/app/dataset-view/dataset.service"; +import { NavigationService } from "src/app/services/navigation.service"; + +@Injectable({ + providedIn: "root", +}) +export class DatasetCommitService { + private errorCommitEventChanges$: Subject = new Subject(); + + public errorCommitEventChanges(message: string): void { + this.errorCommitEventChanges$.next(message); + } + + public get onErrorCommitEventChanges(): Observable { + return this.errorCommitEventChanges$.asObservable(); + } + + private datasetIdsByAccountDatasetName = new Map(); + + constructor( + private datasetApi: DatasetApi, + private navigationService: NavigationService, + private datasetService: DatasetService, + ) {} + + public commitEventToDataset( + accountName: string, + datasetName: string, + event: string, + ): Observable { + return this.getIdByAccountNameAndDatasetName( + accountName, + datasetName, + ).pipe( + switchMap((id: string) => + this.datasetApi.commitEvent({ + datasetId: id, + event, + }), + ), + map((data: CommitEventToDatasetMutation | undefined | null) => { + if ( + data?.datasets.byId?.metadata.chain.commitEvent + .__typename === "CommitResultAppendError" || + data?.datasets.byId?.metadata.chain.commitEvent + .__typename === "MetadataManifestMalformed" + ) { + this.errorCommitEventChanges( + data.datasets.byId.metadata.chain.commitEvent.message, + ); + } else { + this.updatePage(accountName, datasetName); + } + }), + ); + } + + public getIdByAccountNameAndDatasetName( + accountName: string, + datasetName: string, + ): Observable { + const key = `${accountName}${datasetName}`; + if (this.datasetIdsByAccountDatasetName.has(key)) { + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + return of(this.datasetIdsByAccountDatasetName.get(key)!); + } else { + return this.datasetApi + .getDatasetInfoByAccountAndDatasetName(accountName, datasetName) + .pipe( + map((data: DatasetByAccountAndDatasetNameQuery) => { + const id = data.datasets.byOwnerAndName?.id as string; + this.datasetIdsByAccountDatasetName.set(key, id); + return id; + }), + ); + } + } + + public updateReadme( + accountName: string, + datasetName: string, + content: string, + ): Observable { + return this.getIdByAccountNameAndDatasetName( + accountName, + datasetName, + ).pipe( + switchMap((id: string) => + this.datasetApi.updateReadme(id, content), + ), + map((data: UpdateReadmeMutation | null | undefined) => { + if ( + data?.datasets.byId?.metadata.updateReadme.__typename === + "CommitResultSuccess" + ) { + this.updatePage(accountName, datasetName); + } + }), + ); + } + + private updatePage(accountName: string, datasetName: string): void { + this.datasetService + .requestDatasetMainData({ + accountName, + datasetName, + }) + .subscribe(); + this.navigationService.navigateToDatasetView({ + accountName, + datasetName, + tab: DatasetViewTypeEnum.Overview, + }); + } +} From 3d16b435e9ff0407e38ed97a6b07da013e8fae1b Mon Sep 17 00:00:00 2001 From: Dmitriy Borzenko Date: Thu, 3 Aug 2023 19:03:59 +0300 Subject: [PATCH 11/16] Add ability to create README.md --- .../dataset-create/dataset-create.service.ts | 110 +----------------- .../add-polling-source.component.spec.ts | 12 +- .../readme-section.component.html | 2 +- .../readme-section.component.ts | 23 ++-- .../overview-component.html | 17 +++ .../overview-component/overview-component.ts | 7 ++ 6 files changed, 49 insertions(+), 122 deletions(-) diff --git a/src/app/dataset-create/dataset-create.service.ts b/src/app/dataset-create/dataset-create.service.ts index 0b7a94d40..74cf9290d 100644 --- a/src/app/dataset-create/dataset-create.service.ts +++ b/src/app/dataset-create/dataset-create.service.ts @@ -1,18 +1,14 @@ import { - CommitEventToDatasetMutation, CreateDatasetFromSnapshotMutation, CreateEmptyDatasetMutation, - DatasetByAccountAndDatasetNameQuery, - UpdateReadmeMutation, } from "./../api/kamu.graphql.interface"; -import { Observable, Subject, of } from "rxjs"; +import { Observable, Subject } from "rxjs"; import { DatasetApi } from "src/app/api/dataset.api"; import { Injectable } from "@angular/core"; import { DatasetKind } from "../api/kamu.graphql.interface"; -import { map, switchMap } from "rxjs/operators"; +import { map } from "rxjs/operators"; import { NavigationService } from "../services/navigation.service"; import { DatasetViewTypeEnum } from "../dataset-view/dataset-view.interface"; -import { DatasetService } from "../dataset-view/dataset.service"; @Injectable({ providedIn: "root" }) export class AppDatasetCreateService { @@ -26,18 +22,6 @@ export class AppDatasetCreateService { return this.errorMessageChanges$.asObservable(); } - // private errorCommitEventChanges$: Subject = new Subject(); - - // public errorCommitEventChanges(message: string): void { - // this.errorCommitEventChanges$.next(message); - // } - - // public get onErrorCommitEventChanges(): Observable { - // return this.errorCommitEventChanges$.asObservable(); - // } - - // private datasetIdsByAccountDatasetName = new Map(); - public constructor( private datasetApi: DatasetApi, private navigationService: NavigationService, @@ -106,94 +90,4 @@ export class AppDatasetCreateService { ), ); } - - // public commitEventToDataset( - // accountName: string, - // datasetName: string, - // event: string, - // ): Observable { - // return this.getIdByAccountNameAndDatasetName( - // accountName, - // datasetName, - // ).pipe( - // switchMap((id: string) => - // this.datasetApi.commitEvent({ - // datasetId: id, - // event, - // }), - // ), - // map((data: CommitEventToDatasetMutation | undefined | null) => { - // if ( - // data?.datasets.byId?.metadata.chain.commitEvent - // .__typename === "CommitResultAppendError" || - // data?.datasets.byId?.metadata.chain.commitEvent - // .__typename === "MetadataManifestMalformed" - // ) { - // this.errorCommitEventChanges( - // data.datasets.byId.metadata.chain.commitEvent.message, - // ); - // } else { - // this.updatePage(accountName, datasetName); - // } - // }), - // ); - // } - - // public getIdByAccountNameAndDatasetName( - // accountName: string, - // datasetName: string, - // ): Observable { - // const key = `${accountName}${datasetName}`; - // if (this.datasetIdsByAccountDatasetName.has(key)) { - // // eslint-disable-next-line @typescript-eslint/no-non-null-assertion - // return of(this.datasetIdsByAccountDatasetName.get(key)!); - // } else { - // return this.datasetApi - // .getDatasetInfoByAccountAndDatasetName(accountName, datasetName) - // .pipe( - // map((data: DatasetByAccountAndDatasetNameQuery) => { - // const id = data.datasets.byOwnerAndName?.id as string; - // this.datasetIdsByAccountDatasetName.set(key, id); - // return id; - // }), - // ); - // } - // } - - // public updateReadme( - // accountName: string, - // datasetName: string, - // content: string, - // ): Observable { - // return this.getIdByAccountNameAndDatasetName( - // accountName, - // datasetName, - // ).pipe( - // switchMap((id: string) => - // this.datasetApi.updateReadme(id, content), - // ), - // map((data: UpdateReadmeMutation | null | undefined) => { - // if ( - // data?.datasets.byId?.metadata.updateReadme.__typename === - // "CommitResultSuccess" - // ) { - // this.updatePage(accountName, datasetName); - // } - // }), - // ); - // } - - // private updatePage(accountName: string, datasetName: string): void { - // this.datasetService - // .requestDatasetMainData({ - // accountName, - // datasetName, - // }) - // .subscribe(); - // this.navigationService.navigateToDatasetView({ - // accountName, - // datasetName, - // tab: DatasetViewTypeEnum.Overview, - // }); - // } } diff --git a/src/app/dataset-view/additional-components/metadata-component/components/add-polling-source/add-polling-source.component.spec.ts b/src/app/dataset-view/additional-components/metadata-component/components/add-polling-source/add-polling-source.component.spec.ts index 23b9a433b..2e322878c 100644 --- a/src/app/dataset-view/additional-components/metadata-component/components/add-polling-source/add-polling-source.component.spec.ts +++ b/src/app/dataset-view/additional-components/metadata-component/components/add-polling-source/add-polling-source.component.spec.ts @@ -13,7 +13,6 @@ import { ComponentFixture, TestBed } from "@angular/core/testing"; import { AddPollingSourceComponent } from "./add-polling-source.component"; import { NgbModal, NgbModalRef, NgbModule } from "@ng-bootstrap/ng-bootstrap"; import { FinalYamlModalComponent } from "../final-yaml-modal/final-yaml-modal.component"; -import { AppDatasetCreateService } from "src/app/dataset-create/dataset-create.service"; import { SetPollingSourceSection } from "src/app/shared/shared.types"; import { MonacoEditorModule } from "ngx-monaco-editor"; import { StepperNavigationComponent } from "../stepper-navigation/stepper-navigation.component"; @@ -28,13 +27,14 @@ import { } from "src/app/api/kamu.graphql.interface"; import { EditPollingSourceService } from "./edit-polling-source.service"; import { SharedTestModule } from "src/app/common/shared-test.module"; +import { DatasetCommitService } from "../../../overview-component/services/dataset-commit.service"; describe("AddPollingSourceComponent", () => { let component: AddPollingSourceComponent; let fixture: ComponentFixture; let modalService: NgbModal; let modalRef: NgbModalRef; - let createDatasetService: AppDatasetCreateService; + let datasetCommitService: DatasetCommitService; let editService: EditPollingSourceService; beforeEach(async () => { @@ -64,7 +64,7 @@ describe("AddPollingSourceComponent", () => { modalService = TestBed.inject(NgbModal); editService = TestBed.inject(EditPollingSourceService); - createDatasetService = TestBed.inject(AppDatasetCreateService); + datasetCommitService = TestBed.inject(DatasetCommitService); modalRef = modalService.open(FinalYamlModalComponent); component = fixture.componentInstance; component.showPreprocessStep = false; @@ -120,7 +120,7 @@ describe("AddPollingSourceComponent", () => { const mockError = "Some error"; expect(component.errorMessage).toBe(""); expect(component.changedEventYamlByHash).toBeUndefined(); - createDatasetService.errorCommitEventChanges(mockError); + datasetCommitService.errorCommitEventChanges(mockError); expect(component.errorMessage).toBe(mockError); component.onEditYaml(); @@ -145,7 +145,7 @@ describe("AddPollingSourceComponent", () => { it("should check submit yaml", () => { component.ngOnInit(); const submitYamlSpy = spyOn( - createDatasetService, + datasetCommitService, "commitEventToDataset", ).and.returnValue(of()); component.onSaveEvent(); @@ -166,7 +166,7 @@ describe("AddPollingSourceComponent", () => { const errorMessage = "test error message"; expect(component.errorMessage).toBe(""); - createDatasetService.errorCommitEventChanges(errorMessage); + datasetCommitService.errorCommitEventChanges(errorMessage); expect(component.errorMessage).toBe(errorMessage); }); diff --git a/src/app/dataset-view/additional-components/overview-component/components/readme-section/readme-section.component.html b/src/app/dataset-view/additional-components/overview-component/components/readme-section/readme-section.component.html index fe5def22d..34293e1b5 100644 --- a/src/app/dataset-view/additional-components/overview-component/components/readme-section/readme-section.component.html +++ b/src/app/dataset-view/additional-components/overview-component/components/readme-section/readme-section.component.html @@ -1,7 +1,7 @@
(); public get readmeChanged(): boolean { return this.currentReadme !== this.readmeState; @@ -52,14 +55,12 @@ export class ReadmeSectionComponent extends BaseComponent implements OnInit { public toggleReadmeView(): void { this.editViewShow = !this.editViewShow; + this.editViewShowEmmiter.emit(this.editViewShow); } public onCancelChanges(): void { - if (this.currentReadme) { - this.readmeState = this.currentReadme; - this.editViewShow = false; - this.viewMode = EditMode.Edit; - } + this.readmeState = this.currentReadme ?? ""; + this.reset(); } public saveChanges(): void { @@ -71,7 +72,15 @@ export class ReadmeSectionComponent extends BaseComponent implements OnInit { this.datasetBasics.name as string, this.readmeState, ) - .subscribe(() => (this.editViewShow = false)), + .subscribe(() => { + this.reset(); + }), ); } + + private reset(): void { + this.viewMode = EditMode.Edit; + this.editViewShow = false; + this.editViewShowEmmiter.emit(this.editViewShow); + } } diff --git a/src/app/dataset-view/additional-components/overview-component/overview-component.html b/src/app/dataset-view/additional-components/overview-component/overview-component.html index a96a2193d..ff7620575 100644 --- a/src/app/dataset-view/additional-components/overview-component/overview-component.html +++ b/src/app/dataset-view/additional-components/overview-component/overview-component.html @@ -122,9 +122,26 @@

No data

+
+

+ You can + + Add README.md +

+
diff --git a/src/app/dataset-view/additional-components/overview-component/overview-component.ts b/src/app/dataset-view/additional-components/overview-component/overview-component.ts index c48bab74e..9744d0c9b 100644 --- a/src/app/dataset-view/additional-components/overview-component/overview-component.ts +++ b/src/app/dataset-view/additional-components/overview-component/overview-component.ts @@ -3,6 +3,7 @@ import { OverviewDataUpdate } from "src/app/dataset-view/dataset.subscriptions.i import { DatasetKind } from "./../../../api/kamu.graphql.interface"; import { ChangeDetectionStrategy, + ChangeDetectorRef, Component, EventEmitter, Input, @@ -37,6 +38,7 @@ export class OverviewComponent extends BaseComponent implements OnInit { @Input() public datasetBasics?: DatasetBasicsFragment; @Output() toggleReadmeViewEmit = new EventEmitter(); @Output() selectTopicEmit = new EventEmitter(); + public addReadme = false; public currentState?: { schema: MaybeNull; @@ -49,6 +51,7 @@ export class OverviewComponent extends BaseComponent implements OnInit { private appDatasetSubsService: AppDatasetSubscriptionsService, private navigationService: NavigationService, private modalService: NgbModal, + private cdr: ChangeDetectorRef, ) { super(); } @@ -132,4 +135,8 @@ export class OverviewComponent extends BaseComponent implements OnInit { datasetName: this.datasetBasics.name as string, }); } + + public onAddReadme(): void { + this.addReadme = true; + } } From e469abc9a00cbed82b5b5ee7786bced1edf7bdd0 Mon Sep 17 00:00:00 2001 From: Dmitriy Borzenko Date: Fri, 4 Aug 2023 08:13:16 +0300 Subject: [PATCH 12/16] Improve test coverage. --- .../additional-components/data-tabs.mock.ts | 66 +++++++- .../final-yaml-modal.component.spec.ts | 8 +- .../set-transform.component.spec.ts | 8 +- .../edit-watermark-modal.component.spec.ts | 8 +- .../readme-section.component.html | 15 +- .../readme-section.component.spec.ts | 80 +++++++++- .../readme-section.component.ts | 2 +- .../services/dataset-commit.service.spec.ts | 144 ++++++++++++++++-- .../services/dataset-commit.service.ts | 2 +- 9 files changed, 300 insertions(+), 33 deletions(-) diff --git a/src/app/dataset-view/additional-components/data-tabs.mock.ts b/src/app/dataset-view/additional-components/data-tabs.mock.ts index 175f56070..365c4511b 100644 --- a/src/app/dataset-view/additional-components/data-tabs.mock.ts +++ b/src/app/dataset-view/additional-components/data-tabs.mock.ts @@ -4,7 +4,11 @@ import { DataSqlErrorUpdate, DataUpdate, } from "../dataset.subscriptions.interface"; -import { DatasetKind } from "src/app/api/kamu.graphql.interface"; +import { + CommitEventToDatasetMutation, + DatasetKind, + UpdateReadmeMutation, +} from "src/app/api/kamu.graphql.interface"; export const mockDataUpdate: DataUpdate = { schema: { @@ -814,3 +818,63 @@ export const mockOverviewWithSetLicense = { createdAt: "2022-08-05T21:10:57.332924745+00:00", lastUpdatedAt: "2022-08-05T21:15:03.947245004+00:00", }; + +export const mockCommitEventToDatasetMutationError: CommitEventToDatasetMutation = + { + datasets: { + byId: { + metadata: { + chain: { + commitEvent: { + __typename: "CommitResultAppendError", + message: "Fail", + }, + __typename: "MetadataChainMut", + }, + __typename: "DatasetMetadataMut", + }, + __typename: "DatasetMut", + }, + __typename: "DatasetsMut", + }, + }; + +export const mockCommitEventToDatasetMutation: CommitEventToDatasetMutation = { + datasets: { + byId: { + metadata: { + chain: { + commitEvent: { + __typename: "CommitResultSuccess", + message: "Success", + oldHead: + "zW1cMmaF6PdmJPM6LNyy2RyyWFWkF3EojZ54ezAvT2dVgKB", + newHead: + "zW1eXXAXqgtfNDFt7oXpMfLfDy5Tzg3dMDLWQTz2YJE6ooX", + }, + __typename: "MetadataChainMut", + }, + __typename: "DatasetMetadataMut", + }, + __typename: "DatasetMut", + }, + __typename: "DatasetsMut", + }, +}; + +export const mockUpdateReadmeMutation: UpdateReadmeMutation = { + datasets: { + byId: { + metadata: { + updateReadme: { + __typename: "CommitResultSuccess", + message: "Success", + oldHead: "zW1oSh19bxPZqLhY9awS7cnFrmQUueZ5MF21wVf8poHDnaX", + }, + __typename: "DatasetMetadataMut", + }, + __typename: "DatasetMut", + }, + __typename: "DatasetsMut", + }, +}; diff --git a/src/app/dataset-view/additional-components/metadata-component/components/final-yaml-modal/final-yaml-modal.component.spec.ts b/src/app/dataset-view/additional-components/metadata-component/components/final-yaml-modal/final-yaml-modal.component.spec.ts index 5300e2fec..eaf797b0c 100644 --- a/src/app/dataset-view/additional-components/metadata-component/components/final-yaml-modal/final-yaml-modal.component.spec.ts +++ b/src/app/dataset-view/additional-components/metadata-component/components/final-yaml-modal/final-yaml-modal.component.spec.ts @@ -2,13 +2,13 @@ import { ComponentFixture, TestBed } from "@angular/core/testing"; import { NgbActiveModal } from "@ng-bootstrap/ng-bootstrap"; import { ApolloModule } from "apollo-angular"; import { FinalYamlModalComponent } from "./final-yaml-modal.component"; -import { AppDatasetCreateService } from "src/app/dataset-create/dataset-create.service"; import { emitClickOnElementByDataTestId } from "src/app/common/base-test.helpers.spec"; import { DatasetInfo } from "src/app/interface/navigation.interface"; import { of } from "rxjs"; import { FormsModule } from "@angular/forms"; import { MonacoEditorModule } from "ngx-monaco-editor"; import { SharedTestModule } from "src/app/common/shared-test.module"; +import { DatasetCommitService } from "../../../overview-component/services/dataset-commit.service"; const testDatasetInfo: DatasetInfo = { accountName: "testAccountName", @@ -18,7 +18,7 @@ const testDatasetInfo: DatasetInfo = { describe("FinalYamlModalComponent", () => { let component: FinalYamlModalComponent; let fixture: ComponentFixture; - let createDatasetService: AppDatasetCreateService; + let datasetCommitService: DatasetCommitService; let activeModal: NgbActiveModal; beforeEach(async () => { @@ -34,7 +34,7 @@ describe("FinalYamlModalComponent", () => { }).compileComponents(); fixture = TestBed.createComponent(FinalYamlModalComponent); - createDatasetService = TestBed.inject(AppDatasetCreateService); + datasetCommitService = TestBed.inject(DatasetCommitService); activeModal = TestBed.inject(NgbActiveModal); component = fixture.componentInstance; component.yamlTemplate = "test yaml"; @@ -48,7 +48,7 @@ describe("FinalYamlModalComponent", () => { it("should save event", () => { const commitEventToDatasetSpy = spyOn( - createDatasetService, + datasetCommitService, "commitEventToDataset", ).and.returnValue(of()); const closeModalSpy = spyOn(activeModal, "close"); diff --git a/src/app/dataset-view/additional-components/metadata-component/components/set-transform/set-transform.component.spec.ts b/src/app/dataset-view/additional-components/metadata-component/components/set-transform/set-transform.component.spec.ts index 052439354..8a6a1b0a5 100644 --- a/src/app/dataset-view/additional-components/metadata-component/components/set-transform/set-transform.component.spec.ts +++ b/src/app/dataset-view/additional-components/metadata-component/components/set-transform/set-transform.component.spec.ts @@ -5,7 +5,6 @@ import { ActivatedRoute } from "@angular/router"; import { ApolloTestingModule } from "apollo-angular/testing"; import { EditSetTransformService } from "./edit-set-transform..service"; import { DatasetKind } from "src/app/api/kamu.graphql.interface"; -import { AppDatasetCreateService } from "src/app/dataset-create/dataset-create.service"; import { of } from "rxjs"; import { NgbModal, NgbModalRef } from "@ng-bootstrap/ng-bootstrap"; import { FinalYamlModalComponent } from "../final-yaml-modal/final-yaml-modal.component"; @@ -19,12 +18,13 @@ import { import { DatasetService } from "src/app/dataset-view/dataset.service"; import { MatTreeNestedDataSource } from "@angular/material/tree"; import { DatasetNode } from "./set-transform.types"; +import { DatasetCommitService } from "../../../overview-component/services/dataset-commit.service"; describe("SetTransformComponent", () => { let component: SetTransformComponent; let fixture: ComponentFixture; let editService: EditSetTransformService; - let createDatasetService: AppDatasetCreateService; + let datasetCommitService: DatasetCommitService; let modalService: NgbModal; let modalRef: NgbModalRef; let datasetService: DatasetService; @@ -70,7 +70,7 @@ describe("SetTransformComponent", () => { modalService = TestBed.inject(NgbModal); modalRef = modalService.open(FinalYamlModalComponent); editService = TestBed.inject(EditSetTransformService); - createDatasetService = TestBed.inject(AppDatasetCreateService); + datasetCommitService = TestBed.inject(DatasetCommitService); datasetService = TestBed.inject(DatasetService); fixture.detectChanges(); }); @@ -88,7 +88,7 @@ describe("SetTransformComponent", () => { it("should check save event", () => { const commitEventToDatasetSpy = spyOn( - createDatasetService, + datasetCommitService, "commitEventToDataset", ).and.returnValue(of()); component.onSaveEvent(); diff --git a/src/app/dataset-view/additional-components/overview-component/components/edit-watermark-modal/edit-watermark-modal.component.spec.ts b/src/app/dataset-view/additional-components/overview-component/components/edit-watermark-modal/edit-watermark-modal.component.spec.ts index b66c59ca0..23d3f9772 100644 --- a/src/app/dataset-view/additional-components/overview-component/components/edit-watermark-modal/edit-watermark-modal.component.spec.ts +++ b/src/app/dataset-view/additional-components/overview-component/components/edit-watermark-modal/edit-watermark-modal.component.spec.ts @@ -13,14 +13,14 @@ import { FormsModule } from "@angular/forms"; import timekeeper from "timekeeper"; import { mockDatasetBasicsFragment } from "src/app/search/mock.data"; import { emitClickOnElementByDataTestId } from "src/app/common/base-test.helpers.spec"; -import { AppDatasetCreateService } from "src/app/dataset-create/dataset-create.service"; import { of } from "rxjs"; import { SharedTestModule } from "src/app/common/shared-test.module"; +import { DatasetCommitService } from "../../services/dataset-commit.service"; describe("EditWatermarkModalComponent", () => { let component: EditWatermarkModalComponent; let fixture: ComponentFixture; - let appDatasetCreateService: AppDatasetCreateService; + let datasetCommitService: DatasetCommitService; const FROZEN_TIME = new Date("2022-10-01 12:00:00"); beforeEach(async () => { @@ -40,7 +40,7 @@ describe("EditWatermarkModalComponent", () => { fixture = TestBed.createComponent(EditWatermarkModalComponent); component = fixture.componentInstance; - appDatasetCreateService = TestBed.inject(AppDatasetCreateService); + datasetCommitService = TestBed.inject(DatasetCommitService); component.datasetBasics = mockDatasetBasicsFragment; }); @@ -71,7 +71,7 @@ describe("EditWatermarkModalComponent", () => { component, "commitSetWatermarkEvent", ).and.callThrough(); - spyOn(appDatasetCreateService, "commitEventToDataset").and.returnValue( + spyOn(datasetCommitService, "commitEventToDataset").and.returnValue( of(), ); emitClickOnElementByDataTestId(fixture, "commit-setWatermark-event"); diff --git a/src/app/dataset-view/additional-components/overview-component/components/readme-section/readme-section.component.html b/src/app/dataset-view/additional-components/overview-component/components/readme-section/readme-section.component.html index 34293e1b5..a850750b4 100644 --- a/src/app/dataset-view/additional-components/overview-component/components/readme-section/readme-section.component.html +++ b/src/app/dataset-view/additional-components/overview-component/components/readme-section/readme-section.component.html @@ -22,10 +22,8 @@

@@ -38,6 +36,7 @@

-
diff --git a/src/app/dataset-view/dataset-view-header/dataset-view-header.html b/src/app/dataset-view/dataset-view-header/dataset-view-header.html index 132cd92d0..467e118e1 100644 --- a/src/app/dataset-view/dataset-view-header/dataset-view-header.html +++ b/src/app/dataset-view/dataset-view-header/dataset-view-header.html @@ -10,7 +10,7 @@ rel="author" data-hovercard-type="organization" (click)="showOwnerPage()" - data-test-id='show-owner-link' + data-test-id="show-owner-link" >{{ datasetInfo.accountName}} @@ -19,7 +19,7 @@ {{ datasetInfo.datasetName }} From c824cdebaa74760bf1f8f536a131ad901e9d3ea2 Mon Sep 17 00:00:00 2001 From: Sergei Zaychenko Date: Fri, 4 Aug 2023 04:01:10 -0700 Subject: [PATCH 15/16] Improved unit tests for dataset commit service --- .../additional-components/data-tabs.mock.ts | 5 +- .../services/dataset-commit.service.spec.ts | 211 ++++++++++++------ .../services/dataset-commit.service.ts | 1 + 3 files changed, 146 insertions(+), 71 deletions(-) diff --git a/src/app/dataset-view/additional-components/data-tabs.mock.ts b/src/app/dataset-view/additional-components/data-tabs.mock.ts index 365c4511b..632a7244f 100644 --- a/src/app/dataset-view/additional-components/data-tabs.mock.ts +++ b/src/app/dataset-view/additional-components/data-tabs.mock.ts @@ -819,6 +819,8 @@ export const mockOverviewWithSetLicense = { lastUpdatedAt: "2022-08-05T21:15:03.947245004+00:00", }; +export const mockCommitEventToDatasetMutationErrorMessage = "Fail"; + export const mockCommitEventToDatasetMutationError: CommitEventToDatasetMutation = { datasets: { @@ -827,7 +829,8 @@ export const mockCommitEventToDatasetMutationError: CommitEventToDatasetMutation chain: { commitEvent: { __typename: "CommitResultAppendError", - message: "Fail", + message: + mockCommitEventToDatasetMutationErrorMessage, }, __typename: "MetadataChainMut", }, diff --git a/src/app/dataset-view/additional-components/overview-component/services/dataset-commit.service.spec.ts b/src/app/dataset-view/additional-components/overview-component/services/dataset-commit.service.spec.ts index af681ae9b..5ee0d02fc 100644 --- a/src/app/dataset-view/additional-components/overview-component/services/dataset-commit.service.spec.ts +++ b/src/app/dataset-view/additional-components/overview-component/services/dataset-commit.service.spec.ts @@ -3,23 +3,35 @@ import { DatasetCommitService } from "./dataset-commit.service"; import { Apollo, ApolloModule } from "apollo-angular"; import { ApolloTestingModule } from "apollo-angular/testing"; import { DatasetApi } from "src/app/api/dataset.api"; -import { of } from "rxjs"; +import { Observable, Subscription, of } from "rxjs"; import { mockDatasetMainDataResponse } from "src/app/search/mock.data"; import { DatasetByAccountAndDatasetNameQuery } from "src/app/api/kamu.graphql.interface"; import { NavigationService } from "src/app/services/navigation.service"; import { mockCommitEventToDatasetMutation, mockCommitEventToDatasetMutationError, + mockCommitEventToDatasetMutationErrorMessage, mockUpdateReadmeMutation, } from "../../data-tabs.mock"; +import { DatasetViewTypeEnum } from "src/app/dataset-view/dataset-view.interface"; +import { DatasetService } from "src/app/dataset-view/dataset.service"; +import { first } from "rxjs/operators"; describe("DatasetCommitService", () => { - let service: DatasetCommitService; + let commitService: DatasetCommitService; + let datasetService: DatasetService; let datasetApi: DatasetApi; let navigationService: NavigationService; + let getDatasetInfoSpy: jasmine.Spy; + let navigationServiceSpy: jasmine.Spy; + let requestDatasetMainDataSpy: jasmine.Spy; + const TEST_ACCOUNT_NAME = "accountName"; const TEST_DATASET_NAME = "datasetName"; + const TEST_DATASET_ID: string = mockDatasetMainDataResponse.datasets + .byOwnerAndName?.id as string; + const TEST_EVENT_CONTENT = "event content"; beforeEach(() => { TestBed.configureTestingModule({ @@ -28,8 +40,10 @@ describe("DatasetCommitService", () => { }); datasetApi = TestBed.inject(DatasetApi); navigationService = TestBed.inject(NavigationService); - service = TestBed.inject(DatasetCommitService); - spyOn( + commitService = TestBed.inject(DatasetCommitService); + datasetService = TestBed.inject(DatasetService); + + getDatasetInfoSpy = spyOn( datasetApi, "getDatasetInfoByAccountAndDatasetName", ).and.returnValue( @@ -37,102 +51,159 @@ describe("DatasetCommitService", () => { mockDatasetMainDataResponse as DatasetByAccountAndDatasetNameQuery, ), ); + + requestDatasetMainDataSpy = spyOn( + datasetService, + "requestDatasetMainData", + ).and.returnValue(of()); + + navigationServiceSpy = spyOn( + navigationService, + "navigateToDatasetView", + ); }); it("should be created", () => { - expect(service).toBeTruthy(); + expect(commitService).toBeTruthy(); }); + function requestDatasetId(): Observable { + return commitService.getIdByAccountNameAndDatasetName( + TEST_ACCOUNT_NAME, + TEST_DATASET_NAME, + ); + } + + function requestCommitEvent(): Observable { + return commitService.commitEventToDataset( + TEST_ACCOUNT_NAME, + TEST_DATASET_NAME, + TEST_EVENT_CONTENT, + ); + } + + function expectNavigatedToDatasetOverview() { + expect(navigationServiceSpy).toHaveBeenCalledOnceWith({ + accountName: TEST_ACCOUNT_NAME, + datasetName: TEST_DATASET_NAME, + tab: DatasetViewTypeEnum.Overview, + }); + } + + function expectRequestedDatasetMainData() { + expect(requestDatasetMainDataSpy).toHaveBeenCalledOnceWith({ + accountName: TEST_ACCOUNT_NAME, + datasetName: TEST_DATASET_NAME, + }); + } + it("should check getIdByAccountNameAndDatasetName() method with cache", fakeAsync(() => { - const key = TEST_ACCOUNT_NAME + TEST_DATASET_NAME; - service.datasetIdsByAccountDatasetName.set(key, "testId"); - service - .getIdByAccountNameAndDatasetName( - TEST_ACCOUNT_NAME, - TEST_DATASET_NAME, - ) - .subscribe((data) => { - expect(data).toEqual("testId"); + // 1st requests queries the API + let eventsCount = 0; + requestDatasetId().subscribe((data) => { + ++eventsCount; + expect(data).toEqual(TEST_DATASET_ID); + tick(); + + // Issue 2nd request after first to ensure previous value was cached + requestDatasetId().subscribe((data) => { + ++eventsCount; + expect(data).toEqual(TEST_DATASET_ID); tick(); }); + }); flush(); + + expect(eventsCount).toEqual(2); + expect(getDatasetInfoSpy).toHaveBeenCalledOnceWith( + TEST_ACCOUNT_NAME, + TEST_DATASET_NAME, + ); })); it("should check getIdByAccountNameAndDatasetName() method without cache", fakeAsync(() => { - service - .getIdByAccountNameAndDatasetName( - TEST_ACCOUNT_NAME, - TEST_DATASET_NAME, - ) - .subscribe((data) => { - expect(data).toEqual( - mockDatasetMainDataResponse.datasets.byOwnerAndName - ?.id as string, - ); - tick(); - }); + let eventsCount = 0; + requestDatasetId().subscribe((data) => { + ++eventsCount; + expect(data).toEqual(TEST_DATASET_ID); + tick(); + }); flush(); + + expect(eventsCount).toEqual(1); + expect(getDatasetInfoSpy).toHaveBeenCalledOnceWith( + TEST_ACCOUNT_NAME, + TEST_DATASET_NAME, + ); })); it("should check commitEventToDataset() method", fakeAsync(() => { - spyOn(datasetApi, "commitEvent").and.returnValue( + const commitEventSpy = spyOn(datasetApi, "commitEvent").and.returnValue( of(mockCommitEventToDatasetMutation), ); - const navigationServiceSpy = spyOn( - navigationService, - "navigateToDatasetView", - ); - service - .commitEventToDataset( - TEST_ACCOUNT_NAME, - TEST_DATASET_NAME, - "contentEvent", - ) - .subscribe(() => { - expect(navigationServiceSpy).toHaveBeenCalledTimes(1); - tick(); - }); + + requestCommitEvent().subscribe(() => { + tick(); + }); + flush(); + + expect(commitEventSpy).toHaveBeenCalledOnceWith({ + datasetId: TEST_DATASET_ID, + event: TEST_EVENT_CONTENT, + }); + expectNavigatedToDatasetOverview(); + expectRequestedDatasetMainData(); })); it("should check commitEventToDataset() method with error", fakeAsync(() => { - const errorCommitEventChangesSpy = spyOn( - service, - "errorCommitEventChanges", - ); - spyOn(datasetApi, "commitEvent").and.returnValue( + const commitEventSpy = spyOn(datasetApi, "commitEvent").and.returnValue( of(mockCommitEventToDatasetMutationError), ); - service - .commitEventToDataset( - TEST_ACCOUNT_NAME, - TEST_DATASET_NAME, - "contentEvent", - ) - .subscribe(() => { - expect(errorCommitEventChangesSpy).toHaveBeenCalledTimes(1); - tick(); - }); + + const errorSubscription$: Subscription = + commitService.onErrorCommitEventChanges + .pipe(first()) + .subscribe((message) => { + expect(message).toEqual( + mockCommitEventToDatasetMutationErrorMessage, + ); + }); + + requestCommitEvent().subscribe(() => { + tick(); + }); flush(); + + expect(commitEventSpy).toHaveBeenCalledOnceWith({ + datasetId: TEST_DATASET_ID, + event: TEST_EVENT_CONTENT, + }); + expect(navigationServiceSpy).not.toHaveBeenCalled(); + expect(requestDatasetMainDataSpy).not.toHaveBeenCalled(); + + // If error triggered, our subscription will be closed + expect(errorSubscription$.closed).toBeTrue(); })); it("should check updateReadme() method ", fakeAsync(() => { - const navigationServiceSpy = spyOn( - navigationService, - "navigateToDatasetView", - ); - spyOn(datasetApi, "updateReadme").and.returnValue( - of(mockUpdateReadmeMutation), - ); - service - .updateReadme( - TEST_ACCOUNT_NAME, - TEST_DATASET_NAME, - "readme content", - ) + const updateReadmeSpy = spyOn( + datasetApi, + "updateReadme", + ).and.returnValue(of(mockUpdateReadmeMutation)); + const README_CONTENT = "readme content"; + + commitService + .updateReadme(TEST_ACCOUNT_NAME, TEST_DATASET_NAME, README_CONTENT) .subscribe(() => { - expect(navigationServiceSpy).toHaveBeenCalledTimes(1); tick(); }); flush(); + + expect(updateReadmeSpy).toHaveBeenCalledOnceWith( + TEST_DATASET_ID, + README_CONTENT, + ); + expectNavigatedToDatasetOverview(); + expectRequestedDatasetMainData(); })); }); diff --git a/src/app/dataset-view/additional-components/overview-component/services/dataset-commit.service.ts b/src/app/dataset-view/additional-components/overview-component/services/dataset-commit.service.ts index 8663482e0..53472aaa7 100644 --- a/src/app/dataset-view/additional-components/overview-component/services/dataset-commit.service.ts +++ b/src/app/dataset-view/additional-components/overview-component/services/dataset-commit.service.ts @@ -117,6 +117,7 @@ export class DatasetCommitService { datasetName, }) .subscribe(); + this.navigationService.navigateToDatasetView({ accountName, datasetName, From 0627f5b5431a443ae5f097135cdc3ec5f905cef8 Mon Sep 17 00:00:00 2001 From: Sergei Zaychenko Date: Fri, 4 Aug 2023 04:04:40 -0700 Subject: [PATCH 16/16] Changelog updated --- CHANGELOG.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index bc344c05c..300ef7ebf 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,7 +6,9 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). ## [Unreleased] +### Added - Added pictures in the dropdown list for engines +- Editing dataset readme file on Overview tab ## [0.7.0] - 2023-07-27 ### Added