Skip to content

Commit

Permalink
Kamu 178 set attachments from UI saving and editing attachments (#114)
Browse files Browse the repository at this point in the history
* Add manage buttons.
* Add ability to save readme.md
* Add new component ReadmeSectionComponent.
* Updated GraphQL schema.
* Integrate new mutation 'updateReadme'.
* Moved the finding of the ID to a separate method.
  • Loading branch information
dmitriy-borzenko authored Aug 4, 2023
1 parent de8f692 commit feb4044
Show file tree
Hide file tree
Showing 45 changed files with 1,241 additions and 329 deletions.
2 changes: 2 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,9 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).

## [Unreleased]
### Added
- Added pictures in the dropdown list for engines
- Editing dataset readme file on Overview tab

## [0.7.0] - 2023-07-27
### Added
Expand Down
64 changes: 58 additions & 6 deletions resources/schema.graphql
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ type AttachmentsEmbedded {
items: [AttachmentEmbedded!]!
}

type Auth {
type AuthMut {
githubLogin(code: String!): LoginResponse!
accountInfo(accessToken: String!): AccountInfo!
}
Expand All @@ -75,11 +75,11 @@ interface CommitResult {
message: String!
}

type CommitResultAppendError implements CommitResult {
type CommitResultAppendError implements CommitResult & UpdateReadmeResult {
message: String!
}

type CommitResultSuccess implements CommitResult {
type CommitResultSuccess implements CommitResult & UpdateReadmeResult {
oldHead: Multihash
newHead: Multihash!
message: String!
Expand Down Expand Up @@ -317,6 +317,24 @@ type DatasetMetadata {
currentVocab: SetVocab
}

type DatasetMetadataMut {
"""
Access to the mutable metadata chain of the dataset
"""
chain: MetadataChainMut!
"""
Updates or clears the dataset readme
"""
updateReadme(content: String): UpdateReadmeResult!
}

type DatasetMut {
"""
Access to the mutable metadata of the dataset
"""
metadata: DatasetMetadataMut!
}

scalar DatasetName

scalar DatasetRefAny
Expand Down Expand Up @@ -346,6 +364,13 @@ type Datasets {
page: Int
perPage: Int
): DatasetConnection!
}

type DatasetsMut {
"""
Returns a mutable dataset by its ID
"""
byId(datasetId: DatasetID!): DatasetMut
"""
Creates a new empty dataset
"""
Expand Down Expand Up @@ -519,6 +544,9 @@ type MetadataChain {
Iterates all metadata blocks in the reverse chronological order
"""
blocks(page: Int, perPage: Int): MetadataBlockConnection!
}

type MetadataChainMut {
"""
Commits new event to the metadata chain
"""
Expand Down Expand Up @@ -555,8 +583,28 @@ type MetadataManifestUnsupportedVersion implements CommitResult & CreateDatasetF
scalar Multihash

type Mutation {
auth: Auth!
tasks: TasksMutations!
"""
Authentication and authorization-related functionality group
"""
auth: AuthMut!
"""
Dataset-related functionality group.
Datasets are historical streams of events recorded under a cetrain
schema.
"""
datasets: DatasetsMut!
"""
Tasks-related functionality group.
Tasks are units of work scheduled and executed by the system to query
and process data.
"""
tasks: TasksMut!
}

type NoChanges implements CommitResult & UpdateReadmeResult {
message: String!
}

type OffsetInterval {
Expand Down Expand Up @@ -910,7 +958,7 @@ type Tasks {
): TaskConnection!
}

type TasksMutations {
type TasksMut {
"""
Requests cancellation of the specified task
"""
Expand Down Expand Up @@ -952,6 +1000,10 @@ type TransformSql {
temporalTables: [TemporalTable!]
}

interface UpdateReadmeResult {
message: String!
}

type User implements Account {
"""
Unique and stable identitfier of this user account
Expand Down
27 changes: 15 additions & 12 deletions src/app/api/dataset.api.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ import {
import { DatasetApi } from "./dataset.api";
import {
CommitEventToDatasetDocument,
CommitEventToDatasetQuery,
CommitEventToDatasetMutation,
DatasetsByAccountNameDocument,
DatasetsByAccountNameQuery,
GetDatasetDataSqlRunDocument,
Expand Down Expand Up @@ -188,22 +188,25 @@ describe("DatasetApi", () => {
});

it("should commit event", () => {
const mockDatasetId = "mockId";
const mockEvent = "mock event";
service
.commitEvent({
accountName: TEST_USER_NAME,
datasetName: TEST_DATASET_NAME,
event: "mock event",
datasetId: mockDatasetId,
event: mockEvent,
})
.subscribe((res: CommitEventToDatasetQuery) => {
expect(
res.datasets.byOwnerAndName?.metadata.chain.commitEvent
.__typename,
).toEqual("CommitResultSuccess");
});
.subscribe(
(res: CommitEventToDatasetMutation | null | undefined) => {
expect(
res?.datasets.byId?.metadata.chain.commitEvent
.__typename,
).toEqual("CommitResultSuccess");
},
);

const op = controller.expectOne(CommitEventToDatasetDocument);
expect(op.operation.variables.accountName).toEqual(TEST_USER_NAME);
expect(op.operation.variables.datasetName).toEqual(TEST_DATASET_NAME);
expect(op.operation.variables.datasetId).toEqual(mockDatasetId);
expect(op.operation.variables.event).toEqual(mockEvent);
op.flush({
data: mockCommitEventResponse,
});
Expand Down
101 changes: 75 additions & 26 deletions src/app/api/dataset.api.ts
Original file line number Diff line number Diff line change
@@ -1,12 +1,16 @@
import {
CommitEventToDatasetGQL,
CommitEventToDatasetQuery,
CommitEventToDatasetMutation,
CreateDatasetFromSnapshotGQL,
CreateDatasetFromSnapshotQuery,
CreateEmptyDatasetQuery,
CreateDatasetFromSnapshotMutation,
CreateEmptyDatasetMutation,
DatasetByAccountAndDatasetNameGQL,
DatasetByAccountAndDatasetNameQuery,
DatasetKind,
GetDatasetSchemaGQL,
GetDatasetSchemaQuery,
UpdateReadmeGQL,
UpdateReadmeMutation,
} from "src/app/api/kamu.graphql.interface";
import AppValues from "src/app/common/app.values";
import { ApolloQueryResult } from "@apollo/client/core";
Expand All @@ -30,6 +34,7 @@ import {
DatasetByIdGQL,
CreateEmptyDatasetGQL,
} from "./kamu.graphql.interface";
import { MutationResult } from "apollo-angular";

@Injectable({ providedIn: "root" })
export class DatasetApi {
Expand All @@ -40,10 +45,12 @@ export class DatasetApi {
private datasetsByAccountNameGQL: DatasetsByAccountNameGQL,
private metadataBlockGQL: GetMetadataBlockGQL,
private datasetByIdGQL: DatasetByIdGQL,
private datasetByAccountAndDatasetNameGQL: DatasetByAccountAndDatasetNameGQL,
private createEmptyDatasetGQL: CreateEmptyDatasetGQL,
private createDatasetFromSnapshotGQL: CreateDatasetFromSnapshotGQL,
private commitEventToDataset: CommitEventToDatasetGQL,
private commitEventToDatasetGQL: CommitEventToDatasetGQL,
private datasetSchemaGQL: GetDatasetSchemaGQL,
private updateReadmeGQL: UpdateReadmeGQL,
) {}

public getDatasetMainData(params: {
Expand All @@ -52,11 +59,17 @@ export class DatasetApi {
numRecords?: number;
}): Observable<GetDatasetMainDataQuery> {
return this.datasetMainDataGQL
.watch({
accountName: params.accountName,
datasetName: params.datasetName,
limit: params.numRecords ?? AppValues.SQL_QUERY_LIMIT,
})
.watch(
{
accountName: params.accountName,
datasetName: params.datasetName,
limit: params.numRecords ?? AppValues.SQL_QUERY_LIMIT,
},
{
fetchPolicy: "network-only",
errorPolicy: "all",
},
)
.valueChanges.pipe(
first(),
map((result: ApolloQueryResult<GetDatasetMainDataQuery>) => {
Expand Down Expand Up @@ -162,17 +175,38 @@ export class DatasetApi {
);
}

public getDatasetInfoByAccountAndDatasetName(
accountName: string,
datasetName: string,
): Observable<DatasetByAccountAndDatasetNameQuery> {
return this.datasetByAccountAndDatasetNameGQL
.watch({
accountName,
datasetName,
})
.valueChanges.pipe(
first(),
map(
(
result: ApolloQueryResult<DatasetByAccountAndDatasetNameQuery>,
) => {
return result.data;
},
),
);
}

public createDatasetFromSnapshot(
accountId: string,
snapshot: string,
): Observable<CreateDatasetFromSnapshotQuery> {
): Observable<CreateDatasetFromSnapshotMutation | undefined | null> {
return this.createDatasetFromSnapshotGQL
.watch({ accountId, snapshot })
.valueChanges.pipe(
.mutate({ accountId, snapshot })
.pipe(
first(),
map(
(
result: ApolloQueryResult<CreateDatasetFromSnapshotQuery>,
result: MutationResult<CreateDatasetFromSnapshotMutation>,
) => {
return result.data;
},
Expand All @@ -184,31 +218,46 @@ export class DatasetApi {
accountId: string,
datasetKind: DatasetKind,
datasetName: string,
): Observable<CreateEmptyDatasetQuery> {
): Observable<CreateEmptyDatasetMutation | null | undefined> {
return this.createEmptyDatasetGQL
.watch({ accountId, datasetKind, datasetName })
.valueChanges.pipe(
.mutate({ accountId, datasetKind, datasetName })
.pipe(
first(),
map((result: ApolloQueryResult<CreateEmptyDatasetQuery>) => {
map((result: MutationResult<CreateEmptyDatasetMutation>) => {
return result.data;
}),
);
}

public commitEvent(params: {
accountName: string;
datasetName: string;
datasetId: string;
event: string;
}): Observable<CommitEventToDatasetQuery> {
return this.commitEventToDataset
.watch({
accountName: params.accountName,
datasetName: params.datasetName,
}): Observable<CommitEventToDatasetMutation | null | undefined> {
return this.commitEventToDatasetGQL
.mutate({
datasetId: params.datasetId,
event: params.event,
})
.valueChanges.pipe(
.pipe(
first(),
map((result: MutationResult<CommitEventToDatasetMutation>) => {
return result.data;
}),
);
}

public updateReadme(
datasetId: string,
content: string,
): Observable<UpdateReadmeMutation | null | undefined> {
return this.updateReadmeGQL
.mutate({
datasetId,
content,
})
.pipe(
first(),
map((result: ApolloQueryResult<CommitEventToDatasetQuery>) => {
map((result: MutationResult<UpdateReadmeMutation>) => {
return result.data;
}),
);
Expand Down
8 changes: 2 additions & 6 deletions src/app/api/gql/create-dataset/commit-event.graphql
Original file line number Diff line number Diff line change
@@ -1,10 +1,6 @@
query commitEventToDataset(
$accountName: AccountName!
$datasetName: DatasetName!
$event: String!
) {
mutation commitEventToDataset($datasetId: DatasetID!, $event: String!) {
datasets {
byOwnerAndName(accountName: $accountName, datasetName: $datasetName) {
byId(datasetId: $datasetId) {
metadata {
chain {
commitEvent(event: $event, eventFormat: YAML) {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
query createEmptyDataset(
mutation createEmptyDataset(
$accountId: AccountID!
$datasetKind: DatasetKind!
$datasetName: DatasetName!
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
query createDatasetFromSnapshot($accountId: AccountID!, $snapshot: String!) {
mutation createDatasetFromSnapshot($accountId: AccountID!, $snapshot: String!) {
datasets {
createFromSnapshot(
accountId: $accountId
Expand Down
15 changes: 15 additions & 0 deletions src/app/api/gql/create-dataset/update-readme.graphql
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
mutation updateReadme($datasetId: DatasetID!, $content: String!) {
datasets {
byId(datasetId: $datasetId) {
metadata {
updateReadme(content: $content) {
__typename
message
... on CommitResultSuccess {
oldHead
}
}
}
}
}
}
10 changes: 10 additions & 0 deletions src/app/api/gql/dataset-by-account-and-dataset-name.graphql
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
query datasetByAccountAndDatasetName(
$accountName: AccountName!
$datasetName: DatasetName!
) {
datasets {
byOwnerAndName(accountName: $accountName, datasetName: $datasetName) {
...DatasetBasics
}
}
}
Loading

0 comments on commit feb4044

Please sign in to comment.