From b10f3af7f64866dc9faa2408bc724180e5ba227f Mon Sep 17 00:00:00 2001 From: Dmitriy Borzenko Date: Tue, 25 Jul 2023 12:14:38 +0300 Subject: [PATCH] Add datasetRef in detailed block view for SetTransform event. --- resources/schema.graphql | 1208 +++++++++-------- .../fragment-dataset-transform.graphql | 1 + src/app/api/kamu.graphql.interface.ts | 5 + src/app/common/tooltips/set-transform.text.ts | 2 + .../set-transform-event.source.ts | 8 + .../builders/set-transform-section.builder.ts | 16 +- .../add-polling-source.component.html | 3 +- .../edit-set-transform..service.ts | 5 - .../set-transform/set-transform.component.ts | 2 - .../stepper-navigation.component.html | 2 +- .../stepper-navigation.component.spec.ts | 1 - .../stepper-navigation.component.ts | 1 - 12 files changed, 661 insertions(+), 593 deletions(-) diff --git a/resources/schema.graphql b/resources/schema.graphql index fe6d304d0..c0351059d 100644 --- a/resources/schema.graphql +++ b/resources/schema.graphql @@ -1,339 +1,367 @@ type AccessToken { - accessToken: String! - scope: String! - tokenType: String! + accessToken: String! + scope: String! + tokenType: String! } interface Account { - id: AccountID! - name: String! + id: AccountID! + name: String! } scalar AccountID type AccountInfo { - login: String! - name: String! - email: String - avatarUrl: String - gravatarId: String + login: String! + name: String! + email: String + avatarUrl: String + gravatarId: String } scalar AccountName type Accounts { - """ - Returns account by its ID - """ - byId(accountId: AccountID!): Account - """ - Returns account by its name - """ - byName(name: String!): Account + """ + Returns account by its ID + """ + byId(accountId: AccountID!): Account + """ + Returns account by its name + """ + byName(name: String!): Account } type AddData { - inputCheckpoint: Multihash - outputData: DataSlice - outputCheckpoint: Checkpoint - outputWatermark: DateTime - sourceState: SourceState + inputCheckpoint: Multihash + outputData: DataSlice + outputCheckpoint: Checkpoint + outputWatermark: DateTime + sourceState: SourceState } type AttachmentEmbedded { - path: String! - content: String! + path: String! + content: String! } union Attachments = AttachmentsEmbedded type AttachmentsEmbedded { - items: [AttachmentEmbedded!]! + items: [AttachmentEmbedded!]! } type Auth { - githubLogin(code: String!): LoginResponse! - accountInfo(accessToken: String!): AccountInfo! + githubLogin(code: String!): LoginResponse! + accountInfo(accessToken: String!): AccountInfo! } type BlockInterval { - start: Multihash! - end: Multihash! + start: Multihash! + end: Multihash! } type BlockRef { - name: String! - blockHash: Multihash! + name: String! + blockHash: Multihash! } - type Checkpoint { - physicalHash: Multihash! - size: Int! + physicalHash: Multihash! + size: Int! } interface CommitResult { - message: String! + message: String! } type CommitResultAppendError implements CommitResult { - message: String! + message: String! } type CommitResultSuccess implements CommitResult { - oldHead: Multihash - newHead: Multihash! - message: String! + oldHead: Multihash + newHead: Multihash! + message: String! } enum CompressionFormat { - GZIP - ZIP + GZIP + ZIP } interface CreateDatasetFromSnapshotResult { - message: String! + message: String! } interface CreateDatasetResult { - message: String! + message: String! } type CreateDatasetResultInvalidSnapshot implements CreateDatasetFromSnapshotResult { - message: String! + message: String! } type CreateDatasetResultMissingInputs implements CreateDatasetFromSnapshotResult { - missingInputs: [String!]! - message: String! + missingInputs: [String!]! + message: String! } type CreateDatasetResultNameCollision implements CreateDatasetResult & CreateDatasetFromSnapshotResult { - datasetName: DatasetName! - message: String! + accountName: AccountName + datasetName: DatasetName! + message: String! } type CreateDatasetResultSuccess implements CreateDatasetResult & CreateDatasetFromSnapshotResult { - dataset: Dataset! - message: String! + dataset: Dataset! + message: String! } type DataBatch { - format: DataBatchFormat! - content: String! - numRecords: Int! + format: DataBatchFormat! + content: String! + numRecords: Int! } enum DataBatchFormat { - JSON - JSON_LD - JSON_SOA - CSV + JSON + JSON_LD + JSON_SOA + CSV } type DataQueries { - """ - Executes a specified query and returns its result - """ - query(query: String!, queryDialect: QueryDialect!, dataFormat: DataBatchFormat, schemaFormat: DataSchemaFormat, limit: Int): DataQueryResult! - """ - Lists engines known to the system and recommended for use - """ - knownEngines: [EngineDesc!]! + """ + Executes a specified query and returns its result + """ + query( + query: String! + queryDialect: QueryDialect! + dataFormat: DataBatchFormat + schemaFormat: DataSchemaFormat + limit: Int + ): DataQueryResult! + """ + Lists engines known to the system and recommended for use + """ + knownEngines: [EngineDesc!]! } union DataQueryResult = DataQueryResultSuccess | DataQueryResultError type DataQueryResultError { - errorMessage: String! - errorKind: DataQueryResultErrorKind! + errorMessage: String! + errorKind: DataQueryResultErrorKind! } enum DataQueryResultErrorKind { - INVALID_SQL - INTERNAL_ERROR + INVALID_SQL + INTERNAL_ERROR } type DataQueryResultSuccess { - schema: DataSchema - data: DataBatch! - limit: Int! + schema: DataSchema + data: DataBatch! + limit: Int! } type DataSchema { - format: DataSchemaFormat! - content: String! + format: DataSchemaFormat! + content: String! } enum DataSchemaFormat { - PARQUET - PARQUET_JSON + PARQUET + PARQUET_JSON } type DataSlice { - logicalHash: Multihash! - physicalHash: Multihash! - interval: OffsetInterval! - size: Int! + logicalHash: Multihash! + physicalHash: Multihash! + interval: OffsetInterval! + size: Int! } type Dataset { - """ - Unique identifier of the dataset - """ - id: DatasetID! - """ - Symbolic name of the dataset. - Name can change over the dataset's lifetime. For unique identifier use - `id()`. - """ - name: DatasetName! - """ - Returns the user or organization that owns this dataset - """ - owner: Account! - """ - Returns the kind of a dataset (Root or Derivative) - """ - kind: DatasetKind! - """ - Access to the data of the dataset - """ - data: DatasetData! - """ - Access to the metadata of the dataset - """ - metadata: DatasetMetadata! - """ - Creation time of the first metadata block in the chain - """ - createdAt: DateTime! - """ - Creation time of the most recent metadata block in the chain - """ - lastUpdatedAt: DateTime! + """ + Unique identifier of the dataset + """ + id: DatasetID! + """ + Symbolic name of the dataset. + Name can change over the dataset's lifetime. For unique identifier use + `id()`. + """ + name: DatasetName! + """ + Returns the user or organization that owns this dataset + """ + owner: Account! + """ + Returns the kind of a dataset (Root or Derivative) + """ + kind: DatasetKind! + """ + Access to the data of the dataset + """ + data: DatasetData! + """ + Access to the metadata of the dataset + """ + metadata: DatasetMetadata! + """ + Creation time of the first metadata block in the chain + """ + createdAt: DateTime! + """ + Creation time of the most recent metadata block in the chain + """ + lastUpdatedAt: DateTime! } type DatasetConnection { - """ - A shorthand for `edges { node { ... } }` - """ - nodes: [Dataset!]! - """ - Approximate number of total nodes - """ - totalCount: Int! - """ - Page information - """ - pageInfo: PageBasedInfo! - edges: [DatasetEdge!]! + """ + A shorthand for `edges { node { ... } }` + """ + nodes: [Dataset!]! + """ + Approximate number of total nodes + """ + totalCount: Int! + """ + Page information + """ + pageInfo: PageBasedInfo! + edges: [DatasetEdge!]! } type DatasetData { - """ - Total number of records in this dataset - """ - numRecordsTotal: Int! - """ - An estimated size of data on disk not accounting for replication or - caching - """ - estimatedSize: Int! - """ - Returns the specified number of the latest records in the dataset - This is equivalent to the SQL query: `SELECT * FROM dataset ORDER BY - event_time DESC LIMIT N` - """ - tail(limit: Int, dataFormat: DataBatchFormat, schemaFormat: DataSchemaFormat): DataQueryResult! + """ + Total number of records in this dataset + """ + numRecordsTotal: Int! + """ + An estimated size of data on disk not accounting for replication or + caching + """ + estimatedSize: Int! + """ + Returns the specified number of the latest records in the dataset + This is equivalent to the SQL query: `SELECT * FROM dataset ORDER BY + event_time DESC LIMIT N` + """ + tail( + limit: Int + dataFormat: DataBatchFormat + schemaFormat: DataSchemaFormat + ): DataQueryResult! } type DatasetEdge { - node: Dataset! + node: Dataset! } scalar DatasetID enum DatasetKind { - ROOT - DERIVATIVE + ROOT + DERIVATIVE } type DatasetMetadata { - """ - Access to the temporal metadata chain of the dataset - """ - chain: MetadataChain! - """ - Last recorded watermark - """ - currentWatermark: DateTime - """ - Latest data schema - """ - currentSchema(format: DataSchemaFormat): DataSchema - """ - Current upstream dependencies of a dataset - """ - currentUpstreamDependencies: [Dataset!]! - """ - Current downstream dependencies of a dataset - """ - currentDownstreamDependencies: [Dataset!]! - """ - Current source used by the root dataset - """ - currentSource: SetPollingSource - """ - Current transformation used by the derivative dataset - """ - currentTransform: SetTransform - """ - Current descriptive information about the dataset - """ - currentInfo: SetInfo! - """ - Current readme file as discovered from attachments associated with the - dataset - """ - currentReadme: String - """ - Current license associated with the dataset - """ - currentLicense: SetLicense - """ - Current vocabulary associated with the dataset - """ - currentVocab: SetVocab + """ + Access to the temporal metadata chain of the dataset + """ + chain: MetadataChain! + """ + Last recorded watermark + """ + currentWatermark: DateTime + """ + Latest data schema + """ + currentSchema(format: DataSchemaFormat): DataSchema + """ + Current upstream dependencies of a dataset + """ + currentUpstreamDependencies: [Dataset!]! + """ + Current downstream dependencies of a dataset + """ + currentDownstreamDependencies: [Dataset!]! + """ + Current source used by the root dataset + """ + currentSource: SetPollingSource + """ + Current transformation used by the derivative dataset + """ + currentTransform: SetTransform + """ + Current descriptive information about the dataset + """ + currentInfo: SetInfo! + """ + Current readme file as discovered from attachments associated with the + dataset + """ + currentReadme: String + """ + Current license associated with the dataset + """ + currentLicense: SetLicense + """ + Current vocabulary associated with the dataset + """ + currentVocab: SetVocab } scalar DatasetName +scalar DatasetRefAny + type Datasets { - """ - Returns dataset by its ID - """ - byId(datasetId: DatasetID!): Dataset - """ - Returns dataset by its owner and name - """ - byOwnerAndName(accountName: AccountName!, datasetName: DatasetName!): Dataset - """ - Returns datasets belonging to the specified account - """ - byAccountId(accountId: AccountID!, page: Int, perPage: Int): DatasetConnection! - """ - Returns datasets belonging to the specified account - """ - byAccountName(accountName: AccountName!, page: Int, perPage: Int): DatasetConnection! - """ - Creates a new empty dataset - """ - createEmpty(accountId: AccountID!, datasetKind: DatasetKind!, datasetName: DatasetName!): CreateDatasetResult! - """ - Creates a new dataset from provided DatasetSnapshot manifest - """ - createFromSnapshot(accountId: AccountID!, snapshot: String!, snapshotFormat: MetadataManifestFormat!): CreateDatasetFromSnapshotResult! + """ + Returns dataset by its ID + """ + byId(datasetId: DatasetID!): Dataset + """ + Returns dataset by its owner and name + """ + byOwnerAndName(accountName: AccountName!, datasetName: DatasetName!): Dataset + """ + Returns datasets belonging to the specified account + """ + byAccountId( + accountId: AccountID! + page: Int + perPage: Int + ): DatasetConnection! + """ + Returns datasets belonging to the specified account + """ + byAccountName( + accountName: AccountName! + page: Int + perPage: Int + ): DatasetConnection! + """ + Creates a new empty dataset + """ + createEmpty( + accountId: AccountID! + datasetKind: DatasetKind! + datasetName: DatasetName! + ): CreateDatasetResult! + """ + Creates a new dataset from provided DatasetSnapshot manifest + """ + createFromSnapshot( + accountId: AccountID! + snapshot: String! + snapshotFormat: MetadataManifestFormat! + ): CreateDatasetFromSnapshotResult! } """ @@ -347,468 +375,485 @@ scalar DateTime Describes """ type EngineDesc { - """ - A short name of the engine, e.g. "Spark", "Flink". - Intended for use in UI for quick engine identification and selection. - """ - name: String! - """ - Language and dialect this engine is using for queries - Indended for configuring code highlighting and completions. - """ - dialect: QueryDialect! - """ - OCI image repository and a tag of the latest engine image, e.g. - "ghcr.io/kamu-data/engine-datafusion:0.1.2" - """ - latestImage: String! + """ + A short name of the engine, e.g. "Spark", "Flink". + Intended for use in UI for quick engine identification and selection. + """ + name: String! + """ + Language and dialect this engine is using for queries + Indended for configuring code highlighting and completions. + """ + dialect: QueryDialect! + """ + OCI image repository and a tag of the latest engine image, e.g. + "ghcr.io/kamu-data/engine-datafusion:0.1.2" + """ + latestImage: String! } type EnvVar { - name: String! - value: String + name: String! + value: String } union EventTimeSource = EventTimeSourceFromMetadata | EventTimeSourceFromPath type EventTimeSourceFromMetadata { - dummy: String + dummy: String } type EventTimeSourceFromPath { - pattern: String! - timestampFormat: String + pattern: String! + timestampFormat: String } type ExecuteQuery { - inputSlices: [InputSlice!]! - inputCheckpoint: Multihash - outputData: DataSlice - outputCheckpoint: Checkpoint - outputWatermark: DateTime + inputSlices: [InputSlice!]! + inputCheckpoint: Multihash + outputData: DataSlice + outputCheckpoint: Checkpoint + outputWatermark: DateTime } union FetchStep = FetchStepUrl | FetchStepFilesGlob | FetchStepContainer type FetchStepContainer { - image: String! - command: [String!] - args: [String!] - env: [EnvVar!] + image: String! + command: [String!] + args: [String!] + env: [EnvVar!] } type FetchStepFilesGlob { - path: String! - eventTime: EventTimeSource - cache: SourceCaching - order: SourceOrdering + path: String! + eventTime: EventTimeSource + cache: SourceCaching + order: SourceOrdering } type FetchStepUrl { - url: String! - eventTime: EventTimeSource - cache: SourceCaching - headers: [RequestHeader!] + url: String! + eventTime: EventTimeSource + cache: SourceCaching + headers: [RequestHeader!] } - - type InputSlice { - datasetId: DatasetID! - blockInterval: BlockInterval - dataInterval: OffsetInterval + datasetId: DatasetID! + blockInterval: BlockInterval + dataInterval: OffsetInterval } - type LoginResponse { - token: AccessToken! - accountInfo: AccountInfo! + token: AccessToken! + accountInfo: AccountInfo! } -union MergeStrategy = MergeStrategyAppend | MergeStrategyLedger | MergeStrategySnapshot +union MergeStrategy = + MergeStrategyAppend + | MergeStrategyLedger + | MergeStrategySnapshot type MergeStrategyAppend { - dummy: String + dummy: String } type MergeStrategyLedger { - primaryKey: [String!]! + primaryKey: [String!]! } type MergeStrategySnapshot { - primaryKey: [String!]! - compareColumns: [String!] - observationColumn: String - obsvAdded: String - obsvChanged: String - obsvRemoved: String + primaryKey: [String!]! + compareColumns: [String!] + observationColumn: String + obsvAdded: String + obsvChanged: String + obsvRemoved: String } type MetadataBlockConnection { - """ - A shorthand for `edges { node { ... } }` - """ - nodes: [MetadataBlockExtended!]! - """ - Approximate number of total nodes - """ - totalCount: Int! - """ - Page information - """ - pageInfo: PageBasedInfo! - edges: [MetadataBlockEdge!]! + """ + A shorthand for `edges { node { ... } }` + """ + nodes: [MetadataBlockExtended!]! + """ + Approximate number of total nodes + """ + totalCount: Int! + """ + Page information + """ + pageInfo: PageBasedInfo! + edges: [MetadataBlockEdge!]! } type MetadataBlockEdge { - node: MetadataBlockExtended! + node: MetadataBlockExtended! } type MetadataBlockExtended { - blockHash: Multihash! - prevBlockHash: Multihash - systemTime: DateTime! - author: Account! - event: MetadataEvent! - sequenceNumber: Int! + blockHash: Multihash! + prevBlockHash: Multihash + systemTime: DateTime! + author: Account! + event: MetadataEvent! + sequenceNumber: Int! } type MetadataChain { - """ - Returns all named metadata block references - """ - refs: [BlockRef!]! - """ - Returns a metadata block corresponding to the specified hash - """ - blockByHash(hash: Multihash!): MetadataBlockExtended - """ - Returns a metadata block corresponding to the specified hash and encoded - in desired format - """ - blockByHashEncoded(hash: Multihash!, format: MetadataManifestFormat!): String - """ - Iterates all metadata blocks in the reverse chronological order - """ - blocks(page: Int, perPage: Int): MetadataBlockConnection! - """ - Commits new event to the metadata chain - """ - commitEvent(event: String!, eventFormat: MetadataManifestFormat!): CommitResult! -} - -union MetadataEvent = AddData | ExecuteQuery | Seed | SetPollingSource | SetTransform | SetVocab | SetWatermark | SetAttachments | SetInfo | SetLicense + """ + Returns all named metadata block references + """ + refs: [BlockRef!]! + """ + Returns a metadata block corresponding to the specified hash + """ + blockByHash(hash: Multihash!): MetadataBlockExtended + """ + Returns a metadata block corresponding to the specified hash and encoded + in desired format + """ + blockByHashEncoded(hash: Multihash!, format: MetadataManifestFormat!): String + """ + Iterates all metadata blocks in the reverse chronological order + """ + blocks(page: Int, perPage: Int): MetadataBlockConnection! + """ + Commits new event to the metadata chain + """ + commitEvent( + event: String! + eventFormat: MetadataManifestFormat! + ): CommitResult! +} + +union MetadataEvent = + AddData + | ExecuteQuery + | Seed + | SetPollingSource + | SetTransform + | SetVocab + | SetWatermark + | SetAttachments + | SetInfo + | SetLicense enum MetadataManifestFormat { - YAML + YAML } type MetadataManifestMalformed implements CommitResult & CreateDatasetFromSnapshotResult { - message: String! + message: String! } type MetadataManifestUnsupportedVersion implements CommitResult & CreateDatasetFromSnapshotResult { - message: String! + message: String! } scalar Multihash type Mutation { - auth: Auth! - tasks: TasksMutations! + auth: Auth! + tasks: TasksMutations! } type OffsetInterval { - start: Int! - end: Int! + start: Int! + end: Int! } type Organization implements Account { - """ - Unique and stable identitfier of this organization account - """ - id: AccountID! - """ - Symbolic account name - """ - name: String! + """ + Unique and stable identitfier of this organization account + """ + id: AccountID! + """ + Symbolic account name + """ + name: String! } type PageBasedInfo { - """ - When paginating backwards, are there more items? - """ - hasPreviousPage: Boolean! - """ - When paginating forwards, are there more items? - """ - hasNextPage: Boolean! - """ - Index of the current page - """ - currentPage: Int! - """ - Approximate number of total pages assuming number of nodes per page - stays the same - """ - totalPages: Int + """ + When paginating backwards, are there more items? + """ + hasPreviousPage: Boolean! + """ + When paginating forwards, are there more items? + """ + hasNextPage: Boolean! + """ + Index of the current page + """ + currentPage: Int! + """ + Approximate number of total pages assuming number of nodes per page + stays the same + """ + totalPages: Int } union PrepStep = PrepStepDecompress | PrepStepPipe type PrepStepDecompress { - format: CompressionFormat! - subPath: String + format: CompressionFormat! + subPath: String } type PrepStepPipe { - command: [String!]! + command: [String!]! } type Query { - """ - Returns the version of the GQL API - """ - apiVersion: String! - """ - Dataset-related functionality group. - - Datasets are historical streams of events recorded under a cetrain - schema. - """ - datasets: Datasets! - """ - Account-related functionality group. - - Accounts can be individual users or organizations registered in the - system. This groups deals with their identities and permissions. - """ - accounts: Accounts! - """ - Task-related functionality group. - - Tasks are units of scheduling that can perform many functions like - ingesting new data, running dataset transformations, answering ad-hoc - queries etc. - """ - tasks: Tasks! - """ - Search-related functionality group. - """ - search: Search! - """ - Querying and data manipulations - """ - data: DataQueries! + """ + Returns the version of the GQL API + """ + apiVersion: String! + """ + Dataset-related functionality group. + + Datasets are historical streams of events recorded under a cetrain + schema. + """ + datasets: Datasets! + """ + Account-related functionality group. + + Accounts can be individual users or organizations registered in the + system. This groups deals with their identities and permissions. + """ + accounts: Accounts! + """ + Task-related functionality group. + + Tasks are units of scheduling that can perform many functions like + ingesting new data, running dataset transformations, answering ad-hoc + queries etc. + """ + tasks: Tasks! + """ + Search-related functionality group. + """ + search: Search! + """ + Querying and data manipulations + """ + data: DataQueries! } enum QueryDialect { - SQL_SPARK - SQL_FLINK - SQL_DATA_FUSION + SQL_SPARK + SQL_FLINK + SQL_DATA_FUSION } -union ReadStep = ReadStepCsv | ReadStepJsonLines | ReadStepGeoJson | ReadStepEsriShapefile | ReadStepParquet +union ReadStep = + ReadStepCsv + | ReadStepJsonLines + | ReadStepGeoJson + | ReadStepEsriShapefile + | ReadStepParquet type ReadStepCsv { - schema: [String!] - separator: String - encoding: String - quote: String - escape: String - comment: String - header: Boolean - enforceSchema: Boolean - inferSchema: Boolean - ignoreLeadingWhiteSpace: Boolean - ignoreTrailingWhiteSpace: Boolean - nullValue: String - emptyValue: String - nanValue: String - positiveInf: String - negativeInf: String - dateFormat: String - timestampFormat: String - multiLine: Boolean + schema: [String!] + separator: String + encoding: String + quote: String + escape: String + comment: String + header: Boolean + enforceSchema: Boolean + inferSchema: Boolean + ignoreLeadingWhiteSpace: Boolean + ignoreTrailingWhiteSpace: Boolean + nullValue: String + emptyValue: String + nanValue: String + positiveInf: String + negativeInf: String + dateFormat: String + timestampFormat: String + multiLine: Boolean } type ReadStepEsriShapefile { - schema: [String!] - subPath: String + schema: [String!] + subPath: String } type ReadStepGeoJson { - schema: [String!] + schema: [String!] } type ReadStepJsonLines { - schema: [String!] - dateFormat: String - encoding: String - multiLine: Boolean - primitivesAsString: Boolean - timestampFormat: String + schema: [String!] + dateFormat: String + encoding: String + multiLine: Boolean + primitivesAsString: Boolean + timestampFormat: String } type ReadStepParquet { - schema: [String!] + schema: [String!] } type RequestHeader { - name: String! - value: String! + name: String! + value: String! } type Search { - """ - Perform search across all resources - """ - query(query: String!, page: Int, perPage: Int): SearchResultConnection! + """ + Perform search across all resources + """ + query(query: String!, page: Int, perPage: Int): SearchResultConnection! } union SearchResult = Dataset type SearchResultConnection { - """ - A shorthand for `edges { node { ... } }` - """ - nodes: [SearchResult!]! - """ - Approximate number of total nodes - """ - totalCount: Int! - """ - Page information - """ - pageInfo: PageBasedInfo! - edges: [SearchResultEdge!]! + """ + A shorthand for `edges { node { ... } }` + """ + nodes: [SearchResult!]! + """ + Approximate number of total nodes + """ + totalCount: Int! + """ + Page information + """ + pageInfo: PageBasedInfo! + edges: [SearchResultEdge!]! } type SearchResultEdge { - node: SearchResult! + node: SearchResult! } type Seed { - datasetId: DatasetID! - datasetKind: DatasetKind! + datasetId: DatasetID! + datasetKind: DatasetKind! } type SetAttachments { - attachments: Attachments! + attachments: Attachments! } type SetInfo { - description: String - keywords: [String!] + description: String + keywords: [String!] } type SetLicense { - shortName: String! - name: String! - spdxId: String - websiteUrl: String! + shortName: String! + name: String! + spdxId: String + websiteUrl: String! } type SetPollingSource { - fetch: FetchStep! - prepare: [PrepStep!] - read: ReadStep! - preprocess: Transform - merge: MergeStrategy! + fetch: FetchStep! + prepare: [PrepStep!] + read: ReadStep! + preprocess: Transform + merge: MergeStrategy! } type SetTransform { - inputs: [TransformInput!]! - transform: Transform! + inputs: [TransformInput!]! + transform: Transform! } type SetVocab { - systemTimeColumn: String - eventTimeColumn: String - offsetColumn: String + systemTimeColumn: String + eventTimeColumn: String + offsetColumn: String } type SetWatermark { - outputWatermark: DateTime! + outputWatermark: DateTime! } union SourceCaching = SourceCachingForever type SourceCachingForever { - dummy: String + dummy: String } enum SourceOrdering { - BY_EVENT_TIME - BY_NAME + BY_EVENT_TIME + BY_NAME } type SourceState { - kind: String! - source: String! - value: String! + kind: String! + source: String! + value: String! } type SqlQueryStep { - alias: String - query: String! + alias: String + query: String! } - type Task { - """ - Unique and stable identitfier of this task - """ - taskId: TaskID! - """ - Life-cycle status of a task - """ - status: TaskStatus! - """ - Whether the task was ordered to be cancelled - """ - cancellationRequested: Boolean! - """ - Describes a certain final outcome of the task once it reaches the - "finished" status - """ - outcome: TaskOutcome - """ - Time when task was originally created and placed in a queue - """ - createdAt: DateTime! - """ - Time when task transitioned into a running state - """ - ranAt: DateTime - """ - Time when cancellation of task was requested - """ - cancellationRequestedAt: DateTime - """ - Time when task has reached a final outcome - """ - finishedAt: DateTime + """ + Unique and stable identitfier of this task + """ + taskId: TaskID! + """ + Life-cycle status of a task + """ + status: TaskStatus! + """ + Whether the task was ordered to be cancelled + """ + cancellationRequested: Boolean! + """ + Describes a certain final outcome of the task once it reaches the + "finished" status + """ + outcome: TaskOutcome + """ + Time when task was originally created and placed in a queue + """ + createdAt: DateTime! + """ + Time when task transitioned into a running state + """ + ranAt: DateTime + """ + Time when cancellation of task was requested + """ + cancellationRequestedAt: DateTime + """ + Time when task has reached a final outcome + """ + finishedAt: DateTime } type TaskConnection { - """ - A shorthand for `edges { node { ... } }` - """ - nodes: [Task!]! - """ - Approximate number of total nodes - """ - totalCount: Int! - """ - Page information - """ - pageInfo: PageBasedInfo! - edges: [TaskEdge!]! + """ + A shorthand for `edges { node { ... } }` + """ + nodes: [Task!]! + """ + Approximate number of total nodes + """ + totalCount: Int! + """ + Page information + """ + pageInfo: PageBasedInfo! + edges: [TaskEdge!]! } type TaskEdge { - node: Task! + node: Task! } scalar TaskID @@ -817,99 +862,108 @@ scalar TaskID Describes a certain final outcome of the task """ enum TaskOutcome { - """ - Task succeeded - """ - SUCCESS - """ - Task failed to complete - """ - FAILED - """ - Task was cancelled by a user - """ - CANCELLED + """ + Task succeeded + """ + SUCCESS + """ + Task failed to complete + """ + FAILED + """ + Task was cancelled by a user + """ + CANCELLED } """ Life-cycle status of a task """ enum TaskStatus { - """ - Task is waiting for capacity to be allocated to it - """ - QUEUED - """ - Task is being executed - """ - RUNNING - """ - Task has reached a certain final outcome (see [TaskOutcome]) - """ - FINISHED + """ + Task is waiting for capacity to be allocated to it + """ + QUEUED + """ + Task is being executed + """ + RUNNING + """ + Task has reached a certain final outcome (see [TaskOutcome]) + """ + FINISHED } type Tasks { - """ - Returns current state of a given task - """ - getTask(taskId: TaskID!): Task - """ - Returns states of tasks associated with a given dataset ordered by - creation time from newest to oldest - """ - listTasksByDataset(datasetId: DatasetID!, page: Int, perPage: Int): TaskConnection! + """ + Returns current state of a given task + """ + getTask(taskId: TaskID!): Task + """ + Returns states of tasks associated with a given dataset ordered by + creation time from newest to oldest + """ + listTasksByDataset( + datasetId: DatasetID! + page: Int + perPage: Int + ): TaskConnection! } type TasksMutations { - """ - Requests cancellation of the specified task - """ - cancelTask(taskId: TaskID!): Task! - """ - Schedules a task to update the specified dataset by performing polling - ingest or a derivative transformation - """ - createUpdateDatasetTask(datasetId: DatasetID!): Task! - """ - Schedules a task to update the specified dataset by performing polling - ingest or a derivative transformation - """ - createProbeTask(datasetId: DatasetID, busyTimeMs: Int, endWithOutcome: TaskOutcome): Task! + """ + Requests cancellation of the specified task + """ + cancelTask(taskId: TaskID!): Task! + """ + Schedules a task to update the specified dataset by performing polling + ingest or a derivative transformation + """ + createUpdateDatasetTask(datasetId: DatasetID!): Task! + """ + Schedules a task to update the specified dataset by performing polling + ingest or a derivative transformation + """ + createProbeTask( + datasetId: DatasetID + busyTimeMs: Int + endWithOutcome: TaskOutcome + ): Task! } type TemporalTable { - name: String! - primaryKey: [String!]! + name: String! + primaryKey: [String!]! } union Transform = TransformSql type TransformInput { - id: DatasetID - name: DatasetName! - dataset: Dataset! + id: DatasetID + name: DatasetName! + datasetRef: DatasetRefAny + dataset: Dataset! } type TransformSql { - engine: String! - version: String - queries: [SqlQueryStep!]! - temporalTables: [TemporalTable!] + engine: String! + version: String + queries: [SqlQueryStep!]! + temporalTables: [TemporalTable!] } type User implements Account { - """ - Unique and stable identitfier of this user account - """ - id: AccountID! - """ - Symbolic account name - """ - name: String! + """ + Unique and stable identitfier of this user account + """ + id: AccountID! + """ + Symbolic account name + """ + name: String! } schema { - query: Query - mutation: Mutation + query: Query + mutation: Mutation } diff --git a/src/app/api/gql/fragments/fragment-dataset-transform.graphql b/src/app/api/gql/fragments/fragment-dataset-transform.graphql index 579975c0a..863188b12 100644 --- a/src/app/api/gql/fragments/fragment-dataset-transform.graphql +++ b/src/app/api/gql/fragments/fragment-dataset-transform.graphql @@ -4,6 +4,7 @@ fragment DatasetTransform on SetTransform { dataset { ...DatasetBasics } + datasetRef } transform { ...DatasetTransformContent diff --git a/src/app/api/kamu.graphql.interface.ts b/src/app/api/kamu.graphql.interface.ts index c8293fd5a..68dbcd762 100644 --- a/src/app/api/kamu.graphql.interface.ts +++ b/src/app/api/kamu.graphql.interface.ts @@ -24,6 +24,7 @@ export type Scalars = { AccountName: any; DatasetID: any; DatasetName: any; + DatasetRefAny: any; /** * Implement the DateTime scalar * @@ -170,6 +171,7 @@ export type CreateDatasetResultMissingInputs = export type CreateDatasetResultNameCollision = CreateDatasetFromSnapshotResult & CreateDatasetResult & { __typename?: "CreateDatasetResultNameCollision"; + accountName?: Maybe; datasetName: Scalars["DatasetName"]; message: Scalars["String"]; }; @@ -980,6 +982,7 @@ export type Transform = TransformSql; export type TransformInput = { __typename?: "TransformInput"; dataset: Dataset; + datasetRef?: Maybe; id?: Maybe; name: Scalars["DatasetName"]; }; @@ -1742,6 +1745,7 @@ export type DatasetTransformFragment = { inputs: Array<{ __typename?: "TransformInput"; name: any; + datasetRef?: any | null; dataset: { __typename?: "Dataset" } & DatasetBasicsFragment; }>; transform: { @@ -2158,6 +2162,7 @@ export const DatasetTransformFragmentDoc = gql` dataset { ...DatasetBasics } + datasetRef } transform { ...DatasetTransformContent diff --git a/src/app/common/tooltips/set-transform.text.ts b/src/app/common/tooltips/set-transform.text.ts index eb9e5a4c8..94718d813 100644 --- a/src/app/common/tooltips/set-transform.text.ts +++ b/src/app/common/tooltips/set-transform.text.ts @@ -10,4 +10,6 @@ export class SetTransformToolipsTexts { public static readonly DATASET_NAME = "Name of the dataset."; public static readonly DATASET_OWNER = "Owner of the dataset."; public static readonly DATASET_ALIAS = "Query alias of the dataset."; + public static readonly DATASET_REF = + "A local or remote dataset reference to use in dataset resolutions."; } diff --git a/src/app/dataset-block/metadata-block/components/event-details/components/set-transform-event/set-transform-event.source.ts b/src/app/dataset-block/metadata-block/components/event-details/components/set-transform-event/set-transform-event.source.ts index c9ac88bc2..b1bc42146 100644 --- a/src/app/dataset-block/metadata-block/components/event-details/components/set-transform-event/set-transform-event.source.ts +++ b/src/app/dataset-block/metadata-block/components/event-details/components/set-transform-event/set-transform-event.source.ts @@ -71,4 +71,12 @@ export const SET_TRANSFORM_SOURCE_DESCRIPTORS: EventRowDescriptorsByField = { separateRowForValue: false, dataTestId: "set-transform-dataset-alias", }, + + "SetTransform.Dataset.datasetRef": { + label: "Dataset reference:", + tooltip: SetTransformToolipsTexts.DATASET_REF, + presentationComponent: SimplePropertyComponent, + separateRowForValue: false, + dataTestId: "set-transform-dataset-dataset-ref", + }, }; diff --git a/src/app/dataset-block/metadata-block/components/event-details/dynamic-events/builders/set-transform-section.builder.ts b/src/app/dataset-block/metadata-block/components/event-details/dynamic-events/builders/set-transform-section.builder.ts index 8d93efb6f..0b4a1c724 100644 --- a/src/app/dataset-block/metadata-block/components/event-details/dynamic-events/builders/set-transform-section.builder.ts +++ b/src/app/dataset-block/metadata-block/components/event-details/dynamic-events/builders/set-transform-section.builder.ts @@ -35,10 +35,17 @@ export class SetTransformSectionBuilder extends EventSectionBuilder { const rows: EventRow[] = []; - Object.entries({ - ...item.dataset, - alias: item.name as string, - }).forEach(([key, value]) => { + const object = item.datasetRef + ? { + ...item.dataset, + alias: item.name as string, + datasetRef: item.datasetRef as string, + } + : { + ...item.dataset, + alias: item.name as string, + }; + Object.entries(object).forEach(([key, value]) => { if ( event.__typename && item.dataset.__typename && @@ -93,6 +100,7 @@ export class SetTransformSectionBuilder extends EventSectionBuilder, engine: string, queries: Omit[], - // owners: string[], ): Omit { return { - // inputs: this.transformInputsDatasets( - // this.parseInputDatasets(inputDatasets), - // owners, - // ), inputs: this.parseInputDatasets(inputDatasets), transform: { kind: "sql", diff --git a/src/app/dataset-view/additional-components/metadata-component/components/set-transform/set-transform.component.ts b/src/app/dataset-view/additional-components/metadata-component/components/set-transform/set-transform.component.ts index 253fb28a5..fa63d9414 100644 --- a/src/app/dataset-view/additional-components/metadata-component/components/set-transform/set-transform.component.ts +++ b/src/app/dataset-view/additional-components/metadata-component/components/set-transform/set-transform.component.ts @@ -162,7 +162,6 @@ export class SetTransformComponent extends BaseComponent implements OnInit { this.inputDatasets, this.selectedEngine, this.queries, - // this.owners(), ), ); instance.datasetInfo = this.getDatasetInfoFromUrl(); @@ -180,7 +179,6 @@ export class SetTransformComponent extends BaseComponent implements OnInit { this.inputDatasets, this.selectedEngine, this.queries, - // this.owners(), ), ), ) diff --git a/src/app/dataset-view/additional-components/metadata-component/components/stepper-navigation/stepper-navigation.component.html b/src/app/dataset-view/additional-components/metadata-component/components/stepper-navigation/stepper-navigation.component.html index ff544a7d4..404957f89 100644 --- a/src/app/dataset-view/additional-components/metadata-component/components/stepper-navigation/stepper-navigation.component.html +++ b/src/app/dataset-view/additional-components/metadata-component/components/stepper-navigation/stepper-navigation.component.html @@ -27,7 +27,7 @@ (click)="saveEvent()" class="button-save" data-test-id="save-button" - [disabled]="!validStep || !validAllSteps" + [disabled]="!validStep" > Save diff --git a/src/app/dataset-view/additional-components/metadata-component/components/stepper-navigation/stepper-navigation.component.spec.ts b/src/app/dataset-view/additional-components/metadata-component/components/stepper-navigation/stepper-navigation.component.spec.ts index 2de8c70b1..34e1acb3a 100644 --- a/src/app/dataset-view/additional-components/metadata-component/components/stepper-navigation/stepper-navigation.component.spec.ts +++ b/src/app/dataset-view/additional-components/metadata-component/components/stepper-navigation/stepper-navigation.component.spec.ts @@ -45,7 +45,6 @@ describe("StepperNavigationComponent", () => { component.prevStep = SetPollingSourceSection.READ; component.validStep = true; component.nextStep = null; - component.validAllSteps = true; fixture.detectChanges(); emitClickOnElementByDataTestId(fixture, "save-button"); expect(changeStepEmitterSpy).toHaveBeenCalledWith(); diff --git a/src/app/dataset-view/additional-components/metadata-component/components/stepper-navigation/stepper-navigation.component.ts b/src/app/dataset-view/additional-components/metadata-component/components/stepper-navigation/stepper-navigation.component.ts index 6d43f4b99..5957d3a21 100644 --- a/src/app/dataset-view/additional-components/metadata-component/components/stepper-navigation/stepper-navigation.component.ts +++ b/src/app/dataset-view/additional-components/metadata-component/components/stepper-navigation/stepper-navigation.component.ts @@ -18,7 +18,6 @@ export class StepperNavigationComponent { @Input() public nextStep: MaybeNull = null; @Input() public prevStep: MaybeNull = null; @Input() public validStep?: boolean; - @Input() public validAllSteps? = false; @Output() public changeStepEmitter = new EventEmitter(); @Output() public saveEventEmitter = new EventEmitter();