Skip to content

Commit

Permalink
Tool markdown reports.
Browse files Browse the repository at this point in the history
  • Loading branch information
jmchilton committed Oct 24, 2024
1 parent e6d1b30 commit ef948bc
Show file tree
Hide file tree
Showing 21 changed files with 341 additions and 19 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -14,15 +14,14 @@ const { datasetPathDestination } = useDatasetPathDestination();
const props = defineProps<Props>();
const pathDestination = computed<PathDestination | null>(() =>
datasetPathDestination.value(props.historyDatasetId, props.path)
);
const pathDestination = computedAsync<PathDestination | null>(async () => {
return await datasetPathDestination.value(props.historyDatasetId, props.path)
}, null);
const imageUrl = computed(() => {
if (props.path === undefined || props.path === "undefined") {
return `${getAppRoot()}dataset/display?dataset_id=${props.historyDatasetId}`;
}
return pathDestination.value?.fileLink;
});
Expand Down
3 changes: 2 additions & 1 deletion client/src/components/Dataset/DatasetIndex/DatasetIndex.vue
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
<script setup lang="ts">
import { computedAsync } from "@vueuse/core";
import { computed } from "vue";
import type { DatasetExtraFiles } from "@/api/datasets";
Expand All @@ -13,7 +14,7 @@ const { datasetPathDestination } = useDatasetPathDestination();
const props = defineProps<Props>();
const pathDestination = computed<PathDestination | null>(() =>
const pathDestination = computedAsync<PathDestination | null>(() =>
datasetPathDestination.value(props.historyDatasetId, props.path)
);
Expand Down
3 changes: 2 additions & 1 deletion client/src/components/Dataset/DatasetLink/DatasetLink.vue
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
<script setup lang="ts">
import { computedAsync } from "@vueuse/core";
import { computed } from "vue";
import { hasDetails } from "@/api";
Expand All @@ -16,7 +17,7 @@ const { getDataset } = useDatasetStore();
const props = defineProps<Props>();
const pathDestination = computed<PathDestination | null>(() =>
const pathDestination = computedAsync<PathDestination | null>(() =>
datasetPathDestination.value(props.historyDatasetId, props.path)
);
Expand Down
6 changes: 5 additions & 1 deletion client/src/components/History/Content/ContentItem.vue
Original file line number Diff line number Diff line change
Expand Up @@ -189,8 +189,12 @@ const itemUrls = computed<ItemUrls>(() => {
: null,
};
}
let display = `/datasets/${id}/preview`;
if (props.item.extension == "tool_markdown") {
display = `/datasets/${id}/report`;
}
return {
display: `/datasets/${id}/preview`,
display: display,
edit: `/datasets/${id}/edit`,
showDetails: `/datasets/${id}/details`,
reportError: `/datasets/${id}/error`,
Expand Down
15 changes: 11 additions & 4 deletions client/src/components/Markdown/Markdown.vue
Original file line number Diff line number Diff line change
Expand Up @@ -22,10 +22,10 @@
Edit
<FontAwesomeIcon icon="edit" />
</b-button>
<h1 class="float-right align-middle mr-2 mt-1 h-md">Galaxy {{ markdownConfig.model_class }}</h1>
<h1 v-if="title" class="float-right align-middle mr-2 mt-1 h-md">Galaxy {{ markdownConfig.model_class }}</h1>
<span class="float-left font-weight-light">
<h1 class="text-break align-middle">
Title: {{ markdownConfig.title || markdownConfig.model_class }}
<h1 v-if="title" class="text-break align-middle">
Title: {{ title }}
</h1>
<h2 v-if="workflowVersions" class="text-break align-middle">
Workflow Checkpoint: {{ workflowVersions.version }}
Expand All @@ -34,7 +34,7 @@
</div>
<b-badge variant="info" class="w-100 rounded mb-3 white-space-normal">
<div class="float-left m-1 text-break">Generated with Galaxy {{ version }} on {{ time }}</div>
<div class="float-right m-1">Identifier: {{ markdownConfig.id }}</div>
<div v-if="showIdentifier" class="float-right m-1">Identifier: {{ markdownConfig.id }}</div>
</b-badge>
<div>
<b-alert v-if="markdownErrors.length > 0" variant="warning" show>
Expand Down Expand Up @@ -119,6 +119,10 @@ export default {
type: String,
default: null,
},
showIdentifier: {
type: Boolean,
default: true,
},
},
data() {
return {
Expand All @@ -137,6 +141,9 @@ export default {
effectiveExportLink() {
return this.enable_beta_markdown_export ? this.exportLink : null;
},
title() {
return this.markdownConfig.title || this.markdownConfig.model_class;
},
time() {
let generateTime = this.markdownConfig.generate_time;
if (generateTime) {
Expand Down
42 changes: 42 additions & 0 deletions client/src/components/Tool/ToolReport.vue
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
<script setup lang="ts">
import { computed, ref } from "vue";
import { useConfig } from "@/composables/config";
import { urlData } from "@/utils/url";
import Markdown from "@/components/Markdown/Markdown.vue";
interface Props {
datasetId: string;
}
const props = defineProps<Props>();
const dataUrl = computed(()=> {
return `/api/datasets/${props.datasetId}/report`;
});
const dataRef = ref<unknown>(null);
const { config, isConfigLoaded } = useConfig(true);
urlData({ url: dataUrl.value }).then((data) => {
dataRef.value = data;
});
</script>

<template>
<div>
<Markdown
v-if="isConfigLoaded && dataRef"
:markdown-config="dataRef"
:enable-beta-markdown-export="config.enable_beta_markdown_export"
download-endpoint="TODO"
:show-identifier="false"
:read-only="true" />
<div v-else>
Loading....
</div>
</div>
</template>
9 changes: 3 additions & 6 deletions client/src/composables/datasetPathDestination.ts
Original file line number Diff line number Diff line change
Expand Up @@ -20,11 +20,11 @@ export function useDatasetPathDestination() {
const cache = ref<{ [key: string]: PathDestinationMap }>({});

const datasetPathDestination = computed(() => {
return (dataset_id: string, path?: string) => {
return async(dataset_id: string, path?: string) => {
const targetPath = path ?? "undefined";
const pathDestination = cache.value[dataset_id]?.[targetPath];
let pathDestination = cache.value[dataset_id]?.[targetPath];
if (!pathDestination) {
getPathDestination(dataset_id, path);
pathDestination = await getPathDestination(dataset_id, path) ?? undefined;
}
return pathDestination ?? null;
};
Expand All @@ -36,7 +36,6 @@ export function useDatasetPathDestination() {
await datasetExtraFilesStore.fetchDatasetExtFilesByDatasetId({ id: dataset_id });
datasetExtraFiles = datasetExtraFilesStore.getDatasetExtraFiles(dataset_id);
}

if (datasetExtraFiles === null) {
return null;
}
Expand Down Expand Up @@ -66,9 +65,7 @@ export function useDatasetPathDestination() {
}
pathDestination.fileLink = getCompositeDatasetLink(dataset_id, datasetEntry.path);
}

set(cache.value, dataset_id, { [path]: pathDestination });

return pathDestination;
}

Expand Down
6 changes: 6 additions & 0 deletions client/src/entry/analysis/router.js
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ import ToolLanding from "components/Landing/ToolLanding";
import WorkflowLanding from "components/Landing/WorkflowLanding";
import PageDisplay from "components/PageDisplay/PageDisplay";
import PageEditor from "components/PageEditor/PageEditor";
import ToolReport from "components/Tool/ToolReport";
import ToolSuccess from "components/Tool/ToolSuccess";
import ToolsList from "components/ToolsList/ToolsList";
import ToolsJson from "components/ToolsView/ToolsSchemaJson/ToolsJson";
Expand Down Expand Up @@ -239,6 +240,11 @@ export function getRouter(Galaxy) {
src: `/datasets/${route.params.datasetId}/display/?preview=True`,
}),
},
{
path: "datasets/:datasetId/report",
component: ToolReport,
props: true,
},
{
// legacy route, potentially used by 3rd parties
path: "datasets/:datasetId/show_params",
Expand Down
1 change: 1 addition & 0 deletions lib/galaxy/config/sample/datatypes_conf.xml.sample
Original file line number Diff line number Diff line change
Expand Up @@ -575,6 +575,7 @@
<!-- End RGenetics Datatypes -->
<datatype extension="ipynb" type="galaxy.datatypes.text:Ipynb" display_in_upload="true"/>
<datatype extension="json" type="galaxy.datatypes.text:Json" display_in_upload="true"/>
<datatype extension="tool_markdown" type="galaxy.datatypes.text:Text" display_in_upload="true"/>
<datatype extension="expression.json" type="galaxy.datatypes.text:ExpressionJson" display_in_upload="true"/>
<!-- graph datatypes -->
<datatype extension="xgmml" type="galaxy.datatypes.graph:Xgmml" display_in_upload="true"/>
Expand Down
1 change: 1 addition & 0 deletions lib/galaxy/datatypes/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -465,6 +465,7 @@ def _serve_file_download(self, headers, data, trans, to_ext, file_size, **kwd):
composite_extensions = trans.app.datatypes_registry.get_composite_extensions()
composite_extensions.append("html") # for archiving composite datatypes
composite_extensions.append("data_manager_json") # for downloading bundles if bundled.
composite_extensions.append("tool_markdown")

if data.extension in composite_extensions:
return self._archive_composite_dataset(trans, data, headers, do_action=kwd.get("do_action", "zip"))
Expand Down
66 changes: 65 additions & 1 deletion lib/galaxy/managers/markdown_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -900,7 +900,7 @@ def _remap(container, line):
)
if container == "history_link":
return (f"history_link(history_id={invocation.history.id})\n", False)
if container == "invocation_time":
elif container == "invocation_time":
return (f"invocation_time(invocation_id={invocation.id})\n", False)
ref_object_type = None
output_match = re.search(OUTPUT_LABEL_PATTERN, line)
Expand Down Expand Up @@ -953,6 +953,70 @@ def find_non_empty_group(match):
return galaxy_markdown


def resolve_job_markdown(trans, job, job_markdown):
"""Resolve job objects to convert tool markdown to 'internal' representation.
Replace references to abstract workflow parts with actual galaxy object IDs corresponding
to the actual executed workflow. For instance:
convert output=name -to- history_dataset_id=<id> | history_dataset_collection_id=<id>
convert input=name -to- history_dataset_id=<id> | history_dataset_collection_id=<id>
convert argument-less job directives to job
"""
io_dicts = job.io_dicts()

def _remap(container, line):
if container == "history_link":
return (f"history_link(history_id={job.history.id})\n", False)
elif container == "tool_stdout":
return (f"tool_stdout(job_id={job.id})\n", False)
elif container == "tool_stderr":
return (f"tool_stderr(job_id={job.id})\n", False)
elif container == "job_parameters":
return (f"job_parameters(job_id={job.id})\n", False)
elif container == "job_metrics":
return (f"job_metrics(job_id={job.id})\n", False)
ref_object_type = None
output_match = re.search(OUTPUT_LABEL_PATTERN, line)
input_match = re.search(INPUT_LABEL_PATTERN, line)

def find_non_empty_group(match):
for group in match.groups():
if group:
return group

target_match: Optional[Match]
ref_object: Optional[Any]
if output_match:
target_match = output_match
name = find_non_empty_group(target_match)
if name in io_dicts.out_data:
ref_object = io_dicts.out_data[name]
elif name in io_dicts.out_collections:
ref_object = io_dicts.out_collections[name]
else:
raise Exception("Unknown exception")
elif input_match:
target_match = input_match
name = find_non_empty_group(target_match)
ref_object = io_dicts.inp_data[name]
else:
target_match = None
ref_object = None
if ref_object:
assert target_match # tell type system, this is set when ref_object is set
if ref_object_type is None:
if ref_object.history_content_type == "dataset":
ref_object_type = "history_dataset"
else:
ref_object_type = "history_dataset_collection"
line = line.replace(target_match.group(), f"{ref_object_type}_id={ref_object.id}")
return (line, False)

galaxy_markdown = _remap_galaxy_markdown_calls(_remap, job_markdown)
return galaxy_markdown


def _remap_galaxy_markdown_containers(func, markdown):
new_markdown = markdown

Expand Down
7 changes: 7 additions & 0 deletions lib/galaxy/schema/schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -3846,6 +3846,13 @@ class PageDetails(PageSummary):
model_config = ConfigDict(extra="allow")


class ToolReportForDataset(BaseModel):
content: Optional[str] = ContentField
generate_version: Optional[str] = GenerateVersionField
generate_time: Optional[str] = GenerateTimeField
model_config = ConfigDict(extra="allow")


class PageSummaryList(RootModel):
root: List[PageSummary] = Field(
default=[],
Expand Down
12 changes: 12 additions & 0 deletions lib/galaxy/webapps/galaxy/api/datasets.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@
AsyncTaskResultSummary,
DatasetAssociationRoles,
DatasetSourceType,
ToolReportForDataset,
)
from galaxy.util.zipstream import ZipstreamWrapper
from galaxy.webapps.base.api import GalaxyFileResponse
Expand Down Expand Up @@ -503,6 +504,17 @@ def compute_hash(
) -> AsyncTaskResultSummary:
return self.service.compute_hash(trans, dataset_id, payload, hda_ldda=hda_ldda)

@router.get(
"/api/datasets/{dataset_id}/report",
summary="Return JSON content Galaxy will use to render Markdown reports",
)
def report(
self,
dataset_id: HistoryDatasetIDPathParam,
trans=DependsOnTrans,
) -> ToolReportForDataset:
return self.service.report(trans, dataset_id)

@router.put(
"/api/datasets/{dataset_id}/object_store_id",
summary="Update an object store ID for a dataset you own.",
Expand Down
1 change: 1 addition & 0 deletions lib/galaxy/webapps/galaxy/buildapp.py
Original file line number Diff line number Diff line change
Expand Up @@ -280,6 +280,7 @@ def app_pair(global_conf, load_app_kwds=None, wsgi_preflight=True, **kwargs):
webapp.add_client_route("/datasets/{dataset_id}/error")
webapp.add_client_route("/datasets/{dataset_id}/details")
webapp.add_client_route("/datasets/{dataset_id}/preview")
webapp.add_client_route("/datasets/{dataset_id}/report")
webapp.add_client_route("/datasets/{dataset_id}/show_params")
webapp.add_client_route("/collection/{collection_id}/edit")
webapp.add_client_route("/jobs/submission/success")
Expand Down
17 changes: 17 additions & 0 deletions lib/galaxy/webapps/galaxy/services/datasets.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,10 @@
HistoryContentsManager,
)
from galaxy.managers.lddas import LDDAManager
from galaxy.managers.markdown_util import (
ready_galaxy_markdown_for_export,
resolve_job_markdown,
)
from galaxy.model.base import transaction
from galaxy.objectstore.badges import BadgeDict
from galaxy.schema import (
Expand All @@ -70,6 +74,7 @@
DatasetSourceType,
EncodedDatasetSourceId,
Model,
ToolReportForDataset,
UpdateDatasetPermissionsPayload,
)
from galaxy.schema.tasks import ComputeDatasetHashTaskRequest
Expand Down Expand Up @@ -506,6 +511,18 @@ def compute_hash(
result = compute_dataset_hash.delay(request=request, task_user_id=getattr(trans.user, "id", None))
return async_task_summary(result)

def report(self, trans: ProvidesHistoryContext, dataset_id: DecodedDatabaseIdField) -> ToolReportForDataset:
dataset_instance = self.hda_manager.get_accessible(dataset_id, trans.user)
self.hda_manager.ensure_dataset_on_disk(trans, dataset_instance)
file_path = trans.app.object_store.get_filename(dataset_instance.dataset)
raw_content = open(file_path).read()
internal_markdown = resolve_job_markdown(trans, dataset_instance.creating_job, raw_content)
content, extra_attributes = ready_galaxy_markdown_for_export(trans, internal_markdown)
return ToolReportForDataset(
content=content,
**extra_attributes,
)

def drs_dataset_instance(self, object_id: str) -> Tuple[int, DatasetSourceType]:
if object_id.startswith("hda-"):
decoded_object_id = self.decode_id(object_id[len("hda-") :], kind="drs")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,5 +48,10 @@ const rows = computed(() => {
<template>
<loading-div v-if="loading" />
<error-banner error="Failed to load repositories" v-else-if="error"> </error-banner>
<repositories-grid :title="`Repositories for ${username}`" :rows="rows" :on-scroll="onScroll" />
<repositories-grid
:title="`Repositories for ${username}`"
:rows="rows"
:on-scroll="onScroll"
:allow-search="true"
/>
</template>
Loading

0 comments on commit ef948bc

Please sign in to comment.