From 8ee4176962d90ba49c227295c91f88e140c41bd0 Mon Sep 17 00:00:00 2001 From: rgaudin Date: Wed, 9 Oct 2024 12:37:40 +0000 Subject: [PATCH] Use collection.json ordering when importing from WebDAV --- backend/api/routes/files.py | 2 +- backend/api/routes/projects.py | 66 +++++++++++++++++++++++++----- backend/tests/conftest.py | 1 + frontend/src/constants.ts | 2 +- frontend/src/views/ProjectView.vue | 3 +- 5 files changed, 61 insertions(+), 13 deletions(-) diff --git a/backend/api/routes/files.py b/backend/api/routes/files.py index 5ab3032..5a9e5d7 100644 --- a/backend/api/routes/files.py +++ b/backend/api/routes/files.py @@ -236,7 +236,7 @@ async def create_file( path=str(fpath), type=mimetype, status=FileStatus.LOCAL.value, - order=1 + order=1, ) project_.files.append(new_file) indep_session.add(new_file) diff --git a/backend/api/routes/projects.py b/backend/api/routes/projects.py index b28398a..a60f2d9 100644 --- a/backend/api/routes/projects.py +++ b/backend/api/routes/projects.py @@ -179,6 +179,12 @@ def get(self, key: str, default=None): except KeyError: return default + def index_of(self, path: str) -> int: + try: + return self.files_indexes[path] + except KeyError: + return len(self.files_indexes) + 1 + async def read_remote_collection(url: str): resp = requests.get(url, timeout=constants.webdav_request_timeout_sec) @@ -196,7 +202,6 @@ async def update_project_files_from_webdav(session: Session, project: Project): logger.debug(f"[project #{project.id}] refreshing from {project.webdav_path}") - now = datetime.datetime.now(tz=datetime.UTC) prefix = Path(project.webdav_path) # create a folder if this prefix does not exists @@ -227,12 +232,33 @@ async def update_project_files_from_webdav(session: Session, project: Project): else: logger.debug(f"[project #{project.id}] collection: {len(collection)} files") + # first, update DB entries with data from webdav (modified_pn, size) + await _update_existing_entries( + session=session, + project=project, + entries={ + path: entries[path] + for path in remote_paths + if path not in to_add and path not in to_remove + }, + ) + # then add new entries + await _add_new_entries( + project=project, + entries={path: entries[path] for path in to_add}, + collection=collection, + ) + # eventually clean up what's not in WebDAV anymore + await _delete_removed_entries( + session=session, project=project, paths_to_remove=to_remove + ) + + +async def _update_existing_entries(session: Session, project: Project, entries: dict): + # update existing Files without removing metadata for path, entry in entries.items(): - if path in to_add or path in to_remove: - continue - - logger.debug(f"[project #{project.id}] deleting {path}") + logger.debug(f"[project #{project.id}] updating {path}") stmt = select(File).filter_by(project_id=project.id).filter_by(path=str(path)) file = session.execute(stmt).scalar_one() file.filesize = entry.size @@ -241,15 +267,29 @@ async def update_project_files_from_webdav(session: Session, project: Project): file.status = FileStatus.STORAGE.value session.add(file) + +async def _add_new_entries( + project: Project, + entries: dict, + collection: NautilusCollection | None, +): + now = datetime.datetime.now(tz=datetime.UTC) + prefix = Path(project.webdav_path or "") + + if collection: + entries = dict(sorted(entries.items(), key=lambda x: collection.index_of(x[0]))) + # add new files for path, entry in entries.items(): - if path not in to_add: - continue - - logger.debug(f"[project #{project.id}] adding {path}") + order = collection.index_of(path) if collection else 1 + logger.debug(f"[project #{project.id}] adding {path} ## {order}") filepath = Path(entry.path).relative_to(prefix) filename = filepath.name + # dont add collection.json to the project + if collection and str(filepath) == "collection.json": + continue + # TODO validate_project_quota(entry.size, project) @@ -285,14 +325,20 @@ async def update_project_files_from_webdav(session: Session, project: Project): path=str(filepath), type=entry.mimetype, status=FileStatus.STORAGE.value, + order=order, ) project_.files.append(new_file) indep_session.add(new_file) indep_session.flush() indep_session.refresh(new_file) + +async def _delete_removed_entries( + session: Session, project: Project, paths_to_remove: list[str] +): + # delete those that dont exist anymore - for path in to_remove: + for path in paths_to_remove: logger.debug(f"[project #{project.id}] deleting {path}") stmt = select(File).filter_by(path=path).filter_by(project_id=project.id) file = session.execute(stmt).scalar() diff --git a/backend/tests/conftest.py b/backend/tests/conftest.py index e205fd8..bf578db 100644 --- a/backend/tests/conftest.py +++ b/backend/tests/conftest.py @@ -118,6 +118,7 @@ def file_id(project_id, test_file, test_file_hash): path=str(location.resolve()), type="image/png", status="LOCAL", + order=1 ) with Session.begin() as session: project = session.get(Project, project_id) diff --git a/frontend/src/constants.ts b/frontend/src/constants.ts index 1042f69..30b7850 100644 --- a/frontend/src/constants.ts +++ b/frontend/src/constants.ts @@ -117,7 +117,7 @@ export class NautilusFile implements File { file.hash, file.type, file.status, - file.order, + file.order ) } diff --git a/frontend/src/views/ProjectView.vue b/frontend/src/views/ProjectView.vue index a46e6c6..a3dc5b3 100644 --- a/frontend/src/views/ProjectView.vue +++ b/frontend/src/views/ProjectView.vue @@ -267,7 +267,8 @@ async function uploadFiles(uploadFiles: FileList) { new Date().toISOString(), storeApp.constants.fakeHash, uploadFile.type, - FileStatus.UPLOADING + FileStatus.UPLOADING, + 1 ) files.value.set(newFile.id, { file: newFile, uploadedSize: 0 })