From b3f43f51c9f8cbe4cda470059ce231bb07ffe7e1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=C3=A3o=20Demenech?= Date: Wed, 31 Jan 2024 16:10:54 -0300 Subject: [PATCH] Staging update (#247) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix dataset collaborator and issue delete authorization * fix(map view): prevent client error when legend config is empty * Feature/pending datasets (#198) * Add pending_datasets table init and CRUD * Add approval_status and draft to metadata * Fix draft permissions * [main.yml] Init. pendingdb tables * Run WRI unit tests first * Fix paths in unit tests script * Run WRI unit tests first in src * Fix OR in unit tests script --------- Co-authored-by: Muhammad Ismail Shahzad <57398621+MuhammadIsmailShahzad@users.noreply.github.com> * Rm test.yml (#202) * [xl]: approval ui implementation * add approval permission * Normalize filenames instead of using a uuid (#205) * approval workflow implementation * add test * fix test * update permission * add issue tab indexing * update approval workflow * update documentation * display filled metadata fields * Prefect datapusher (#208) * Prefect datapusher * Progress * Datapusher * Use default id * Progress * Datapusher ci/cd (#207) * [main.yml] Add CI/CD for datapusher * [values.yaml.dev.template] Add github sha for datapusher * Progress * Progress * Progress * Progress * Fix build * FInisihed * Remove rm command * Fix tests * Fix tests * Wait on prefect * Fix typo * Trigger CI * Change viewport * Update tests * Update github action * Fix tests * Fix tests * Fix tests * Fix tests * Fix tests * Fix tests * Fix tests * Fix tests * Fix tests * Check logs * Fix tests * Fix tests * Datapusher token * Fix tests * Fix tests * Fix tests * Init exteions * Fix tests * Fix tests * Change permissions of file * Fix tests * Fix test * Trigger CI * Remove api_token * Fix tests * Fix build * Update datapusher * Fix tests * Fix unit tests * Fix test.ini --------- Co-authored-by: Muhammad Ismail Shahzad <57398621+MuhammadIsmailShahzad@users.noreply.github.com> * Return RW Error message (#210) * Return RW Error message * Fix build * Trigger CI * Fix file upload * Fix datapusher and resource upload (#211) * Improvements table view (#212) * Improvements table view * Datepicker on tabular view * Default values (#213) * Default values * Fix pagination * Enable other data formats (#215) * Fix datafiles editing (#216) * Feat/chart view connector url (#219) * feat: basic chart creation functionallity * feat: chart view creation/visualization * feat(chart view): fix self is not defined error, imrpove formats select, responsiveness fixes * fix(chart view): dybamic import chart * fix(chart view): dybamic import chart * feat(chat view): public view options * feat(chat view): implement pie charts * feat(chat view): implement embedding * fix(chart view): minor fixes to the editor * fix(chart view): show all format options * feat(chart view): refactor components structure * feat(chart view): makes it possible to create chart views based on connector url * fix(chart view): linting errors * fix(chart view): chart creation button (#222) * QA Fixes (#223) * Show datapusher status (#225) * Show datapusher status * Status on tab for datapusher * Add spinner to scheduled * Fix/chart view ii (#226) * feat(chart view): allow users to see dataset charts * feat(chart view): dataset views are now embedable * feat(chart view): update resource and view update button label * feat(chart view): show a tooltip with a message when chart view cannot be saved due to pending preview update * fix(chart view): enable tooltips by default * feat(chart view): start implementing chart view icons * fix(chart view): fix page braking when maps are present * fix build * trigger build * temporarily remove test * Fix xlsx datapusher (#227) * Fix datastore query (#228) * Fix/chart view ii (#229) * feat(chart view): allow users to see dataset charts * feat(chart view): dataset views are now embedable * feat(chart view): update resource and view update button label * feat(chart view): show a tooltip with a message when chart view cannot be saved due to pending preview update * fix(chart view): enable tooltips by default * feat(chart view): start implementing chart view icons * fix(chart view): fix page braking when maps are present * fix build * trigger build * temporarily remove test * build: add map test again * build: remove temporary test * feat(chart view): maintain flag on dataset that indicates whether it has chart views or not * fix(chart view): do not allow category, measure and dimension to be the same * fix(map view): fix tests * build: add map test again * fix(map view): fix tests * fix(tests): remove unnecessary file * fix build * feat(chart view): add tests * feat(chart view): add docs * fix: remove unnecessary header from rw api * fix(chart view): proxy requests to rw api (#230) * Tabular preview fixes (#231) * Tabular preview fixes * Add back () * Trigger CI * Fix/chart view iii (#234) * fix(chart view): add loading indicator for view create and update + fix success notification theme * fix(chart view): fix plot height issue when plot is refreshed * fix(chart view): tweak sorting behavior * Docs datapusher tabular preview (#232) * Boot up staging env (#21) * Add staging files * Enable s3filestore and disable init cont. * Initial frontend (#23) --------- Co-authored-by: Luccas Mateus * [hotfix] Fix cert manager name for ingress * [main.yml] Fix frontend build secrets * [cost-splitting] Change the links according to the branch * [cost-splitting] Update branch name * Avoid overwriting cost splitting files of staging * Fix url for cypress tests * Sync dev to staging and enable frontend deployment (#47) * Updating the storage report * Cost splitting will now run on different buckets (#24) Depending on branch * Updating the storage report * Switch to cost splitting branch for s3filestore * Updating the storage report * Updating the storage report * [Dockerfile] Enable s3filestore * [values.yaml.dev.template] Enable s3filestore * Normalize date string * Updating the storage report * Add all files * Updating the storage report * Fix typo * Updating the storage report * Get org title * Updating the storage report * Fix typo * Updating the storage report * Debug * Updating the storage report * Clear stuff * Updating the storage report * Updating the storage report * Initial frontend (#25) Co-authored-by: Luccas Mateus * Rename the base directory as it includes frontend too now * Use datapusher from the ecr * Enable frontend and use images from ECR * Enable frontend build and push * [values.yaml.dev.temp] Add frontend ingress * [main.yml] Fix CKAN_IMAGE for building * [main.yml] Fix Trivy repo variable * Add Site Title for frontend env * Enable auth and fix frontend svc * Run builds on dev and PRs to dev only * Changing text color * Fix cost splitting CI * [images][l]: add images * [package][s]: add headlessui * [taildwind.config][m]: add custom themes * [components][l]: implement shared and locallize components * [footer][m]: implement footer component * [Header][s]: implement header component * [_documents][s]: add font url to document level * [Search][m]: implement search page * [search-page][s]: remove div and use fragment * [fix][l]: update styling * [package][s]: add swiper * [Carousel][m]: add swiper carousel * [carouselnavbutton][m]: add carousel nav button * [Carousel][l]: wrap higlight and recetly sections inide carousel * [Footer][s]: fix footer alignment issue * [Header][s]: remove comment * [Carousel][s]: remove state * fix spacing to correct vieport * Shell for search page * Advanced search * Fix build * feat(odp-136): implement explore topics page UI * fix(odp-136): eslint errors * Little fixes * Text-xs on small mobile * Individual page for topic * Copy button * Acumin Pro used * Add WRI extension and update CKAN package schema (#20) * Sync dev with master (#19) * Fix/broken build (#13) * [cost-splitting-report.yml] Add permissions * [values.yaml.dev.template] Add quotes to the bool env variables --------- Co-authored-by: Muhammad Ismail Shahzad <57398621+MuhammadIsmailShahzad@users.noreply.github.com> * Update cost-splitting-report.yml --------- Co-authored-by: Luccas Mateus * Add wri extension and update package schema * Add gitkeep for src directory * Update gitignore * Use symlink for ckanext-wri * Fix tests * [GitHub Actions] try ckan-dev instead of localhost * [GitHub Actions] Add wait-on to docker step * [GitHub Actions] Revert wait-on * [GitHub Actions] Fix repo path * [GitHub Actions] Install wri extension * [GitHub Actions] Install wri extension * [GitHub Actions] Install wri extension - fix path * [GitHub Actions] Install wri extension - fix path and add APP_DIR * [GitHub Actions] Install wri extension - try symlink in cwd * [GitHub Actions] Install wri extension - remove symlink and update path * [GitHub Actions] Install wri extension - add debug log * [GitHub Actions] Install wri extension * [GitHub Actions] Install wri extension * [GitHub Actions] Install wri extension * [GitHub Actions] Install wri extension * [GitHub Actions] Install wri extension * [GitHub Actions] Install wri extension * [GitHub Actions] Install wri extension * [GitHub Actions] Install wri extension - mv extension during action * [GitHub Actions] Install wri extension - mv extension during action * [GitHub Actions] Install wri extension - mv extension during action * [GitHub Actions] Fix tests * [GitHub Actions] Generate JWT tokens * [GitHub Actions] Install openssl * [GitHub Actions] Install openssl as root * [GitHub Actions] Fix key generation * [GitHub Actions] Fix key generation - remove pre-generated keys * [GitHub Actions] Test adding plugins directly in test * [GitHub Actions] Update scheming env variables * Fix schema validators * Fix schema validators * Remove debugging output * Dummy commit to run CI/CD * Update docs --------- Co-authored-by: Muhammad Ismail Shahzad <57398621+MuhammadIsmailShahzad@users.noreply.github.com> Co-authored-by: Luccas Mateus * fix: minor typo on classname * [Odp 147]:Team metadata page implementation [mockup implementation] (#35) * [tailwindconfig][s]" add maxwidth * add icon and image * Team page implementation * [TeamHeader][l]: update styling * group component * add activity icons * [ActivityStream][l]: add activity stream components * [DatasetList][l]: component to list all dataset * [Teams][xl]: add teams page * update teams page- move to a subpage * update vertical space * Little fixes --------- Co-authored-by: Luccas Mateus * build: trigger reubild * feat(odp-137): implement UI for explore teams page * Enable scheming extension on test site * build: dummy commit to trigger build * Dummy commit to rerun CI/CD * Another dummy commit to rerun CI/CD * Another dummy commit to rerun CI/CD * Trigger CI * build: dummy commit to trigger build * Temporarily remove unit tests * Fix schema tests * Fixes odp-134 - Misc (#39) * fixe alignment issues * [Recent][s]:add shadows to bottom * update styling * Fix responsiveness * Fix icons * [TeamSearchResults][[s]: add pagination * fix: active link effect and login button spacing on mobile (#41) * fix: active link effect and login button spacing on mobile * Change route name to match url --------- Co-authored-by: Luccas Mateus * Fix share topic button (#43) * [values.yaml.dev.template] Increase frontend resources (#44) * fix styling and spacing (#45) * [main.yml] Fix frontend build secrets * [cost-splitting] Change the links according to the branch * [cost-splitting] Update branch name * Avoid overwriting cost splitting files of staging * Fix url for cypress tests --------- Co-authored-by: GitHub Action Co-authored-by: Luccas Mateus Co-authored-by: Michael Polidori Co-authored-by: steveoni Co-authored-by: João Demenech Co-authored-by: Demenech * [main.yml] Use same url for int tests as dev * [codeql] Use staging branch * Sync staging to dev (#105) * Fix Staging Title * Fix download link (#136) (#137) * Fix download link * Change update to patch * [Bypassed][Dockerfile] Disable public signup * Remove test.yml as not currently needed * Merge Dev to Staging (#201) * fix(map view): prevent client error when legend config is empty * Feature/pending datasets (#198) * Add pending_datasets table init and CRUD * Add approval_status and draft to metadata * Fix draft permissions * [main.yml] Init. pendingdb tables * Run WRI unit tests first * Fix paths in unit tests script * Run WRI unit tests first in src * Fix OR in unit tests script --------- Co-authored-by: Muhammad Ismail Shahzad <57398621+MuhammadIsmailShahzad@users.noreply.github.com> * Rm test.yml (#202) --------- Co-authored-by: Demenech Co-authored-by: João Demenech Co-authored-by: Michael Polidori Co-authored-by: Muhammad Ismail Shahzad <57398621+MuhammadIsmailShahzad@users.noreply.github.com> * Datapusher docs * Update docs * Fix typo * Add retry * Fix test * Fix tests * Fix datapusher * Fix types * Revert "Merge branch 'staging' of https://github.com/wri/wri-odp into docs-datapusher-tabular-preview" This reverts commit 2d72215fcb9229ba76e64f6698c4bdcb54e3b987, reversing changes made to b5b90fa27c13619c7068572207c9057df69c12e9. --------- Co-authored-by: Muhammad Ismail Shahzad <57398621+MuhammadIsmailShahzad@users.noreply.github.com> Co-authored-by: MuhammadIsmailShahzad Co-authored-by: GitHub Action Co-authored-by: Michael Polidori Co-authored-by: steveoni Co-authored-by: João Demenech Co-authored-by: Demenech * update activity stream to fetch filter data via server request (#241) * Make datapusher not conflict with multiple environmetns (#243) * Make datapusher not conflict with multiple environmetns * fix typo * Trigger CI (#244) * Trigger CI * Trigger CI * Add retries * Fix: Create resource view for non-public datasets not working (#245) * fix(chart view): auth header not beingproperly passed * trigger build * fix: fix conflict markers --------- Co-authored-by: steveoni Co-authored-by: Michael Polidori Co-authored-by: Muhammad Ismail Shahzad <57398621+MuhammadIsmailShahzad@users.noreply.github.com> Co-authored-by: Luccas Mateus Co-authored-by: MuhammadIsmailShahzad Co-authored-by: GitHub Action --- .github/workflows/main.yml | 9 ++ ckan-backend-dev/.env.example | 1 + ckan-backend-dev/docker-compose.dev.yml | 1 + ckan-backend-dev/docker-compose.test.yml | 1 + .../ckanext/wri/logic/action/datapusher.py | 3 +- .../ckanext/wri/logic/action/delete.py | 2 +- .../ckanext/wri/logic/action/get.py | 2 + .../src/ckanext-wri/ckanext/wri/plugin.py | 1 + ckan-backend-dev/src/ckanext-wri/test.ini | 1 + datapusher/config.py | 2 + datapusher/main.py | 2 +- deployment/frontend/README.md | 2 - .../dashboard/activitystream/ActivityList.tsx | 126 +++++++++++------- .../activitystream/ActivitystreamHeader.tsx | 25 +++- deployment/frontend/src/pages/api/proxy.ts | 2 +- .../src/server/api/routers/activityStream.ts | 104 +++++++++------ .../cypress/e2e/dataset_create_and_read.cy.js | 50 ++++--- 17 files changed, 220 insertions(+), 114 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index ee2077769..2cbc3b40e 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -69,6 +69,15 @@ jobs: --build-arg NEXT_PUBLIC_CKAN_URL=${{ secrets.STAGING_FRONTEND_CKAN_URL }} \ deployment/frontend docker push $REGISTRY/$FRONTEND_REPO:$IMAGE_TAG + - name: Build and push Datapusher image to ECR + env: + REGISTRY: ${{ steps.login-ecr.outputs.registry }} + DATAPUSHER_REPO: ${{ secrets.ECR_DATAPUSHER_REPO}} + IMAGE_TAG: ${{ github.sha }} + run: | + docker build -t $REGISTRY/$DATAPUSHER_REPO:$IMAGE_TAG \ + datapusher + docker push $REGISTRY/$DATAPUSHER_REPO:$IMAGE_TAG - name: Set up Docker Containers env: CKAN_IMAGE: '${{ steps.login-ecr.outputs.registry }}/${{ secrets.ECR_CKAN_REPO }}:${{ github.sha }}' diff --git a/ckan-backend-dev/.env.example b/ckan-backend-dev/.env.example index b8399f41a..84aa81eba 100644 --- a/ckan-backend-dev/.env.example +++ b/ckan-backend-dev/.env.example @@ -114,6 +114,7 @@ CKANEXT__AUTH__AZURE_CLIENT_ID=ffffffff-ffff-ffff-ffff-ffffffffffff # custom auth CKANEXT__WRI__ODP_URL=http://frontend:3000 CKANEXT__WRI__PREFECT_URL=http://prefect:4200 +CKANEXT__WRI__DATAPUSHER_DEPLOYMENT_NAME=datapusher-dev # DataAPI #CKANEXT__DATA_API__HASURA_URL=http://hasura-svc:80 diff --git a/ckan-backend-dev/docker-compose.dev.yml b/ckan-backend-dev/docker-compose.dev.yml index dfa0393ee..d5a3ae412 100755 --- a/ckan-backend-dev/docker-compose.dev.yml +++ b/ckan-backend-dev/docker-compose.dev.yml @@ -107,6 +107,7 @@ services: restart: always environment: - PREFECT_API_URL=http://prefect:4200/api + - DEPLOYMENT_NAME=datapusher-dev - CKAN_DATASTORE_READ_URL=postgresql://ckandbuser:ckandbpassword@db/datastore - CKAN_DATASTORE_WRITE_URL=postgresql://ckandbuser:ckandbpassword@db/datastore depends_on: diff --git a/ckan-backend-dev/docker-compose.test.yml b/ckan-backend-dev/docker-compose.test.yml index 06b488a65..36a3a5be7 100755 --- a/ckan-backend-dev/docker-compose.test.yml +++ b/ckan-backend-dev/docker-compose.test.yml @@ -102,6 +102,7 @@ services: restart: always environment: - PREFECT_API_URL=http://prefect:4200/api + - DEPLOYMENT_NAME=datapusher-dev - CKAN_DATASTORE_READ_URL=postgresql://ckandbuser:ckandbpassword@db/datastore - CKAN_DATASTORE_WRITE_URL=postgresql://ckandbuser:ckandbpassword@db/datastore depends_on: diff --git a/ckan-backend-dev/src/ckanext-wri/ckanext/wri/logic/action/datapusher.py b/ckan-backend-dev/src/ckanext-wri/ckanext/wri/logic/action/datapusher.py index 66253681c..af125d137 100644 --- a/ckan-backend-dev/src/ckanext-wri/ckanext/wri/logic/action/datapusher.py +++ b/ckan-backend-dev/src/ckanext-wri/ckanext/wri/logic/action/datapusher.py @@ -72,6 +72,7 @@ def datapusher_submit(context: Context, data_dict: dict[str, Any]): return False prefect_url: str = config.get("ckanext.wri.prefect_url") + deployment_name: str = config.get("ckanext.wri.datapusher_deployment_name") callback_url_base = config.get("ckan.datapusher.callback_url_base") or config.get( "ckan.site_url" @@ -159,7 +160,7 @@ def datapusher_submit(context: Context, data_dict: dict[str, Any]): api_token = p.toolkit.get_action('api_token_create')(None, {"user": "ckan_admin", "name": "datapusher"}).get('token') if force else api_token try: deployment = requests.get( - urljoin(prefect_url, "api/deployments/name/push-to-datastore/datapusher") + urljoin(prefect_url, f"api/deployments/name/push-to-datastore/{deployment_name}") ) deployment = deployment.json() deployment_id = deployment["id"] diff --git a/ckan-backend-dev/src/ckanext-wri/ckanext/wri/logic/action/delete.py b/ckan-backend-dev/src/ckanext-wri/ckanext/wri/logic/action/delete.py index b36947f0c..aad5c706b 100644 --- a/ckan-backend-dev/src/ckanext-wri/ckanext/wri/logic/action/delete.py +++ b/ckan-backend-dev/src/ckanext-wri/ckanext/wri/logic/action/delete.py @@ -29,4 +29,4 @@ def pending_dataset_delete(context: Context, data_dict: DataDict): raise tk.ValidationError(_(f"Pending Dataset not found: {package_id}")) # was returining pending_dataset db object # change to return package_id or can none either way - return package_id \ No newline at end of file + return package_id diff --git a/ckan-backend-dev/src/ckanext-wri/ckanext/wri/logic/action/get.py b/ckan-backend-dev/src/ckanext-wri/ckanext/wri/logic/action/get.py index b71f77922..0b89db293 100644 --- a/ckan-backend-dev/src/ckanext-wri/ckanext/wri/logic/action/get.py +++ b/ckan-backend-dev/src/ckanext-wri/ckanext/wri/logic/action/get.py @@ -480,6 +480,8 @@ def pending_diff_show(context: Context, data_dict: DataDict): try: pending_dataset = PendingDatasets.get(package_id=package_id) + log.error("===============pending dataset=================") + log.error(pending_dataset) if pending_dataset is not None: pending_dataset = pending_dataset.get("package_data") existing_dataset = get_action("package_show")(context, {"id": package_id}) diff --git a/ckan-backend-dev/src/ckanext-wri/ckanext/wri/plugin.py b/ckan-backend-dev/src/ckanext-wri/ckanext/wri/plugin.py index 8f4e82b07..dc139cc39 100644 --- a/ckan-backend-dev/src/ckanext-wri/ckanext/wri/plugin.py +++ b/ckan-backend-dev/src/ckanext-wri/ckanext/wri/plugin.py @@ -40,6 +40,7 @@ def configure(self, config): missing_config = "{0} is not configured. Please amend your .ini file." config_options = ( 'ckanext.wri.prefect_url', + 'ckanext.wri.datapusher_deployment_name', ) for option in config_options: if not config.get(option, None): diff --git a/ckan-backend-dev/src/ckanext-wri/test.ini b/ckan-backend-dev/src/ckanext-wri/test.ini index dfc95a3ff..ab93463e6 100644 --- a/ckan-backend-dev/src/ckanext-wri/test.ini +++ b/ckan-backend-dev/src/ckanext-wri/test.ini @@ -15,6 +15,7 @@ api_token.jwt.algorithm = RS256 api_token.jwt.encode.secret = file:/srv/app/jwtRS256.key api_token.jwt.decode.secret = file:/srv/app/jwtRS256.key.pub ckanext.wri.prefect_url = http://prefect:4200 +ckanext.wri.datapusher_deployment_name = datapusher # Logging configuration [loggers] diff --git a/datapusher/config.py b/datapusher/config.py index a13e6f37f..5310b7369 100644 --- a/datapusher/config.py +++ b/datapusher/config.py @@ -11,6 +11,7 @@ _DATABASE_URI = os.environ['CKAN_DATASTORE_WRITE_URL'] _WRITE_ENGINE_URL = os.environ['CKAN_DATASTORE_WRITE_URL'] +_DEPLOYMENT_NAME = os.environ['DEPLOYMENT_NAME'] _QSVDP_BIN = '/root/.cargo/bin/qsvdp' if os.environ.get('QSVPDP_BIN') is None else os.environ['QSVPDP_BIN'] _CKAN_URL = 'http://ckan-dev:5000' if os.environ.get('CKAN_URL') is None else os.environ['CKAN_URL'] _TYPES = "String", "Float", "Integer", "DateTime", "Date", "NULL" @@ -35,6 +36,7 @@ def _parse_bool(val: Union[str, bool]) -> bool: # pylint: disable=E1136 # DataPusherPlusConfig class with required fields, default values, type checking, and typecasting for int and bool values class DataPusherPlusConfig(MutableMapping): # ckan_service_provider settings + DEPLOYMENT_NAME: str = _DEPLOYMENT_NAME CKAN_URL: str = _CKAN_URL SQLALCHEMY_DATABASE_URI: str = _DATABASE_URI WRITE_ENGINE_URL: str = _WRITE_ENGINE_URL diff --git a/datapusher/main.py b/datapusher/main.py index a194eb239..69e453fbb 100644 --- a/datapusher/main.py +++ b/datapusher/main.py @@ -107,7 +107,7 @@ def push_to_datastore(resource_id, api_key): if __name__ == "__main__": datastore_deployment = push_to_datastore.to_deployment( - name="datapusher", + name=config.get('DEPLOYMENT_NAME'), parameters={"resource_id": "test_id", "api_key": "api_key"}, enforce_parameter_schema=False, is_schedule_active=False, diff --git a/deployment/frontend/README.md b/deployment/frontend/README.md index f45dcb24f..71f0fb2c1 100644 --- a/deployment/frontend/README.md +++ b/deployment/frontend/README.md @@ -27,5 +27,3 @@ You can check out the [create-t3-app GitHub repository](https://github.com/t3-os Follow our deployment guides for [Vercel](https://create.t3.gg/en/deployment/vercel), [Netlify](https://create.t3.gg/en/deployment/netlify) and [Docker](https://create.t3.gg/en/deployment/docker) for more information. - - diff --git a/deployment/frontend/src/components/dashboard/activitystream/ActivityList.tsx b/deployment/frontend/src/components/dashboard/activitystream/ActivityList.tsx index 8a4ac0a28..77c238c86 100644 --- a/deployment/frontend/src/components/dashboard/activitystream/ActivityList.tsx +++ b/deployment/frontend/src/components/dashboard/activitystream/ActivityList.tsx @@ -1,58 +1,92 @@ import React, { useState } from 'react' import ActivitystreamHeader from './ActivitystreamHeader' import ActivityStreamCard from '../../_shared/ActivityStreamCard' -import { api } from '@/utils/api'; -import Spinner from '@/components/_shared/Spinner'; -import type { SearchInput } from '@/schema/search.schema'; -import Pagination from '../_shared/Pagination'; -import { useQuery } from 'react-query'; -import { filterObjects, searchArrayForKeyword } from "@/utils/general"; +import { api } from '@/utils/api' +import Spinner from '@/components/_shared/Spinner' +import type { SearchInput } from '@/schema/search.schema' +import Pagination from '../_shared/Pagination' +import { useQuery } from 'react-query' +import { filterObjects, searchArrayForKeyword } from '@/utils/general' export default function ActivityList() { - const [query, setQuery] = useState({ search: '', fq: {}, page: { start: 0, rows: 10 } }) - const { data, isLoading } = api.dashboardActivity.listActivityStreamDashboard.useQuery({ search: '', page: { start: 0, rows: 10000 } }); + const [query, setQuery] = useState({ + search: '', + fq: {}, + page: { start: 0, rows: 10 }, + }) + const [serverQuery, setServerQuery] = useState({ + search: '', + fq: {}, + page: { start: 0, rows: 1000 }, + }) + const { data, isLoading } = + api.dashboardActivity.listActivityStreamDashboard.useQuery(serverQuery) - const processedActivity = useQuery(['processedActivitystream', data, query], () => { - if (!data) return { activity: [], count: 0 }; - const searchTerm = query.search.toLowerCase(); - const activity = data.activity; - let filteredActivity = activity; - if (searchTerm) { - filteredActivity = searchArrayForKeyword(activity, searchTerm); - } + const processedActivity = useQuery( + ['processedActivitystream', data, query], + () => { + if (!data) return { activity: [], count: 0 } + const searchTerm = query.search.toLowerCase() + const activity = data.activity + let filteredActivity = activity + if (searchTerm) { + filteredActivity = searchArrayForKeyword(activity, searchTerm) + } - const fq = query.fq!; - if (fq && Object.keys(fq).length > 0) { - console.log("in here") - filteredActivity = filterObjects(filteredActivity, fq); - } + const fq = query.fq! + if (fq && Object.keys(fq).length > 0) { + console.log('in here fg: ', fq) + filteredActivity = filterObjects(filteredActivity, fq) + } - const start = query.page.start; - const rows = query.page.rows; - const slicedData = filteredActivity.slice(start, start + rows); - return { activity: slicedData, count: filteredActivity.length }; - }, { - enabled: !!data, // Only run the query when data is available - }); + const start = query.page.start + const rows = query.page.rows + const slicedData = filteredActivity.slice(start, start + rows) + return { activity: slicedData, count: filteredActivity.length } + }, + { + enabled: !!data, // Only run the query when data is available + } + ) + return ( +
+ + } + /> - - return ( -
- } /> - - { - isLoading || processedActivity.isLoading ?
: ( - processedActivity.data?.activity === undefined || processedActivity.data?.activity.length === 0 ?
No data
: - processedActivity.data?.activity.map((items, index) => { - return ( -
- + {isLoading || processedActivity.isLoading ? ( +
+ +
+ ) : processedActivity.data?.activity === undefined || + processedActivity.data?.activity.length === 0 ? ( +
+ No data
- ) - }) - ) - } -
- ) + ) : ( + processedActivity.data?.activity.map((items, index) => { + return ( +
+ +
+ ) + }) + )} +
+ ) } diff --git a/deployment/frontend/src/components/dashboard/activitystream/ActivitystreamHeader.tsx b/deployment/frontend/src/components/dashboard/activitystream/ActivitystreamHeader.tsx index 5499f38b7..60341ffa8 100644 --- a/deployment/frontend/src/components/dashboard/activitystream/ActivitystreamHeader.tsx +++ b/deployment/frontend/src/components/dashboard/activitystream/ActivitystreamHeader.tsx @@ -14,7 +14,11 @@ import { Group } from '@portaljs/ckan' function LeftNode({ setQuery, query, + setServerQuery, + serverQuery, }: { + setServerQuery: React.Dispatch> + serverQuery: SearchInput setQuery: React.Dispatch> query: SearchInput }) { @@ -110,8 +114,8 @@ function LeftNode({ ) )} filtername="packageId" - setQuery={setQuery} - query={query} + setQuery={setServerQuery} + query={serverQuery} /> ) : ( '' @@ -126,8 +130,8 @@ function LeftNode({ ) )} filtername="orgId" - setQuery={setQuery} - query={query} + setQuery={setServerQuery} + query={serverQuery} /> ) : ( '' @@ -152,15 +156,26 @@ function LeftNode({ export default function ActivitystreamHeader({ setQuery, query, + setServerQuery, + serverQuery, Pagination, }: { setQuery: React.Dispatch> query: SearchInput + setServerQuery: React.Dispatch> + serverQuery: SearchInput Pagination?: React.ReactNode }) { return ( } + leftNode={ + + } rightStyle="sm:mt-4" Pagination={Pagination} /> diff --git a/deployment/frontend/src/pages/api/proxy.ts b/deployment/frontend/src/pages/api/proxy.ts index 759d1b6af..6a151e140 100644 --- a/deployment/frontend/src/pages/api/proxy.ts +++ b/deployment/frontend/src/pages/api/proxy.ts @@ -8,7 +8,7 @@ export default async function handler( const { url, headers } = body - const response = await fetch(url, headers) + const response = await fetch(url, { headers }) return res.json(await response.json()) } diff --git a/deployment/frontend/src/server/api/routers/activityStream.ts b/deployment/frontend/src/server/api/routers/activityStream.ts index d807df1af..ca2a9039d 100644 --- a/deployment/frontend/src/server/api/routers/activityStream.ts +++ b/deployment/frontend/src/server/api/routers/activityStream.ts @@ -1,48 +1,70 @@ -import { z } from "zod"; -import { - createTRPCRouter, - protectedProcedure -} from "@/server/api/trpc"; -import { env } from "@/env.mjs"; -import type { Activity, ActivityDisplay, CkanResponse, User } from "@/schema/ckan.schema"; -import { getUser, activityDetails } from "@/utils/apiUtils"; -import { searchArrayForKeyword } from "@/utils/general"; -import { searchSchema } from "@/schema/search.schema"; -import { filterObjects } from "@/utils/general"; +import { z } from 'zod' +import { createTRPCRouter, protectedProcedure } from '@/server/api/trpc' +import { env } from '@/env.mjs' +import type { + Activity, + ActivityDisplay, + CkanResponse, + User, +} from '@/schema/ckan.schema' +import { getUser, activityDetails } from '@/utils/apiUtils' +import { searchArrayForKeyword } from '@/utils/general' +import { searchSchema } from '@/schema/search.schema' +import { filterObjects } from '@/utils/general' export const activityStreamRouter = createTRPCRouter({ - listActivityStreamDashboard: protectedProcedure - .input(searchSchema) - .query(async ({ input, ctx }) => { - const response = await fetch(`${env.CKAN_URL}/api/3/action/dashboard_activity_list`, - { - headers: { - "Authorization": ctx.session.user.apikey, - } - }) + listActivityStreamDashboard: protectedProcedure + .input(searchSchema) + .query(async ({ input, ctx }) => { + let url = `${env.CKAN_URL}/api/3/action/dashboard_activity_list` - const data = (await response.json()) as CkanResponse; - const activities = await Promise.all(data.result.map(async (activity: Activity) => { - let user_data = await getUser({ userId: activity.user_id, apiKey: ctx.session.user.apikey }); - user_data = user_data === undefined ? null : user_data; - const actitvityDetails = activityDetails(activity); - actitvityDetails.description = `${user_data?.name} ${actitvityDetails.description}` - return actitvityDetails; - })); + if (input.fq) { + if ('package_id' in input.fq) { + url = `${env.CKAN_URL}/api/3/action/package_activity_list?id=${input.fq['package_id']}` + } else if ('orgId' in input.fq) { + url = `${env.CKAN_URL}/api/3/action/organization_activity_list?id=${input.fq['orgId']}` + } + } + const response = await fetch(url, { + headers: { + Authorization: ctx.session.user.apikey, + }, + }) - let result = activities; - if (input.search) { - result = searchArrayForKeyword(activities, input.search); - } + const data = (await response.json()) as CkanResponse + const activities = await Promise.all( + data.result.map(async (activity: Activity) => { + let user_data = await getUser({ + userId: activity.user_id, + apiKey: ctx.session.user.apikey, + }) + user_data = user_data === undefined ? null : user_data + const actitvityDetails = activityDetails(activity) + actitvityDetails.description = `${user_data?.name} ${actitvityDetails.description}` + return actitvityDetails + }) + ) - if (input.fq && activities) { - result = filterObjects(activities, input.fq); - } + let result = activities + if (input.search) { + result = searchArrayForKeyword( + activities, + input.search + ) + } - return { - activity: result ? result.slice(input.page.start, input.page.start + input.page.rows) : [], - count: result.length, - }; - }), + if (input.fq && activities) { + result = filterObjects(activities, input.fq) + } -}); \ No newline at end of file + return { + activity: result + ? result.slice( + input.page.start, + input.page.start + input.page.rows + ) + : [], + count: result.length, + } + }), +}) diff --git a/e2e-tests/cypress/e2e/dataset_create_and_read.cy.js b/e2e-tests/cypress/e2e/dataset_create_and_read.cy.js index 20f02c81d..2dff702ed 100644 --- a/e2e-tests/cypress/e2e/dataset_create_and_read.cy.js +++ b/e2e-tests/cypress/e2e/dataset_create_and_read.cy.js @@ -110,12 +110,21 @@ describe("Create dataset", () => { }, ); - it("Should show the members", () => { - cy.addPackageCollaboratorApi(user, dataset, "editor"); - cy.visit("/datasets/" + dataset); - cy.contains("Collaborators").click(); - cy.contains(user); - }); + it( + "Should show the members", + { + retries: { + runMode: 5, + openMode: 0, + }, + }, + () => { + cy.addPackageCollaboratorApi(user, dataset, "editor"); + cy.visit("/datasets/" + dataset); + cy.contains("Collaborators").click(); + cy.contains(user); + }, + ); it("Edit metadata", () => { cy.visit("/dashboard/datasets/" + dataset + "/edit"); @@ -161,16 +170,25 @@ describe("Create dataset", () => { }, ); - it("Should show the new member", () => { - cy.visit("/datasets/" + dataset); - cy.contains("Collaborators").click(); - cy.contains(user_2); - cy.logout(); - cy.login(user_2, "test_user_2"); - cy.visit("/dashboard/notifications"); - cy.contains(ckanUserName); - cy.contains(" added you as a collaborator (member) for the dataset"); - }); + it( + "Should show the new member", + { + retries: { + runMode: 5, + openMode: 0, + }, + }, + () => { + cy.visit("/datasets/" + dataset); + cy.contains("Collaborators").click(); + cy.contains(user_2); + cy.logout(); + cy.login(user_2, "test_user_2"); + cy.visit("/dashboard/notifications"); + cy.contains(ckanUserName); + cy.contains(" added you as a collaborator (member) for the dataset"); + }, + ); after(() => {